diff --git a/.gitignore b/.gitignore index 6c03ec1b..7ca1dd25 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,6 @@ dist build eggs -parts bin var sdist @@ -46,4 +45,4 @@ iterate.dat # git merge files # ################### -*.orig \ No newline at end of file +*.orig diff --git a/.travis.yml b/.travis.yml index fb8ddb2c..1f796285 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,7 +7,7 @@ virtualenv: system_site_packages: true # command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors -before_install: +before_install: - sudo apt-get install -qq python-scipy python-pip - sudo apt-get install -qq python-matplotlib # Workaround for a permissions issue with Travis virtual machine images @@ -17,10 +17,10 @@ before_install: - sudo ln -s /run/shm /dev/shm install: - - pip install --upgrade numpy==1.7.1 - - pip install sphinx + - pip install --upgrade numpy==1.7.1 + - pip install sphinx - pip install nose - pip install . --use-mirrors # command to run tests, e.g. python setup.py test -script: +script: - nosetests GPy/testing diff --git a/GPy/FAQ.txt b/GPy/FAQ.txt new file mode 100644 index 00000000..66ba4834 --- /dev/null +++ b/GPy/FAQ.txt @@ -0,0 +1,8 @@ +Frequently Asked Questions +-------------------------- + +Unit tests are run through Travis-Ci. They can be run locally through entering the GPy route diretory and writing + +nosetests testing/ + +Documentation is handled by Sphinx. To build the documentation: diff --git a/GPy/__init__.py b/GPy/__init__.py index 04129139..f35fda78 100644 --- a/GPy/__init__.py +++ b/GPy/__init__.py @@ -5,6 +5,7 @@ warnings.filterwarnings("ignore", category=DeprecationWarning) import core import models +import mappings import inference import util import examples diff --git a/GPy/coding_style_guide.txt b/GPy/coding_style_guide.txt new file mode 100644 index 00000000..0cc732e4 --- /dev/null +++ b/GPy/coding_style_guide.txt @@ -0,0 +1,10 @@ +In this text document we will describe coding conventions to be used in GPy to keep things consistent. + +All arrays containing data are two dimensional. The first dimension is the number of data, the second dimension is number of features. This keeps things consistent with the idea of a design matrix. + +Input matrices are either X or t, output matrices are Y. + +Input dimensionality is input_dim, output dimensionality is output_dim, number of data is num_data. + +Data sets are preprocessed in the datasets.py file. This file also records where the data set was obtained from in the dictionary stored in the file. Long term we should move this dictionary to sqlite or similar. + diff --git a/GPy/core/__init__.py b/GPy/core/__init__.py index e9e049b0..9813d5ae 100644 --- a/GPy/core/__init__.py +++ b/GPy/core/__init__.py @@ -2,9 +2,10 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) from model import * -from parameterised import * +from parameterized import * import priors from gp import GP from sparse_gp import SparseGP from fitc import FITC from svigp import SVIGP +from mapping import * diff --git a/GPy/core/domains.py b/GPy/core/domains.py index dfc880f6..cefac6c2 100644 --- a/GPy/core/domains.py +++ b/GPy/core/domains.py @@ -2,6 +2,22 @@ Created on 4 Jun 2013 @author: maxz + +(Hyper-)Parameter domains defined for :py:mod:`~GPy.core.priors` and :py:mod:`~GPy.kern`. +These domains specify the legitimate realm of the parameters to live in. + +:const:`~GPy.core.domains.REAL` : + real domain, all values in the real numbers are allowed + +:const:`~GPy.core.domains.POSITIVE`: + positive domain, only positive real values are allowed + +:const:`~GPy.core.domains.NEGATIVE`: + same as :const:`~GPy.core.domains.POSITIVE`, but only negative values are allowed + +:const:`~GPy.core.domains.BOUNDED`: + only values within the bounded range are allowed, + the bounds are specified withing the object with the bounded range ''' REAL = 'real' diff --git a/GPy/core/fitc.py b/GPy/core/fitc.py index ef171459..97b4fb1d 100644 --- a/GPy/core/fitc.py +++ b/GPy/core/fitc.py @@ -11,25 +11,27 @@ from sparse_gp import SparseGP class FITC(SparseGP): """ - sparse FITC approximation + + Sparse FITC approximation :param X: inputs :type X: np.ndarray (num_data x Q) :param likelihood: a likelihood instance, containing the observed data :type likelihood: GPy.likelihood.(Gaussian | EP) - :param kernel : the kernel (covariance function). See link kernels + :param kernel: the kernel (covariance function). See link kernels :type kernel: a GPy.kern.kern instance :param Z: inducing inputs (optional, see note) :type Z: np.ndarray (M x Q) | None - :param normalize_(X|Y) : whether to normalize the data before computing (predictions will be in original scales) + :param normalize_(X|Y): whether to normalize the data before computing (predictions will be in original scales) :type normalize_(X|Y): bool + """ def __init__(self, X, likelihood, kernel, Z, normalize_X=False): SparseGP.__init__(self, X, likelihood, kernel, Z, X_variance=None, normalize_X=False) assert self.output_dim == 1, "FITC model is not defined for handling multiple outputs" - def update_likelihood_approximation(self): + def update_likelihood_approximation(self, **kwargs): """ Approximates a non-Gaussian likelihood using Expectation Propagation @@ -37,7 +39,7 @@ class FITC(SparseGP): this function does nothing """ self.likelihood.restart() - self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0) + self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0, **kwargs) self._set_params(self._get_params()) def _compute_kernel_matrices(self): @@ -120,11 +122,11 @@ class FITC(SparseGP): _dKmm = .5*(V_n**2 + alpha_n + gamma_n**2 - 2.*gamma_k) * K_pp_K #Diag_dD_dKmm self._dpsi1_dtheta += self.kern.dK_dtheta(_dpsi1,self.X[i:i+1,:],self.Z) self._dKmm_dtheta += self.kern.dK_dtheta(_dKmm,self.Z) - self._dKmm_dX += 2.*self.kern.dK_dX(_dKmm ,self.Z) + self._dKmm_dX += self.kern.dK_dX(_dKmm ,self.Z) self._dpsi1_dX += self.kern.dK_dX(_dpsi1.T,self.Z,self.X[i:i+1,:]) # the partial derivative vector for the likelihood - if self.likelihood.Nparams == 0: + if self.likelihood.num_params == 0: # save computation here. self.partial_for_likelihood = None elif self.likelihood.is_heteroscedastic: @@ -140,7 +142,6 @@ class FITC(SparseGP): dA_dnoise = 0.5 * self.input_dim * (dbstar_dnoise/self.beta_star).sum() - 0.5 * self.input_dim * np.sum(self.likelihood.Y**2 * dbstar_dnoise) dC_dnoise = -0.5 * np.sum(mdot(self.LBi.T,self.LBi,Lmi_psi1) * Lmi_psi1 * dbstar_dnoise.T) - dC_dnoise = -0.5 * np.sum(mdot(self.LBi.T,self.LBi,Lmi_psi1) * Lmi_psi1 * dbstar_dnoise.T) dD_dnoise_1 = mdot(self.V_star*LBiLmipsi1.T,LBiLmipsi1*dbstar_dnoise.T*self.likelihood.Y.T) alpha = mdot(LBiLmipsi1,self.V_star) @@ -158,7 +159,7 @@ class FITC(SparseGP): A = -0.5 * self.num_data * self.output_dim * np.log(2.*np.pi) + 0.5 * np.sum(np.log(self.beta_star)) - 0.5 * np.sum(self.V_star * self.likelihood.Y) C = -self.output_dim * (np.sum(np.log(np.diag(self.LB)))) D = 0.5 * np.sum(np.square(self._LBi_Lmi_psi1V)) - return A + C + D + return A + C + D + self.likelihood.Z def _log_likelihood_gradients(self): pass @@ -174,7 +175,7 @@ class FITC(SparseGP): def dL_dZ(self): dL_dZ = self.kern.dK_dX(self._dL_dpsi1.T,self.Z,self.X) - dL_dZ += 2. * self.kern.dK_dX(self._dL_dKmm,X=self.Z) + dL_dZ += self.kern.dK_dX(self._dL_dKmm,X=self.Z) dL_dZ += self._dpsi1_dX dL_dZ += self._dKmm_dX return dL_dZ diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 5172d9e7..0d1b69a0 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -6,8 +6,7 @@ import numpy as np import pylab as pb from .. import kern from ..util.linalg import pdinv, mdot, tdot, dpotrs, dtrtrs -#from ..util.plot import gpplot, Tango -from ..likelihoods import EP +from ..likelihoods import EP, Laplace from gp_base import GPBase class GP(GPBase): @@ -16,50 +15,60 @@ class GP(GPBase): :param X: input observations :param kernel: a GPy kernel, defaults to rbf+white - :parm likelihood: a GPy likelihood + :param likelihood: a GPy likelihood :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) :type normalize_X: False|True :rtype: model object - :param epsilon_ep: convergence criterion for the Expectation Propagation algorithm, defaults to 0.1 - :param powerep: power-EP parameters [$\eta$,$\delta$], defaults to [1.,1.] - :type powerep: list .. Note:: Multiple independent outputs are allowed using columns of Y """ def __init__(self, X, likelihood, kernel, normalize_X=False): GPBase.__init__(self, X, likelihood, kernel, normalize_X=normalize_X) - self._set_params(self._get_params()) + self.update_likelihood_approximation() + def _set_params(self, p): - self.kern._set_params_transformed(p[:self.kern.num_params_transformed()]) - self.likelihood._set_params(p[self.kern.num_params_transformed():]) + new_kern_params = p[:self.kern.num_params_transformed()] + new_likelihood_params = p[self.kern.num_params_transformed():] + old_likelihood_params = self.likelihood._get_params() + + self.kern._set_params_transformed(new_kern_params) + self.likelihood._set_params_transformed(new_likelihood_params) self.K = self.kern.K(self.X) + + #Re fit likelihood approximation (if it is an approx), as parameters have changed + if isinstance(self.likelihood, Laplace): + self.likelihood.fit_full(self.K) + self.K += self.likelihood.covariance_matrix self.Ki, self.L, self.Li, self.K_logdet = pdinv(self.K) # the gradient of the likelihood wrt the covariance matrix if self.likelihood.YYT is None: - #alpha = np.dot(self.Ki, self.likelihood.Y) - alpha,_ = dpotrs(self.L, self.likelihood.Y,lower=1) + # alpha = np.dot(self.Ki, self.likelihood.Y) + alpha, _ = dpotrs(self.L, self.likelihood.Y, lower=1) self.dL_dK = 0.5 * (tdot(alpha) - self.output_dim * self.Ki) else: - #tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki) + # tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki) tmp, _ = dpotrs(self.L, np.asfortranarray(self.likelihood.YYT), lower=1) tmp, _ = dpotrs(self.L, np.asfortranarray(tmp.T), lower=1) self.dL_dK = 0.5 * (tmp - self.output_dim * self.Ki) + #Adding dZ_dK (0 for a non-approximate likelihood, compensates for + #additional gradients of K when log-likelihood has non-zero Z term) + self.dL_dK += self.likelihood.dZ_dK + def _get_params(self): return np.hstack((self.kern._get_params_transformed(), self.likelihood._get_params())) - def _get_param_names(self): return self.kern._get_param_names_transformed() + self.likelihood._get_param_names() - def update_likelihood_approximation(self): + def update_likelihood_approximation(self, **kwargs): """ Approximates a non-gaussian likelihood using Expectation Propagation @@ -67,8 +76,8 @@ class GP(GPBase): this function does nothing """ self.likelihood.restart() - self.likelihood.fit_full(self.kern.K(self.X)) - self._set_params(self._get_params()) # update the GP + self.likelihood.fit_full(self.kern.K(self.X), **kwargs) + self._set_params(self._get_params()) # update the GP def _model_fit_term(self): """ @@ -77,7 +86,7 @@ class GP(GPBase): if self.likelihood.YYT is None: tmp, _ = dtrtrs(self.L, np.asfortranarray(self.likelihood.Y), lower=1) return -0.5 * np.sum(np.square(tmp)) - #return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y))) + # return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y))) else: return -0.5 * np.sum(np.multiply(self.Ki, self.likelihood.YYT)) @@ -89,8 +98,8 @@ class GP(GPBase): model for a new variable Y* = v_tilde/tau_tilde, with a covariance matrix K* = K + diag(1./tau_tilde) plus a normalization term. """ - return -0.5 * self.output_dim * self.K_logdet + self._model_fit_term() + self.likelihood.Z - + return (-0.5 * self.num_data * self.output_dim * np.log(2.*np.pi) - + 0.5 * self.output_dim * self.K_logdet + self._model_fit_term() + self.likelihood.Z) def _log_likelihood_gradients(self): """ @@ -100,13 +109,13 @@ class GP(GPBase): """ return np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK)))) - def _raw_predict(self, _Xnew, which_parts='all', full_cov=False,stop=False): + def _raw_predict(self, _Xnew, which_parts='all', full_cov=False, stop=False): """ Internal helper function for making predictions, does not account for normalization or likelihood """ - Kx = self.kern.K(_Xnew,self.X,which_parts=which_parts).T - #KiKx = np.dot(self.Ki, Kx) + Kx = self.kern.K(_Xnew, self.X, which_parts=which_parts).T + # KiKx = np.dot(self.Ki, Kx) KiKx, _ = dpotrs(self.L, np.asfortranarray(Kx), lower=1) mu = np.dot(KiKx.T, self.likelihood.Y) if full_cov: @@ -120,20 +129,19 @@ class GP(GPBase): debug_this # @UndefinedVariable return mu, var - def predict(self, Xnew, which_parts='all', full_cov=False): + def predict(self, Xnew, which_parts='all', full_cov=False, **likelihood_args): """ Predict the function(s) at the new point(s) Xnew. - Arguments - --------- + :param Xnew: The points at which to make a prediction :type Xnew: np.ndarray, Nnew x self.input_dim :param which_parts: specifies which outputs kernel(s) to use in prediction :type which_parts: ('all', list of bools) - :param full_cov: whether to return the folll covariance matrix, or just the diagonal + :param full_cov: whether to return the full covariance matrix, or just the diagonal :type full_cov: bool - :rtype: posterior mean, a Numpy array, Nnew x self.input_dim - :rtype: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise - :rtype: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim + :returns: mean: posterior mean, a Numpy array, Nnew x self.input_dim + :returns: var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise + :returns: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew. @@ -145,6 +153,52 @@ class GP(GPBase): mu, var = self._raw_predict(Xnew, full_cov=full_cov, which_parts=which_parts) # now push through likelihood - mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov) - + mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov, **likelihood_args) return mean, var, _025pm, _975pm + + def _raw_predict_single_output(self, _Xnew, output, which_parts='all', full_cov=False,stop=False): + """ + For a specific output, calls _raw_predict() at the new point(s) _Xnew. + This functions calls _add_output_index(), so _Xnew should not have an index column specifying the output. + --------- + + :param Xnew: The points at which to make a prediction + :type Xnew: np.ndarray, Nnew x self.input_dim + :param output: output to predict + :type output: integer in {0,..., output_dim-1} + :param which_parts: specifies which outputs kernel(s) to use in prediction + :type which_parts: ('all', list of bools) + :param full_cov: whether to return the full covariance matrix, or just the diagonal + + .. Note:: For multiple non-independent outputs models only. + """ + _Xnew = self._add_output_index(_Xnew, output) + return self._raw_predict(_Xnew, which_parts=which_parts,full_cov=full_cov, stop=stop) + + def predict_single_output(self, Xnew,output=0, which_parts='all', full_cov=False, likelihood_args=dict()): + """ + For a specific output, calls predict() at the new point(s) Xnew. + This functions calls _add_output_index(), so Xnew should not have an index column specifying the output. + + :param Xnew: The points at which to make a prediction + :type Xnew: np.ndarray, Nnew x self.input_dim + :param which_parts: specifies which outputs kernel(s) to use in prediction + :type which_parts: ('all', list of bools) + :param full_cov: whether to return the full covariance matrix, or just the diagonal + :type full_cov: bool + :returns: mean: posterior mean, a Numpy array, Nnew x self.input_dim + :returns: var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise + :returns: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim + + .. Note:: For multiple non-independent outputs models only. + """ + Xnew = self._add_output_index(Xnew, output) + return self.predict(Xnew, which_parts=which_parts, full_cov=full_cov, likelihood_args=likelihood_args) + + def getstate(self): + return GPBase.getstate(self) + + def setstate(self, state): + GPBase.setstate(self, state) + self._set_params(self._get_params()) + diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index b82f3298..2577e06c 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -3,16 +3,22 @@ from .. import kern from ..util.plot import gpplot, Tango, x_frame1D, x_frame2D import pylab as pb from GPy.core.model import Model +import warnings +from ..likelihoods import Gaussian, Gaussian_Mixed_Noise class GPBase(Model): """ - Gaussian Process Model for holding shared behaviour between - sprase_GP and GP models - """ + Gaussian process base model for holding shared behaviour between + sparse_GP and GP models, and potentially other models in the future. + Here we define some functions that are use + """ def __init__(self, X, likelihood, kernel, normalize_X=False): + if len(X.shape)==1: + X = X.reshape(-1,1) + warnings.warn("One dimension output (N,) being reshaped to (N,1)") self.X = X - assert len(self.X.shape) == 2 + assert len(self.X.shape) == 2, "too many dimensions for X input" self.num_data, self.input_dim = self.X.shape assert isinstance(kernel, kern.kern) self.kern = kernel @@ -29,108 +35,241 @@ class GPBase(Model): self._Xscale = np.ones((1, self.input_dim)) super(GPBase, self).__init__() - #Model.__init__(self) + # Model.__init__(self) # All leaf nodes should call self._set_params(self._get_params()) at # the end - def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): + + def posterior_samples_f(self,X,size=10,which_parts='all'): """ - Plot the GP's view of the world, where the data is normalized and the - likelihood is Gaussian. + Samples the posterior GP at the points X. - :param samples: the number of a posteriori samples to plot - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :param X: The points at which to take the samples. + :type X: np.ndarray, Nnew x self.input_dim. + :param size: the number of a posteriori samples to plot. + :type size: int. + :param which_parts: which of the kernel functions to plot (additively). + :type which_parts: 'all', or list of bools. + :param full_cov: whether to return the full covariance matrix, or just the diagonal. + :type full_cov: bool. + :returns: Ysim: set of simulations, a Numpy array (N x samples). + """ + m, v = self._raw_predict(X, which_parts=which_parts, full_cov=True) + v = v.reshape(m.size,-1) if len(v.shape)==3 else v + Ysim = np.random.multivariate_normal(m.flatten(), v, size).T + return Ysim + + def posterior_samples(self,X,size=10,which_parts='all',noise_model=None): + """ + Samples the posterior GP at the points X. + + :param X: the points at which to take the samples. + :type X: np.ndarray, Nnew x self.input_dim. + :param size: the number of a posteriori samples to plot. + :type size: int. + :param which_parts: which of the kernel functions to plot (additively). + :type which_parts: 'all', or list of bools. + :param full_cov: whether to return the full covariance matrix, or just the diagonal. + :type full_cov: bool. + :param noise_model: for mixed noise likelihood, the noise model to use in the samples. + :type noise_model: integer. + :returns: Ysim: set of simulations, a Numpy array (N x samples). + """ + Ysim = self.posterior_samples_f(X, size, which_parts=which_parts) + if isinstance(self.likelihood,Gaussian): + noise_std = np.sqrt(self.likelihood._get_params()) + Ysim += np.random.normal(0,noise_std,Ysim.shape) + elif isinstance(self.likelihood,Gaussian_Mixed_Noise): + assert noise_model is not None, "A noise model must be specified." + noise_std = np.sqrt(self.likelihood._get_params()[noise_model]) + Ysim += np.random.normal(0,noise_std,Ysim.shape) + else: + Ysim = self.likelihood.noise_model.samples(Ysim) + + return Ysim + + def plot_f(self, *args, **kwargs): + """ + Plot the GP's view of the world, where the data is normalized and before applying a likelihood. + + This is a convenience function: we simply call self.plot with the + argument use_raw_predict set True. All args and kwargs are passed on to + plot. + + see also: gp_base.plot + """ + kwargs['plot_raw'] = True + self.plot(*args, **kwargs) + + def plot(self, plot_limits=None, which_data_rows='all', + which_data_ycols='all', which_parts='all', fixed_inputs=[], + levels=20, samples=0, fignum=None, ax=None, resolution=None, + plot_raw=False, + linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): + """ Plot the posterior of the GP. - In one dimension, the function is plotted with a shaded region identifying two standard deviations. - In two dimsensions, a contour-plot shows the mean predicted function - - In higher dimensions, we've no implemented this yet !TODO! + - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. Can plot only part of the data and part of the posterior functions - using which_data and which_functions - """ - if which_data == 'all': - which_data = slice(None) + using which_data_rowsm which_data_ycols and which_parts + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :type plot_limits: np.array + :param which_data_rows: which of the training data to plot (default all) + :type which_data_rows: 'all' or a slice object to slice self.X, self.Y + :param which_data_ycols: when the data has several columns (independant outputs), only plot these + :type which_data_rows: 'all' or a list of integers + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param levels: number of levels to plot in a contour plot. + :type levels: int + :param samples: the number of a posteriori samples to plot + :type samples: int + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + :type output: integer (first output is 0) + :param linecol: color of line to plot. + :type linecol: + :param fillcol: color of fill + :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + """ + #deal with optional arguments + if which_data_rows == 'all': + which_data_rows = slice(None) + if which_data_ycols == 'all': + which_data_ycols = np.arange(self.output_dim) + if len(which_data_ycols)==0: + raise ValueError('No data selected for plotting') if ax is None: fig = pb.figure(num=fignum) ax = fig.add_subplot(111) - if self.X.shape[1] == 1: - Xnew, xmin, xmax = x_frame1D(self.X, plot_limits=plot_limits) - if samples == 0: - m, v = self._raw_predict(Xnew, which_parts=which_parts) - gpplot(Xnew, m, m - 2 * np.sqrt(v), m + 2 * np.sqrt(v), axes=ax) - ax.plot(self.X[which_data], self.likelihood.Y[which_data], 'kx', mew=1.5) + #work out what the inputs are for plotting (1D or 2D) + fixed_dims = np.array([i for i,v in fixed_inputs]) + free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims) + + #one dimensional plotting + if len(free_dims) == 1: + + #define the frame on which to plot + resolution = resolution or 200 + Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now + Xnew, xmin, xmax = x_frame1D(Xu[:,free_dims], plot_limits=plot_limits) + Xgrid = np.empty((Xnew.shape[0],self.input_dim)) + Xgrid[:,free_dims] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v + + #make a prediction on the frame and plot it + if plot_raw: + m, v = self._raw_predict(Xgrid, which_parts=which_parts) + lower = m - 2*np.sqrt(v) + upper = m + 2*np.sqrt(v) + Y = self.likelihood.Y else: - m, v = self._raw_predict(Xnew, which_parts=which_parts, full_cov=True) - Ysim = np.random.multivariate_normal(m.flatten(), v, samples) - gpplot(Xnew, m, m - 2 * np.sqrt(np.diag(v)[:, None]), m + 2 * np.sqrt(np.diag(v))[:, None, ], axes=ax) - for i in range(samples): - ax.plot(Xnew, Ysim[i, :], Tango.colorsHex['darkBlue'], linewidth=0.25) - ax.plot(self.X[which_data], self.likelihood.Y[which_data], 'kx', mew=1.5) - ax.set_xlim(xmin, xmax) - ymin, ymax = min(np.append(self.likelihood.Y, m - 2 * np.sqrt(np.diag(v)[:, None]))), max(np.append(self.likelihood.Y, m + 2 * np.sqrt(np.diag(v)[:, None]))) - ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) - ax.set_ylim(ymin, ymax) + m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=False) #Compute the exact mean + m_, v_, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=True, num_samples=15000) #Apporximate the percentiles + Y = self.likelihood.data + for d in which_data_ycols: + gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) + ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) - elif self.X.shape[1] == 2: - resolution = resolution or 50 - Xnew, xmin, xmax, xx, yy = x_frame2D(self.X, plot_limits, resolution) - m, v = self._raw_predict(Xnew, which_parts=which_parts) - m = m.reshape(resolution, resolution).T - ax.contour(xx, yy, m, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable - ax.scatter(self.X[:, 0], self.X[:, 1], 40, self.likelihood.Y, linewidth=0, cmap=pb.cm.jet, vmin=m.min(), vmax=m.max()) # @UndefinedVariable - ax.set_xlim(xmin[0], xmax[0]) - ax.set_ylim(xmin[1], xmax[1]) - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + #optionally plot some samples + if samples: #NOTE not tested with fixed_inputs + Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts) + for yi in Ysim.T: + ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) + #ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs. - def plot(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None): - """ - TODO: Docstrings! - - :param levels: for 2D plotting, the number of contour levels to use - is ax is None, create a new figure - """ - # TODO include samples - if which_data == 'all': - which_data = slice(None) - - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - if self.X.shape[1] == 1: - - Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now - - Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits) - m, _, lower, upper = self.predict(Xnew, which_parts=which_parts) - for d in range(m.shape[1]): - gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax) - ax.plot(Xu[which_data], self.likelihood.data[which_data, d], 'kx', mew=1.5) - ymin, ymax = min(np.append(self.likelihood.data, lower)), max(np.append(self.likelihood.data, upper)) + #set the limits of the plot to some sensible values + ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper)) ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) ax.set_xlim(xmin, xmax) ax.set_ylim(ymin, ymax) - elif self.X.shape[1] == 2: # FIXME + #2D plotting + elif len(free_dims) == 2: + + #define the frame for plotting on resolution = resolution or 50 - Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution) + Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now + Xnew, _, _, xmin, xmax = x_frame2D(Xu[:,free_dims], plot_limits, resolution) + Xgrid = np.empty((Xnew.shape[0],self.input_dim)) + Xgrid[:,free_dims] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) - m, _, lower, upper = self.predict(Xnew, which_parts=which_parts) - m = m.reshape(resolution, resolution).T - ax.contour(x, y, m, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable - Yf = self.likelihood.Y.flatten() - ax.scatter(self.X[:, 0], self.X[:, 1], 40, Yf, cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) # @UndefinedVariable + + #predict on the frame and plot + if plot_raw: + m, _ = self._raw_predict(Xgrid, which_parts=which_parts) + Y = self.likelihood.Y + else: + m, _, _, _ = self.predict(Xgrid, which_parts=which_parts,sampling=False) + Y = self.likelihood.data + for d in which_data_ycols: + m_d = m[:,d].reshape(resolution, resolution).T + ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) + ax.scatter(self.X[which_data_rows, free_dims[0]], self.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) + + #set the limits of the plot to some sensible values ax.set_xlim(xmin[0], xmax[0]) ax.set_ylim(xmin[1], xmax[1]) + if samples: + warnings.warn("Samples are rather difficult to plot for 2D inputs...") + else: raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + + def getstate(self): + """ + Get the curent state of the class. This is only used to efficiently + pickle the model. See also self.setstate + """ + return Model.getstate(self) + [self.X, + self.num_data, + self.input_dim, + self.kern, + self.likelihood, + self.output_dim, + self._Xoffset, + self._Xscale] + + def setstate(self, state): + """ + Set the state of the model. Used for efficient pickling + """ + self._Xscale = state.pop() + self._Xoffset = state.pop() + self.output_dim = state.pop() + self.likelihood = state.pop() + self.kern = state.pop() + self.input_dim = state.pop() + self.num_data = state.pop() + self.X = state.pop() + Model.setstate(self, state) + + def log_predictive_density(self, x_test, y_test): + """ + Calculation of the log predictive density + + .. math: + p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*}) + + :param x_test: test observations (x_{*}) + :type x_test: (Nx1) array + :param y_test: test observations (y_{*}) + :type y_test: (Nx1) array + """ + mu_star, var_star = self._raw_predict(x_test) + return self.likelihood.log_predictive_density(y_test, mu_star, var_star) diff --git a/GPy/core/mapping.py b/GPy/core/mapping.py new file mode 100644 index 00000000..7b2c89b9 --- /dev/null +++ b/GPy/core/mapping.py @@ -0,0 +1,192 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from ..util.plot import Tango, x_frame1D, x_frame2D +from parameterized import Parameterized +import numpy as np +import pylab as pb + +class Mapping(Parameterized): + """ + Base model for shared behavior between models that can act like a mapping. + """ + + def __init__(self, input_dim, output_dim): + self.input_dim = input_dim + self.output_dim = output_dim + + super(Mapping, self).__init__() + # Model.__init__(self) + # All leaf nodes should call self._set_params(self._get_params()) at + # the end + + def f(self, X): + raise NotImplementedError + + def df_dX(self, dL_df, X): + """Evaluate derivatives of mapping outputs with respect to inputs. + + :param dL_df: gradient of the objective with respect to the function. + :type dL_df: ndarray (num_data x output_dim) + :param X: the input locations where derivatives are to be evaluated. + :type X: ndarray (num_data x input_dim) + :returns: matrix containing gradients of the function with respect to the inputs. + """ + raise NotImplementedError + + def df_dtheta(self, dL_df, X): + """The gradient of the outputs of the multi-layer perceptron with respect to each of the parameters. + :param dL_df: gradient of the objective with respect to the function. + :type dL_df: ndarray (num_data x output_dim) + :param X: input locations where the function is evaluated. + :type X: ndarray (num_data x input_dim) + :returns: Matrix containing gradients with respect to parameters of each output for each input data. + :rtype: ndarray (num_params length) + """ + raise NotImplementedError + + def plot(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue']): + """ + + Plot the mapping. + + Plots the mapping associated with the model. + - In one dimension, the function is plotted. + - In two dimsensions, a contour-plot shows the function + - In higher dimensions, we've not implemented this yet !TODO! + + Can plot only part of the data and part of the posterior functions + using which_data and which_functions + + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :type plot_limits: np.array + :param which_data: which if the training data to plot (default all) + :type which_data: 'all' or a slice object to slice self.X, self.Y + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param levels: number of levels to plot in a contour plot. + :type levels: int + :param samples: the number of a posteriori samples to plot + :type samples: int + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples + :param linecol: color of line to plot. + :type linecol: + :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + + """ + # TODO include samples + if which_data == 'all': + which_data = slice(None) + + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + plotdims = self.input_dim - len(fixed_inputs) + + if plotdims == 1: + + Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now + + fixed_dims = np.array([i for i,v in fixed_inputs]) + freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims) + + Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits) + Xgrid = np.empty((Xnew.shape[0],self.input_dim)) + Xgrid[:,freedim] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v + + f = self.predict(Xgrid, which_parts=which_parts) + for d in range(y.shape[1]): + ax.plot(Xnew, f[:, d], edgecol=linecol) + + elif self.X.shape[1] == 2: + resolution = resolution or 50 + Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution) + x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) + f = self.predict(Xnew, which_parts=which_parts) + m = m.reshape(resolution, resolution).T + ax.contour(x, y, f, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable + ax.set_xlim(xmin[0], xmax[0]) + ax.set_ylim(xmin[1], xmax[1]) + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + +from GPy.core.model import Model + +class Mapping_check_model(Model): + """ + This is a dummy model class used as a base class for checking that the + gradients of a given mapping are implemented correctly. It enables + checkgradient() to be called independently on each mapping. + """ + def __init__(self, mapping=None, dL_df=None, X=None): + num_samples = 20 + if mapping==None: + mapping = GPy.mapping.linear(1, 1) + if X==None: + X = np.random.randn(num_samples, mapping.input_dim) + if dL_df==None: + dL_df = np.ones((num_samples, mapping.output_dim)) + + self.mapping=mapping + self.X = X + self.dL_df = dL_df + self.num_params = self.mapping.num_params + Model.__init__(self) + + + def _get_params(self): + return self.mapping._get_params() + + def _get_param_names(self): + return self.mapping._get_param_names() + + def _set_params(self, x): + self.mapping._set_params(x) + + def log_likelihood(self): + return (self.dL_df*self.mapping.f(self.X)).sum() + + def _log_likelihood_gradients(self): + raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class." + +class Mapping_check_df_dtheta(Mapping_check_model): + """This class allows gradient checks for the gradient of a mapping with respect to parameters. """ + def __init__(self, mapping=None, dL_df=None, X=None): + Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X) + + def _log_likelihood_gradients(self): + return self.mapping.df_dtheta(self.dL_df, self.X) + + +class Mapping_check_df_dX(Mapping_check_model): + """This class allows gradient checks for the gradient of a mapping with respect to X. """ + def __init__(self, mapping=None, dL_df=None, X=None): + Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X) + + if dL_df==None: + dL_df = np.ones((self.X.shape[0],self.mapping.output_dim)) + self.num_params = self.X.shape[0]*self.mapping.input_dim + + def _log_likelihood_gradients(self): + return self.mapping.df_dX(self.dL_df, self.X).flatten() + + def _get_param_names(self): + return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] + + def _get_params(self): + return self.X.flatten() + + def _set_params(self, x): + self.X=x.reshape(self.X.shape) + diff --git a/GPy/core/model.py b/GPy/core/model.py index 05375b2a..6fbc9623 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Copyright (c) 2012, 2013, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) @@ -6,49 +6,74 @@ from .. import likelihoods from ..inference import optimization from ..util.linalg import jitchol from GPy.util.misc import opt_wrapper -from parameterised import Parameterised +from parameterized import Parameterized import multiprocessing as mp import numpy as np from GPy.core.domains import POSITIVE, REAL from numpy.linalg.linalg import LinAlgError # import numdifftools as ndt -class Model(Parameterised): +class Model(Parameterized): _fail_count = 0 # Count of failed optimization steps (see objective) _allowed_failures = 10 # number of allowed failures def __init__(self): - Parameterised.__init__(self) + Parameterized.__init__(self) self.priors = None self.optimization_runs = [] self.sampling_runs = [] self.preferred_optimizer = 'scg' # self._set_params(self._get_params()) has been taken out as it should only be called on leaf nodes - def _get_params(self): - raise NotImplementedError, "this needs to be implemented to use the Model class" - def _set_params(self, x): - raise NotImplementedError, "this needs to be implemented to use the Model class" def log_likelihood(self): - raise NotImplementedError, "this needs to be implemented to use the Model class" + raise NotImplementedError, "this needs to be implemented to use the model class" def _log_likelihood_gradients(self): - raise NotImplementedError, "this needs to be implemented to use the Model class" + raise NotImplementedError, "this needs to be implemented to use the model class" + + def getstate(self): + """ + Get the current state of the class. + Inherited from Parameterized, so add those parameters to the state + + :return: list of states from the model. + + """ + return Parameterized.getstate(self) + \ + [self.priors, self.optimization_runs, + self.sampling_runs, self.preferred_optimizer] + + def setstate(self, state): + """ + set state from previous call to getstate + call Parameterized with the rest of the state + + :param state: the state of the model. + :type state: list as returned from getstate. + + """ + self.preferred_optimizer = state.pop() + self.sampling_runs = state.pop() + self.optimization_runs = state.pop() + self.priors = state.pop() + Parameterized.setstate(self, state) def set_prior(self, regexp, what): """ - Sets priors on the Model parameters. - Arguments - --------- - regexp -- string, regexp, or integer array - what -- instance of a Prior class + Sets priors on the model parameters. - Notes - ----- - Asserts that the Prior is suitable for the constraint. If the + **Notes** + + Asserts that the prior is suitable for the constraint. If the wrong constraint is in place, an error is raised. If no constraint is in place, one is added (warning printed). - For tied parameters, the Prior will only be "counted" once, thus - a Prior object is only inserted on the first tied index + For tied parameters, the prior will only be "counted" once, thus + a prior object is only inserted on the first tied index + + :param regexp: regular expression of parameters on which priors need to be set. + :type param: string, regexp, or integer array + :param what: prior to set on parameter. + :type what: GPy.core.Prior type + """ if self.priors is None: self.priors = [None for i in range(self._get_params().size)] @@ -58,12 +83,12 @@ class Model(Parameterised): # check tied situation tie_partial_matches = [tie for tie in self.tied_indices if (not set(tie).isdisjoint(set(which))) & (not set(tie) == set(which))] if len(tie_partial_matches): - raise ValueError, "cannot place Prior across partial ties" + raise ValueError, "cannot place prior across partial ties" tie_matches = [tie for tie in self.tied_indices if set(which) == set(tie) ] if len(tie_matches) > 1: - raise ValueError, "cannot place Prior across multiple ties" + raise ValueError, "cannot place prior across multiple ties" elif len(tie_matches) == 1: - which = which[:1] # just place a Prior object on the first parameter + which = which[:1] # just place a prior object on the first parameter # check constraints are okay @@ -75,7 +100,7 @@ class Model(Parameterised): else: constrained_positive_indices = np.zeros(shape=(0,)) bad_constraints = np.setdiff1d(self.all_constrained_indices(), constrained_positive_indices) - assert not np.any(which[:, None] == bad_constraints), "constraint and Prior incompatible" + assert not np.any(which[:, None] == bad_constraints), "constraint and prior incompatible" unconst = np.setdiff1d(which, constrained_positive_indices) if len(unconst): print "Warning: constraining parameters to be positive:" @@ -83,17 +108,22 @@ class Model(Parameterised): print '\n' self.constrain_positive(unconst) elif what.domain is REAL: - assert not np.any(which[:, None] == self.all_constrained_indices()), "constraint and Prior incompatible" + assert not np.any(which[:, None] == self.all_constrained_indices()), "constraint and prior incompatible" else: - raise ValueError, "Prior not recognised" + raise ValueError, "prior not recognised" - # store the Prior in a local list + # store the prior in a local list for w in which: self.priors[w] = what def get_gradient(self, name, return_names=False): """ - Get Model gradient(s) by name. The name is applied as a regular expression and all parameters that match that regular expression are returned. + Get model gradient(s) by name. The name is applied as a regular expression and all parameters that match that regular expression are returned. + + :param name: the name of parameters required (as a regular expression). + :type name: regular expression + :param return_names: whether or not to return the names matched (default False) + :type return_names: bool """ matches = self.grep_param_names(name) if len(matches): @@ -133,14 +163,14 @@ class Model(Parameterised): def randomize(self): """ - Randomize the Model. - Make this draw from the Prior if one exists, else draw from N(0,1) + Randomize the model. + Make this draw from the prior if one exists, else draw from N(0,1) """ # first take care of all parameters (from N(0,1)) x = self._get_params_transformed() x = np.random.randn(x.size) self._set_params_transformed(x) - # now draw from Prior where possible + # now draw from prior where possible x = self._get_params() if self.priors is not None: [np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None] @@ -150,25 +180,34 @@ class Model(Parameterised): def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): """ - Perform random restarts of the Model, and set the Model to the best + Perform random restarts of the model, and set the model to the best seen solution. If the robust flag is set, exceptions raised during optimizations will - be handled silently. If _all_ runs fail, the Model is reset to the + be handled silently. If _all_ runs fail, the model is reset to the existing parameter values. - Notes - ----- - **kwargs are passed to the optimizer. They can be: - :max_f_eval: maximum number of function evaluations - :messages: whether to display during optimisation - :verbose: whether to show informations about the current restart - :parallel: whether to run each restart as a separate process. It relies on the multiprocessing module. - :num_processes: number of workers in the multiprocessing pool + **Notes** - ..Note: If num_processes is None, the number of workes in the multiprocessing pool is automatically - set to the number of processors on the current machine. + :param num_restarts: number of restarts to use (default 10) + :type num_restarts: int + :param robust: whether to handle exceptions silently or not (default False) + :type robust: bool + :param parallel: whether to run each restart as a separate process. It relies on the multiprocessing module. + :type parallel: bool + :param num_processes: number of workers in the multiprocessing pool + :type numprocesses: int + \*\*kwargs are passed to the optimizer. They can be: + + :param max_f_eval: maximum number of function evaluations + :type max_f_eval: int + :param max_iters: maximum number of iterations + :type max_iters: int + :param messages: whether to display during optimisation + :type messages: bool + + .. note:: If num_processes is None, the number of workes in the multiprocessing pool is automatically set to the number of processors on the current machine. """ initial_parameters = self._get_params_transformed() @@ -213,9 +252,14 @@ class Model(Parameterised): def ensure_default_constraints(self): """ - Ensure that any variables which should clearly be positive have been constrained somehow. + Ensure that any variables which should clearly be positive + have been constrained somehow. The method performs a regular + expression search on parameter names looking for the terms + 'variance', 'lengthscale', 'precision' and 'kappa'. If any of + these terms are present in the name the parameter is + constrained positive. """ - positive_strings = ['variance', 'lengthscale', 'precision', 'kappa'] + positive_strings = ['variance', 'lengthscale', 'precision', 'decay', 'kappa'] # param_names = self._get_param_names() currently_constrained = self.all_constrained_indices() to_make_positive = [] @@ -228,11 +272,15 @@ class Model(Parameterised): def objective_function(self, x): """ - The objective function passed to the optimizer. It combines the likelihood and the priors. - + The objective function passed to the optimizer. It combines + the likelihood and the priors. + Failures are handled robustly. The algorithm will try several times to return the objective, and will raise the original exception if it the objective cannot be computed. + + :param x: the parameters of the model. + :parameter type: np.array """ try: self._set_params_transformed(x) @@ -249,39 +297,53 @@ class Model(Parameterised): Gets the gradients from the likelihood and the priors. Failures are handled robustly. The algorithm will try several times to - return the objective, and will raise the original exception if it + return the gradients, and will raise the original exception if it the objective cannot be computed. + + :param x: the parameters of the model. + :parameter type: np.array """ try: self._set_params_transformed(x) + obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()) self._fail_count = 0 except (LinAlgError, ZeroDivisionError, ValueError) as e: if self._fail_count >= self._allowed_failures: raise e self._fail_count += 1 - obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()) + obj_grads = np.clip(-self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()), -1e100, 1e100) return obj_grads def objective_and_gradients(self, x): + """ + Compute the objective function of the model and the gradient of the model at the point given by x. + + :param x: the point at which gradients are to be computed. + :type np.array: + """ + try: self._set_params_transformed(x) obj_f = -self.log_likelihood() - self.log_prior() self._fail_count = 0 + obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()) except (LinAlgError, ZeroDivisionError, ValueError) as e: if self._fail_count >= self._allowed_failures: raise e self._fail_count += 1 obj_f = np.inf - obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()) + obj_grads = np.clip(-self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients()), -1e100, 1e100) return obj_f, obj_grads def optimize(self, optimizer=None, start=None, **kwargs): """ - Optimize the Model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. + Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. kwargs are passed to the optimizer. They can be: - :max_f_eval: maximum number of function evaluations + :param max_f_eval: maximum number of function evaluations + :type max_f_eval: int :messages: whether to display during optimisation + :type messages: bool :param optimzer: which optimizer to use (defaults to self.preferred optimizer) :type optimzer: string TODO: valid strings? """ @@ -293,7 +355,9 @@ class Model(Parameterised): optimizer = optimization.get_optimizer(optimizer) opt = optimizer(start, model=self, **kwargs) + opt.run(f_fp=self.objective_and_gradients, f=self.objective_function, fp=self.objective_function_gradients) + self.optimization_runs.append(opt) self._set_params_transformed(opt.x_opt) @@ -305,14 +369,14 @@ class Model(Parameterised): self.optimization_runs.append(sgd) def Laplace_covariance(self): - """return the covariance matric of a Laplace approximatino at the current (stationary) point""" - # TODO add in the Prior contributions for MAP estimation + """return the covariance matrix of a Laplace approximation at the current (stationary) point.""" + # TODO add in the prior contributions for MAP estimation # TODO fix the hessian for tied, constrained and fixed components if hasattr(self, 'log_likelihood_hessian'): A = -self.log_likelihood_hessian() else: - print "numerically calculating hessian. please be patient!" + print "numerically calculating Hessian. please be patient!" x = self._get_params() def f(x): self._set_params(x) @@ -326,8 +390,8 @@ class Model(Parameterised): return A def Laplace_evidence(self): - """Returns an estiamte of the Model evidence based on the Laplace approximation. - Uses a numerical estimate of the hessian if none is available analytically""" + """Returns an estiamte of the model evidence based on the Laplace approximation. + Uses a numerical estimate of the Hessian if none is available analytically.""" A = self.Laplace_covariance() try: hld = np.sum(np.log(np.diag(jitchol(A)[0]))) @@ -336,39 +400,47 @@ class Model(Parameterised): return 0.5 * self._get_params().size * np.log(2 * np.pi) + self.log_likelihood() - hld def __str__(self): - s = Parameterised.__str__(self).split('\n') + s = Parameterized.__str__(self).split('\n') + #def __str__(self, names=None): + # if names is None: + # names = self._get_print_names() + #s = Parameterized.__str__(self, names=names).split('\n') # add priors to the string if self.priors is not None: strs = [str(p) if p is not None else '' for p in self.priors] else: strs = [''] * len(self._get_params()) + # strs = [''] * len(self._get_param_names()) + # name_indices = self.grep_param_names("|".join(names)) + # strs = np.array(strs)[name_indices] width = np.array(max([len(p) for p in strs] + [5])) + 4 log_like = self.log_likelihood() log_prior = self.log_prior() obj_funct = '\nLog-likelihood: {0:.3e}'.format(log_like) if len(''.join(strs)) != 0: - obj_funct += ', Log Prior: {0:.3e}, LL+Prior = {0:.3e}'.format(log_prior, log_like + log_prior) + obj_funct += ', Log prior: {0:.3e}, LL+prior = {0:.3e}'.format(log_prior, log_like + log_prior) obj_funct += '\n\n' s[0] = obj_funct + s[0] - s[0] += "|{h:^{col}}".format(h='Prior', col=width) + s[0] += "|{h:^{col}}".format(h='prior', col=width) s[1] += '-' * (width + 1) for p in range(2, len(strs) + 2): - s[p] += '|{Prior:^{width}}'.format(Prior=strs[p - 2], width=width) + s[p] += '|{prior:^{width}}'.format(prior=strs[p - 2], width=width) return '\n'.join(s) def checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3): """ - Check the gradient of the Model by comparing to a numerical estimate. - If the verbose flag is passed, invividual components are tested (and printed) + Check the gradient of the ,odel by comparing to a numerical + estimate. If the verbose flag is passed, invividual + components are tested (and printed) :param verbose: If True, print a "full" checking of each parameter :type verbose: bool :param step: The size of the step around which to linearise the objective - :type step: float (defaul 1e-6) + :type step: float (default 1e-6) :param tolerance: the tolerance allowed (see note) :type tolerance: float (default 1e-3) @@ -381,7 +453,12 @@ class Model(Parameterised): if not verbose: # just check the global ratio - dx = step * np.sign(np.random.uniform(-1, 1, x.size)) + + #choose a random direction to find the linear approximation in + if x.size==2: + dx = step * np.ones(2) # random direction for 2 parameters can fail dure to symmetry + else: + dx = step * np.sign(np.random.uniform(-1, 1, x.size)) # evaulate around the point x f1, g1 = self.objective_and_gradients(x + dx) @@ -389,9 +466,9 @@ class Model(Parameterised): gradient = self.objective_function_gradients(x) numerical_gradient = (f1 - f2) / (2 * dx) - global_ratio = (f1 - f2) / (2 * np.dot(dx, gradient)) + global_ratio = (f1 - f2) / (2 * np.dot(dx, np.where(gradient==0, 1e-32, gradient))) - return (np.abs(1. - global_ratio) < tolerance) or (np.abs(gradient - numerical_gradient).mean() - 1) < tolerance + return (np.abs(1. - global_ratio) < tolerance) or (np.abs(gradient - numerical_gradient).mean() < tolerance) else: # check the gradient of each parameter individually, and do some pretty printing try: @@ -429,7 +506,7 @@ class Model(Parameterised): gradient = self.objective_function_gradients(x)[i] numerical_gradient = (f1 - f2) / (2 * step) - ratio = (f1 - f2) / (2 * step * gradient) + ratio = (f1 - f2) / (2 * step * np.where(gradient==0, 1e-312, gradient)) difference = np.abs((f1 - f2) / 2 / step - gradient) if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance: @@ -445,43 +522,40 @@ class Model(Parameterised): def input_sensitivity(self): """ - return an array describing the sesitivity of the Model to each input + return an array describing the sesitivity of the model to each input NB. Right now, we're basing this on the lengthscales (or variances) of the kernel. TODO: proper sensitivity analysis - where we integrate across the Model inputs and evaluate the - effect on the variance of the Model output. """ + where we integrate across the model inputs and evaluate the + effect on the variance of the model output. """ if not hasattr(self, 'kern'): - raise ValueError, "this Model has no kernel" + raise ValueError, "this model has no kernel" - k = [p for p in self.kern.parts if p.name in ['rbf', 'linear']] + k = [p for p in self.kern.parts if p.name in ['rbf', 'linear', 'rbf_inv']] if (not len(k) == 1) or (not k[0].ARD): raise ValueError, "cannot determine sensitivity for this kernel" k = k[0] if k.name == 'rbf': - return k.lengthscale + return 1. / k.lengthscale + elif k.name == 'rbf_inv': + return k.inv_lengthscale elif k.name == 'linear': - return 1. / k.variances + return k.variances - def pseudo_EM(self, epsilon=.1, **kwargs): + def pseudo_EM(self, stop_crit=.1, **kwargs): """ - TODO: Should this not bein the GP class? EM - like algorithm for Expectation Propagation and Laplace approximation - kwargs are passed to the optimize function. They can be: - - :epsilon: convergence criterion - :max_f_eval: maximum number of function evaluations - :messages: whether to display during optimisation - :param optimzer: whice optimizer to use (defaults to self.preferred optimizer) - :type optimzer: string TODO: valid strings? + :param stop_crit: convergence criterion + :type stop_crit: float + .. Note: kwargs are passed to update_likelihood and optimize functions. """ - assert isinstance(self.likelihood, likelihoods.EP), "pseudo_EM is only available for EP likelihoods" - ll_change = epsilon + 1. + assert isinstance(self.likelihood, (likelihoods.EP, likelihoods.EP_Mixed_Noise, likelihoods.Laplace)), "pseudo_EM is only available for approximate likelihoods" + ll_change = stop_crit + 1. iteration = 0 last_ll = -np.inf @@ -489,10 +563,25 @@ class Model(Parameterised): alpha = 0 stop = False + #Handle **kwargs + ep_args = {} + for arg in kwargs.keys(): + if arg in ('epsilon','power_ep'): + ep_args[arg] = kwargs[arg] + del kwargs[arg] + while not stop: last_approximation = self.likelihood.copy() last_params = self._get_params() - self.update_likelihood_approximation() + if len(ep_args) == 2: + self.update_likelihood_approximation(epsilon=ep_args['epsilon'],power_ep=ep_args['power_ep']) + elif len(ep_args) == 1: + if ep_args.keys()[0] == 'epsilon': + self.update_likelihood_approximation(epsilon=ep_args['epsilon']) + elif ep_args.keys()[0] == 'power_ep': + self.update_likelihood_approximation(power_ep=ep_args['power_ep']) + else: + self.update_likelihood_approximation() new_ll = self.log_likelihood() ll_change = new_ll - last_ll @@ -504,7 +593,7 @@ class Model(Parameterised): else: self.optimize(**kwargs) last_ll = self.log_likelihood() - if ll_change < epsilon: + if ll_change < stop_crit: stop = True iteration += 1 if stop: diff --git a/GPy/core/parameterised.py b/GPy/core/parameterized.py similarity index 66% rename from GPy/core/parameterised.py rename to GPy/core/parameterized.py index b3a5712a..de1adaf8 100644 --- a/GPy/core/parameterised.py +++ b/GPy/core/parameterized.py @@ -9,7 +9,7 @@ import cPickle import warnings import transformations -class Parameterised(object): +class Parameterized(object): def __init__(self): """ This is the base class for model and kernel. Mostly just handles tieing and constraining of parameters @@ -20,55 +20,66 @@ class Parameterised(object): self.constrained_indices = [] self.constraints = [] - def pickle(self, filename, protocol= -1): - f = file(filename, 'w') - cPickle.dump(self, f, protocol) - f.close() + def _get_params(self): + raise NotImplementedError, "this needs to be implemented to use the Parameterized class" + def _set_params(self, x): + raise NotImplementedError, "this needs to be implemented to use the Parameterized class" + + def _get_param_names(self): + raise NotImplementedError, "this needs to be implemented to use the Parameterized class" + #def _get_print_names(self): + # """ Override for which names to print out, when using print m """ + # return self._get_param_names() + + def pickle(self, filename, protocol=None): + if protocol is None: + if self._has_get_set_state(): + protocol = 0 + else: + protocol = -1 + with open(filename, 'w') as f: + cPickle.dump(self, f, protocol) def copy(self): """Returns a (deep) copy of the current model """ return copy.deepcopy(self) - @property - def params(self): + def __getstate__(self): + if self._has_get_set_state(): + return self.getstate() + return self.__dict__ + + def __setstate__(self, state): + if self._has_get_set_state(): + self.setstate(state) # set state + self._set_params(self._get_params()) # restore all values + return + self.__dict__ = state + + def _has_get_set_state(self): + return 'getstate' in vars(self.__class__) and 'setstate' in vars(self.__class__) + + def getstate(self): """ - Returns a **copy** of parameters in non transformed space + Get the current state of the class, + here just all the indices, rest can get recomputed + For inheriting from Parameterized: - :see_also: :py:func:`GPy.core.Parameterised.params_transformed` + Allways append the state of the inherited object + and call down to the inherited object in setstate!! """ - return self._get_params() + return [self.tied_indices, + self.fixed_indices, + self.fixed_values, + self.constrained_indices, + self.constraints] - @params.setter - def params(self, params): - self._set_params(params) - - @property - def params_transformed(self): - """ - Returns a **copy** of parameters in transformed space - - :see_also: :py:func:`GPy.core.Parameterised.params` - """ - return self._get_params_transformed() - - @params_transformed.setter - def params_transformed(self, params): - self._set_params_transformed(params) - - _get_set_deprecation = """get and set methods wont be available at next minor release - in the next releases you will get and set with following syntax: - Assume m is a model class: - print m['var'] # > prints all parameters matching 'var' - m['var'] = 2. # > sets all parameters matching 'var' to 2. - m['var'] = # > sets parameters matching 'var' to - """ - def get(self, regexp): - warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2) - return self[regexp] - - def set(self, regexp, val): - warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2) - self[regexp] = val + def setstate(self, state): + self.constraints = state.pop() + self.constrained_indices = state.pop() + self.fixed_values = state.pop() + self.fixed_indices = state.pop() + self.tied_indices = state.pop() def __getitem__(self, regexp, return_names=False): """ @@ -95,13 +106,16 @@ class Parameterised(object): if len(matches): val = np.array(val) assert (val.size == 1) or val.size == len(matches), "Shape mismatch: {}:({},)".format(val.size, len(matches)) - x = self.params + x = self._get_params() x[matches] = val - self.params = x + self._set_params(x) else: raise AttributeError, "no parameter matches %s" % name def tie_params(self, regexp): + """ + Tie (all!) parameters matching the regular expression `regexp`. + """ matches = self.grep_param_names(regexp) assert matches.size > 0, "need at least something to tie together" if len(self.tied_indices): @@ -154,7 +168,7 @@ class Parameterised(object): return len(self._get_params()) - removed def unconstrain(self, regexp): - """Unconstrain matching parameters. does not untie parameters""" + """Unconstrain matching parameters. Does not untie parameters""" matches = self.grep_param_names(regexp) # tranformed contraints: @@ -181,7 +195,7 @@ class Parameterised(object): def constrain_negative(self, regexp): """ Set negative constraints. """ - self.constrain(regexp, transformations.negative_exponent()) + self.constrain(regexp, transformations.negative_logexp()) def constrain_positive(self, regexp): """ Set positive constraints. """ @@ -217,16 +231,19 @@ class Parameterised(object): def constrain_fixed(self, regexp, value=None): """ - Arguments - --------- - :param regexp: np.array(dtype=int), or regular expression object or string - :param value: a float to fix the matched values to. If the value is not specified, - the parameter is fixed to the current value - Notes - ----- + :param regexp: which parameters need to be fixed. + :type regexp: ndarray(dtype=int) or regular expression object or string + :param value: the vlaue to fix the parameters to. If the value is not specified, + the parameter is fixed to the current value + :type value: float + + **Notes** + Fixing a parameter which is tied to another, or constrained in some way will result in an error. - To fix multiple parameters to the same value, simply pass a regular expression which matches both parameter names, or pass both of the indexes + + To fix multiple parameters to the same value, simply pass a regular expression which matches both parameter names, or pass both of the indexes. + """ matches = self.grep_param_names(regexp) overlap = set(matches).intersection(set(self.all_constrained_indices())) @@ -321,19 +338,30 @@ class Parameterised(object): n = [nn for i, nn in enumerate(n) if not i in remove] return n + #@property + #def all(self): + # return self.__str__(self._get_param_names()) + + + #def __str__(self, names=None, nw=30): def __str__(self, nw=30): """ Return a string describing the parameter names and their ties and constraints """ names = self._get_param_names() + #if names is None: + # names = self._get_print_names() + #name_indices = self.grep_param_names("|".join(names)) N = len(names) if not N: return "This object has no free parameters." header = ['Name', 'Value', 'Constraints', 'Ties'] values = self._get_params() # map(str,self._get_params()) + #values = self._get_params()[name_indices] # map(str,self._get_params()) # sort out the constraints constraints = [''] * len(names) + #constraints = [''] * len(self._get_param_names()) for i, t in zip(self.constrained_indices, self.constraints): for ii in i: constraints[ii] = t.__str__() @@ -346,7 +374,10 @@ class Parameterised(object): for j in tie: ties[j] = '(' + str(i) + ')' - values = ['%.4f' % float(v) for v in values] + if values.size == 1: + values = ['%.4f' %float(values)] + else: + values = ['%.4f' % float(v) for v in values] max_names = max([len(names[i]) for i in range(len(names))] + [len(header[0])]) max_values = max([len(values[i]) for i in range(len(values))] + [len(header[1])]) max_constraint = max([len(constraints[i]) for i in range(len(constraints))] + [len(header[2])]) @@ -361,3 +392,77 @@ class Parameterised(object): return ('\n'.join([header_string[0], separator] + param_string)) + '\n' + + def grep_model(self,regexp): + regexp_indices = self.grep_param_names(regexp) + all_names = self._get_param_names() + + names = [all_names[pj] for pj in regexp_indices] + N = len(names) + + if not N: + return "Match not found." + + header = ['Name', 'Value', 'Constraints', 'Ties'] + all_values = self._get_params() + values = np.array([all_values[pj] for pj in regexp_indices]) + constraints = [''] * len(names) + + _constrained_indices,aux = self._pick_elements(regexp_indices,self.constrained_indices) + _constraints = [self.constraints[pj] for pj in aux] + + for i, t in zip(_constrained_indices, _constraints): + for ii in i: + iii = regexp_indices.tolist().index(ii) + constraints[iii] = t.__str__() + + _fixed_indices,aux = self._pick_elements(regexp_indices,self.fixed_indices) + for i in _fixed_indices: + for ii in i: + iii = regexp_indices.tolist().index(ii) + constraints[ii] = 'Fixed' + + _tied_indices,aux = self._pick_elements(regexp_indices,self.tied_indices) + ties = [''] * len(names) + for i,ti in zip(_tied_indices,aux): + for ii in i: + iii = regexp_indices.tolist().index(ii) + ties[iii] = '(' + str(ti) + ')' + + if values.size == 1: + values = ['%.4f' %float(values)] + else: + values = ['%.4f' % float(v) for v in values] + + max_names = max([len(names[i]) for i in range(len(names))] + [len(header[0])]) + max_values = max([len(values[i]) for i in range(len(values))] + [len(header[1])]) + max_constraint = max([len(constraints[i]) for i in range(len(constraints))] + [len(header[2])]) + max_ties = max([len(ties[i]) for i in range(len(ties))] + [len(header[3])]) + cols = np.array([max_names, max_values, max_constraint, max_ties]) + 4 + + header_string = ["{h:^{col}}".format(h=header[i], col=cols[i]) for i in range(len(cols))] + header_string = map(lambda x: '|'.join(x), [header_string]) + separator = '-' * len(header_string[0]) + param_string = ["{n:^{c0}}|{v:^{c1}}|{c:^{c2}}|{t:^{c3}}".format(n=names[i], v=values[i], c=constraints[i], t=ties[i], c0=cols[0], c1=cols[1], c2=cols[2], c3=cols[3]) for i in range(len(values))] + + print header_string[0] + print separator + for string in param_string: + print string + + def _pick_elements(self,regexp_ind,array_list): + """Removes from array_list the elements different from regexp_ind""" + new_array_list = [] #New list with elements matching regexp_ind + array_indices = [] #Indices that matches the arrays in new_array_list and array_list + + array_index = 0 + for array in array_list: + _new = [] + for ai in array: + if ai in regexp_ind: + _new.append(ai) + if len(_new): + new_array_list.append(np.array(_new)) + array_indices.append(array_index) + array_index += 1 + return new_array_list, array_indices diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 3183cff0..5e381110 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -5,7 +5,7 @@ import numpy as np import pylab as pb from ..util.linalg import mdot, jitchol, tdot, symmetrify, backsub_both_sides, chol_inv, dtrtrs, dpotrs, dpotri from scipy import linalg -from ..likelihoods import Gaussian +from ..likelihoods import Gaussian, EP,EP_Mixed_Noise from gp_base import GPBase class SparseGP(GPBase): @@ -16,16 +16,17 @@ class SparseGP(GPBase): :type X: np.ndarray (num_data x input_dim) :param likelihood: a likelihood instance, containing the observed data :type likelihood: GPy.likelihood.(Gaussian | EP | Laplace) - :param kernel : the kernel (covariance function). See link kernels + :param kernel: the kernel (covariance function). See link kernels :type kernel: a GPy.kern.kern instance :param X_variance: The uncertainty in the measurements of X (Gaussian variance) :type X_variance: np.ndarray (num_data x input_dim) | None :param Z: inducing inputs (optional, see note) :type Z: np.ndarray (num_inducing x input_dim) | None - :param num_inducing : Number of inducing points (optional, default 10. Ignored if Z is not None) + :param num_inducing: Number of inducing points (optional, default 10. Ignored if Z is not None) :type num_inducing: int - :param normalize_(X|Y) : whether to normalize the data before computing (predictions will be in original scales) + :param normalize_(X|Y): whether to normalize the data before computing (predictions will be in original scales) :type normalize_(X|Y): bool + """ def __init__(self, X, likelihood, kernel, Z, X_variance=None, normalize_X=False): @@ -33,10 +34,10 @@ class SparseGP(GPBase): self.Z = Z self.num_inducing = Z.shape[0] - self.likelihood = likelihood if X_variance is None: self.has_uncertain_inputs = False + self.X_variance = None else: assert X_variance.shape == X.shape self.has_uncertain_inputs = True @@ -49,6 +50,8 @@ class SparseGP(GPBase): if self.has_uncertain_inputs: self.X_variance /= np.square(self._Xscale) + self._const_jitter = None + def _compute_kernel_matrices(self): # kernel computations, using BGPLVM notation self.Kmm = self.kern.K(self.Z) @@ -62,11 +65,13 @@ class SparseGP(GPBase): self.psi2 = None def _computations(self): + if self._const_jitter is None or not(self._const_jitter.shape[0] == self.num_inducing): + self._const_jitter = np.eye(self.num_inducing) * 1e-7 # factor Kmm - self.Lm = jitchol(self.Kmm) + self._Lm = jitchol(self.Kmm + self._const_jitter) - # The rather complex computations of self.A + # The rather complex computations of self._A if self.has_uncertain_inputs: if self.likelihood.is_heteroscedastic: psi2_beta = (self.psi2 * (self.likelihood.precision.flatten().reshape(self.num_data, 1, 1))).sum(0) @@ -74,44 +79,48 @@ class SparseGP(GPBase): psi2_beta = self.psi2.sum(0) * self.likelihood.precision evals, evecs = linalg.eigh(psi2_beta) clipped_evals = np.clip(evals, 0., 1e6) # TODO: make clipping configurable + if not np.array_equal(evals, clipped_evals): + pass # print evals tmp = evecs * np.sqrt(clipped_evals) tmp = tmp.T else: if self.likelihood.is_heteroscedastic: - tmp = self.psi1 * (np.sqrt(self.likelihood.precision.flatten().reshape(self.num_data,1))) + tmp = self.psi1 * (np.sqrt(self.likelihood.precision.flatten().reshape(self.num_data, 1))) else: tmp = self.psi1 * (np.sqrt(self.likelihood.precision)) - tmp, _ = dtrtrs(self.Lm, np.asfortranarray(tmp.T), lower=1) - self.A = tdot(tmp) - + tmp, _ = dtrtrs(self._Lm, np.asfortranarray(tmp.T), lower=1) + self._A = tdot(tmp) # factor B - self.B = np.eye(self.num_inducing) + self.A + self.B = np.eye(self.num_inducing) + self._A self.LB = jitchol(self.B) - # TODO: make a switch for either first compute psi1V, or VV.T - self.psi1V = np.dot(self.psi1.T, self.likelihood.V) + # VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency! + self.psi1Vf = np.dot(self.psi1.T, self.likelihood.VVT_factor) - # back substutue C into psi1V - tmp, info1 = dtrtrs(self.Lm, np.asfortranarray(self.psi1V), lower=1, trans=0) - self._LBi_Lmi_psi1V, _ = dtrtrs(self.LB, np.asfortranarray(tmp), lower=1, trans=0) - tmp, info2 = dpotrs(self.LB, tmp, lower=1) - self.Cpsi1V, info3 = dtrtrs(self.Lm, tmp, lower=1, trans=1) + # back substutue C into psi1Vf + tmp, info1 = dtrtrs(self._Lm, np.asfortranarray(self.psi1Vf), lower=1, trans=0) + self._LBi_Lmi_psi1Vf, _ = dtrtrs(self.LB, np.asfortranarray(tmp), lower=1, trans=0) + # tmp, info2 = dpotrs(self.LB, tmp, lower=1) + tmp, info2 = dtrtrs(self.LB, self._LBi_Lmi_psi1Vf, lower=1, trans=1) + self.Cpsi1Vf, info3 = dtrtrs(self._Lm, tmp, lower=1, trans=1) # Compute dL_dKmm - tmp = tdot(self._LBi_Lmi_psi1V) + tmp = tdot(self._LBi_Lmi_psi1Vf) + self.data_fit = np.trace(tmp) self.DBi_plus_BiPBi = backsub_both_sides(self.LB, self.output_dim * np.eye(self.num_inducing) + tmp) tmp = -0.5 * self.DBi_plus_BiPBi tmp += -0.5 * self.B * self.output_dim tmp += self.output_dim * np.eye(self.num_inducing) - self.dL_dKmm = backsub_both_sides(self.Lm, tmp) + self.dL_dKmm = backsub_both_sides(self._Lm, tmp) # Compute dL_dpsi # FIXME: this is untested for the heterscedastic + uncertain inputs case self.dL_dpsi0 = -0.5 * self.output_dim * (self.likelihood.precision * np.ones([self.num_data, 1])).flatten() - self.dL_dpsi1 = np.dot(self.Cpsi1V, self.likelihood.V.T).T - dL_dpsi2_beta = 0.5 * backsub_both_sides(self.Lm, self.output_dim * np.eye(self.num_inducing) - self.DBi_plus_BiPBi) + self.dL_dpsi1 = np.dot(self.likelihood.VVT_factor, self.Cpsi1Vf.T) + dL_dpsi2_beta = 0.5 * backsub_both_sides(self._Lm, self.output_dim * np.eye(self.num_inducing) - self.DBi_plus_BiPBi) if self.likelihood.is_heteroscedastic: + if self.has_uncertain_inputs: self.dL_dpsi2 = self.likelihood.precision.flatten()[:, None, None] * dL_dpsi2_beta[None, :, :] else: @@ -129,27 +138,45 @@ class SparseGP(GPBase): # the partial derivative vector for the likelihood - if self.likelihood.Nparams == 0: + if self.likelihood.num_params == 0: # save computation here. self.partial_for_likelihood = None elif self.likelihood.is_heteroscedastic: - raise NotImplementedError, "heteroscedatic derivates not implemented" + + if self.has_uncertain_inputs: + raise NotImplementedError, "heteroscedatic derivates with uncertain inputs not implemented" + + else: + + LBi = chol_inv(self.LB) + Lmi_psi1, nil = dtrtrs(self._Lm, np.asfortranarray(self.psi1.T), lower=1, trans=0) + _LBi_Lmi_psi1, _ = dtrtrs(self.LB, np.asfortranarray(Lmi_psi1), lower=1, trans=0) + + + self.partial_for_likelihood = -0.5 * self.likelihood.precision + 0.5 * self.likelihood.V**2 + self.partial_for_likelihood += 0.5 * self.output_dim * (self.psi0 - np.sum(Lmi_psi1**2,0))[:,None] * self.likelihood.precision**2 + + self.partial_for_likelihood += 0.5*np.sum(mdot(LBi.T,LBi,Lmi_psi1)*Lmi_psi1,0)[:,None]*self.likelihood.precision**2 + + self.partial_for_likelihood += -np.dot(self._LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T * self.likelihood.Y * self.likelihood.precision**2 + self.partial_for_likelihood += 0.5*np.dot(self._LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T**2 * self.likelihood.precision**2 + else: - # likelihood is not heterscedatic + # likelihood is not heteroscedatic self.partial_for_likelihood = -0.5 * self.num_data * self.output_dim * self.likelihood.precision + 0.5 * self.likelihood.trYYT * self.likelihood.precision ** 2 - self.partial_for_likelihood += 0.5 * self.output_dim * (self.psi0.sum() * self.likelihood.precision ** 2 - np.trace(self.A) * self.likelihood.precision) - self.partial_for_likelihood += self.likelihood.precision * (0.5 * np.sum(self.A * self.DBi_plus_BiPBi) - np.sum(np.square(self._LBi_Lmi_psi1V))) + self.partial_for_likelihood += 0.5 * self.output_dim * (self.psi0.sum() * self.likelihood.precision ** 2 - np.trace(self._A) * self.likelihood.precision) + self.partial_for_likelihood += self.likelihood.precision * (0.5 * np.sum(self._A * self.DBi_plus_BiPBi) - self.data_fit) def log_likelihood(self): """ Compute the (lower bound on the) log marginal likelihood """ if self.likelihood.is_heteroscedastic: A = -0.5 * self.num_data * self.output_dim * np.log(2.*np.pi) + 0.5 * np.sum(np.log(self.likelihood.precision)) - 0.5 * np.sum(self.likelihood.V * self.likelihood.Y) - B = -0.5 * self.output_dim * (np.sum(self.likelihood.precision.flatten() * self.psi0) - np.trace(self.A)) + B = -0.5 * self.output_dim * (np.sum(self.likelihood.precision.flatten() * self.psi0) - np.trace(self._A)) else: A = -0.5 * self.num_data * self.output_dim * (np.log(2.*np.pi) - np.log(self.likelihood.precision)) - 0.5 * self.likelihood.precision * self.likelihood.trYYT - B = -0.5 * self.output_dim * (np.sum(self.likelihood.precision * self.psi0) - np.trace(self.A)) + B = -0.5 * self.output_dim * (np.sum(self.likelihood.precision * self.psi0) - np.trace(self._A)) C = -self.output_dim * (np.sum(np.log(np.diag(self.LB)))) # + 0.5 * self.num_inducing * np.log(sf2)) - D = 0.5 * np.sum(np.square(self._LBi_Lmi_psi1V)) + D = 0.5 * self.data_fit return A + B + C + D + self.likelihood.Z def _set_params(self, p): @@ -158,15 +185,19 @@ class SparseGP(GPBase): self.likelihood._set_params(p[self.Z.size + self.kern.num_params:]) self._compute_kernel_matrices() self._computations() + self.Cpsi1V = None def _get_params(self): return np.hstack([self.Z.flatten(), self.kern._get_params_transformed(), self.likelihood._get_params()]) def _get_param_names(self): - return sum([['iip_%i_%i' % (i, j) for j in range(self.Z.shape[1])] for i in range(self.Z.shape[0])],[])\ + return sum([['iip_%i_%i' % (i, j) for j in range(self.Z.shape[1])] for i in range(self.Z.shape[0])], [])\ + self.kern._get_param_names_transformed() + self.likelihood._get_param_names() - def update_likelihood_approximation(self): + #def _get_print_names(self): + # return self.kern._get_param_names_transformed() + self.likelihood._get_param_names() + + def update_likelihood_approximation(self, **kwargs): """ Approximates a non-gaussian likelihood using Expectation Propagation @@ -176,14 +207,14 @@ class SparseGP(GPBase): if not isinstance(self.likelihood, Gaussian): # Updates not needed for Gaussian likelihood self.likelihood.restart() if self.has_uncertain_inputs: - Lmi = chol_inv(self.Lm) + Lmi = chol_inv(self._Lm) Kmmi = tdot(Lmi.T) diag_tr_psi2Kmmi = np.array([np.trace(psi2_Kmmi) for psi2_Kmmi in np.dot(self.psi2, Kmmi)]) - self.likelihood.fit_FITC(self.Kmm, self.psi1.T, diag_tr_psi2Kmmi) # This uses the fit_FITC code, but does not perfomr a FITC-EP.#TODO solve potential confusion + self.likelihood.fit_FITC(self.Kmm, self.psi1.T, diag_tr_psi2Kmmi, **kwargs) # This uses the fit_FITC code, but does not perfomr a FITC-EP.#TODO solve potential confusion # raise NotImplementedError, "EP approximation not implemented for uncertain inputs" else: - self.likelihood.fit_DTC(self.Kmm, self.psi1.T) + self.likelihood.fit_DTC(self.Kmm, self.psi1.T, **kwargs) # self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0) self._set_params(self._get_params()) # update the GP @@ -209,7 +240,7 @@ class SparseGP(GPBase): """ The derivative of the bound wrt the inducing inputs Z """ - dL_dZ = 2.*self.kern.dK_dX(self.dL_dKmm, self.Z) # factor of two becase of vertical and horizontal 'stripes' in dKmm_dZ + dL_dZ = self.kern.dK_dX(self.dL_dKmm, self.Z) if self.has_uncertain_inputs: dL_dZ += self.kern.dpsi1_dZ(self.dL_dpsi1, self.Z, self.X, self.X_variance) dL_dZ += self.kern.dpsi2_dZ(self.dL_dpsi2, self.Z, self.X, self.X_variance) @@ -218,11 +249,20 @@ class SparseGP(GPBase): return dL_dZ def _raw_predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False): - """Internal helper function for making predictions, does not account for normalization""" + """ + Internal helper function for making predictions, does not account for + normalization or likelihood function + """ - Bi, _ = dpotri(self.LB, lower=0) # WTH? this lower switch should be 1, but that doesn't work! + Bi, _ = dpotri(self.LB, lower=0) # WTH? this lower switch should be 1, but that doesn't work! symmetrify(Bi) - Kmmi_LmiBLmi = backsub_both_sides(self.Lm, np.eye(self.num_inducing) - Bi) + Kmmi_LmiBLmi = backsub_both_sides(self._Lm, np.eye(self.num_inducing) - Bi) + + if self.Cpsi1V is None: + psi1V = np.dot(self.psi1.T, self.likelihood.V) + tmp, _ = dtrtrs(self._Lm, np.asfortranarray(psi1V), lower=1, trans=0) + tmp, _ = dpotrs(self.LB, tmp, lower=1) + self.Cpsi1V, _ = dtrtrs(self._Lm, tmp, lower=1, trans=1) if X_variance_new is None: Kx = self.kern.K(self.Z, Xnew, which_parts=which_parts) @@ -234,7 +274,7 @@ class SparseGP(GPBase): Kxx = self.kern.Kdiag(Xnew, which_parts=which_parts) var = Kxx - np.sum(Kx * np.dot(Kmmi_LmiBLmi, Kx), 0) else: - # assert which_p.Tarts=='all', "swithching out parts of variational kernels is not implemented" + # assert which_parts=='all', "swithching out parts of variational kernels is not implemented" Kx = self.kern.psi1(self.Z, Xnew, X_variance_new) # , which_parts=which_parts) TODO: which_parts mu = np.dot(Kx, self.Cpsi1V) if full_cov: @@ -246,19 +286,19 @@ class SparseGP(GPBase): return mu, var[:, None] - def predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False): + def predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False, **likelihood_args): """ Predict the function(s) at the new point(s) Xnew. - Arguments - --------- + **Arguments** + :param Xnew: The points at which to make a prediction :type Xnew: np.ndarray, Nnew x self.input_dim :param X_variance_new: The uncertainty in the prediction points :type X_variance_new: np.ndarray, Nnew x self.input_dim :param which_parts: specifies which outputs kernel(s) to use in prediction :type which_parts: ('all', list of bools) - :param full_cov: whether to return the folll covariance matrix, or just the diagonal + :param full_cov: whether to return the full covariance matrix, or just the diagonal :type full_cov: bool :rtype: posterior mean, a Numpy array, Nnew x self.input_dim :rtype: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise @@ -278,22 +318,52 @@ class SparseGP(GPBase): mu, var = self._raw_predict(Xnew, X_variance_new, full_cov=full_cov, which_parts=which_parts) # now push through likelihood - mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov) + mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov, **likelihood_args) return mean, var, _025pm, _975pm - def plot(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, fignum=None, ax=None): + + def plot_f(self, samples=0, plot_limits=None, which_data_rows='all', + which_data_ycols='all', which_parts='all', resolution=None, + full_cov=False, fignum=None, ax=None): + + """ + Plot the GP's view of the world, where the data is normalized and the + - In one dimension, the function is plotted with a shaded region identifying two standard deviations. + - In two dimsensions, a contour-plot shows the mean predicted function + - Not implemented in higher dimensions + + :param samples: the number of a posteriori samples to plot + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :param which_data_rows: which if the training data to plot (default all) + :type which_data_rows: 'all' or a slice object to slice self.X, self.Y + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param full_cov: + :type full_cov: bool + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + + :param output: which output to plot (for multiple output models only) + :type output: integer (first output is 0) + """ if ax is None: fig = pb.figure(num=fignum) ax = fig.add_subplot(111) + if fignum is None and ax is None: + fignum = fig.num + if which_data_rows is 'all': + which_data_rows = slice(None) - if which_data is 'all': - which_data = slice(None) + GPBase.plot_f(self, samples=samples, plot_limits=plot_limits, which_data_rows=which_data_rows, which_data_ycols=which_data_ycols, which_parts=which_parts, resolution=resolution, fignum=fignum, ax=ax) - GPBase.plot(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, ax=ax) if self.X.shape[1] == 1: if self.has_uncertain_inputs: - Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now + Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now ax.errorbar(Xu[which_data, 0], self.likelihood.data[which_data, 0], xerr=2 * np.sqrt(self.X_variance[which_data, 0]), ecolor='k', fmt=None, elinewidth=.5, alpha=.5) @@ -303,3 +373,99 @@ class SparseGP(GPBase): elif self.X.shape[1] == 2: Zu = self.Z * self._Xscale + self._Xoffset ax.plot(Zu[:, 0], Zu[:, 1], 'wo') + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + + def plot(self, plot_limits=None, which_data_rows='all', + which_data_ycols='all', which_parts='all', fixed_inputs=[], + plot_raw=False, + levels=20, samples=0, fignum=None, ax=None, resolution=None): + """ + Plot the posterior of the sparse GP. + - In one dimension, the function is plotted with a shaded region identifying two standard deviations. + - In two dimsensions, a contour-plot shows the mean predicted function + - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. + + Can plot only part of the data and part of the posterior functions + using which_data_rowsm which_data_ycols and which_parts + + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :type plot_limits: np.array + :param which_data_rows: which of the training data to plot (default all) + :type which_data_rows: 'all' or a slice object to slice self.X, self.Y + :param which_data_ycols: when the data has several columns (independant outputs), only plot these + :type which_data_rows: 'all' or a list of integers + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param levels: number of levels to plot in a contour plot. + :type levels: int + :param samples: the number of a posteriori samples to plot + :type samples: int + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + :type output: integer (first output is 0) + :param linecol: color of line to plot. + :type linecol: + :param fillcol: color of fill + :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + """ + #deal work out which ax to plot on + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + #work out what the inputs are for plotting (1D or 2D) + fixed_dims = np.array([i for i,v in fixed_inputs]) + free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims) + + #call the base plotting + GPBase.plot(self, samples=samples, plot_limits=plot_limits, + which_data_rows=which_data_rows, + which_data_ycols=which_data_ycols, fixed_inputs=fixed_inputs, + which_parts=which_parts, resolution=resolution, levels=20, + fignum=fignum, ax=ax) + + if len(free_dims) == 1: + #plot errorbars for the uncertain inputs + if self.has_uncertain_inputs: + Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now + ax.errorbar(Xu[which_data_rows, 0], self.likelihood.data[which_data_rows, 0], + xerr=2 * np.sqrt(self.X_variance[which_data_rows, 0]), + ecolor='k', fmt=None, elinewidth=.5, alpha=.5) + + #plot the inducing inputs + Zu = self.Z * self._Xscale + self._Xoffset + ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12) + + elif len(free_dims) == 2: + Zu = self.Z * self._Xscale + self._Xoffset + ax.plot(Zu[:, 0], Zu[:, 1], 'wo') + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + + def getstate(self): + """ + Get the current state of the class, + here just all the indices, rest can get recomputed + """ + return GPBase.getstate(self) + [self.Z, + self.num_inducing, + self.has_uncertain_inputs, + self.X_variance] + + def setstate(self, state): + self.X_variance = state.pop() + self.has_uncertain_inputs = state.pop() + self.num_inducing = state.pop() + self.Z = state.pop() + GPBase.setstate(self, state) + + diff --git a/GPy/core/svigp.py b/GPy/core/svigp.py index 1db0e26f..fdd95aa8 100644 --- a/GPy/core/svigp.py +++ b/GPy/core/svigp.py @@ -14,35 +14,22 @@ import sys class SVIGP(GPBase): """ + Stochastic Variational inference in a Gaussian Process :param X: inputs - :type X: np.ndarray (N x Q) + :type X: np.ndarray (num_data x num_inputs) :param Y: observed data - :type Y: np.ndarray of observations (N x D) - :param batchsize: the size of a h - - Additional kwargs are used as for a sparse GP. They include - + :type Y: np.ndarray of observations (num_data x output_dim) + :param batchsize: the size of a minibatch :param q_u: canonical parameters of the distribution squasehd into a 1D array :type q_u: np.ndarray - :param M : Number of inducing points (optional, default 10. Ignored if Z is not None) - :type M: int - :param kernel : the kernel/covariance function. See link kernels + :param kernel: the kernel/covariance function. See link kernels :type kernel: a GPy kernel - :param Z: inducing inputs (optional, see note) - :type Z: np.ndarray (M x Q) | None - :param X_uncertainty: The uncertainty in the measurements of X (Gaussian variance) - :type X_uncertainty: np.ndarray (N x Q) | None - :param Zslices: slices for the inducing inputs (see slicing TODO: link) - :param M : Number of inducing points (optional, default 10. Ignored if Z is not None) - :type M: int - :param beta: noise precision. TODO> ignore beta if doing EP - :type beta: float - :param normalize_(X|Y) : whether to normalize the data before computing (predictions will be in original scales) - :type normalize_(X|Y): bool - """ + :param Z: inducing inputs + :type Z: np.ndarray (num_inducing x num_inputs) + """ def __init__(self, X, likelihood, kernel, Z, q_u=None, batchsize=10, X_variance=None): GPBase.__init__(self, X, likelihood, kernel, normalize_X=False) @@ -91,6 +78,58 @@ class SVIGP(GPBase): self._param_steplength_trace = [] self._vb_steplength_trace = [] + def getstate(self): + steplength_params = [self.hbar_t, self.tau_t, self.gbar_t, self.gbar_t1, self.gbar_t2, self.hbar_tp, self.tau_tp, self.gbar_tp, self.adapt_param_steplength, self.adapt_vb_steplength, self.vb_steplength, self.param_steplength] + return GPBase.getstate(self) + \ + [self.get_vb_param(), + self.Z, + self.num_inducing, + self.has_uncertain_inputs, + self.X_variance, + self.X_batch, + self.X_variance_batch, + steplength_params, + self.batchcounter, + self.batchsize, + self.epochs, + self.momentum, + self.data_prop, + self._param_trace, + self._param_steplength_trace, + self._vb_steplength_trace, + self._ll_trace, + self._grad_trace, + self.Y, + self._permutation, + self.iterations + ] + + def setstate(self, state): + self.iterations = state.pop() + self._permutation = state.pop() + self.Y = state.pop() + self._grad_trace = state.pop() + self._ll_trace = state.pop() + self._vb_steplength_trace = state.pop() + self._param_steplength_trace = state.pop() + self._param_trace = state.pop() + self.data_prop = state.pop() + self.momentum = state.pop() + self.epochs = state.pop() + self.batchsize = state.pop() + self.batchcounter = state.pop() + steplength_params = state.pop() + (self.hbar_t, self.tau_t, self.gbar_t, self.gbar_t1, self.gbar_t2, self.hbar_tp, self.tau_tp, self.gbar_tp, self.adapt_param_steplength, self.adapt_vb_steplength, self.vb_steplength, self.param_steplength) = steplength_params + self.X_variance_batch = state.pop() + self.X_batch = state.pop() + self.X_variance = state.pop() + self.has_uncertain_inputs = state.pop() + self.num_inducing = state.pop() + self.Z = state.pop() + vb_param = state.pop() + GPBase.setstate(self, state) + self.set_vb_param(vb_param) + def _compute_kernel_matrices(self): # kernel computations, using BGPLVM notation self.Kmm = self.kern.K(self.Z) @@ -166,7 +205,7 @@ class SVIGP(GPBase): psi2_beta = (self.psi2 * (self.likelihood.precision.flatten().reshape(self.batchsize, 1, 1))).sum(0) else: psi2_beta = self.psi2.sum(0) * self.likelihood.precision - evals, evecs = linalg.eigh(psi2_beta) + evals, evecs = np.linalg.eigh(psi2_beta) clipped_evals = np.clip(evals, 0., 1e6) # TODO: make clipping configurable tmp = evecs * np.sqrt(clipped_evals) else: @@ -296,8 +335,8 @@ class SVIGP(GPBase): #callback if i and not i%callback_interval: - callback() - time.sleep(0.1) + callback(self) # Change this to callback() + time.sleep(0.01) if self.epochs > 10: self._adapt_steplength() @@ -313,13 +352,13 @@ class SVIGP(GPBase): assert self.vb_steplength > 0 if self.adapt_param_steplength: - # self._adaptive_param_steplength() + self._adaptive_param_steplength() # self._adaptive_param_steplength_log() - self._adaptive_param_steplength_from_vb() + # self._adaptive_param_steplength_from_vb() self._param_steplength_trace.append(self.param_steplength) def _adaptive_param_steplength(self): - decr_factor = 0.1 + decr_factor = 0.02 g_tp = self._transform_gradients(self._log_likelihood_gradients()) self.gbar_tp = (1-1/self.tau_tp)*self.gbar_tp + 1/self.tau_tp * g_tp self.hbar_tp = (1-1/self.tau_tp)*self.hbar_tp + 1/self.tau_tp * np.dot(g_tp.T, g_tp) @@ -353,7 +392,7 @@ class SVIGP(GPBase): self.tau_t = self.tau_t*(1-self.vb_steplength) + 1 def _adaptive_vb_steplength_KL(self): - decr_factor = 1 #0.1 + decr_factor = 0.1 natgrad = self.vb_grad_natgrad() g_t1 = natgrad[0] g_t2 = natgrad[1] @@ -393,7 +432,7 @@ class SVIGP(GPBase): else: return mu, diag_var[:,None] - def predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False): + def predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False, sampling=False, num_samples=15000): # normalize X values Xnew = (Xnew.copy() - self._Xoffset) / self._Xscale if X_variance_new is not None: @@ -403,7 +442,7 @@ class SVIGP(GPBase): mu, var = self._raw_predict(Xnew, X_variance_new, full_cov=full_cov, which_parts=which_parts) # now push through likelihood - mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov) + mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov, sampling=sampling, num_samples=num_samples) return mean, var, _025pm, _975pm @@ -449,7 +488,7 @@ class SVIGP(GPBase): ax.plot(Zu, np.zeros_like(Zu) + Z_height, 'r|', mew=1.5, markersize=12) if self.input_dim==2: - ax.scatter(self.X_all[:,0], self.X_all[:,1], 20., self.Y[:,0], linewidth=0, cmap=pb.cm.jet) + ax.scatter(self.X[:,0], self.X[:,1], 20., self.Y[:,0], linewidth=0, cmap=pb.cm.jet) ax.plot(Zu[:,0], Zu[:,1], 'w^') def plot_traces(self): diff --git a/GPy/core/transformations.py b/GPy/core/transformations.py index 2520a33b..59c6a563 100644 --- a/GPy/core/transformations.py +++ b/GPy/core/transformations.py @@ -4,6 +4,8 @@ import numpy as np from GPy.core.domains import POSITIVE, NEGATIVE, BOUNDED +import sys +lim_val = -np.log(sys.float_info.epsilon) class transformation(object): domain = None @@ -16,27 +18,43 @@ class transformation(object): def gradfactor(self, f): """ df_dx evaluated at self.f(x)=f""" raise NotImplementedError + def initialize(self, f): - """ produce a sensible initial values for f(x)""" + """ produce a sensible initial value for f(x)""" raise NotImplementedError + def __str__(self): raise NotImplementedError class logexp(transformation): domain = POSITIVE def f(self, x): - return np.log(1. + np.exp(x)) + return np.where(x>lim_val, x, np.log(1. + np.exp(x))) def finv(self, f): - return np.log(np.exp(f) - 1.) + return np.where(f>lim_val, f, np.log(np.exp(f) - 1.)) def gradfactor(self, f): - ef = np.exp(f) - return (ef - 1.) / ef + return np.where(f>lim_val, 1., 1 - np.exp(-f)) def initialize(self, f): + if np.any(f < 0.): + print "Warning: changing parameters to satisfy constraints" return np.abs(f) def __str__(self): return '(+ve)' -class logexp_clipped(transformation): +class negative_logexp(transformation): + domain = NEGATIVE + def f(self, x): + return -logexp.f(x) + def finv(self, f): + return logexp.finv(-f) + def gradfactor(self, f): + return -logexp.gradfactor(-f) + def initialize(self, f): + return -logexp.initialize(f) + def __str__(self): + return '(-ve)' + +class logexp_clipped(logexp): max_bound = 1e100 min_bound = 1e-10 log_max_bound = np.log(max_bound) @@ -66,7 +84,7 @@ class logexp_clipped(transformation): class exponent(transformation): domain = POSITIVE def f(self, x): - return np.exp(x) + return np.where(x-lim_val, np.exp(x), np.exp(-lim_val)), np.exp(lim_val)) def finv(self, x): return np.log(x) def gradfactor(self, f): @@ -78,18 +96,16 @@ class exponent(transformation): def __str__(self): return '(+ve)' -class negative_exponent(transformation): +class negative_exponent(exponent): domain = NEGATIVE def f(self, x): - return -np.exp(x) - def finv(self, x): - return np.log(-x) + return -exponent.f(x) + def finv(self, f): + return exponent.finv(-f) def gradfactor(self, f): return f def initialize(self, f): - if np.any(f > 0.): - print "Warning: changing parameters to satisfy constraints" - return -np.abs(f) + return -exponent.initialize(f) #np.abs(f) def __str__(self): return '(-ve)' diff --git a/GPy/core/variational.py b/GPy/core/variational.py new file mode 100644 index 00000000..74287dcf --- /dev/null +++ b/GPy/core/variational.py @@ -0,0 +1,19 @@ +''' +Created on 6 Nov 2013 + +@author: maxz +''' +from parameterized import Parameterized +from parameter import Param + +class Normal(Parameterized): + ''' + Normal distribution for variational approximations. + + holds the means and variances for a factorizing multivariate normal distribution + ''' + def __init__(self, name, means, variances): + Parameterized.__init__(self, name=name) + self.means = Param("mean", means) + self.variances = Param('variance', variances) + self.add_parameters(self.means, self.variances) \ No newline at end of file diff --git a/GPy/examples/classification.py b/GPy/examples/classification.py index c7daa26b..f9aaddd1 100644 --- a/GPy/examples/classification.py +++ b/GPy/examples/classification.py @@ -6,66 +6,48 @@ Gaussian Processes classification """ import pylab as pb -import numpy as np import GPy default_seed = 10000 -def crescent_data(seed=default_seed): # FIXME - """Run a Gaussian process classification on the crescent data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood. - :param model_type: type of model to fit ['Full', 'FITC', 'DTC']. - :param seed : seed value for data generation. - :type seed: int - :param inducing : number of inducing variables (only used for 'FITC' or 'DTC'). - :type inducing: int +def oil(num_inducing=50, max_iters=100, kernel=None, optimize=True, plot=True): """ + Run a Gaussian process classification on the three phase oil data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood. - data = GPy.util.datasets.crescent_data(seed=seed) - Y = data['Y'] - Y[Y.flatten()==-1] = 0 - - m = GPy.models.GPClassification(data['X'], Y) - #m.update_likelihood_approximation() - #m.optimize() - m.pseudo_EM() - print(m) - m.plot() - return m - -def oil(num_inducing=50): - """ - Run a Gaussian process classification on the oil data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood. """ data = GPy.util.datasets.oil() - X = data['X'][:600,:] - X_test = data['X'][600:,:] - Y = data['Y'][:600, 0:1] + X = data['X'] + Xtest = data['Xtest'] + Y = data['Y'][:, 0:1] + Ytest = data['Ytest'][:, 0:1] Y[Y.flatten()==-1] = 0 - Y_test = data['Y'][600:, 0:1] + Ytest[Ytest.flatten()==-1] = 0 # Create GP model - m = GPy.models.SparseGPClassification(X, Y,num_inducing=num_inducing) + m = GPy.models.SparseGPClassification(X, Y, kernel=kernel, num_inducing=num_inducing) # Contrain all parameters to be positive - m.constrain_positive('') m.tie_params('.*len') m['.*len'] = 10. m.update_likelihood_approximation() # Optimize - m.optimize() + if optimize: + m.optimize(max_iters=max_iters) print(m) #Test - probs = m.predict(X_test)[0] - GPy.util.classification.conf_matrix(probs,Y_test) + probs = m.predict(Xtest)[0] + GPy.util.classification.conf_matrix(probs, Ytest) return m -def toy_linear_1d_classification(seed=default_seed): +def toy_linear_1d_classification(seed=default_seed, optimize=True, plot=True): """ - Simple 1D classification example - :param seed : seed value for data generation (default is 4). + Simple 1D classification example using EP approximation + + :param seed: seed value for data generation (default is 4). :type seed: int + """ data = GPy.util.datasets.toy_linear_1d_classification(seed=seed) @@ -76,24 +58,65 @@ def toy_linear_1d_classification(seed=default_seed): m = GPy.models.GPClassification(data['X'], Y) # Optimize - #m.update_likelihood_approximation() - # Parameters optimization: - #m.optimize() - m.pseudo_EM() + if optimize: + #m.update_likelihood_approximation() + # Parameters optimization: + #m.optimize() + #m.update_likelihood_approximation() + m.pseudo_EM() # Plot - fig, axes = pb.subplots(2,1) - m.plot_f(ax=axes[0]) - m.plot(ax=axes[1]) - print(m) + if plot: + fig, axes = pb.subplots(2, 1) + m.plot_f(ax=axes[0]) + m.plot(ax=axes[1]) + print m return m -def sparse_toy_linear_1d_classification(num_inducing=10,seed=default_seed): +def toy_linear_1d_classification_laplace(seed=default_seed, optimize=True, plot=True): + """ + Simple 1D classification example using Laplace approximation + + :param seed: seed value for data generation (default is 4). + :type seed: int + + """ + + data = GPy.util.datasets.toy_linear_1d_classification(seed=seed) + Y = data['Y'][:, 0:1] + Y[Y.flatten() == -1] = 0 + + bern_noise_model = GPy.likelihoods.bernoulli() + laplace_likelihood = GPy.likelihoods.Laplace(Y.copy(), bern_noise_model) + + # Model definition + m = GPy.models.GPClassification(data['X'], Y, likelihood=laplace_likelihood) + print m + + # Optimize + if optimize: + #m.update_likelihood_approximation() + # Parameters optimization: + m.optimize('bfgs', messages=1) + #m.pseudo_EM() + + # Plot + if plot: + fig, axes = pb.subplots(2, 1) + m.plot_f(ax=axes[0]) + m.plot(ax=axes[1]) + + print m + return m + +def sparse_toy_linear_1d_classification(num_inducing=10, seed=default_seed, optimize=True, plot=True): """ Sparse 1D classification example - :param seed : seed value for data generation (default is 4). + + :param seed: seed value for data generation (default is 4). :type seed: int + """ data = GPy.util.datasets.toy_linear_1d_classification(seed=seed) @@ -101,68 +124,91 @@ def sparse_toy_linear_1d_classification(num_inducing=10,seed=default_seed): Y[Y.flatten() == -1] = 0 # Model definition - m = GPy.models.SparseGPClassification(data['X'], Y,num_inducing=num_inducing) - m['.*len']= 4. + m = GPy.models.SparseGPClassification(data['X'], Y, num_inducing=num_inducing) + m['.*len'] = 4. # Optimize - #m.update_likelihood_approximation() - # Parameters optimization: - #m.optimize() - m.pseudo_EM() + if optimize: + #m.update_likelihood_approximation() + # Parameters optimization: + #m.optimize() + m.pseudo_EM() # Plot - fig, axes = pb.subplots(2,1) - m.plot_f(ax=axes[0]) - m.plot(ax=axes[1]) - print(m) + if plot: + fig, axes = pb.subplots(2, 1) + m.plot_f(ax=axes[0]) + m.plot(ax=axes[1]) + print m return m -def sparse_crescent_data(num_inducing=10, seed=default_seed): +def toy_heaviside(seed=default_seed, optimize=True, plot=True): """ - Run a Gaussian process classification with DTC approxiamtion on the crescent data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood. + Simple 1D classification example using a heavy side gp transformation + + :param seed: seed value for data generation (default is 4). + :type seed: int + + """ + + data = GPy.util.datasets.toy_linear_1d_classification(seed=seed) + Y = data['Y'][:, 0:1] + Y[Y.flatten() == -1] = 0 + + # Model definition + noise_model = GPy.likelihoods.bernoulli(GPy.likelihoods.noise_models.gp_transformations.Heaviside()) + likelihood = GPy.likelihoods.EP(Y, noise_model) + m = GPy.models.GPClassification(data['X'], likelihood=likelihood) + + # Optimize + if optimize: + m.update_likelihood_approximation() + # Parameters optimization: + m.optimize() + #m.pseudo_EM() + + # Plot + if plot: + fig, axes = pb.subplots(2, 1) + m.plot_f(ax=axes[0]) + m.plot(ax=axes[1]) + + print m + return m + +def crescent_data(model_type='Full', num_inducing=10, seed=default_seed, kernel=None, optimize=True, plot=True): + """ + Run a Gaussian process classification on the crescent data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood. :param model_type: type of model to fit ['Full', 'FITC', 'DTC']. - :param seed : seed value for data generation. - :type seed: int - :param inducing : number of inducing variables (only used for 'FITC' or 'DTC'). + :param inducing: number of inducing variables (only used for 'FITC' or 'DTC'). :type inducing: int - """ - - data = GPy.util.datasets.crescent_data(seed=seed) - Y = data['Y'] - Y[Y.flatten()==-1]=0 - - m = GPy.models.SparseGPClassification(data['X'], Y,num_inducing=num_inducing) - m['.*len'] = 10. - #m.update_likelihood_approximation() - #m.optimize() - m.pseudo_EM() - print(m) - m.plot() - return m - -def FITC_crescent_data(num_inducing=10, seed=default_seed): - """ - Run a Gaussian process classification with FITC approximation on the crescent data. The demonstration uses EP to approximate the likelihood. - - :param model_type: type of model to fit ['Full', 'FITC', 'DTC']. - :param seed : seed value for data generation. + :param seed: seed value for data generation. :type seed: int - :param inducing : number of inducing variables (only used for 'FITC' or 'DTC'). - :type num_inducing: int + :param kernel: kernel to use in the model + :type kernel: a GPy kernel """ - data = GPy.util.datasets.crescent_data(seed=seed) Y = data['Y'] - Y[Y.flatten()==-1]=0 + Y[Y.flatten()==-1] = 0 - m = GPy.models.FITCClassification(data['X'], Y,num_inducing=num_inducing) - m.constrain_bounded('.*len',1.,1e3) - m['.*len'] = 3. - #m.update_likelihood_approximation() - #m.optimize() - m.pseudo_EM() - print(m) - m.plot() + if model_type == 'Full': + m = GPy.models.GPClassification(data['X'], Y, kernel=kernel) + + elif model_type == 'DTC': + m = GPy.models.SparseGPClassification(data['X'], Y, kernel=kernel, num_inducing=num_inducing) + m['.*len'] = 10. + + elif model_type == 'FITC': + m = GPy.models.FITCClassification(data['X'], Y, kernel=kernel, num_inducing=num_inducing) + m['.*len'] = 3. + + if optimize: + m.pseudo_EM() + + if plot: + m.plot() + + print m return m diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 16afe5eb..94bb4955 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -1,70 +1,93 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) +import numpy as _np +default_seed = _np.random.seed(123344) -import numpy as np -from matplotlib import pyplot as plt, cm +def bgplvm_test_model(seed=default_seed, optimize=False, verbose=1, plot=False): + """ + model for testing purposes. Samples from a GP with rbf kernel and learns + the samples with a new kernel. Normally not for optimization, just model cheking + """ + from GPy.likelihoods.gaussian import Gaussian + import GPy -import GPy -from GPy.core.transformations import logexp -from GPy.models.bayesian_gplvm import BayesianGPLVM + num_inputs = 13 + num_inducing = 5 + if plot: + output_dim = 1 + input_dim = 2 + else: + input_dim = 2 + output_dim = 25 -default_seed = np.random.seed(123344) - -def BGPLVM(seed=default_seed): - N = 10 - num_inducing = 3 - Q = 2 - D = 4 # generate GPLVM-like data - X = np.random.rand(N, Q) - k = GPy.kern.rbf(Q) + GPy.kern.white(Q, 0.00001) + X = _np.random.rand(num_inputs, input_dim) + lengthscales = _np.random.rand(input_dim) + k = (GPy.kern.rbf(input_dim, .5, lengthscales, ARD=True) + + GPy.kern.white(input_dim, 0.01)) K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N), K, Q).T + Y = _np.random.multivariate_normal(_np.zeros(num_inputs), K, output_dim).T + lik = Gaussian(Y, normalize=True) - k = GPy.kern.rbf(Q, ARD=True) + GPy.kern.linear(Q, ARD=True) + GPy.kern.rbf(Q, ARD=True) + GPy.kern.white(Q) - # k = GPy.kern.rbf(Q) + GPy.kern.rbf(Q) + GPy.kern.white(Q) - # k = GPy.kern.rbf(Q) + GPy.kern.bias(Q) + GPy.kern.white(Q, 0.00001) - # k = GPy.kern.rbf(Q, ARD = False) + GPy.kern.white(Q, 0.00001) + k = GPy.kern.rbf_inv(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim) + # k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001) + # k = GPy.kern.rbf(input_dim, ARD = False) + GPy.kern.white(input_dim, 0.00001) + # k = GPy.kern.rbf(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.rbf(input_dim, .3, _np.ones(input_dim) * .2, ARD=True) + # k = GPy.kern.rbf(input_dim, .5, 2., ARD=0) + GPy.kern.rbf(input_dim, .3, .2, ARD=0) + # k = GPy.kern.rbf(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.linear(input_dim, _np.ones(input_dim) * .2, ARD=True) - m = GPy.models.BayesianGPLVM(Y, Q, kernel=k, num_inducing=num_inducing) - # m.constrain_positive('(rbf|bias|noise|white|S)') - # m.constrain_fixed('S', 1) + m = GPy.models.BayesianGPLVM(lik, input_dim, kernel=k, num_inducing=num_inducing) + m.lengthscales = lengthscales - # pb.figure() - # m.plot() - # pb.title('PCA initialisation') - # pb.figure() - # m.optimize(messages = 1) - # m.plot() - # pb.title('After optimisation') - m.randomize() - m.checkgrad(verbose=1) + if plot: + import matplotlib.pyplot as pb + m.plot() + pb.title('PCA initialisation') + + if optimize: + m.optimize('scg', messages=verbose) + if plot: + m.plot() + pb.title('After optimisation') return m -def GPLVM_oil_100(optimize=True): +def gplvm_oil_100(optimize=True, verbose=1, plot=True): + import GPy data = GPy.util.datasets.oil_100() Y = data['X'] - # create simple GP model kernel = GPy.kern.rbf(6, ARD=True) + GPy.kern.bias(6) m = GPy.models.GPLVM(Y, 6, kernel=kernel) m.data_labels = data['Y'].argmax(axis=1) - - # optimize - if optimize: - m.optimize('scg', messages=1) - - # plot - print(m) - m.plot_latent(labels=m.data_labels) + if optimize: m.optimize('scg', messages=verbose) + if plot: m.plot_latent(labels=m.data_labels) return m -def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False): - from GPy.util.datasets import swiss_roll_generated - from GPy.core.transformations import logexp_clipped +def sparse_gplvm_oil(optimize=True, verbose=0, plot=True, N=100, Q=6, num_inducing=15, max_iters=50): + import GPy + _np.random.seed(0) + data = GPy.util.datasets.oil() + Y = data['X'][:N] + Y = Y - Y.mean(0) + Y /= Y.std(0) + # Create the model + kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q) + m = GPy.models.SparseGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing) + m.data_labels = data['Y'][:N].argmax(axis=1) - data = swiss_roll_generated(N=N, sigma=sigma) + if optimize: m.optimize('scg', messages=verbose, max_iters=max_iters) + if plot: + m.plot_latent(labels=m.data_labels) + m.kern.plot_ARD() + return m + +def swiss_roll(optimize=True, verbose=1, plot=True, N=1000, num_inducing=15, Q=4, sigma=.2): + import GPy + from GPy.util.datasets import swiss_roll_generated + from GPy.models import BayesianGPLVM + + data = swiss_roll_generated(num_samples=N, sigma=sigma) Y = data['Y'] Y -= Y.mean() Y /= Y.std() @@ -77,116 +100,98 @@ def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False iso = Isomap().fit(Y) X = iso.embedding_ if Q > 2: - X = np.hstack((X, np.random.randn(N, Q - 2))) + X = _np.hstack((X, _np.random.randn(N, Q - 2))) except ImportError: - X = np.random.randn(N, Q) + X = _np.random.randn(N, Q) if plot: - from mpl_toolkits import mplot3d - import pylab - fig = pylab.figure("Swiss Roll Data") + import matplotlib.pyplot as plt + from mpl_toolkits.mplot3d import Axes3D # @UnusedImport + fig = plt.figure("Swiss Roll Data") ax = fig.add_subplot(121, projection='3d') ax.scatter(*Y.T, c=c) ax.set_title("Swiss Roll") ax = fig.add_subplot(122) ax.scatter(*X.T[:2], c=c) - ax.set_title("Initialization") - + ax.set_title("BGPLVM init") var = .5 - S = (var * np.ones_like(X) + np.clip(np.random.randn(N, Q) * var ** 2, + S = (var * _np.ones_like(X) + _np.clip(_np.random.randn(N, Q) * var ** 2, - (1 - var), (1 - var))) + .001 - Z = np.random.permutation(X)[:num_inducing] + Z = _np.random.permutation(X)[:num_inducing] - kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2)) + kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2)) m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel) m.data_colors = c m.data_t = t - - m['rbf_lengthscale'] = 1. # X.var(0).max() / X.var(0) m['noise_variance'] = Y.var() / 100. - m['bias_variance'] = 0.05 if optimize: - m.optimize('scg', messages=1) + m.optimize('scg', messages=verbose, max_iters=2e3) + + if plot: + fig = plt.figure('fitted') + ax = fig.add_subplot(111) + s = m.input_sensitivity().argsort()[::-1][:2] + ax.scatter(*m.X.T[s], c=c) + return m -def BGPLVM_oil(optimize=True, N=200, Q=10, num_inducing=15, max_f_eval=50, plot=False, **k): - np.random.seed(0) +def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40, max_iters=1000, **k): + import GPy + from GPy.likelihoods import Gaussian + from matplotlib import pyplot as plt + + _np.random.seed(0) data = GPy.util.datasets.oil() - # create simple GP model - kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2)) + kernel = GPy.kern.rbf_inv(Q, 1., [.1] * Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) Y = data['X'][:N] - Yn = Y - Y.mean(0) - Yn /= Yn.std(0) - + Yn = Gaussian(Y, normalize=True) m = GPy.models.BayesianGPLVM(Yn, Q, kernel=kernel, num_inducing=num_inducing, **k) m.data_labels = data['Y'][:N].argmax(axis=1) + m['noise'] = Yn.Y.var() / 100. - # m.constrain('variance|leng', logexp_clipped()) - m['.*lengt'] = 1. # m.X.var(0).max() / m.X.var(0) - m['noise'] = Yn.var() / 100. - - - # optimize if optimize: - m.constrain_fixed('noise') - m.optimize('scg', messages=1, max_f_eval=100, gtol=.05) - m.constrain_positive('noise') - m.optimize('scg', messages=1, max_f_eval=max_f_eval, gtol=.05) + m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05) if plot: y = m.likelihood.Y[0, :] fig, (latent_axes, sense_axes) = plt.subplots(1, 2) - plt.sca(latent_axes) - m.plot_latent() + m.plot_latent(ax=latent_axes) data_show = GPy.util.visualize.vector_show(y) - lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes) + lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], # @UnusedVariable + m, data_show, latent_axes=latent_axes, sense_axes=sense_axes) raw_input('Press enter to finish') plt.close(fig) return m -def oil_100(): - data = GPy.util.datasets.oil_100() - m = GPy.models.GPLVM(data['X'], 2) - - # optimize - m.optimize(messages=1, max_iters=2) - - # plot - print(m) - # m.plot_latent(labels=data['Y'].argmax(axis=1)) - return m - - - def _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim=False): - x = np.linspace(0, 4 * np.pi, N)[:, None] - s1 = np.vectorize(lambda x: np.sin(x)) - s2 = np.vectorize(lambda x: np.cos(x)) - s3 = np.vectorize(lambda x:-np.exp(-np.cos(2 * x))) - sS = np.vectorize(lambda x: np.sin(2 * x)) + x = _np.linspace(0, 4 * _np.pi, N)[:, None] + s1 = _np.vectorize(lambda x: _np.sin(x)) + s2 = _np.vectorize(lambda x: _np.cos(x)) + s3 = _np.vectorize(lambda x:-_np.exp(-_np.cos(2 * x))) + sS = _np.vectorize(lambda x: _np.sin(2 * x)) s1 = s1(x) s2 = s2(x) s3 = s3(x) sS = sS(x) - S1 = np.hstack([s1, sS]) - S2 = np.hstack([s2, s3, sS]) - S3 = np.hstack([s3, sS]) + S1 = _np.hstack([s1, sS]) + S2 = _np.hstack([s2, s3, sS]) + S3 = _np.hstack([s3, sS]) - Y1 = S1.dot(np.random.randn(S1.shape[1], D1)) - Y2 = S2.dot(np.random.randn(S2.shape[1], D2)) - Y3 = S3.dot(np.random.randn(S3.shape[1], D3)) + Y1 = S1.dot(_np.random.randn(S1.shape[1], D1)) + Y2 = S2.dot(_np.random.randn(S2.shape[1], D2)) + Y3 = S3.dot(_np.random.randn(S3.shape[1], D3)) - Y1 += .3 * np.random.randn(*Y1.shape) - Y2 += .2 * np.random.randn(*Y2.shape) - Y3 += .1 * np.random.randn(*Y3.shape) + Y1 += .3 * _np.random.randn(*Y1.shape) + Y2 += .2 * _np.random.randn(*Y2.shape) + Y3 += .25 * _np.random.randn(*Y3.shape) Y1 -= Y1.mean(0) Y2 -= Y2.mean(0) @@ -201,6 +206,7 @@ def _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim=False): if plot_sim: import pylab + import matplotlib.cm as cm import itertools fig = pylab.figure("MRD Simulation Data", figsize=(8, 6)) fig.clf() @@ -211,179 +217,252 @@ def _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim=False): ax.legend() for i, Y in enumerate(Ylist): ax = fig.add_subplot(2, len(Ylist), len(Ylist) + 1 + i) - ax.imshow(Y, aspect='auto', cmap=cm.gray) # @UndefinedVariable + ax.imshow(Y, aspect='auto', cmap=cm.gray) ax.set_title("Y{}".format(i + 1)) pylab.draw() pylab.tight_layout() return slist, [S1, S2, S3], Ylist -def bgplvm_simulation_matlab_compare(): - from GPy.util.datasets import simulation_BGPLVM - sim_data = simulation_BGPLVM() - Y = sim_data['Y'] - S = sim_data['S'] - mu = sim_data['mu'] - num_inducing, [_, Q] = 3, mu.shape +# def bgplvm_simulation_matlab_compare(): +# from GPy.util.datasets import simulation_BGPLVM +# from GPy import kern +# from GPy.models import BayesianGPLVM +# +# sim_data = simulation_BGPLVM() +# Y = sim_data['Y'] +# mu = sim_data['mu'] +# num_inducing, [_, Q] = 3, mu.shape +# +# k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) +# m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, +# _debug=False) +# m.auto_scale_factor = True +# m['noise'] = Y.var() / 100. +# m['linear_variance'] = .01 +# return m - from GPy.models import mrd +def bgplvm_simulation(optimize=True, verbose=1, + plot=True, plot_sim=False, + max_iters=2e4, + ): from GPy import kern - reload(mrd); reload(kern) - k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) - m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, -# X=mu, -# X_variance=S, - _debug=False) - m.auto_scale_factor = True - m['noise'] = Y.var() / 100. - m['linear_variance'] = .01 - return m - -def bgplvm_simulation(optimize='scg', - plot=True, - max_f_eval=2e4): -# from GPy.core.transformations import logexp_clipped - D1, D2, D3, N, num_inducing, Q = 15, 8, 8, 100, 3, 5 - slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot) - - from GPy.models import mrd - from GPy import kern - reload(mrd); reload(kern) - + from GPy.models import BayesianGPLVM + D1, D2, D3, N, num_inducing, Q = 15, 5, 8, 30, 3, 10 + _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) Y = Ylist[0] - - k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q) - m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, _debug=True) - # m.constrain('variance|noise', logexp_clipped()) + k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) + m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k) m['noise'] = Y.var() / 100. - m['linear_variance'] = .01 if optimize: print "Optimizing model:" - m.optimize(optimize, max_iters=max_f_eval, - max_f_eval=max_f_eval, - messages=True, gtol=.05) + m.optimize('scg', messages=verbose, max_iters=max_iters, + gtol=.05) if plot: m.plot_X_1d("BGPLVM Latent Space 1D") m.kern.plot_ARD('BGPLVM Simulation ARD Parameters') return m -def mrd_simulation(optimize=True, plot=True, plot_sim=True, **kw): - D1, D2, D3, N, num_inducing, Q = 150, 200, 400, 500, 3, 7 - slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) - - from GPy.models import mrd +def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw): from GPy import kern + from GPy.models import MRD + from GPy.likelihoods import Gaussian - reload(mrd); reload(kern) + D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 + _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) + likelihood_list = [Gaussian(x, normalize=True) for x in Ylist] - k = kern.linear(Q, [.05] * Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) - m = mrd.MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw) + k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) + m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw) + m.ensure_default_constraints() - for i, Y in enumerate(Ylist): - m['{}_noise'.format(i + 1)] = Y.var() / 100. - - - # DEBUG - # np.seterr("raise") + for i, bgplvm in enumerate(m.bgplvms): + m['{}_noise'.format(i)] = bgplvm.likelihood.Y.var() / 500. if optimize: print "Optimizing Model:" - m.optimize(messages=1, max_iters=8e3, max_f_eval=8e3, gtol=.1) + m.optimize(messages=verbose, max_iters=8e3, gtol=.1) if plot: m.plot_X_1d("MRD Latent Space 1D") m.plot_scales("MRD Scales") return m -def brendan_faces(): - from GPy import kern +def brendan_faces(optimize=True, verbose=True, plot=True): + import GPy + data = GPy.util.datasets.brendan_faces() Q = 2 - Y = data['Y'][0:-1:10, :] - # Y = data['Y'] + Y = data['Y'] Yn = Y - Y.mean() Yn /= Yn.std() m = GPy.models.GPLVM(Yn, Q) - # m = GPy.models.BayesianGPLVM(Yn, Q, num_inducing=100) # optimize m.constrain('rbf|noise|white', GPy.core.transformations.logexp_clipped()) - m.optimize('scg', messages=1, max_f_eval=10000) + if optimize: m.optimize('scg', messages=verbose, max_iters=1000) - ax = m.plot_latent(which_indices=(0, 1)) - y = m.likelihood.Y[0, :] - data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, invert=False, scale=False) - lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) - raw_input('Press enter to finish') - lvm_visualizer.close() + if plot: + ax = m.plot_latent(which_indices=(0, 1)) + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, order='F', invert=False, scale=False) + GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') return m -def stick(): - data = GPy.util.datasets.stick() - m = GPy.models.GPLVM(data['Y'], 2) +def olivetti_faces(optimize=True, verbose=True, plot=True): + import GPy + data = GPy.util.datasets.olivetti_faces() + Q = 2 + Y = data['Y'] + Yn = Y - Y.mean() + Yn /= Yn.std() + + m = GPy.models.GPLVM(Yn, Q) + if optimize: m.optimize('scg', messages=verbose, max_iters=1000) + if plot: + ax = m.plot_latent(which_indices=(0, 1)) + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(112, 92), transpose=False, invert=False, scale=False) + GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') + + return m + +def stick_play(range=None, frame_rate=15, optimize=False, verbose=True, plot=True): + import GPy + data = GPy.util.datasets.osu_run1() # optimize - m.optimize(messages=1, max_f_eval=10000) - m._set_params(m._get_params()) + if range == None: + Y = data['Y'].copy() + else: + Y = data['Y'][range[0]:range[1], :].copy() + if plot: + y = Y[0, :] + data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) + GPy.util.visualize.data_play(Y, data_show, frame_rate) + return Y - ax = m.plot_latent() - y = m.likelihood.Y[0, :] - data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) - lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) - raw_input('Press enter to finish') - lvm_visualizer.close() +def stick(kernel=None, optimize=True, verbose=True, plot=True): + from matplotlib import pyplot as plt + import GPy + + data = GPy.util.datasets.osu_run1() + # optimize + m = GPy.models.GPLVM(data['Y'], 2, kernel=kernel) + if optimize: m.optimize(messages=verbose, max_f_eval=10000) + if plot and GPy.util.visualize.visual_available: + plt.clf + ax = m.plot_latent() + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) + GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') return m -def cmu_mocap(subject='35', motion=['01'], in_place=True): +def bcgplvm_linear_stick(kernel=None, optimize=True, verbose=True, plot=True): + from matplotlib import pyplot as plt + import GPy + + data = GPy.util.datasets.osu_run1() + # optimize + mapping = GPy.mappings.Linear(data['Y'].shape[1], 2) + m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping) + if optimize: m.optimize(messages=verbose, max_f_eval=10000) + if plot and GPy.util.visualize.visual_available: + plt.clf + ax = m.plot_latent() + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) + GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') + + return m + +def bcgplvm_stick(kernel=None, optimize=True, verbose=True, plot=True): + from matplotlib import pyplot as plt + import GPy + + data = GPy.util.datasets.osu_run1() + # optimize + back_kernel=GPy.kern.rbf(data['Y'].shape[1], lengthscale=5.) + mapping = GPy.mappings.Kernel(X=data['Y'], output_dim=2, kernel=back_kernel) + m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping) + if optimize: m.optimize(messages=verbose, max_f_eval=10000) + if plot and GPy.util.visualize.visual_available: + plt.clf + ax = m.plot_latent() + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) + GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') + + return m + +def robot_wireless(optimize=True, verbose=True, plot=True): + from matplotlib import pyplot as plt + import GPy + + data = GPy.util.datasets.robot_wireless() + # optimize + m = GPy.models.GPLVM(data['Y'], 2) + if optimize: m.optimize(messages=verbose, max_f_eval=10000) + m._set_params(m._get_params()) + if plot: + m.plot_latent() + + return m + +def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True): + from GPy.models import BayesianGPLVM + from matplotlib import pyplot as plt + import GPy + + data = GPy.util.datasets.osu_run1() + Q = 6 + kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2)) + m = BayesianGPLVM(data['Y'], Q, init="PCA", num_inducing=20, kernel=kernel) + # optimize + m.ensure_default_constraints() + if optimize: m.optimize('scg', messages=verbose, max_iters=200, xtol=1e-300, ftol=1e-300) + m._set_params(m._get_params()) + if plot: + plt.clf, (latent_axes, sense_axes) = plt.subplots(1, 2) + plt.sca(latent_axes) + m.plot_latent() + y = m.likelihood.Y[0, :].copy() + data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) + GPy.util.visualize.lvm_dimselect(m.X[0, :].copy(), m, data_show, latent_axes=latent_axes, sense_axes=sense_axes) + raw_input('Press enter to finish') + + return m + + +def cmu_mocap(subject='35', motion=['01'], in_place=True, optimize=True, verbose=True, plot=True): + import GPy data = GPy.util.datasets.cmu_mocap(subject, motion) - Y = data['Y'] if in_place: # Make figure move in place. data['Y'][:, 0:3] = 0.0 + m = GPy.models.GPLVM(data['Y'], 2, normalize_Y=True) - # optimize - m.optimize(messages=1, max_f_eval=10000) + if optimize: + m.optimize(messages=verbose, max_f_eval=10000) - ax = m.plot_latent() - y = m.likelihood.Y[0, :] - data_show = GPy.util.visualize.skeleton_show(y[None, :], data['skel']) - lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) - raw_input('Press enter to finish') - lvm_visualizer.close() + if plot: + ax = m.plot_latent() + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.skeleton_show(y[None, :], data['skel']) + lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) + raw_input('Press enter to finish') + lvm_visualizer.close() return m - -# def BGPLVM_oil(): -# data = GPy.util.datasets.oil() -# Y, X = data['Y'], data['X'] -# X -= X.mean(axis=0) -# X /= X.std(axis=0) -# -# Q = 10 -# num_inducing = 30 -# -# kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q) + GPy.kern.white(Q) -# m = GPy.models.BayesianGPLVM(X, Q, kernel=kernel, num_inducing=num_inducing) -# # m.scale_factor = 100.0 -# m.constrain_positive('(white|noise|bias|X_variance|rbf_variance|rbf_length)') -# from sklearn import cluster -# km = cluster.KMeans(num_inducing, verbose=10) -# Z = km.fit(m.X).cluster_centers_ -# # Z = GPy.util.misc.kmm_init(m.X, num_inducing) -# m.set('iip', Z) -# m.set('bias', 1e-4) -# # optimize -# -# import pdb; pdb.set_trace() -# m.optimize('tnc', messages=1) -# print m -# m.plot_latent(labels=data['Y'].argmax(axis=1)) -# return m - diff --git a/GPy/examples/non_gaussian.py b/GPy/examples/non_gaussian.py new file mode 100644 index 00000000..bda80137 --- /dev/null +++ b/GPy/examples/non_gaussian.py @@ -0,0 +1,286 @@ +import GPy +import numpy as np +import matplotlib.pyplot as plt +from GPy.util import datasets + +def student_t_approx(optimize=True, plot=True): + """ + Example of regressing with a student t likelihood using Laplace + """ + real_std = 0.1 + #Start a function, any function + X = np.linspace(0.0, np.pi*2, 100)[:, None] + Y = np.sin(X) + np.random.randn(*X.shape)*real_std + Y = Y/Y.max() + Yc = Y.copy() + + X_full = np.linspace(0.0, np.pi*2, 500)[:, None] + Y_full = np.sin(X_full) + Y_full = Y_full/Y_full.max() + + #Slightly noisy data + Yc[75:80] += 1 + + #Very noisy data + #Yc[10] += 100 + #Yc[25] += 10 + #Yc[23] += 10 + #Yc[26] += 1000 + #Yc[24] += 10 + #Yc = Yc/Yc.max() + + #Add student t random noise to datapoints + deg_free = 5 + print "Real noise: ", real_std + initial_var_guess = 0.5 + edited_real_sd = initial_var_guess + + # Kernel object + kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel2 = kernel1.copy() + kernel3 = kernel1.copy() + kernel4 = kernel1.copy() + + #Gaussian GP model on clean data + m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1) + # optimize + m1.ensure_default_constraints() + m1.constrain_fixed('white', 1e-5) + m1.randomize() + + #Gaussian GP model on corrupt data + m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2) + m2.ensure_default_constraints() + m2.constrain_fixed('white', 1e-5) + m2.randomize() + + #Student t GP model on clean data + t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) + stu_t_likelihood = GPy.likelihoods.Laplace(Y.copy(), t_distribution) + m3 = GPy.models.GPRegression(X, Y.copy(), kernel3, likelihood=stu_t_likelihood) + m3.ensure_default_constraints() + m3.constrain_bounded('t_noise', 1e-6, 10.) + m3.constrain_fixed('white', 1e-5) + m3.randomize() + + #Student t GP model on corrupt data + t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) + corrupt_stu_t_likelihood = GPy.likelihoods.Laplace(Yc.copy(), t_distribution) + m4 = GPy.models.GPRegression(X, Yc.copy(), kernel4, likelihood=corrupt_stu_t_likelihood) + m4.ensure_default_constraints() + m4.constrain_bounded('t_noise', 1e-6, 10.) + m4.constrain_fixed('white', 1e-5) + m4.randomize() + + if optimize: + optimizer='scg' + print "Clean Gaussian" + m1.optimize(optimizer, messages=1) + print "Corrupt Gaussian" + m2.optimize(optimizer, messages=1) + print "Clean student t" + m3.optimize(optimizer, messages=1) + print "Corrupt student t" + m4.optimize(optimizer, messages=1) + + if plot: + plt.figure(1) + plt.suptitle('Gaussian likelihood') + ax = plt.subplot(211) + m1.plot(ax=ax) + plt.plot(X_full, Y_full) + plt.ylim(-1.5, 1.5) + plt.title('Gaussian clean') + + ax = plt.subplot(212) + m2.plot(ax=ax) + plt.plot(X_full, Y_full) + plt.ylim(-1.5, 1.5) + plt.title('Gaussian corrupt') + + plt.figure(2) + plt.suptitle('Student-t likelihood') + ax = plt.subplot(211) + m3.plot(ax=ax) + plt.plot(X_full, Y_full) + plt.ylim(-1.5, 1.5) + plt.title('Student-t rasm clean') + + ax = plt.subplot(212) + m4.plot(ax=ax) + plt.plot(X_full, Y_full) + plt.ylim(-1.5, 1.5) + plt.title('Student-t rasm corrupt') + + return m1, m2, m3, m4 + +def boston_example(optimize=True, plot=True): + import sklearn + from sklearn.cross_validation import KFold + optimizer='bfgs' + messages=0 + data = datasets.boston_housing() + degrees_freedoms = [3, 5, 8, 10] + X = data['X'].copy() + Y = data['Y'].copy() + X = X-X.mean(axis=0) + X = X/X.std(axis=0) + Y = Y-Y.mean() + Y = Y/Y.std() + num_folds = 10 + kf = KFold(len(Y), n_folds=num_folds, indices=True) + num_models = len(degrees_freedoms) + 3 #3 for baseline, gaussian, gaussian laplace approx + score_folds = np.zeros((num_models, num_folds)) + pred_density = score_folds.copy() + + def rmse(Y, Ystar): + return np.sqrt(np.mean((Y-Ystar)**2)) + + for n, (train, test) in enumerate(kf): + X_train, X_test, Y_train, Y_test = X[train], X[test], Y[train], Y[test] + print "Fold {}".format(n) + + noise = 1e-1 #np.exp(-2) + rbf_len = 0.5 + data_axis_plot = 4 + kernelstu = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + GPy.kern.bias(X.shape[1]) + kernelgp = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + GPy.kern.bias(X.shape[1]) + + #Baseline + score_folds[0, n] = rmse(Y_test, np.mean(Y_train)) + + #Gaussian GP + print "Gauss GP" + mgp = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelgp.copy()) + mgp.ensure_default_constraints() + mgp.constrain_fixed('white', 1e-5) + mgp['rbf_len'] = rbf_len + mgp['noise'] = noise + print mgp + if optimize: + mgp.optimize(optimizer=optimizer, messages=messages) + Y_test_pred = mgp.predict(X_test) + score_folds[1, n] = rmse(Y_test, Y_test_pred[0]) + pred_density[1, n] = np.mean(mgp.log_predictive_density(X_test, Y_test)) + print mgp + print pred_density + + print "Gaussian Laplace GP" + N, D = Y_train.shape + g_distribution = GPy.likelihoods.noise_model_constructors.gaussian(variance=noise, N=N, D=D) + g_likelihood = GPy.likelihoods.Laplace(Y_train.copy(), g_distribution) + mg = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelstu.copy(), likelihood=g_likelihood) + mg.ensure_default_constraints() + mg.constrain_positive('noise_variance') + mg.constrain_fixed('white', 1e-5) + mg['rbf_len'] = rbf_len + mg['noise'] = noise + print mg + if optimize: + mg.optimize(optimizer=optimizer, messages=messages) + Y_test_pred = mg.predict(X_test) + score_folds[2, n] = rmse(Y_test, Y_test_pred[0]) + pred_density[2, n] = np.mean(mg.log_predictive_density(X_test, Y_test)) + print pred_density + print mg + + for stu_num, df in enumerate(degrees_freedoms): + #Student T + print "Student-T GP {}df".format(df) + t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=df, sigma2=noise) + stu_t_likelihood = GPy.likelihoods.Laplace(Y_train.copy(), t_distribution) + mstu_t = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelstu.copy(), likelihood=stu_t_likelihood) + mstu_t.ensure_default_constraints() + mstu_t.constrain_fixed('white', 1e-5) + mstu_t.constrain_bounded('t_noise', 0.0001, 1000) + mstu_t['rbf_len'] = rbf_len + mstu_t['t_noise'] = noise + print mstu_t + if optimize: + mstu_t.optimize(optimizer=optimizer, messages=messages) + Y_test_pred = mstu_t.predict(X_test) + score_folds[3+stu_num, n] = rmse(Y_test, Y_test_pred[0]) + pred_density[3+stu_num, n] = np.mean(mstu_t.log_predictive_density(X_test, Y_test)) + print pred_density + print mstu_t + + if plot: + plt.figure() + plt.scatter(X_test[:, data_axis_plot], Y_test_pred[0]) + plt.scatter(X_test[:, data_axis_plot], Y_test, c='r', marker='x') + plt.title('GP gauss') + + plt.figure() + plt.scatter(X_test[:, data_axis_plot], Y_test_pred[0]) + plt.scatter(X_test[:, data_axis_plot], Y_test, c='r', marker='x') + plt.title('Lap gauss') + + plt.figure() + plt.scatter(X_test[:, data_axis_plot], Y_test_pred[0]) + plt.scatter(X_test[:, data_axis_plot], Y_test, c='r', marker='x') + plt.title('Stu t {}df'.format(df)) + + print "Average scores: {}".format(np.mean(score_folds, 1)) + print "Average pred density: {}".format(np.mean(pred_density, 1)) + + if plot: + #Plotting + stu_t_legends = ['Student T, df={}'.format(df) for df in degrees_freedoms] + legends = ['Baseline', 'Gaussian', 'Laplace Approx Gaussian'] + stu_t_legends + + #Plot boxplots for RMSE density + fig = plt.figure() + ax=fig.add_subplot(111) + plt.title('RMSE') + bp = ax.boxplot(score_folds.T, notch=0, sym='+', vert=1, whis=1.5) + plt.setp(bp['boxes'], color='black') + plt.setp(bp['whiskers'], color='black') + plt.setp(bp['fliers'], color='red', marker='+') + xtickNames = plt.setp(ax, xticklabels=legends) + plt.setp(xtickNames, rotation=45, fontsize=8) + ax.set_ylabel('RMSE') + ax.set_xlabel('Distribution') + #Make grid and put it below boxes + ax.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', + alpha=0.5) + ax.set_axisbelow(True) + + #Plot boxplots for predictive density + fig = plt.figure() + ax=fig.add_subplot(111) + plt.title('Predictive density') + bp = ax.boxplot(pred_density[1:,:].T, notch=0, sym='+', vert=1, whis=1.5) + plt.setp(bp['boxes'], color='black') + plt.setp(bp['whiskers'], color='black') + plt.setp(bp['fliers'], color='red', marker='+') + xtickNames = plt.setp(ax, xticklabels=legends[1:]) + plt.setp(xtickNames, rotation=45, fontsize=8) + ax.set_ylabel('Mean Log probability P(Y*|Y)') + ax.set_xlabel('Distribution') + #Make grid and put it below boxes + ax.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', + alpha=0.5) + ax.set_axisbelow(True) + return mstu_t + +#def precipitation_example(): + #import sklearn + #from sklearn.cross_validation import KFold + #data = datasets.boston_housing() + #X = data['X'].copy() + #Y = data['Y'].copy() + #X = X-X.mean(axis=0) + #X = X/X.std(axis=0) + #Y = Y-Y.mean() + #Y = Y/Y.std() + #import ipdb; ipdb.set_trace() # XXX BREAKPOINT + #num_folds = 10 + #kf = KFold(len(Y), n_folds=num_folds, indices=True) + #score_folds = np.zeros((4, num_folds)) + #def rmse(Y, Ystar): + #return np.sqrt(np.mean((Y-Ystar)**2)) + ##for train, test in kf: + #for n, (train, test) in enumerate(kf): + #X_train, X_test, Y_train, Y_test = X[train], X[test], Y[train], Y[test] + #print "Fold {}".format(n) + diff --git a/GPy/examples/regression.py b/GPy/examples/regression.py index 21b435e7..9b910005 100644 --- a/GPy/examples/regression.py +++ b/GPy/examples/regression.py @@ -1,7 +1,6 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - """ Gaussian Processes regression examples """ @@ -9,192 +8,163 @@ import pylab as pb import numpy as np import GPy - -def toy_rbf_1d(optimizer='tnc', max_nb_eval_optim=100): - """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" - data = GPy.util.datasets.toy_rbf_1d() +def olympic_marathon_men(optimize=True, plot=True): + """Run a standard Gaussian process regression on the Olympic marathon data.""" + data = GPy.util.datasets.olympic_marathon_men() # create simple GP Model - m = GPy.models.GPRegression(data['X'],data['Y']) + m = GPy.models.GPRegression(data['X'], data['Y']) - # optimize - m.optimize(optimizer, max_f_eval=max_nb_eval_optim) - # plot - m.plot() - print(m) - return m - -def rogers_girolami_olympics(optim_iters=100): - """Run a standard Gaussian process regression on the Rogers and Girolami olympics data.""" - data = GPy.util.datasets.rogers_girolami_olympics() - - # create simple GP Model - m = GPy.models.GPRegression(data['X'],data['Y']) - - #set the lengthscale to be something sensible (defaults to 1) + # set the lengthscale to be something sensible (defaults to 1) m['rbf_lengthscale'] = 10 - # optimize - m.optimize(max_f_eval=optim_iters) + if optimize: + m.optimize('bfgs', max_iters=200) + if plot: + m.plot(plot_limits=(1850, 2050)) - # plot - m.plot(plot_limits = (1850, 2050)) - print(m) return m -def toy_rbf_1d_50(optim_iters=100): - """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" - data = GPy.util.datasets.toy_rbf_1d_50() - - # create simple GP Model - m = GPy.models.GPRegression(data['X'],data['Y']) - - # optimize - m.optimize(max_f_eval=optim_iters) - - # plot - m.plot() - print(m) - return m - -def silhouette(optim_iters=100): - """Predict the pose of a figure given a silhouette. This is a task from Agarwal and Triggs 2004 ICML paper.""" - data = GPy.util.datasets.silhouette() - - # create simple GP Model - m = GPy.models.GPRegression(data['X'],data['Y']) - - # optimize - m.optimize(messages=True,max_f_eval=optim_iters) - - print(m) - return m - -def coregionalisation_toy2(optim_iters=100): +def coregionalization_toy2(optimize=True, plot=True): """ - A simple demonstration of coregionalisation on two sinusoidal functions. + A simple demonstration of coregionalization on two sinusoidal functions. """ - X1 = np.random.rand(50,1)*8 - X2 = np.random.rand(30,1)*5 - index = np.vstack((np.zeros_like(X1),np.ones_like(X2))) - X = np.hstack((np.vstack((X1,X2)),index)) - Y1 = np.sin(X1) + np.random.randn(*X1.shape)*0.05 - Y2 = np.sin(X2) + np.random.randn(*X2.shape)*0.05 + 2. - Y = np.vstack((Y1,Y2)) + #build a design matrix with a column of integers indicating the output + X1 = np.random.rand(50, 1) * 8 + X2 = np.random.rand(30, 1) * 5 + index = np.vstack((np.zeros_like(X1), np.ones_like(X2))) + X = np.hstack((np.vstack((X1, X2)), index)) + #build a suitable set of observed variables + Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05 + Y2 = np.sin(X2) + np.random.randn(*X2.shape) * 0.05 + 2. + Y = np.vstack((Y1, Y2)) + + #build the kernel k1 = GPy.kern.rbf(1) + GPy.kern.bias(1) - k2 = GPy.kern.Coregionalise(2,1) - k = k1.prod(k2,tensor=True) - m = GPy.models.GPRegression(X,Y,kernel=k) - m.constrain_fixed('.*rbf_var',1.) - #m.constrain_positive('.*kappa') - m.optimize('sim',messages=1,max_f_eval=optim_iters) + k2 = GPy.kern.coregionalize(2,1) + k = k1**k2 + m = GPy.models.GPRegression(X, Y, kernel=k) + m.constrain_fixed('.*rbf_var', 1.) + + if optimize: + m.optimize('bfgs', max_iters=100) + + if plot: + m.plot(fixed_inputs=[(1,0)]) + m.plot(fixed_inputs=[(1,1)], ax=pb.gca()) - pb.figure() - Xtest1 = np.hstack((np.linspace(0,9,100)[:,None],np.zeros((100,1)))) - Xtest2 = np.hstack((np.linspace(0,9,100)[:,None],np.ones((100,1)))) - mean, var,low,up = m.predict(Xtest1) - GPy.util.plot.gpplot(Xtest1[:,0],mean,low,up) - mean, var,low,up = m.predict(Xtest2) - GPy.util.plot.gpplot(Xtest2[:,0],mean,low,up) - pb.plot(X1[:,0],Y1[:,0],'rx',mew=2) - pb.plot(X2[:,0],Y2[:,0],'gx',mew=2) return m -def coregionalisation_toy(optim_iters=100): - """ - A simple demonstration of coregionalisation on two sinusoidal functions. - """ - X1 = np.random.rand(50,1)*8 - X2 = np.random.rand(30,1)*5 - index = np.vstack((np.zeros_like(X1),np.ones_like(X2))) - X = np.hstack((np.vstack((X1,X2)),index)) - Y1 = np.sin(X1) + np.random.randn(*X1.shape)*0.05 - Y2 = -np.sin(X2) + np.random.randn(*X2.shape)*0.05 - Y = np.vstack((Y1,Y2)) +#FIXME: Needs recovering once likelihoods are consolidated +#def coregionalization_toy(optimize=True, plot=True): +# """ +# A simple demonstration of coregionalization on two sinusoidal functions. +# """ +# X1 = np.random.rand(50, 1) * 8 +# X2 = np.random.rand(30, 1) * 5 +# X = np.vstack((X1, X2)) +# Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05 +# Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05 +# Y = np.vstack((Y1, Y2)) +# +# k1 = GPy.kern.rbf(1) +# m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) +# m.constrain_fixed('.*rbf_var', 1.) +# m.optimize(max_iters=100) +# +# fig, axes = pb.subplots(2,1) +# m.plot(fixed_inputs=[(1,0)],ax=axes[0]) +# m.plot(fixed_inputs=[(1,1)],ax=axes[1]) +# axes[0].set_title('Output 0') +# axes[1].set_title('Output 1') +# return m - k1 = GPy.kern.rbf(1) - k2 = GPy.kern.Coregionalise(2,2) - k = k1.prod(k2,tensor=True) - m = GPy.models.GPRegression(X,Y,kernel=k) - m.constrain_fixed('.*rbf_var',1.) - #m.constrain_positive('kappa') - m.optimize(max_f_eval=optim_iters) +def coregionalization_sparse(optimize=True, plot=True): + """ + A simple demonstration of coregionalization on two sinusoidal functions using sparse approximations. + """ + #fetch the data from the non sparse examples + m = coregionalization_toy2(optimize=False, plot=False) + X, Y = m.X, m.likelihood.Y + + #construct a model + m = GPy.models.SparseGPRegression(X,Y) + m.constrain_fixed('iip_\d+_1') # don't optimize the inducing input indexes + + if optimize: + m.optimize('bfgs', max_iters=100, messages=1) + + if plot: + m.plot(fixed_inputs=[(1,0)]) + m.plot(fixed_inputs=[(1,1)], ax=pb.gca()) - pb.figure() - Xtest1 = np.hstack((np.linspace(0,9,100)[:,None],np.zeros((100,1)))) - Xtest2 = np.hstack((np.linspace(0,9,100)[:,None],np.ones((100,1)))) - mean, var,low,up = m.predict(Xtest1) - GPy.util.plot.gpplot(Xtest1[:,0],mean,low,up) - mean, var,low,up = m.predict(Xtest2) - GPy.util.plot.gpplot(Xtest2[:,0],mean,low,up) - pb.plot(X1[:,0],Y1[:,0],'rx',mew=2) - pb.plot(X2[:,0],Y2[:,0],'gx',mew=2) return m - -def coregionalisation_sparse(optim_iters=100): +def epomeo_gpx(max_iters=200, optimize=True, plot=True): """ - A simple demonstration of coregionalisation on two sinusoidal functions using sparse approximations. + Perform Gaussian process regression on the latitude and longitude data + from the Mount Epomeo runs. Requires gpxpy to be installed on your system + to load in the data. """ - X1 = np.random.rand(500,1)*8 - X2 = np.random.rand(300,1)*5 - index = np.vstack((np.zeros_like(X1),np.ones_like(X2))) - X = np.hstack((np.vstack((X1,X2)),index)) - Y1 = np.sin(X1) + np.random.randn(*X1.shape)*0.05 - Y2 = -np.sin(X2) + np.random.randn(*X2.shape)*0.05 - Y = np.vstack((Y1,Y2)) + data = GPy.util.datasets.epomeo_gpx() + num_data_list = [] + for Xpart in data['X']: + num_data_list.append(Xpart.shape[0]) - num_inducing = 40 - Z = np.hstack((np.random.rand(num_inducing,1)*8,np.random.randint(0,2,num_inducing)[:,None])) + num_data_array = np.array(num_data_list) + num_data = num_data_array.sum() + Y = np.zeros((num_data, 2)) + t = np.zeros((num_data, 2)) + start = 0 + for Xpart, index in zip(data['X'], range(len(data['X']))): + end = start+Xpart.shape[0] + t[start:end, :] = np.hstack((Xpart[:, 0:1], + index*np.ones((Xpart.shape[0], 1)))) + Y[start:end, :] = Xpart[:, 1:3] + + num_inducing = 200 + Z = np.hstack((np.linspace(t[:,0].min(), t[:, 0].max(), num_inducing)[:, None], + np.random.randint(0, 4, num_inducing)[:, None])) k1 = GPy.kern.rbf(1) - k2 = GPy.kern.Coregionalise(2,2) - k = k1.prod(k2,tensor=True) + GPy.kern.white(2,0.001) + k2 = GPy.kern.coregionalize(output_dim=5, rank=5) + k = k1**k2 - m = GPy.models.SparseGPRegression(X,Y,kernel=k,Z=Z) - m.constrain_fixed('.*rbf_var',1.) + m = GPy.models.SparseGPRegression(t, Y, kernel=k, Z=Z, normalize_Y=True) + m.constrain_fixed('.*rbf_var', 1.) m.constrain_fixed('iip') - m.constrain_bounded('noise_variance',1e-3,1e-1) - m.optimize_restarts(5, robust=True, messages=1, max_f_eval=optim_iters) + m.constrain_bounded('noise_variance', 1e-3, 1e-1) + m.optimize(max_iters=max_iters,messages=True) - #plotting: - pb.figure() - Xtest1 = np.hstack((np.linspace(0,9,100)[:,None],np.zeros((100,1)))) - Xtest2 = np.hstack((np.linspace(0,9,100)[:,None],np.ones((100,1)))) - mean, var,low,up = m.predict(Xtest1) - GPy.util.plot.gpplot(Xtest1[:,0],mean,low,up) - mean, var,low,up = m.predict(Xtest2) - GPy.util.plot.gpplot(Xtest2[:,0],mean,low,up) - pb.plot(X1[:,0],Y1[:,0],'rx',mew=2) - pb.plot(X2[:,0],Y2[:,0],'gx',mew=2) - y = pb.ylim()[0] - pb.plot(Z[:,0][Z[:,1]==0],np.zeros(np.sum(Z[:,1]==0))+y,'r|',mew=2) - pb.plot(Z[:,0][Z[:,1]==1],np.zeros(np.sum(Z[:,1]==1))+y,'g|',mew=2) return m - -def multiple_optima(gene_number=937,resolution=80, model_restarts=10, seed=10000, optim_iters=300): - """Show an example of a multimodal error surface for Gaussian process regression. Gene 939 has bimodal behaviour where the noisey mode is higher.""" +def multiple_optima(gene_number=937, resolution=80, model_restarts=10, seed=10000, max_iters=300, optimize=True, plot=True): + """ + Show an example of a multimodal error surface for Gaussian process + regression. Gene 939 has bimodal behaviour where the noisy mode is + higher. + """ # Contour over a range of length scales and signal/noise ratios. length_scales = np.linspace(0.1, 60., resolution) log_SNRs = np.linspace(-3., 4., resolution) - data = GPy.util.datasets.della_gatta_TRP63_gene_expression(gene_number) - #data['Y'] = data['Y'][0::2, :] - #data['X'] = data['X'][0::2, :] + data = GPy.util.datasets.della_gatta_TRP63_gene_expression(data_set='della_gatta',gene_number=gene_number) + # data['Y'] = data['Y'][0::2, :] + # data['X'] = data['X'][0::2, :] data['Y'] = data['Y'] - np.mean(data['Y']) lls = GPy.examples.regression._contour_data(data, length_scales, log_SNRs, GPy.kern.rbf) - pb.contour(length_scales, log_SNRs, np.exp(lls), 20, cmap=pb.cm.jet) - ax = pb.gca() - pb.xlabel('length scale') - pb.ylabel('log_10 SNR') + if plot: + pb.contour(length_scales, log_SNRs, np.exp(lls), 20, cmap=pb.cm.jet) + ax = pb.gca() + pb.xlabel('length scale') + pb.ylabel('log_10 SNR') - xlim = ax.get_xlim() - ylim = ax.get_ylim() + xlim = ax.get_xlim() + ylim = ax.get_ylim() # Now run a few optimizations models = [] @@ -202,124 +172,347 @@ def multiple_optima(gene_number=937,resolution=80, model_restarts=10, seed=10000 optim_point_y = np.empty(2) np.random.seed(seed=seed) for i in range(0, model_restarts): - #kern = GPy.kern.rbf(1, variance=np.random.exponential(1.), lengthscale=np.random.exponential(50.)) - kern = GPy.kern.rbf(1, variance=np.random.uniform(1e-3,1), lengthscale=np.random.uniform(5,50)) + # kern = GPy.kern.rbf(1, variance=np.random.exponential(1.), lengthscale=np.random.exponential(50.)) + kern = GPy.kern.rbf(1, variance=np.random.uniform(1e-3, 1), lengthscale=np.random.uniform(5, 50)) - m = GPy.models.GPRegression(data['X'],data['Y'], kernel=kern) - m['noise_variance'] = np.random.uniform(1e-3,1) + m = GPy.models.GPRegression(data['X'], data['Y'], kernel=kern) + m['noise_variance'] = np.random.uniform(1e-3, 1) optim_point_x[0] = m['rbf_lengthscale'] optim_point_y[0] = np.log10(m['rbf_variance']) - np.log10(m['noise_variance']); # optimize - m.optimize('scg', xtol=1e-6, ftol=1e-6, max_f_eval=optim_iters) + if optimize: + m.optimize('scg', xtol=1e-6, ftol=1e-6, max_iters=max_iters) optim_point_x[1] = m['rbf_lengthscale'] optim_point_y[1] = np.log10(m['rbf_variance']) - np.log10(m['noise_variance']); - pb.arrow(optim_point_x[0], optim_point_y[0], optim_point_x[1]-optim_point_x[0], optim_point_y[1]-optim_point_y[0], label=str(i), head_length=1, head_width=0.5, fc='k', ec='k') + if plot: + pb.arrow(optim_point_x[0], optim_point_y[0], optim_point_x[1] - optim_point_x[0], optim_point_y[1] - optim_point_y[0], label=str(i), head_length=1, head_width=0.5, fc='k', ec='k') models.append(m) - ax.set_xlim(xlim) - ax.set_ylim(ylim) - return m #(models, lls) + if plot: + ax.set_xlim(xlim) + ax.set_ylim(ylim) + return m # (models, lls) def _contour_data(data, length_scales, log_SNRs, kernel_call=GPy.kern.rbf): - """Evaluate the GP objective function for a given data set for a range of signal to noise ratios and a range of lengthscales. + """ + Evaluate the GP objective function for a given data set for a range of + signal to noise ratios and a range of lengthscales. :data_set: A data set from the utils.datasets director. :length_scales: a list of length scales to explore for the contour plot. :log_SNRs: a list of base 10 logarithm signal to noise ratios to explore for the contour plot. - :kernel: a kernel to use for the 'signal' portion of the data.""" + :kernel: a kernel to use for the 'signal' portion of the data. + """ lls = [] total_var = np.var(data['Y']) kernel = kernel_call(1, variance=1., lengthscale=1.) - Model = GPy.models.GPRegression(data['X'], data['Y'], kernel=kernel) + model = GPy.models.GPRegression(data['X'], data['Y'], kernel=kernel) for log_SNR in log_SNRs: SNR = 10.**log_SNR - noise_var = total_var/(1.+SNR) + noise_var = total_var / (1. + SNR) signal_var = total_var - noise_var - Model.kern['.*variance'] = signal_var - Model['noise_variance'] = noise_var + model.kern['.*variance'] = signal_var + model['noise_variance'] = noise_var length_scale_lls = [] for length_scale in length_scales: - Model['.*lengthscale'] = length_scale - length_scale_lls.append(Model.log_likelihood()) + model['.*lengthscale'] = length_scale + length_scale_lls.append(model.log_likelihood()) lls.append(length_scale_lls) return np.array(lls) -def sparse_GP_regression_1D(N = 400, num_inducing = 5, optim_iters=100): - """Run a 1D example of a sparse GP regression.""" - # sample inputs and outputs - X = np.random.uniform(-3.,3.,(N,1)) - Y = np.sin(X)+np.random.randn(N,1)*0.05 - # construct kernel - rbf = GPy.kern.rbf(1) - noise = GPy.kern.white(1) - kernel = rbf + noise + +def olympic_100m_men(optimize=True, plot=True): + """Run a standard Gaussian process regression on the Rogers and Girolami olympics data.""" + data = GPy.util.datasets.olympic_100m_men() + # create simple GP Model - m = GPy.models.SparseGPRegression(X, Y, kernel, num_inducing=num_inducing) + m = GPy.models.GPRegression(data['X'], data['Y']) + # set the lengthscale to be something sensible (defaults to 1) + m['rbf_lengthscale'] = 10 - m.checkgrad(verbose=1) - m.optimize('tnc', messages = 1, max_f_eval=optim_iters) - m.plot() + if optimize: + m.optimize('bfgs', max_iters=200) + + if plot: + m.plot(plot_limits=(1850, 2050)) return m -def sparse_GP_regression_2D(N = 400, num_inducing = 50, optim_iters=100): - """Run a 2D example of a sparse GP regression.""" - X = np.random.uniform(-3.,3.,(N,2)) - Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(N,1)*0.05 - - # construct kernel - rbf = GPy.kern.rbf(2) - noise = GPy.kern.white(2) - kernel = rbf + noise +def toy_rbf_1d(optimize=True, plot=True): + """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" + data = GPy.util.datasets.toy_rbf_1d() # create simple GP Model - m = GPy.models.SparseGPRegression(X,Y,kernel, num_inducing = num_inducing) + m = GPy.models.GPRegression(data['X'], data['Y']) + + if optimize: + m.optimize('bfgs') + if plot: + m.plot() + + return m + +def toy_rbf_1d_50(optimize=True, plot=True): + """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" + data = GPy.util.datasets.toy_rbf_1d_50() + + # create simple GP Model + m = GPy.models.GPRegression(data['X'], data['Y']) + + if optimize: + m.optimize('bfgs') + if plot: + m.plot() + + return m + + +def toy_poisson_rbf_1d(optimize=True, plot=True): + """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" + x_len = 400 + X = np.linspace(0, 10, x_len)[:, None] + f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X)) + Y = np.array([np.random.poisson(np.exp(f)) for f in f_true]).reshape(x_len,1) + + noise_model = GPy.likelihoods.poisson() + likelihood = GPy.likelihoods.EP(Y,noise_model) + + # create simple GP Model + m = GPy.models.GPRegression(X, Y, likelihood=likelihood) + + if optimize: + m.optimize('bfgs') + if plot: + m.plot() + + return m + +def toy_poisson_rbf_1d_laplace(optimize=True, plot=True): + """Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance.""" + optimizer='scg' + x_len = 30 + X = np.linspace(0, 10, x_len)[:, None] + f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X)) + Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None] + + noise_model = GPy.likelihoods.poisson() + likelihood = GPy.likelihoods.Laplace(Y,noise_model) + + # create simple GP Model + m = GPy.models.GPRegression(X, Y, likelihood=likelihood) + + if optimize: + m.optimize(optimizer) + if plot: + m.plot() + # plot the real underlying rate function + pb.plot(X, np.exp(f_true), '--k', linewidth=2) + + return m + +def toy_ARD(max_iters=1000, kernel_type='linear', num_samples=300, D=4, optimize=True, plot=True): + # Create an artificial dataset where the values in the targets (Y) + # only depend in dimensions 1 and 3 of the inputs (X). Run ARD to + # see if this dependency can be recovered + X1 = np.sin(np.sort(np.random.rand(num_samples, 1) * 10, 0)) + X2 = np.cos(np.sort(np.random.rand(num_samples, 1) * 10, 0)) + X3 = np.exp(np.sort(np.random.rand(num_samples, 1), 0)) + X4 = np.log(np.sort(np.random.rand(num_samples, 1), 0)) + X = np.hstack((X1, X2, X3, X4)) + + Y1 = np.asarray(2 * X[:, 0] + 3).reshape(-1, 1) + Y2 = np.asarray(4 * (X[:, 2] - 1.5 * X[:, 0])).reshape(-1, 1) + Y = np.hstack((Y1, Y2)) + + Y = np.dot(Y, np.random.rand(2, D)); + Y = Y + 0.2 * np.random.randn(Y.shape[0], Y.shape[1]) + Y -= Y.mean() + Y /= Y.std() + + if kernel_type == 'linear': + kernel = GPy.kern.linear(X.shape[1], ARD=1) + elif kernel_type == 'rbf_inv': + kernel = GPy.kern.rbf_inv(X.shape[1], ARD=1) + else: + kernel = GPy.kern.rbf(X.shape[1], ARD=1) + kernel += GPy.kern.white(X.shape[1]) + GPy.kern.bias(X.shape[1]) + m = GPy.models.GPRegression(X, Y, kernel) + # len_prior = GPy.priors.inverse_gamma(1,18) # 1, 25 + # m.set_prior('.*lengthscale',len_prior) + + if optimize: + m.optimize(optimizer='scg', max_iters=max_iters, messages=1) + + if plot: + m.kern.plot_ARD() + + print m + return m + +def toy_ARD_sparse(max_iters=1000, kernel_type='linear', num_samples=300, D=4, optimize=True, plot=True): + # Create an artificial dataset where the values in the targets (Y) + # only depend in dimensions 1 and 3 of the inputs (X). Run ARD to + # see if this dependency can be recovered + X1 = np.sin(np.sort(np.random.rand(num_samples, 1) * 10, 0)) + X2 = np.cos(np.sort(np.random.rand(num_samples, 1) * 10, 0)) + X3 = np.exp(np.sort(np.random.rand(num_samples, 1), 0)) + X4 = np.log(np.sort(np.random.rand(num_samples, 1), 0)) + X = np.hstack((X1, X2, X3, X4)) + + Y1 = np.asarray(2 * X[:, 0] + 3)[:, None] + Y2 = np.asarray(4 * (X[:, 2] - 1.5 * X[:, 0]))[:, None] + Y = np.hstack((Y1, Y2)) + + Y = np.dot(Y, np.random.rand(2, D)); + Y = Y + 0.2 * np.random.randn(Y.shape[0], Y.shape[1]) + Y -= Y.mean() + Y /= Y.std() + + if kernel_type == 'linear': + kernel = GPy.kern.linear(X.shape[1], ARD=1) + elif kernel_type == 'rbf_inv': + kernel = GPy.kern.rbf_inv(X.shape[1], ARD=1) + else: + kernel = GPy.kern.rbf(X.shape[1], ARD=1) + kernel += GPy.kern.bias(X.shape[1]) + X_variance = np.ones(X.shape) * 0.5 + m = GPy.models.SparseGPRegression(X, Y, kernel, X_variance=X_variance) + # len_prior = GPy.priors.inverse_gamma(1,18) # 1, 25 + # m.set_prior('.*lengthscale',len_prior) + + if optimize: + m.optimize(optimizer='scg', max_iters=max_iters, messages=1) + + if plot: + m.kern.plot_ARD() + + print m + return m + +def robot_wireless(max_iters=100, kernel=None, optimize=True, plot=True): + """Predict the location of a robot given wirelss signal strength readings.""" + data = GPy.util.datasets.robot_wireless() + + # create simple GP Model + m = GPy.models.GPRegression(data['Y'], data['X'], kernel=kernel) + + # optimize + if optimize: + m.optimize(messages=True, max_iters=max_iters) + + Xpredict = m.predict(data['Ytest'])[0] + if plot: + pb.plot(data['Xtest'][:, 0], data['Xtest'][:, 1], 'r-') + pb.plot(Xpredict[:, 0], Xpredict[:, 1], 'b-') + pb.axis('equal') + pb.title('WiFi Localization with Gaussian Processes') + pb.legend(('True Location', 'Predicted Location')) + + sse = ((data['Xtest'] - Xpredict)**2).sum() + + print m + print('Sum of squares error on test data: ' + str(sse)) + return m + +def silhouette(max_iters=100, optimize=True, plot=True): + """Predict the pose of a figure given a silhouette. This is a task from Agarwal and Triggs 2004 ICML paper.""" + data = GPy.util.datasets.silhouette() + + # create simple GP Model + m = GPy.models.GPRegression(data['X'], data['Y']) + + # optimize + if optimize: + m.optimize(messages=True, max_iters=max_iters) + + print m + return m + +def sparse_GP_regression_1D(num_samples=400, num_inducing=5, max_iters=100, optimize=True, plot=True): + """Run a 1D example of a sparse GP regression.""" + # sample inputs and outputs + X = np.random.uniform(-3., 3., (num_samples, 1)) + Y = np.sin(X) + np.random.randn(num_samples, 1) * 0.05 + # construct kernel + rbf = GPy.kern.rbf(1) + # create simple GP Model + m = GPy.models.SparseGPRegression(X, Y, kernel=rbf, num_inducing=num_inducing) + m.checkgrad(verbose=1) + + if optimize: + m.optimize('tnc', messages=1, max_iters=max_iters) + + if plot: + m.plot() + + return m + +def sparse_GP_regression_2D(num_samples=400, num_inducing=50, max_iters=100, optimize=True, plot=True): + """Run a 2D example of a sparse GP regression.""" + X = np.random.uniform(-3., 3., (num_samples, 2)) + Y = np.sin(X[:, 0:1]) * np.sin(X[:, 1:2]) + np.random.randn(num_samples, 1) * 0.05 + + # construct kernel + rbf = GPy.kern.rbf(2) + + # create simple GP Model + m = GPy.models.SparseGPRegression(X, Y, kernel=rbf, num_inducing=num_inducing) # contrain all parameters to be positive (but not inducing inputs) - m.set('.*len',2.) + m['.*len'] = 2. m.checkgrad() - # optimize and plot - m.optimize('tnc', messages = 1, max_f_eval=optim_iters) - m.plot() - print(m) + # optimize + if optimize: + m.optimize('tnc', messages=1, max_iters=max_iters) + + # plot + if plot: + m.plot() + + print m return m -def uncertain_inputs_sparse_regression(optim_iters=100): +def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True): """Run a 1D example of a sparse GP regression with uncertain inputs.""" - fig, axes = pb.subplots(1,2,figsize=(12,5)) + fig, axes = pb.subplots(1, 2, figsize=(12, 5)) # sample inputs and outputs - S = np.ones((20,1)) - X = np.random.uniform(-3.,3.,(20,1)) - Y = np.sin(X)+np.random.randn(20,1)*0.05 - #likelihood = GPy.likelihoods.Gaussian(Y) - Z = np.random.uniform(-3.,3.,(7,1)) + S = np.ones((20, 1)) + X = np.random.uniform(-3., 3., (20, 1)) + Y = np.sin(X) + np.random.randn(20, 1) * 0.05 + # likelihood = GPy.likelihoods.Gaussian(Y) + Z = np.random.uniform(-3., 3., (7, 1)) - k = GPy.kern.rbf(1) + GPy.kern.white(1) + k = GPy.kern.rbf(1) # create simple GP Model - no input uncertainty on this one m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z) - m.optimize('scg', messages=1, max_f_eval=optim_iters) - m.plot(ax=axes[0]) - axes[0].set_title('no input uncertainty') + if optimize: + m.optimize('scg', messages=1, max_iters=max_iters) - #the same Model with uncertainty + if plot: + m.plot(ax=axes[0]) + axes[0].set_title('no input uncertainty') + print m + + # the same Model with uncertainty m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z, X_variance=S) - m.optimize('scg', messages=1, max_f_eval=optim_iters) - m.plot(ax=axes[1]) - axes[1].set_title('with input uncertainty') - print(m) - - fig.canvas.draw() + if optimize: + m.optimize('scg', messages=1, max_iters=max_iters) + if plot: + m.plot(ax=axes[1]) + axes[1].set_title('with input uncertainty') + fig.canvas.draw() + print m return m diff --git a/GPy/examples/stochastic.py b/GPy/examples/stochastic.py index 533904d5..c302ec7d 100644 --- a/GPy/examples/stochastic.py +++ b/GPy/examples/stochastic.py @@ -5,7 +5,7 @@ import pylab as pb import numpy as np import GPy -def toy_1d(): +def toy_1d(optimize=True, plot=True): N = 2000 M = 20 @@ -16,25 +16,22 @@ def toy_1d(): m = GPy.models.SVIGPRegression(X,Y, batchsize=10, Z=Z) m.constrain_bounded('noise_variance',1e-3,1e-1) + m.constrain_bounded('white_variance',1e-3,1e-1) m.param_steplength = 1e-4 - fig = pb.figure() - ax = fig.add_subplot(111) - def cb(): - ax.cla() - m.plot(ax=ax,Z_height=-3) - ax.set_ylim(-3,3) - fig.canvas.draw() + if plot: + fig = pb.figure() + ax = fig.add_subplot(111) + def cb(foo): + ax.cla() + m.plot(ax=ax,Z_height=-3) + ax.set_ylim(-3,3) + fig.canvas.draw() - m.optimize(500, callback=cb, callback_interval=1) + if optimize: + m.optimize(500, callback=cb, callback_interval=1) - m.plot_traces() + if plot: + m.plot_traces() return m - - - - - - - diff --git a/GPy/examples/tutorials.py b/GPy/examples/tutorials.py index 69fc2aaf..7825992d 100644 --- a/GPy/examples/tutorials.py +++ b/GPy/examples/tutorials.py @@ -11,7 +11,7 @@ pb.ion() import numpy as np import GPy -def tuto_GP_regression(): +def tuto_GP_regression(optimize=True, plot=True): """The detailed explanations of the commands used in this file can be found in the tutorial section""" X = np.random.uniform(-3.,3.,(20,1)) @@ -22,7 +22,8 @@ def tuto_GP_regression(): m = GPy.models.GPRegression(X, Y, kernel) print m - m.plot() + if plot: + m.plot() m.constrain_positive('') @@ -31,9 +32,9 @@ def tuto_GP_regression(): m.constrain_bounded('.*lengthscale',1.,10. ) m.constrain_fixed('.*noise',0.0025) - m.optimize() - - m.optimize_restarts(num_restarts = 10) + if optimize: + m.optimize() + m.optimize_restarts(num_restarts = 10) ####################################################### ####################################################### @@ -51,22 +52,26 @@ def tuto_GP_regression(): m.constrain_positive('') # optimize and plot - m.optimize('tnc', max_f_eval = 1000) - m.plot() - print(m) + if optimize: + m.optimize('tnc', max_f_eval = 1000) + if plot: + m.plot() + + print m return(m) -def tuto_kernel_overview(): +def tuto_kernel_overview(optimize=True, plot=True): """The detailed explanations of the commands used in this file can be found in the tutorial section""" ker1 = GPy.kern.rbf(1) # Equivalent to ker1 = GPy.kern.rbf(input_dim=1, variance=1., lengthscale=1.) ker2 = GPy.kern.rbf(input_dim=1, variance = .75, lengthscale=2.) ker3 = GPy.kern.rbf(1, .5, .5) - + print ker2 - ker1.plot() - ker2.plot() - ker3.plot() + if plot: + ker1.plot() + ker2.plot() + ker3.plot() k1 = GPy.kern.rbf(1,1.,2.) k2 = GPy.kern.Matern32(1, 0.5, 0.2) @@ -77,8 +82,8 @@ def tuto_kernel_overview(): # Sum of kernels k_add = k1.add(k2) # By default, tensor=False - k_addtens = k1.add(k2,tensor=True) - + k_addtens = k1.add(k2,tensor=True) + k1 = GPy.kern.rbf(1,1.,2) k2 = GPy.kern.periodic_Matern52(1,variance=1e3, lengthscale=1, period = 1.5, lower=-5., upper = 5) @@ -102,7 +107,7 @@ def tuto_kernel_overview(): k.unconstrain('white') k.constrain_bounded('white',lower=1e-5,upper=.5) print k - + k_cst = GPy.kern.bias(1,variance=1.) k_mat = GPy.kern.Matern52(1,variance=1., lengthscale=3) Kanova = (k_cst + k_mat).prod(k_cst + k_mat,tensor=True) @@ -114,30 +119,32 @@ def tuto_kernel_overview(): # Create GP regression model m = GPy.models.GPRegression(X, Y, Kanova) - fig = pb.figure(figsize=(5,5)) - ax = fig.add_subplot(111) - m.plot(ax=ax) - - pb.figure(figsize=(20,3)) - pb.subplots_adjust(wspace=0.5) - axs = pb.subplot(1,5,1) - m.plot(ax=axs) - pb.subplot(1,5,2) - pb.ylabel("= ",rotation='horizontal',fontsize='30') - axs = pb.subplot(1,5,3) - m.plot(ax=axs, which_parts=[False,True,False,False]) - pb.ylabel("cst +",rotation='horizontal',fontsize='30') - axs = pb.subplot(1,5,4) - m.plot(ax=axs, which_parts=[False,False,True,False]) - pb.ylabel("+ ",rotation='horizontal',fontsize='30') - axs = pb.subplot(1,5,5) - pb.ylabel("+ ",rotation='horizontal',fontsize='30') - m.plot(ax=axs, which_parts=[False,False,False,True]) + + if plot: + fig = pb.figure(figsize=(5,5)) + ax = fig.add_subplot(111) + m.plot(ax=ax) + + pb.figure(figsize=(20,3)) + pb.subplots_adjust(wspace=0.5) + axs = pb.subplot(1,5,1) + m.plot(ax=axs) + pb.subplot(1,5,2) + pb.ylabel("= ",rotation='horizontal',fontsize='30') + axs = pb.subplot(1,5,3) + m.plot(ax=axs, which_parts=[False,True,False,False]) + pb.ylabel("cst +",rotation='horizontal',fontsize='30') + axs = pb.subplot(1,5,4) + m.plot(ax=axs, which_parts=[False,False,True,False]) + pb.ylabel("+ ",rotation='horizontal',fontsize='30') + axs = pb.subplot(1,5,5) + pb.ylabel("+ ",rotation='horizontal',fontsize='30') + m.plot(ax=axs, which_parts=[False,False,False,True]) return(m) -def model_interaction(): +def model_interaction(optimize=True, plot=True): X = np.random.randn(20,1) Y = np.sin(X) + np.random.randn(*X.shape)*0.01 + 5. k = GPy.kern.rbf(1) + GPy.kern.bias(1) diff --git a/GPy/gpy_config.cfg b/GPy/gpy_config.cfg new file mode 100644 index 00000000..d52edd28 --- /dev/null +++ b/GPy/gpy_config.cfg @@ -0,0 +1,7 @@ +# This is the configuration file for GPy + +[parallel] +# Enable openmp support. This speeds up some computations, depending on the number +# of cores available. Setting up a compiler with openmp support can be difficult on +# some platforms, hence this option. +openmp=False diff --git a/GPy/inference/conjugate_gradient_descent.py b/GPy/inference/conjugate_gradient_descent.py index 0f6603e5..9eabf5dd 100644 --- a/GPy/inference/conjugate_gradient_descent.py +++ b/GPy/inference/conjugate_gradient_descent.py @@ -233,7 +233,7 @@ class CGD(Async_Optimize): """ opt_async(self, f, df, x0, callback, update_rule=FletcherReeves, messages=0, maxiter=5e3, max_f_eval=15e3, gtol=1e-6, - report_every=10, *args, **kwargs) + report_every=10, \*args, \*\*kwargs) callback gets called every `report_every` iterations @@ -244,16 +244,14 @@ class CGD(Async_Optimize): f, and df will be called with - f(xi, *args, **kwargs) - df(xi, *args, **kwargs) + f(xi, \*args, \*\*kwargs) + df(xi, \*args, \*\*kwargs) - **returns** - ----------- + **Returns:** Started `Process` object, optimizing asynchronously - **calls** - --------- + **Calls:** callback(x_opt, f_opt, g_opt, iteration, function_calls, gradient_calls, status_message) @@ -265,7 +263,7 @@ class CGD(Async_Optimize): """ opt(self, f, df, x0, callback=None, update_rule=FletcherReeves, messages=0, maxiter=5e3, max_f_eval=15e3, gtol=1e-6, - report_every=10, *args, **kwargs) + report_every=10, \*args, \*\*kwargs) Minimize f, calling callback every `report_every` iterations with following syntax: @@ -276,11 +274,10 @@ class CGD(Async_Optimize): f, and df will be called with - f(xi, *args, **kwargs) - df(xi, *args, **kwargs) + f(xi, \*args, \*\*kwargs) + df(xi, \*args, \*\*kwargs) **returns** - --------- x_opt, f_opt, g_opt, iteration, function_calls, gradient_calls, status_message diff --git a/GPy/inference/optimization.py b/GPy/inference/optimization.py index 433d5f41..e65b862e 100644 --- a/GPy/inference/optimization.py +++ b/GPy/inference/optimization.py @@ -4,6 +4,7 @@ import pylab as pb import datetime as dt from scipy import optimize +from warnings import warn try: import rasmussens_minimize as rasm @@ -28,7 +29,7 @@ class Optimizer(): """ def __init__(self, x_init, messages=False, model=None, max_f_eval=1e4, max_iters=1e3, - ftol=None, gtol=None, xtol=None): + ftol=None, gtol=None, xtol=None, bfgs_factor=None): self.opt_name = None self.x_init = x_init self.messages = messages @@ -38,6 +39,7 @@ class Optimizer(): self.status = None self.max_f_eval = int(max_f_eval) self.max_iters = int(max_iters) + self.bfgs_factor = bfgs_factor self.trace = None self.time = "Not available" self.xtol = xtol @@ -127,9 +129,11 @@ class opt_lbfgsb(Optimizer): print "WARNING: l-bfgs-b doesn't have an ftol arg, so I'm going to ignore it" if self.gtol is not None: opt_dict['pgtol'] = self.gtol + if self.bfgs_factor is not None: + opt_dict['factr'] = self.bfgs_factor opt_result = optimize.fmin_l_bfgs_b(f_fp, self.x_init, iprint=iprint, - maxfun=self.max_f_eval, **opt_dict) + maxfun=self.max_iters, **opt_dict) self.x_opt = opt_result[0] self.f_opt = f_fp(self.x_opt)[0] self.funct_eval = opt_result[2]['funcalls'] @@ -198,17 +202,22 @@ class opt_rasm(Optimizer): class opt_SCG(Optimizer): def __init__(self, *args, **kwargs): + if 'max_f_eval' in kwargs: + warn("max_f_eval deprecated for SCG optimizer: use max_iters instead!\nIgnoring max_f_eval!", FutureWarning) Optimizer.__init__(self, *args, **kwargs) + self.opt_name = "Scaled Conjugate Gradients" def opt(self, f_fp=None, f=None, fp=None): assert not f is None assert not fp is None + opt_result = SCG(f, fp, self.x_init, display=self.messages, maxiters=self.max_iters, max_f_eval=self.max_f_eval, xtol=self.xtol, ftol=self.ftol, gtol=self.gtol) + self.x_opt = opt_result[0] self.trace = opt_result[1] self.f_opt = self.trace[-1] diff --git a/GPy/inference/scg.py b/GPy/inference/scg.py index 5753be7f..252f348e 100644 --- a/GPy/inference/scg.py +++ b/GPy/inference/scg.py @@ -26,13 +26,16 @@ import numpy as np import sys -def print_out(len_maxiters, display, fnow, current_grad, beta, iteration): - if display: - print '\r', - print '{0:>0{mi}g} {1:> 12e} {2:> 12e} {3:> 12e}'.format(iteration, float(fnow), float(beta), float(current_grad), mi=len_maxiters), # print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r', - sys.stdout.flush() +def print_out(len_maxiters, fnow, current_grad, beta, iteration): + print '\r', + print '{0:>0{mi}g} {1:> 12e} {2:> 12e} {3:> 12e}'.format(iteration, float(fnow), float(beta), float(current_grad), mi=len_maxiters), # print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r', + sys.stdout.flush() -def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xtol=None, ftol=None, gtol=None): +def exponents(fnow, current_grad): + exps = [np.abs(fnow), current_grad] + return np.sign(exps) * np.log10(exps).astype(int) + +def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=np.inf, display=True, xtol=None, ftol=None, gtol=None): """ Optimisation through Scaled Conjugate Gradients (SCG) @@ -52,11 +55,14 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto ftol = 1e-6 if gtol is None: gtol = 1e-5 + sigma0 = 1.0e-8 fold = f(x, *optargs) # Initial function value. function_eval = 1 fnow = fold gradnew = gradf(x, *optargs) # Initial gradient. + if any(np.isnan(gradnew)): + raise UnexpectedInfOrNan, "Gradient contribution resulted in a NaN value" current_grad = np.dot(gradnew, gradnew) gradold = gradnew.copy() d = -gradnew # Initial search direction. @@ -64,7 +70,7 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto nsuccess = 0 # nsuccess counts number of successes. beta = 1.0 # Initial scale parameter. betamin = 1.0e-60 # Lower bound on scale. - betamax = 1.0e100 # Upper bound on scale. + betamax = 1.0e50 # Upper bound on scale. status = "Not converged" flog = [fold] @@ -74,6 +80,8 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto len_maxiters = len(str(maxiters)) if display: print ' {0:{mi}s} {1:11s} {2:11s} {3:11s}'.format("I", "F", "Scale", "|g|", mi=len_maxiters) + exps = exponents(fnow, current_grad) + p_iter = iteration # Main optimization loop. while iteration < maxiters: @@ -103,9 +111,9 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto fnew = f(xnew, *optargs) function_eval += 1 - if function_eval >= max_f_eval: - status = "Maximum number of function evaluations exceeded" - break +# if function_eval >= max_f_eval: +# status = "maximum number of function evaluations exceeded" +# break # return x, flog, function_eval, status Delta = 2.*(fnew - fold) / (alpha * mu) @@ -122,15 +130,28 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto flog.append(fnow) # Current function value iteration += 1 - print_out(len_maxiters, display, fnow, current_grad, beta, iteration) + if display: + print_out(len_maxiters, fnow, current_grad, beta, iteration) + n_exps = exponents(fnow, current_grad) + if iteration - p_iter >= 20 * np.random.rand(): + a = iteration >= p_iter * 2.78 + b = np.any(n_exps < exps) + if a or b: + p_iter = iteration + print '' + if b: + exps = n_exps if success: # Test for termination - if (np.max(np.abs(alpha * d)) < xtol) or (np.abs(fnew - fold) < ftol): - status = 'converged' + + if (np.abs(fnew - fold) < ftol): + status = 'converged - relative reduction in objective' break # return x, flog, function_eval, status - + elif (np.max(np.abs(alpha * d)) < xtol): + status = 'converged - relative stepsize' + break else: # Update variables for new position gradnew = gradf(x, *optargs) @@ -139,7 +160,7 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto fold = fnew # If the gradient is zero then we are done. if current_grad <= gtol: - status = 'converged' + status = 'converged - relative reduction in gradient' break # return x, flog, function_eval, status @@ -164,6 +185,7 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto status = "maxiter exceeded" if display: - print_out(len_maxiters, display, fnow, current_grad, beta, iteration) + print_out(len_maxiters, fnow, current_grad, beta, iteration) print "" + print status return x, flog, function_eval, status diff --git a/GPy/inference/sgd.py b/GPy/inference/sgd.py index e443f45a..5cd144e8 100644 --- a/GPy/inference/sgd.py +++ b/GPy/inference/sgd.py @@ -10,11 +10,10 @@ class opt_SGD(Optimizer): """ Optimize using stochastic gradient descent. - *** Parameters *** - Model: reference to the Model object - iterations: number of iterations - learning_rate: learning rate - momentum: momentum + :param Model: reference to the Model object + :param iterations: number of iterations + :param learning_rate: learning rate + :param momentum: momentum """ diff --git a/GPy/kern/__init__.py b/GPy/kern/__init__.py index 97c1d88f..eb4076c3 100644 --- a/GPy/kern/__init__.py +++ b/GPy/kern/__init__.py @@ -1,10 +1,9 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - -from constructors import rbf, Matern32, Matern52, exponential, linear, white, bias, finite_dimensional, spline, Brownian, periodic_exponential, periodic_Matern32, periodic_Matern52, prod, symmetric, Coregionalise, rational_quadratic, Fixed, rbfcos, IndependentOutputs +from constructors import * try: from constructors import rbf_sympy, sympykern # these depend on sympy except: pass -from kern import kern +from kern import * diff --git a/GPy/kern/constructors.py b/GPy/kern/constructors.py index e2c21f15..05eaa028 100644 --- a/GPy/kern/constructors.py +++ b/GPy/kern/constructors.py @@ -1,33 +1,27 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - import numpy as np from kern import kern +import parts -from rbf import rbf as rbfpart -from white import white as whitepart -from linear import linear as linearpart -from exponential import exponential as exponentialpart -from Matern32 import Matern32 as Matern32part -from Matern52 import Matern52 as Matern52part -from bias import bias as biaspart -from fixed import Fixed as fixedpart -from finite_dimensional import finite_dimensional as finite_dimensionalpart -from spline import spline as splinepart -from Brownian import Brownian as Brownianpart -from periodic_exponential import periodic_exponential as periodic_exponentialpart -from periodic_Matern32 import periodic_Matern32 as periodic_Matern32part -from periodic_Matern52 import periodic_Matern52 as periodic_Matern52part -from prod import prod as prodpart -from symmetric import symmetric as symmetric_part -from coregionalise import Coregionalise as coregionalise_part -from rational_quadratic import rational_quadratic as rational_quadraticpart -from rbfcos import rbfcos as rbfcospart -from independent_outputs import IndependentOutputs as independent_output_part -#TODO these s=constructors are not as clean as we'd like. Tidy the code up -#using meta-classes to make the objects construct properly wthout them. +def rbf_inv(input_dim,variance=1., inv_lengthscale=None,ARD=False): + """ + Construct an RBF kernel + + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param lengthscale: the lengthscale of the kernel + :type lengthscale: float + :param ARD: Auto Relevance Determination (one lengthscale per dimension) + :type ARD: Boolean + + """ + part = parts.rbf_inv.RBFInv(input_dim,variance,inv_lengthscale,ARD) + return kern(input_dim, [part]) def rbf(input_dim,variance=1., lengthscale=None,ARD=False): """ @@ -41,35 +35,122 @@ def rbf(input_dim,variance=1., lengthscale=None,ARD=False): :type lengthscale: float :param ARD: Auto Relevance Determination (one lengthscale per dimension) :type ARD: Boolean + """ - part = rbfpart(input_dim,variance,lengthscale,ARD) + part = parts.rbf.RBF(input_dim,variance,lengthscale,ARD) return kern(input_dim, [part]) def linear(input_dim,variances=None,ARD=False): """ Construct a linear kernel. - Arguments - --------- - input_dimD (int), obligatory - variances (np.ndarray) - ARD (boolean) + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variances: + :type variances: np.ndarray + :param ARD: Auto Relevance Determination (one lengthscale per dimension) + :type ARD: Boolean + """ - part = linearpart(input_dim,variances,ARD) + part = parts.linear.Linear(input_dim,variances,ARD) + return kern(input_dim, [part]) + +def mlp(input_dim,variance=1., weight_variance=None,bias_variance=100.,ARD=False): + """ + Construct an MLP kernel + + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param weight_scale: the lengthscale of the kernel + :type weight_scale: vector of weight variances for input weights in neural network (length 1 if kernel is isotropic) + :param bias_variance: the variance of the biases in the neural network. + :type bias_variance: float + :param ARD: Auto Relevance Determination (allows for ARD version of covariance) + :type ARD: Boolean + + """ + part = parts.mlp.MLP(input_dim,variance,weight_variance,bias_variance,ARD) + return kern(input_dim, [part]) + +def gibbs(input_dim,variance=1., mapping=None): + """ + + Gibbs and MacKay non-stationary covariance function. + + .. math:: + + r = \\sqrt{((x_i - x_j)'*(x_i - x_j))} + + k(x_i, x_j) = \\sigma^2*Z*exp(-r^2/(l(x)*l(x) + l(x')*l(x'))) + + Z = \\sqrt{2*l(x)*l(x')/(l(x)*l(x) + l(x')*l(x')} + + Where :math:`l(x)` is a function giving the length scale as a function of space. + + This is the non stationary kernel proposed by Mark Gibbs in his 1997 + thesis. It is similar to an RBF but has a length scale that varies + with input location. This leads to an additional term in front of + the kernel. + + The parameters are :math:`\\sigma^2`, the process variance, and the parameters of l(x) which is a function that can be specified by the user, by default an multi-layer peceptron is used is used. + + :param input_dim: the number of input dimensions + :type input_dim: int + :param variance: the variance :math:`\\sigma^2` + :type variance: float + :param mapping: the mapping that gives the lengthscale across the input space. + :type mapping: GPy.core.Mapping + :param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one weight variance parameter :math:`\\sigma^2_w`), otherwise there is one weight variance parameter per dimension. + :type ARD: Boolean + :rtype: Kernpart object + + """ + part = parts.gibbs.Gibbs(input_dim,variance,mapping) + return kern(input_dim, [part]) + +def hetero(input_dim, mapping=None, transform=None): + """ + """ + part = parts.hetero.Hetero(input_dim,mapping,transform) + return kern(input_dim, [part]) + +def poly(input_dim,variance=1., weight_variance=None,bias_variance=1.,degree=2, ARD=False): + """ + Construct a polynomial kernel + + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param weight_scale: the lengthscale of the kernel + :type weight_scale: vector of weight variances for input weights. + :param bias_variance: the variance of the biases. + :type bias_variance: float + :param degree: the degree of the polynomial + :type degree: int + :param ARD: Auto Relevance Determination (allows for ARD version of covariance) + :type ARD: Boolean + + """ + part = parts.poly.POLY(input_dim,variance,weight_variance,bias_variance,degree,ARD) return kern(input_dim, [part]) def white(input_dim,variance=1.): """ Construct a white kernel. - Arguments - --------- - input_dimD (int), obligatory - variance (float) + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + """ - part = whitepart(input_dim,variance) + part = parts.white.White(input_dim,variance) return kern(input_dim, [part]) + def exponential(input_dim,variance=1., lengthscale=None, ARD=False): """ Construct an exponential kernel @@ -82,8 +163,9 @@ def exponential(input_dim,variance=1., lengthscale=None, ARD=False): :type lengthscale: float :param ARD: Auto Relevance Determination (one lengthscale per dimension) :type ARD: Boolean + """ - part = exponentialpart(input_dim,variance, lengthscale, ARD) + part = parts.exponential.Exponential(input_dim,variance, lengthscale, ARD) return kern(input_dim, [part]) def Matern32(input_dim,variance=1., lengthscale=None, ARD=False): @@ -98,8 +180,9 @@ def Matern32(input_dim,variance=1., lengthscale=None, ARD=False): :type lengthscale: float :param ARD: Auto Relevance Determination (one lengthscale per dimension) :type ARD: Boolean + """ - part = Matern32part(input_dim,variance, lengthscale, ARD) + part = parts.Matern32.Matern32(input_dim,variance, lengthscale, ARD) return kern(input_dim, [part]) def Matern52(input_dim, variance=1., lengthscale=None, ARD=False): @@ -114,31 +197,38 @@ def Matern52(input_dim, variance=1., lengthscale=None, ARD=False): :type lengthscale: float :param ARD: Auto Relevance Determination (one lengthscale per dimension) :type ARD: Boolean + """ - part = Matern52part(input_dim, variance, lengthscale, ARD) + part = parts.Matern52.Matern52(input_dim, variance, lengthscale, ARD) return kern(input_dim, [part]) def bias(input_dim, variance=1.): """ Construct a bias kernel. - Arguments - --------- - input_dim (int), obligatory - variance (float) + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + """ - part = biaspart(input_dim, variance) + part = parts.bias.Bias(input_dim, variance) return kern(input_dim, [part]) def finite_dimensional(input_dim, F, G, variances=1., weights=None): """ Construct a finite dimensional kernel. - input_dim: int - the number of input dimensions - F: np.array of functions with shape (n,) - the n basis functions - G: np.array with shape (n,n) - the Gram matrix associated to F - variances : np.ndarray with shape (n,) + + :param input_dim: the number of input dimensions + :type input_dim: int + :param F: np.array of functions with shape (n,) - the n basis functions + :type F: np.array + :param G: np.array with shape (n,n) - the Gram matrix associated to F + :type G: np.array + :param variances: np.ndarray with shape (n,) + :type: np.ndarray """ - part = finite_dimensionalpart(input_dim, F, G, variances, weights) + part = parts.finite_dimensional.FiniteDimensional(input_dim, F, G, variances, weights) return kern(input_dim, [part]) def spline(input_dim, variance=1.): @@ -149,8 +239,9 @@ def spline(input_dim, variance=1.): :type input_dim: int :param variance: the variance of the kernel :type variance: float + """ - part = splinepart(input_dim, variance) + part = parts.spline.Spline(input_dim, variance) return kern(input_dim, [part]) def Brownian(input_dim, variance=1.): @@ -161,43 +252,111 @@ def Brownian(input_dim, variance=1.): :type input_dim: int :param variance: the variance of the kernel :type variance: float + """ - part = Brownianpart(input_dim, variance) + part = parts.Brownian.Brownian(input_dim, variance) return kern(input_dim, [part]) try: import sympy as sp - from sympykern import spkern - from sympy.parsing.sympy_parser import parse_expr sympy_available = True except ImportError: sympy_available = False if sympy_available: + from parts.sympykern import spkern + from sympy.parsing.sympy_parser import parse_expr + from GPy.util import symbolic + def rbf_sympy(input_dim, ARD=False, variance=1., lengthscale=1.): """ Radial Basis Function covariance. """ - X = [sp.var('x%i' % i) for i in range(input_dim)] - Z = [sp.var('z%i' % i) for i in range(input_dim)] - rbf_variance = sp.var('rbf_variance',positive=True) + X = sp.symbols('x_:' + str(input_dim)) + Z = sp.symbols('z_:' + str(input_dim)) + variance = sp.var('variance',positive=True) if ARD: - rbf_lengthscales = [sp.var('rbf_lengthscale_%i' % i, positive=True) for i in range(input_dim)] - dist_string = ' + '.join(['(x%i-z%i)**2/rbf_lengthscale_%i**2' % (i, i, i) for i in range(input_dim)]) + lengthscales = sp.symbols('lengthscale_:' + str(input_dim)) + dist_string = ' + '.join(['(x_%i-z_%i)**2/lengthscale%i**2' % (i, i, i) for i in range(input_dim)]) dist = parse_expr(dist_string) - f = rbf_variance*sp.exp(-dist/2.) + f = variance*sp.exp(-dist/2.) else: - rbf_lengthscale = sp.var('rbf_lengthscale',positive=True) - dist_string = ' + '.join(['(x%i-z%i)**2' % (i, i) for i in range(input_dim)]) + lengthscale = sp.var('lengthscale',positive=True) + dist_string = ' + '.join(['(x_%i-z_%i)**2' % (i, i) for i in range(input_dim)]) dist = parse_expr(dist_string) - f = rbf_variance*sp.exp(-dist/(2*rbf_lengthscale**2)) - return kern(input_dim, [spkern(input_dim, f)]) + f = variance*sp.exp(-dist/(2*lengthscale**2)) + return kern(input_dim, [spkern(input_dim, f, name='rbf_sympy')]) - def sympykern(input_dim, k): + def eq_sympy(input_dim, output_dim, ARD=False): """ - A kernel from a symbolic sympy representation + Latent force model covariance, exponentiated quadratic with multiple outputs. Derived from a diffusion equation with the initial spatial condition layed down by a Gaussian process with lengthscale given by shared_lengthscale. + + See IEEE Trans Pattern Anal Mach Intell. 2013 Nov;35(11):2693-705. doi: 10.1109/TPAMI.2013.86. Linear latent force models using Gaussian processes. Alvarez MA, Luengo D, Lawrence ND. + + :param input_dim: Dimensionality of the kernel + :type input_dim: int + :param output_dim: number of outputs in the covariance function. + :type output_dim: int + :param ARD: whether or not to user ARD (default False). + :type ARD: bool + """ - return kern(input_dim, [spkern(input_dim, k)]) + real_input_dim = input_dim + if output_dim>1: + real_input_dim -= 1 + X = sp.symbols('x_:' + str(real_input_dim)) + Z = sp.symbols('z_:' + str(real_input_dim)) + scale = sp.var('scale_i scale_j',positive=True) + if ARD: + lengthscales = [sp.var('lengthscale%i_i lengthscale%i_j' % i, positive=True) for i in range(real_input_dim)] + shared_lengthscales = [sp.var('shared_lengthscale%i' % i, positive=True) for i in range(real_input_dim)] + dist_string = ' + '.join(['(x_%i-z_%i)**2/(shared_lengthscale%i**2 + lengthscale%i_i**2 + lengthscale%i_j**2)' % (i, i, i) for i in range(real_input_dim)]) + dist = parse_expr(dist_string) + f = variance*sp.exp(-dist/2.) + else: + lengthscales = sp.var('lengthscale_i lengthscale_j',positive=True) + shared_lengthscale = sp.var('shared_lengthscale',positive=True) + dist_string = ' + '.join(['(x_%i-z_%i)**2' % (i, i) for i in range(real_input_dim)]) + dist = parse_expr(dist_string) + f = scale_i*scale_j*sp.exp(-dist/(2*(lengthscale_i**2 + lengthscale_j**2 + shared_lengthscale**2))) + return kern(input_dim, [spkern(input_dim, f, output_dim=output_dim, name='eq_sympy')]) + + def ode1_eq(output_dim=1): + """ + Latent force model covariance, first order differential + equation driven by exponentiated quadratic. + + See N. D. Lawrence, G. Sanguinetti and M. Rattray. (2007) + 'Modelling transcriptional regulation using Gaussian + processes' in B. Schoelkopf, J. C. Platt and T. Hofmann (eds) + Advances in Neural Information Processing Systems, MIT Press, + Cambridge, MA, pp 785--792. + + :param output_dim: number of outputs in the covariance function. + :type output_dim: int + """ + input_dim = 2 + x_0, z_0, decay_i, decay_j, scale_i, scale_j, lengthscale = sp.symbols('x_0, z_0, decay_i, decay_j, scale_i, scale_j, lengthscale') + f = scale_i*scale_j*(symbolic.h(x_0, z_0, decay_i, decay_j, lengthscale) + + symbolic.h(z_0, x_0, decay_j, decay_i, lengthscale)) + return kern(input_dim, [spkern(input_dim, f, output_dim=output_dim, name='ode1_eq')]) + + def sympykern(input_dim, k=None, output_dim=1, name=None, param=None): + """ + A base kernel object, where all the hard work in done by sympy. + + :param k: the covariance function + :type k: a positive definite sympy function of x1, z1, x2, z2... + + To construct a new sympy kernel, you'll need to define: + - a kernel function using a sympy object. Ensure that the kernel is of the form k(x,z). + - that's it! we'll extract the variables from the function k. + + Note: + - to handle multiple inputs, call them x1, z1, etc + - to handle multpile correlated outputs, you'll need to define each covariance function and 'cross' variance function. TODO + """ + return kern(input_dim, [spkern(input_dim, k=k, output_dim=output_dim, name=name, param=param)]) del sympy_available def periodic_exponential(input_dim=1, variance=1., lengthscale=None, period=2 * np.pi, n_freq=10, lower=0., upper=4 * np.pi): @@ -214,8 +373,9 @@ def periodic_exponential(input_dim=1, variance=1., lengthscale=None, period=2 * :type period: float :param n_freq: the number of frequencies considered for the periodic subspace :type n_freq: int + """ - part = periodic_exponentialpart(input_dim, variance, lengthscale, period, n_freq, lower, upper) + part = parts.periodic_exponential.PeriodicExponential(input_dim, variance, lengthscale, period, n_freq, lower, upper) return kern(input_dim, [part]) def periodic_Matern32(input_dim, variance=1., lengthscale=None, period=2 * np.pi, n_freq=10, lower=0., upper=4 * np.pi): @@ -232,8 +392,9 @@ def periodic_Matern32(input_dim, variance=1., lengthscale=None, period=2 * np.pi :type period: float :param n_freq: the number of frequencies considered for the periodic subspace :type n_freq: int + """ - part = periodic_Matern32part(input_dim, variance, lengthscale, period, n_freq, lower, upper) + part = parts.periodic_Matern32.PeriodicMatern32(input_dim, variance, lengthscale, period, n_freq, lower, upper) return kern(input_dim, [part]) def periodic_Matern52(input_dim, variance=1., lengthscale=None, period=2 * np.pi, n_freq=10, lower=0., upper=4 * np.pi): @@ -250,8 +411,9 @@ def periodic_Matern52(input_dim, variance=1., lengthscale=None, period=2 * np.pi :type period: float :param n_freq: the number of frequencies considered for the periodic subspace :type n_freq: int + """ - part = periodic_Matern52part(input_dim, variance, lengthscale, period, n_freq, lower, upper) + part = parts.periodic_Matern52.PeriodicMatern52(input_dim, variance, lengthscale, period, n_freq, lower, upper) return kern(input_dim, [part]) def prod(k1,k2,tensor=False): @@ -260,21 +422,60 @@ def prod(k1,k2,tensor=False): :param k1, k2: the kernels to multiply :type k1, k2: kernpart + :param tensor: The kernels are either multiply as functions defined on the same input space (default) or on the product of the input spaces + :type tensor: Boolean :rtype: kernel object + """ - part = prodpart(k1,k2,tensor) + part = parts.prod.Prod(k1, k2, tensor) return kern(part.input_dim, [part]) def symmetric(k): """ - Construct a symmetrical kernel from an existing kernel + Construct a symmetric kernel from an existing kernel + + The symmetric kernel works by adding two GP functions together, and computing the overall covariance. + + Let f ~ GP(x | 0, k(x, x')). Now let g = f(x) + f(-x). + + It's easy to see that g is a symmetric function: g(x) = g(-x). + + by construction, g, is a gaussian Process with mean 0 and covariance + + k(x, x') + k(-x, x') + k(x, -x') + k(-x, -x') + + This constructor builds a covariance function of this form from the initial kernel """ k_ = k.copy() - k_.parts = [symmetric_part(p) for p in k.parts] + k_.parts = [parts.symmetric.Symmetric(p) for p in k.parts] return k_ -def Coregionalise(Nout,R=1, W=None, kappa=None): - p = coregionalise_part(Nout,R,W,kappa) +def coregionalize(output_dim,rank=1, W=None, kappa=None): + """ + Coregionlization matrix B, of the form: + + .. math:: + \mathbf{B} = \mathbf{W}\mathbf{W}^\top + kappa \mathbf{I} + + An intrinsic/linear coregionalization kernel of the form: + + .. math:: + k_2(x, y)=\mathbf{B} k(x, y) + + it is obtainded as the tensor product between a kernel k(x,y) and B. + + :param output_dim: the number of outputs to corregionalize + :type output_dim: int + :param rank: number of columns of the W matrix (this parameter is ignored if parameter W is not None) + :type rank: int + :param W: a low rank matrix that determines the correlations between the different outputs, together with kappa it forms the coregionalization matrix B + :type W: numpy array of dimensionality (num_outpus, rank) + :param kappa: a vector which allows the outputs to behave independently + :type kappa: numpy array of dimensionality (output_dim,) + :rtype: kernel object + + """ + p = parts.coregionalize.Coregionalize(output_dim,rank,W,kappa) return kern(1,[p]) @@ -291,36 +492,111 @@ def rational_quadratic(input_dim, variance=1., lengthscale=1., power=1.): :rtype: kern object """ - part = rational_quadraticpart(input_dim, variance, lengthscale, power) + part = parts.rational_quadratic.RationalQuadratic(input_dim, variance, lengthscale, power) return kern(input_dim, [part]) -def Fixed(input_dim, K, variance=1.): +def fixed(input_dim, K, variance=1.): """ Construct a Fixed effect kernel. - Arguments - --------- - input_dim (int), obligatory - K (np.array), obligatory - variance (float) + :param input_dim: the number of input dimensions + :type input_dim: int (input_dim=1 is the only value currently supported) + :param K: the variance :math:`\sigma^2` + :type K: np.array + :param variance: kernel variance + :type variance: float + :rtype: kern object """ - part = fixedpart(input_dim, K, variance) + part = parts.fixed.Fixed(input_dim, K, variance) return kern(input_dim, [part]) def rbfcos(input_dim, variance=1., frequencies=None, bandwidths=None, ARD=False): """ construct a rbfcos kernel """ - part = rbfcospart(input_dim, variance, frequencies, bandwidths, ARD) + part = parts.rbfcos.RBFCos(input_dim, variance, frequencies, bandwidths, ARD) return kern(input_dim, [part]) -def IndependentOutputs(k): +def independent_outputs(k): """ Construct a kernel with independent outputs from an existing kernel """ for sl in k.input_slices: assert (sl.start is None) and (sl.stop is None), "cannot adjust input slices! (TODO)" - parts = [independent_output_part(p) for p in k.parts] - return kern(k.input_dim+1,parts) + _parts = [parts.independent_outputs.IndependentOutputs(p) for p in k.parts] + return kern(k.input_dim+1,_parts) +def hierarchical(k): + """ + TODO This can't be right! Construct a kernel with independent outputs from an existing kernel + """ + # for sl in k.input_slices: + # assert (sl.start is None) and (sl.stop is None), "cannot adjust input slices! (TODO)" + _parts = [parts.hierarchical.Hierarchical(k.parts)] + return kern(k.input_dim+len(k.parts),_parts) +def build_lcm(input_dim, output_dim, kernel_list = [], rank=1,W=None,kappa=None): + """ + Builds a kernel of a linear coregionalization model + + :input_dim: Input dimensionality + :output_dim: Number of outputs + :kernel_list: List of coregionalized kernels, each element in the list will be multiplied by a different corregionalization matrix + :type kernel_list: list of GPy kernels + :param rank: number tuples of the corregionalization parameters 'coregion_W' + :type rank: integer + + ..note the kernels dimensionality is overwritten to fit input_dim + + """ + + for k in kernel_list: + if k.input_dim <> input_dim: + k.input_dim = input_dim + warnings.warn("kernel's input dimension overwritten to fit input_dim parameter.") + + k_coreg = coregionalize(output_dim,rank,W,kappa) + kernel = kernel_list[0]**k_coreg.copy() + + for k in kernel_list[1:]: + k_coreg = coregionalize(output_dim,rank,W,kappa) + kernel += k**k_coreg.copy() + + return kernel + +def ODE_1(input_dim=1, varianceU=1., varianceY=1., lengthscaleU=None, lengthscaleY=None): + """ + kernel resultiong from a first order ODE with OU driving GP + + :param input_dim: the number of input dimension, has to be equal to one + :type input_dim: int + :param varianceU: variance of the driving GP + :type varianceU: float + :param lengthscaleU: lengthscale of the driving GP + :type lengthscaleU: float + :param varianceY: 'variance' of the transfer function + :type varianceY: float + :param lengthscaleY: 'lengthscale' of the transfer function + :type lengthscaleY: float + :rtype: kernel object + + """ + part = parts.ODE_1.ODE_1(input_dim, varianceU, varianceY, lengthscaleU, lengthscaleY) + return kern(input_dim, [part]) + +def ODE_UY(input_dim=2, varianceU=1., varianceY=1., lengthscaleU=None, lengthscaleY=None): + """ + kernel resultiong from a first order ODE with OU driving GP + :param input_dim: the number of input dimension, has to be equal to one + :type input_dim: int + :param input_lengthU: the number of input U length + :param varianceU: variance of the driving GP + :type varianceU: float + :param varianceY: 'variance' of the transfer function + :type varianceY: float + :param lengthscaleY: 'lengthscale' of the transfer function + :type lengthscaleY: float + :rtype: kernel object + """ + part = parts.ODE_UY.ODE_UY(input_dim, varianceU, varianceY, lengthscaleU, lengthscaleY) + return kern(input_dim, [part]) diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 76a0d99e..949df5ab 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -1,20 +1,26 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - +import sys import numpy as np import pylab as pb -from ..core.parameterised import Parameterised -from kernpart import Kernpart +from ..core.parameterized import Parameterized +from parts.kernpart import Kernpart import itertools -from prod import prod +from parts.prod import Prod as prod +from matplotlib.transforms import offset_copy -class kern(Parameterised): +class kern(Parameterized): def __init__(self, input_dim, parts=[], input_slices=None): """ - This is the main kernel class for GPy. It handles multiple (additive) kernel functions, and keeps track of variaous things like which parameters live where. + This is the main kernel class for GPy. It handles multiple + (additive) kernel functions, and keeps track of various things + like which parameters live where. - The technical code for kernels is divided into _parts_ (see e.g. rbf.py). This obnject contains a list of parts, which are computed additively. For multiplication, special _prod_ parts are used. + The technical code for kernels is divided into _parts_ (see + e.g. rbf.py). This object contains a list of parts, which are + computed additively. For multiplication, special _prod_ parts + are used. :param input_dim: The dimensionality of the kernel's input space :type input_dim: int @@ -25,11 +31,16 @@ class kern(Parameterised): """ self.parts = parts - self.Nparts = len(parts) + self.num_parts = len(parts) self.num_params = sum([p.num_params for p in self.parts]) self.input_dim = input_dim + part_names = [k.name for k in self.parts] + self.name='' + for name in part_names: + self.name += name + '+' + self.name = self.name[:-1] # deal with input_slices if input_slices is None: self.input_slices = [slice(None) for p in self.parts] @@ -42,29 +53,111 @@ class kern(Parameterised): self.compute_param_slices() - Parameterised.__init__(self) + Parameterized.__init__(self) + + def getstate(self): + """ + Get the current state of the class, + here just all the indices, rest can get recomputed + """ + return Parameterized.getstate(self) + [self.parts, + self.num_parts, + self.num_params, + self.input_dim, + self.input_slices, + self.param_slices + ] + + def setstate(self, state): + self.param_slices = state.pop() + self.input_slices = state.pop() + self.input_dim = state.pop() + self.num_params = state.pop() + self.num_parts = state.pop() + self.parts = state.pop() + Parameterized.setstate(self, state) - def plot_ARD(self, fignum=None, ax=None): - """If an ARD kernel is present, it bar-plots the ARD parameters""" + def plot_ARD(self, fignum=None, ax=None, title='', legend=False): + """If an ARD kernel is present, plot a bar representation using matplotlib + + :param fignum: figure number of the plot + :param ax: matplotlib axis to plot on + :param title: + title of the plot, + pass '' to not print a title + pass None for a generic title + """ if ax is None: fig = pb.figure(fignum) ax = fig.add_subplot(111) + else: + fig = ax.figure + from GPy.util import Tango + from matplotlib.textpath import TextPath + Tango.reset() + xticklabels = [] + bars = [] + x0 = 0 for p in self.parts: + c = Tango.nextMedium() if hasattr(p, 'ARD') and p.ARD: - ax.set_title('ARD parameters, %s kernel' % p.name) - + if title is None: + ax.set_title('ARD parameters, %s kernel' % p.name) + else: + ax.set_title(title) if p.name == 'linear': ard_params = p.variances else: ard_params = 1. / p.lengthscale - ax.bar(np.arange(len(ard_params)) - 0.4, ard_params) - ax.set_xticks(np.arange(len(ard_params))) - ax.set_xticklabels([r"${}$".format(i) for i in range(len(ard_params))]) + x = np.arange(x0, x0 + len(ard_params)) + bars.append(ax.bar(x, ard_params, align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name)) + xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))]) + x0 += len(ard_params) + x = np.arange(x0) + transOffset = offset_copy(ax.transData, fig=fig, + x=0., y= -2., units='points') + transOffsetUp = offset_copy(ax.transData, fig=fig, + x=0., y=1., units='points') + for bar in bars: + for patch, num in zip(bar.patches, np.arange(len(bar.patches))): + height = patch.get_height() + xi = patch.get_x() + patch.get_width() / 2. + va = 'top' + c = 'w' + t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center') + transform = transOffset + if patch.get_extents().height <= t.get_extents().height + 3: + va = 'bottom' + c = 'k' + transform = transOffsetUp + ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform) + # for xi, t in zip(x, xticklabels): + # ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center') + # ax.set_xticklabels(xticklabels, rotation=17) + ax.set_xticks([]) + ax.set_xlim(-.5, x0 - .5) + if legend: + if title is '': + mode = 'expand' + if len(bars) > 1: + mode = 'expand' + ax.legend(bbox_to_anchor=(0., 1.02, 1., 1.02), loc=3, + ncol=len(bars), mode=mode, borderaxespad=0.) + fig.tight_layout(rect=(0, 0, 1, .9)) + else: + ax.legend() return ax def _transform_gradients(self, g): + """ + Apply the transformations of the kernel so that the returned vector + represents the gradient in the transformed space (i.e. that given by + get_params_transformed()) + + :param g: the gradient vector for the current model, usually created by dK_dtheta + """ x = self._get_params() [np.put(x, i, x * t.gradfactor(x[i])) for i, t in zip(self.constrained_indices, self.constraints)] [np.put(g, i, v) for i, v in [(t[0], np.sum(g[t])) for t in self.tied_indices]] @@ -75,7 +168,9 @@ class kern(Parameterised): return g def compute_param_slices(self): - """create a set of slices that can index the parameters of each part""" + """ + Create a set of slices that can index the parameters of each part. + """ self.param_slices = [] count = 0 for p in self.parts: @@ -83,16 +178,23 @@ class kern(Parameterised): count += p.num_params def __add__(self, other): - """ - Shortcut for `add`. - """ + """ Overloading of the '+' operator. for more control, see self.add """ return self.add(other) def add(self, other, tensor=False): """ - Add another kernel to this one. Both kernels are defined on the same _space_ + Add another kernel to this one. + + If Tensor is False, both kernels are defined on the same _space_. then + the created kernel will have the same number of inputs as self and + other (which must be the same). + + If Tensor is True, then the dimensions are stacked 'horizontally', so + that the resulting kernel has self.input_dim + other.input_dim + :param other: the other kernel to be added :type other: GPy.kern + """ if tensor: D = self.input_dim + other.input_dim @@ -121,16 +223,24 @@ class kern(Parameterised): return newkern def __mul__(self, other): - """ - Shortcut for `prod`. - """ + """ Here we overload the '*' operator. See self.prod for more information""" return self.prod(other) + def __pow__(self, other, tensor=False): + """ + Shortcut for tensor `prod`. + """ + return self.prod(other, tensor=True) + def prod(self, other, tensor=False): """ - multiply two kernels (either on the same space, or on the tensor product of the input space) + Multiply two kernels (either on the same space, or on the tensor product of the input space). + :param other: the other kernel to be added :type other: GPy.kern + :param tensor: whether or not to use the tensor space (default is false). + :type tensor: bool + """ K1 = self.copy() K2 = other.copy() @@ -199,7 +309,7 @@ class kern(Parameterised): [p._set_params(x[s]) for p, s in zip(self.parts, self.param_slices)] def _get_param_names(self): - # this is a bit nasty: we wat to distinguish between parts with the same name by appending a count + # this is a bit nasty: we want to distinguish between parts with the same name by appending a count part_names = np.array([k.name for k in self.parts], dtype=np.str) counts = [np.sum(part_names == ni) for i, ni in enumerate(part_names)] cum_counts = [np.sum(part_names[i:] == ni) for i, ni in enumerate(part_names)] @@ -208,8 +318,19 @@ class kern(Parameterised): return sum([[name + '_' + n for n in k._get_param_names()] for name, k in zip(names, self.parts)], []) def K(self, X, X2=None, which_parts='all'): + """ + Compute the kernel function. + + :param X: the first set of inputs to the kernel + :param X2: (optional) the second set of arguments to the kernel. If X2 + is None, this is passed throgh to the 'part' object, which + handles this as X2 == X. + :param which_parts: a list of booleans detailing whether to include + each of the part functions. By default, 'all' + indicates [True]*self.num_parts + """ if which_parts == 'all': - which_parts = [True] * self.Nparts + which_parts = [True] * self.num_parts assert X.shape[1] == self.input_dim if X2 is None: target = np.zeros((X.shape[0], X.shape[0])) @@ -221,12 +342,16 @@ class kern(Parameterised): def dK_dtheta(self, dL_dK, X, X2=None): """ - :param dL_dK: An array of dL_dK derivaties, dL_dK - :type dL_dK: Np.ndarray (N x num_inducing) + Compute the gradient of the covariance function with respect to the parameters. + + :param dL_dK: An array of gradients of the objective function with respect to the covariance function. + :type dL_dK: Np.ndarray (num_samples x num_inducing) :param X: Observed data inputs - :type X: np.ndarray (N x input_dim) - :param X2: Observed dara inputs (optional, defaults to X) + :type X: np.ndarray (num_samples x input_dim) + :param X2: Observed data inputs (optional, defaults to X) :type X2: np.ndarray (num_inducing x input_dim) + + returns: dL_dtheta """ assert X.shape[1] == self.input_dim target = np.zeros(self.num_params) @@ -238,24 +363,33 @@ class kern(Parameterised): return self._transform_gradients(target) def dK_dX(self, dL_dK, X, X2=None): - if X2 is None: - X2 = X + """Compute the gradient of the objective function with respect to X. + + :param dL_dK: An array of gradients of the objective function with respect to the covariance function. + :type dL_dK: np.ndarray (num_samples x num_inducing) + :param X: Observed data inputs + :type X: np.ndarray (num_samples x input_dim) + :param X2: Observed data inputs (optional, defaults to X) + :type X2: np.ndarray (num_inducing x input_dim)""" + target = np.zeros_like(X) - if X2 is None: + if X2 is None: [p.dK_dX(dL_dK, X[:, i_s], None, target[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] else: [p.dK_dX(dL_dK, X[:, i_s], X2[:, i_s], target[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] return target def Kdiag(self, X, which_parts='all'): + """Compute the diagonal of the covariance function for inputs X.""" if which_parts == 'all': - which_parts = [True] * self.Nparts + which_parts = [True] * self.num_parts assert X.shape[1] == self.input_dim target = np.zeros(X.shape[0]) [p.Kdiag(X[:, i_s], target=target) for p, i_s, part_on in zip(self.parts, self.input_slices, which_parts) if part_on] return target def dKdiag_dtheta(self, dL_dKdiag, X): + """Compute the gradient of the diagonal of the covariance function with respect to the parameters.""" assert X.shape[1] == self.input_dim assert dL_dKdiag.size == X.shape[0] target = np.zeros(self.num_params) @@ -278,6 +412,9 @@ class kern(Parameterised): [p.dpsi0_dtheta(dL_dpsi0, Z[:, i_s], mu[:, i_s], S[:, i_s], target[ps]) for p, ps, i_s in zip(self.parts, self.param_slices, self.input_slices)] return self._transform_gradients(target) + def dpsi0_dZ(self, dL_dpsi0, Z, mu, S): + return np.zeros_like(Z) + def dpsi0_dmuS(self, dL_dpsi0, Z, mu, S): target_mu, target_S = np.zeros_like(mu), np.zeros_like(S) [p.dpsi0_dmuS(dL_dpsi0, Z[:, i_s], mu[:, i_s], S[:, i_s], target_mu[:, i_s], target_S[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] @@ -299,90 +436,253 @@ class kern(Parameterised): return target def dpsi1_dmuS(self, dL_dpsi1, Z, mu, S): - """return shapes are N,num_inducing,input_dim""" + """return shapes are num_samples,num_inducing,input_dim""" target_mu, target_S = np.zeros((2, mu.shape[0], mu.shape[1])) [p.dpsi1_dmuS(dL_dpsi1, Z[:, i_s], mu[:, i_s], S[:, i_s], target_mu[:, i_s], target_S[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] return target_mu, target_S def psi2(self, Z, mu, S): """ - :param Z: np.ndarray of inducing inputs (num_inducing x input_dim) - :param mu, S: np.ndarrays of means and variances (each N x input_dim) - :returns psi2: np.ndarray (N,num_inducing,num_inducing) + :param Z: np.ndarray of inducing inputs (M x Q) + :param mu, S: np.ndarrays of means and variances (each N x Q) + :returns psi2: np.ndarray (N,M,M) """ target = np.zeros((mu.shape[0], Z.shape[0], Z.shape[0])) [p.psi2(Z[:, i_s], mu[:, i_s], S[:, i_s], target) for p, i_s in zip(self.parts, self.input_slices)] # compute the "cross" terms # TODO: input_slices needed - crossterms = 0 + from parts.white import White + from parts.rbf import RBF + from parts.rbf_inv import RBFInv + from parts.bias import Bias + from parts.linear import Linear + from parts.fixed import Fixed - for p1, p2 in itertools.combinations(self.parts, 2): + for (p1, i1), (p2, i2) in itertools.combinations(itertools.izip(self.parts, self.input_slices), 2): + # white doesn;t combine with anything + if isinstance(p1, White) or isinstance(p2, White): + pass + # rbf X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)): + target += p1.variance * (p2._psi1[:, :, None] + p2._psi1[:, None, :]) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)): + target += p2.variance * (p1._psi1[:, :, None] + p1._psi1[:, None, :]) + # linear X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (Linear, RBF, RBFInv)): + tmp = np.zeros((mu.shape[0], Z.shape[0])) + p2.psi1(Z, mu, S, tmp) + target += p1.variance * (tmp[:, :, None] + tmp[:, None, :]) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (Linear, RBF, RBFInv)): + tmp = np.zeros((mu.shape[0], Z.shape[0])) + p1.psi1(Z, mu, S, tmp) + target += p2.variance * (tmp[:, :, None] + tmp[:, None, :]) + # rbf X any + elif False:#isinstance(p1, (RBF, RBFInv)) or isinstance(p2, (RBF, RBFInv)): + if isinstance(p2, (RBF, RBFInv)) and not isinstance(p1, (RBF, RBFInv)): + p1t = p1; p1 = p2; p2 = p1t; del p1t + N, M = mu.shape[0], Z.shape[0]; NM=N*M + psi11 = np.zeros((N, M)) + psi12 = np.zeros((NM, M)) + p1.psi1(Z, mu, S, psi11) + Mu, Sigma = p1._crossterm_mu_S(Z, mu, S) + Mu, Sigma = Mu.reshape(NM,self.input_dim), Sigma.reshape(NM,self.input_dim) - # TODO psi1 this must be faster/better/precached/more nice - tmp1 = np.zeros((mu.shape[0], Z.shape[0])) - p1.psi1(Z, mu, S, tmp1) - tmp2 = np.zeros((mu.shape[0], Z.shape[0])) - p2.psi1(Z, mu, S, tmp2) - - prod = np.multiply(tmp1, tmp2) - crossterms += prod[:, :, None] + prod[:, None, :] - - target += crossterms - return target + p2.psi1(Z, Mu, Sigma, psi12) + eK2 = psi12.reshape(N, M, M) + crossterms = eK2 * (psi11[:, :, None] + psi11[:, None, :]) + target += crossterms + else: + raise NotImplementedError, "psi2 cannot be computed for this kernel" + return target def dpsi2_dtheta(self, dL_dpsi2, Z, mu, S): target = np.zeros(self.num_params) [p.dpsi2_dtheta(dL_dpsi2, Z[:, i_s], mu[:, i_s], S[:, i_s], target[ps]) for p, i_s, ps in zip(self.parts, self.input_slices, self.param_slices)] + from parts.white import White + from parts.rbf import RBF + from parts.rbf_inv import RBFInv + from parts.bias import Bias + from parts.linear import Linear + from parts.fixed import Fixed + # compute the "cross" terms # TODO: better looping, input_slices - for i1, i2 in itertools.permutations(range(len(self.parts)), 2): + for i1, i2 in itertools.combinations(range(len(self.parts)), 2): p1, p2 = self.parts[i1], self.parts[i2] -# ipsl1, ipsl2 = self.input_slices[i1], self.input_slices[i2] - ps1, ps2 = self.param_slices[i1], self.param_slices[i2] + #ipsl1, ipsl2 = self.input_slices[i1], self.input_slices[i2] + ps1, ps2 = self.param_slices[i1], self.param_slices[i2] + if isinstance(p1, White) or isinstance(p2, White): + pass + # rbf X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)): + p2.dpsi1_dtheta(dL_dpsi2.sum(1) * p1.variance * 2., Z, mu, S, target[ps2]) + p1.dpsi1_dtheta(dL_dpsi2.sum(1) * p2._psi1 * 2., Z, mu, S, target[ps1]) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)): + p1.dpsi1_dtheta(dL_dpsi2.sum(1) * p2.variance * 2., Z, mu, S, target[ps1]) + p2.dpsi1_dtheta(dL_dpsi2.sum(1) * p1._psi1 * 2., Z, mu, S, target[ps2]) + # linear X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, Linear): + p2.dpsi1_dtheta(dL_dpsi2.sum(1) * p1.variance * 2., Z, mu, S, target[ps2]) # [ps1]) + psi1 = np.zeros((mu.shape[0], Z.shape[0])) + p2.psi1(Z, mu, S, psi1) + p1.dpsi1_dtheta(dL_dpsi2.sum(1) * psi1 * 2., Z, mu, S, target[ps1]) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, Linear): + p1.dpsi1_dtheta(dL_dpsi2.sum(1) * p2.variance * 2., Z, mu, S, target[ps1]) + psi1 = np.zeros((mu.shape[0], Z.shape[0])) + p1.psi1(Z, mu, S, psi1) + p2.dpsi1_dtheta(dL_dpsi2.sum(1) * psi1 * 2., Z, mu, S, target[ps2]) + # rbf X any + elif False:#isinstance(p1, (RBF, RBFInv)) or isinstance(p2, (RBF, RBFInv)): + if isinstance(p2, (RBF, RBFInv)) and not isinstance(p1, (RBF, RBFInv)): + # turn around to have rbf in front + p1, p2 = self.parts[i2], self.parts[i1] + ps1, ps2 = self.param_slices[i2], self.param_slices[i1] - tmp = np.zeros((mu.shape[0], Z.shape[0])) - p1.psi1(Z, mu, S, tmp) - p2.dpsi1_dtheta((tmp[:, None, :] * dL_dpsi2).sum(1) * 2., Z, mu, S, target[ps2]) + N, M = mu.shape[0], Z.shape[0]; NM=N*M + + psi11 = np.zeros((N, M)) + p1.psi1(Z, mu, S, psi11) + + Mu, Sigma = p1._crossterm_mu_S(Z, mu, S) + Mu, Sigma = Mu.reshape(NM,self.input_dim), Sigma.reshape(NM,self.input_dim) + + tmp1 = np.zeros_like(target[ps1]) + tmp2 = np.zeros_like(target[ps2]) +# for n in range(N): +# for m in range(M): +# for m_prime in range(M): +# p1.dpsi1_dtheta((dL_dpsi2[n:n+1,m:m+1,m_prime:m_prime+1]*psi12_t.reshape(N,M,M)[n:n+1,m:m+1,m_prime:m_prime+1])[0], Z[m:m+1], mu[n:n+1], S[n:n+1], tmp2)#Z[m_prime:m_prime+1], mu[n:n+1], S[n:n+1], tmp2) +# p1.dpsi1_dtheta((dL_dpsi2[n:n+1,m:m+1,m_prime:m_prime+1]*psi12_t.reshape(N,M,M)[n:n+1,m_prime:m_prime+1,m:m+1])[0], Z[m_prime:m_prime+1], mu[n:n+1], S[n:n+1], tmp2) +# Mu, Sigma= Mu.reshape(N,M,self.input_dim), Sigma.reshape(N,M,self.input_dim) +# p2.dpsi1_dtheta((dL_dpsi2[n:n+1,m:m+1,m_prime:m_prime+1]*(psi11[n:n+1,m_prime:m_prime+1]))[0], Z[m:m+1], Mu[n:n+1,m], Sigma[n:n+1,m], target[ps2]) +# p2.dpsi1_dtheta((dL_dpsi2[n:n+1,m:m+1,m_prime:m_prime+1]*(psi11[n:n+1,m:m+1]))[0], Z[m_prime:m_prime+1], Mu[n:n+1, m_prime], Sigma[n:n+1, m_prime], target[ps2])#Z[m_prime:m_prime+1], Mu[n+m:(n+m)+1], Sigma[n+m:(n+m)+1], target[ps2]) + + if isinstance(p1, RBF) and isinstance(p2, RBF): + psi12 = np.zeros((N, M)) + p2.psi1(Z, mu, S, psi12) + Mu2, Sigma2 = p2._crossterm_mu_S(Z, mu, S) + Mu2, Sigma2 = Mu2.reshape(NM,self.input_dim), Sigma2.reshape(NM,self.input_dim) + p1.dpsi1_dtheta((dL_dpsi2*(psi12[:,:,None] + psi12[:,None,:])).reshape(NM,M), Z, Mu2, Sigma2, tmp1) + pass + + if isinstance(p1, RBF) and isinstance(p2, Linear): + #import ipdb;ipdb.set_trace() + pass + + p2.dpsi1_dtheta((dL_dpsi2*(psi11[:,:,None] + psi11[:,None,:])).reshape(NM,M), Z, Mu, Sigma, tmp2) + + target[ps1] += tmp1 + target[ps2] += tmp2 + else: + raise NotImplementedError, "psi2 cannot be computed for this kernel" return self._transform_gradients(target) def dpsi2_dZ(self, dL_dpsi2, Z, mu, S): target = np.zeros_like(Z) [p.dpsi2_dZ(dL_dpsi2, Z[:, i_s], mu[:, i_s], S[:, i_s], target[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] - # target *= 2 + + from parts.white import White + from parts.rbf import RBF + from parts.rbf_inv import RBFInv + from parts.bias import Bias + from parts.linear import Linear + from parts.fixed import Fixed # compute the "cross" terms - # TODO: we need input_slices here. - for p1, p2 in itertools.permutations(self.parts, 2): - if p1.name == 'linear' and p2.name == 'linear': - raise NotImplementedError("We don't handle linear/linear cross-terms") - tmp = np.zeros((mu.shape[0], Z.shape[0])) - p1.psi1(Z, mu, S, tmp) - p2.dpsi1_dZ((tmp[:, None, :] * dL_dpsi2).sum(1), Z, mu, S, target) + # TODO: better looping, input_slices + for p1, p2 in itertools.combinations(self.parts, 2): + if isinstance(p1, White) or isinstance(p2, White): + pass + # rbf X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)): + p2.dpsi1_dZ(dL_dpsi2.sum(1) * p1.variance, Z, mu, S, target) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)): + p1.dpsi1_dZ(dL_dpsi2.sum(1) * p2.variance, Z, mu, S, target) + # linear X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, Linear): + p2.dpsi1_dZ(dL_dpsi2.sum(1) * p1.variance, Z, mu, S, target) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, Linear): + p1.dpsi1_dZ(dL_dpsi2.sum(1) * p2.variance, Z, mu, S, target) + # rbf X any + elif False:#isinstance(p1, (RBF, RBFInv)) or isinstance(p2, (RBF, RBFInv)): + if isinstance(p2, (RBF, RBFInv)) and not isinstance(p1, (RBF, RBFInv)): + p1t = p1; p1 = p2; p2 = p1t; del p1t + N, M = mu.shape[0], Z.shape[0]; NM=N*M + psi11 = np.zeros((N, M)) + psi12 = np.zeros((NM, M)) + #psi12_t = np.zeros((N,M)) + p1.psi1(Z, mu, S, psi11) + Mu, Sigma = p1._crossterm_mu_S(Z, mu, S) + Mu, Sigma = Mu.reshape(NM,self.input_dim), Sigma.reshape(NM,self.input_dim) + + p2.psi1(Z, Mu, Sigma, psi12) + tmp1 = np.zeros_like(target) + p1.dpsi1_dZ((dL_dpsi2*psi12.reshape(N,M,M)).sum(1), Z, mu, S, tmp1) + p1.dpsi1_dZ((dL_dpsi2*psi12.reshape(N,M,M)).sum(2), Z, mu, S, tmp1) + target += tmp1 + + #p2.dpsi1_dtheta((dL_dpsi2*(psi11[:,:,None] + psi11[:,None,:])).reshape(NM,M), Z, Mu, Sigma, target) + p2.dpsi1_dZ((dL_dpsi2*(psi11[:,:,None] + psi11[:,None,:])).reshape(NM,M), Z, Mu, Sigma, target) + else: + raise NotImplementedError, "psi2 cannot be computed for this kernel" return target * 2 def dpsi2_dmuS(self, dL_dpsi2, Z, mu, S): target_mu, target_S = np.zeros((2, mu.shape[0], mu.shape[1])) [p.dpsi2_dmuS(dL_dpsi2, Z[:, i_s], mu[:, i_s], S[:, i_s], target_mu[:, i_s], target_S[:, i_s]) for p, i_s in zip(self.parts, self.input_slices)] + from parts.white import White + from parts.rbf import RBF + from parts.rbf_inv import RBFInv + from parts.bias import Bias + from parts.linear import Linear + from parts.fixed import Fixed + # compute the "cross" terms - # TODO: we need input_slices here. - for p1, p2 in itertools.permutations(self.parts, 2): - if p1.name == 'linear' and p2.name == 'linear': - raise NotImplementedError("We don't handle linear/linear cross-terms") + # TODO: better looping, input_slices + for p1, p2 in itertools.combinations(self.parts, 2): + if isinstance(p1, White) or isinstance(p2, White): + pass + # rbf X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)): + p2.dpsi1_dmuS(dL_dpsi2.sum(1) * p1.variance * 2., Z, mu, S, target_mu, target_S) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)): + p1.dpsi1_dmuS(dL_dpsi2.sum(1) * p2.variance * 2., Z, mu, S, target_mu, target_S) + # linear X bias + elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, Linear): + p2.dpsi1_dmuS(dL_dpsi2.sum(1) * p1.variance * 2., Z, mu, S, target_mu, target_S) + elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, Linear): + p1.dpsi1_dmuS(dL_dpsi2.sum(1) * p2.variance * 2., Z, mu, S, target_mu, target_S) + # rbf X any + elif False:#isinstance(p1, (RBF, RBFInv)) or isinstance(p2, (RBF, RBFInv)): + if isinstance(p2, (RBF, RBFInv)) and not isinstance(p1, (RBF, RBFInv)): + p1t = p1; p1 = p2; p2 = p1t; del p1t + N, M = mu.shape[0], Z.shape[0]; NM=N*M + psi11 = np.zeros((N, M)) + psi12 = np.zeros((NM, M)) + #psi12_t = np.zeros((N,M)) - tmp = np.zeros((mu.shape[0], Z.shape[0])) - p1.psi1(Z, mu, S, tmp) - p2.dpsi1_dmuS((tmp[:, None, :] * dL_dpsi2).sum(1) * 2., Z, mu, S, target_mu, target_S) + p1.psi1(Z, mu, S, psi11) + Mu, Sigma = p1._crossterm_mu_S(Z, mu, S) + Mu, Sigma = Mu.reshape(NM,self.input_dim), Sigma.reshape(NM,self.input_dim) + p2.psi1(Z, Mu, Sigma, psi12) + p1.dpsi1_dmuS((dL_dpsi2*psi12.reshape(N,M,M)).sum(1), Z, mu, S, target_mu, target_S) + p1.dpsi1_dmuS((dL_dpsi2*psi12.reshape(N,M,M)).sum(2), Z, mu, S, target_mu, target_S) + + #p2.dpsi1_dtheta((dL_dpsi2*(psi11[:,:,None] + psi11[:,None,:])).reshape(NM,M), Z, Mu, Sigma, target) + p2.dpsi1_dmuS((dL_dpsi2*(psi11[:,:,None])).sum(1)*2, Z, Mu.reshape(N,M,self.input_dim).sum(1), Sigma.reshape(N,M,self.input_dim).sum(1), target_mu, target_S) + else: + raise NotImplementedError, "psi2 cannot be computed for this kernel" return target_mu, target_S def plot(self, x=None, plot_limits=None, which_parts='all', resolution=None, *args, **kwargs): if which_parts == 'all': - which_parts = [True] * self.Nparts + which_parts = [True] * self.num_parts if self.input_dim == 1: if x is None: x = np.zeros((1, 1)) @@ -435,3 +735,232 @@ class kern(Parameterised): pb.title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1])) else: raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions" + +from ..core.model import Model +class Kern_check_model(Model): + """This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgradient() to be called independently on a kernel.""" + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + num_samples = 20 + num_samples2 = 10 + if kernel==None: + import GPy + kernel = GPy.kern.rbf(1) + del GPy + if X==None: + X = np.random.normal(size=(num_samples, kernel.input_dim)) + if dL_dK==None: + if X2==None: + dL_dK = np.ones((X.shape[0], X.shape[0])) + else: + dL_dK = np.ones((X.shape[0], X2.shape[0])) + + self.kernel=kernel + self.X = X + self.X2 = X2 + self.dL_dK = dL_dK + #self.constrained_indices=[] + #self.constraints=[] + super(Kern_check_model, self).__init__() + + def is_positive_definite(self): + v = np.linalg.eig(self.kernel.K(self.X))[0] + if any(v<-10*sys.float_info.epsilon): + return False + else: + return True + + def _get_params(self): + return self.kernel._get_params() + + def _get_param_names(self): + return self.kernel._get_param_names() + + def _set_params(self, x): + self.kernel._set_params(x) + + def log_likelihood(self): + return (self.dL_dK*self.kernel.K(self.X, self.X2)).sum() + + def _log_likelihood_gradients(self): + raise NotImplementedError, "This needs to be implemented to use the kern_check_model class." + +class Kern_check_dK_dtheta(Kern_check_model): + """This class allows gradient checks for the gradient of a kernel with respect to parameters. """ + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) + + def _log_likelihood_gradients(self): + return self.kernel.dK_dtheta(self.dL_dK, self.X, self.X2) + +class Kern_check_dKdiag_dtheta(Kern_check_model): + """This class allows gradient checks of the gradient of the diagonal of a kernel with respect to the parameters.""" + def __init__(self, kernel=None, dL_dK=None, X=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None) + if dL_dK==None: + self.dL_dK = np.ones((self.X.shape[0])) + + def log_likelihood(self): + return (self.dL_dK*self.kernel.Kdiag(self.X)).sum() + + def _log_likelihood_gradients(self): + return self.kernel.dKdiag_dtheta(self.dL_dK, self.X) + +class Kern_check_dK_dX(Kern_check_model): + """This class allows gradient checks for the gradient of a kernel with respect to X. """ + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) + + def _log_likelihood_gradients(self): + return self.kernel.dK_dX(self.dL_dK, self.X, self.X2).flatten() + + def _get_param_names(self): + return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] + + def _get_params(self): + return self.X.flatten() + + def _set_params(self, x): + self.X=x.reshape(self.X.shape) + +class Kern_check_dKdiag_dX(Kern_check_model): + """This class allows gradient checks for the gradient of a kernel diagonal with respect to X. """ + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None) + if dL_dK==None: + self.dL_dK = np.ones((self.X.shape[0])) + + def log_likelihood(self): + return (self.dL_dK*self.kernel.Kdiag(self.X)).sum() + + def _log_likelihood_gradients(self): + return self.kernel.dKdiag_dX(self.dL_dK, self.X).flatten() + + def _get_param_names(self): + return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] + + def _get_params(self): + return self.X.flatten() + + def _set_params(self, x): + self.X=x.reshape(self.X.shape) + +def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False, X_positive=False): + """This function runs on kernels to check the correctness of their implementation. It checks that the covariance function is positive definite for a randomly generated data set. + + :param kern: the kernel to be tested. + :type kern: GPy.kern.Kernpart + :param X: X input values to test the covariance function. + :type X: ndarray + :param X2: X2 input values to test the covariance function. + :type X2: ndarray + + """ + pass_checks = True + if X==None: + X = np.random.randn(10, kern.input_dim) + if X_positive: + X = abs(X) + if output_ind is not None: + assert(output_ind>> index = np.asarray([0,0,0,1,1,1,2,2,2]) + returns + >>> [[slice(0,3,None)],[slice(3,6,None)],[slice(6,9,None)]] + + or, a more complicated example + >>> index = np.asarray([0,0,1,1,0,2,2,2,1,1]) + returns + >>> [[slice(0,2,None),slice(4,5,None)],[slice(2,4,None),slice(8,10,None)],[slice(5,8,None)]] + """ + + #contruct the return structure + ind = np.asarray(index,dtype=np.int64) + ret = [[] for i in range(ind.max()+1)] + + #find the switchpoints + ind_ = np.hstack((ind,ind[0]+ind[-1]+1)) + switchpoints = np.nonzero(ind_ - np.roll(ind_,+1))[0] + + [ret[ind_i].append(slice(*indexes_i)) for ind_i,indexes_i in zip(ind[switchpoints[:-1]],zip(switchpoints,switchpoints[1:]))] + return ret + +class ODE_UY(Kernpart): + """ + kernel resultiong from a first order ODE with OU driving GP + + :param input_dim: the number of input dimension, has to be equal to one + :type input_dim: int + :param input_lengthU: the number of input U length + :type input_dim: int + :param varianceU: variance of the driving GP + :type varianceU: float + :param lengthscaleU: lengthscale of the driving GP (sqrt(3)/lengthscaleU) + :type lengthscaleU: float + :param varianceY: 'variance' of the transfer function + :type varianceY: float + :param lengthscaleY: 'lengthscale' of the transfer function (1/lengthscaleY) + :type lengthscaleY: float + :rtype: kernel object + + """ + + + + + def __init__(self, input_dim=2,varianceU=1., varianceY=1., lengthscaleU=None, lengthscaleY=None): + assert input_dim==2, "Only defined for input_dim = 1" + self.input_dim = input_dim + self.num_params = 4 + self.name = 'ODE_UY' + + + if lengthscaleU is not None: + lengthscaleU = np.asarray(lengthscaleU) + assert lengthscaleU.size == 1, "lengthscaleU should be one dimensional" + else: + lengthscaleU = np.ones(1) + if lengthscaleY is not None: + lengthscaleY = np.asarray(lengthscaleY) + assert lengthscaleY.size == 1, "lengthscaleY should be one dimensional" + else: + lengthscaleY = np.ones(1) + #lengthscaleY = 0.5 + self._set_params(np.hstack((varianceU, varianceY, lengthscaleU,lengthscaleY))) + + def _get_params(self): + """return the value of the parameters.""" + return np.hstack((self.varianceU,self.varianceY, self.lengthscaleU,self.lengthscaleY)) + + def _set_params(self, x): + """set the value of the parameters.""" + assert x.size == self.num_params + + self.varianceU = x[0] + self.varianceY = x[1] + self.lengthscaleU = x[2] + self.lengthscaleY = x[3] + + + def _get_param_names(self): + """return parameter names.""" + return ['varianceU','varianceY', 'lengthscaleU', 'lengthscaleY'] + + + def K(self, X, X2, target): + """Compute the covariance matrix between X and X2.""" + + X,slices = X[:,:-1],index_to_slices(X[:,-1]) + if X2 is None: + X2,slices2 = X,slices + else: + X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1]) + + + #rdist = X[:,0][:,None] - X2[:,0][:,None].T + rdist = X - X2.T + ly=1/self.lengthscaleY + lu=np.sqrt(3)/self.lengthscaleU + #iu=self.input_lengthU #dimention of U + + Vu=self.varianceU + Vy=self.varianceY + + kuu = lambda dist:Vu * (1 + lu* np.abs(dist)) * np.exp(-lu * np.abs(dist)) + + k1 = lambda dist:np.exp(-ly*np.abs(dist))*(2*lu+ly)/(lu+ly)**2 + k2 = lambda dist:(np.exp(-lu*dist)*(ly-2*lu+lu*ly*dist-lu**2*dist) + np.exp(-ly*dist)*(2*lu-ly) ) / (ly-lu)**2 + k3 = lambda dist:np.exp(-lu*dist) * ( (1+lu*dist)/(lu+ly) + (lu)/(lu+ly)**2 ) + kyy = lambda dist:Vu*Vy*(k1(dist) + k2(dist) + k3(dist)) + + kyu3 = lambda dist:np.exp(-lu*dist)/(lu+ly)*(1+lu*(dist+1/(lu+ly))) + kyup = lambda dist:Vu*Vy*(k1(dist)+k2(dist)) #t>0 kyu + kyun = lambda dist:Vu*Vy*(kyu3(dist)) #t<0 kyu + + kuyp = lambda dist:Vu*Vy*(kyu3(dist)) #t>0 kuy + kuyn = lambda dist:Vu*Vy*(k1(dist)+k2(dist)) #t<0 kuy + + for i, s1 in enumerate(slices): + for j, s2 in enumerate(slices2): + for ss1 in s1: + for ss2 in s2: + if i==0 and j==0: + target[ss1,ss2] = kuu(np.abs(rdist[ss1,ss2])) + elif i==0 and j==1: + target[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kuyp(np.abs(rdist[ss1,ss2])), kuyn(np.abs(rdist[s1[0],s2[0]]) ) ) + elif i==1 and j==1: + target[ss1,ss2] = kyy(np.abs(rdist[ss1,ss2])) + else: + target[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kyup(np.abs(rdist[ss1,ss2])), kyun(np.abs(rdist[s1[0],s2[0]]) ) ) + + + #KUU = kuu(np.abs(rdist[:iu,:iu])) + + #KYY = kyy(np.abs(rdist[iu:,iu:])) + + #KYU = np.where(rdist[iu:,:iu]>0,kyup(np.abs(rdist[iu:,:iu])),kyun(np.abs(rdist[iu:,:iu]) )) + + #KUY = np.where(rdist[:iu,iu:]>0,kuyp(np.abs(rdist[:iu,iu:])),kuyn(np.abs(rdist[:iu,iu:]) )) + + #ker=np.vstack((np.hstack([KUU,KUY]),np.hstack([KYU,KYY]))) + + #np.add(ker, target, target) + + def Kdiag(self, X, target): + """Compute the diagonal of the covariance matrix associated to X.""" + ly=1/self.lengthscaleY + lu=np.sqrt(3)/self.lengthscaleU + #ly=self.lengthscaleY + #lu=self.lengthscaleU + + k1 = (2*lu+ly)/(lu+ly)**2 + k2 = (ly-2*lu + 2*lu-ly ) / (ly-lu)**2 + k3 = 1/(lu+ly) + (lu)/(lu+ly)**2 + + slices = index_to_slices(X[:,-1]) + + for i, ss1 in enumerate(slices): + for s1 in ss1: + if i==0: + target[s1]+= self.varianceU + elif i==1: + target[s1]+= self.varianceU*self.varianceY*(k1+k2+k3) + else: + raise ValueError, "invalid input/output index" + + #target[slices[0][0]]+= self.varianceU #matern32 diag + #target[slices[1][0]]+= self.varianceU*self.varianceY*(k1+k2+k3) # diag + + + + + + + def dK_dtheta(self, dL_dK, X, X2, target): + """derivative of the covariance matrix with respect to the parameters.""" + if X2 is None: X2 = X + dist = np.abs(X - X2.T) + + ly=1/self.lengthscaleY + lu=np.sqrt(3)/self.lengthscaleU + #ly=self.lengthscaleY + #lu=self.lengthscaleU + + dk1theta1 = lambda dist: np.exp(-ly*dist)*2*(-lu)/(lu+ly)**3 + #c=np.sqrt(3) + #t1=c/lu + #t2=1/ly + #dk1theta1=np.exp(-dist*ly)*t2*( (2*c*t2+2*t1)/(c*t2+t1)**2 -2*(2*c*t2*t1+t1**2)/(c*t2+t1)**3 ) + + dk2theta1 = lambda dist: 1*( + np.exp(-lu*dist)*dist*(-ly+2*lu-lu*ly*dist+dist*lu**2)*(ly-lu)**(-2) + np.exp(-lu*dist)*(-2+ly*dist-2*dist*lu)*(ly-lu)**(-2) + +np.exp(-dist*lu)*(ly-2*lu+ly*lu*dist-dist*lu**2)*2*(ly-lu)**(-3) + +np.exp(-dist*ly)*2*(ly-lu)**(-2) + +np.exp(-dist*ly)*2*(2*lu-ly)*(ly-lu)**(-3) + ) + + dk3theta1 = lambda dist: np.exp(-dist*lu)*(lu+ly)**(-2)*((2*lu+ly+dist*lu**2+lu*ly*dist)*(-dist-2/(lu+ly))+2+2*lu*dist+ly*dist) + + dktheta1 = lambda dist: self.varianceU*self.varianceY*(dk1theta1+dk2theta1+dk3theta1) + + + + + dk1theta2 = lambda dist: np.exp(-ly*dist) * ((lu+ly)**(-2)) * ( (-dist)*(2*lu+ly) + 1 + (-2)*(2*lu+ly)/(lu+ly) ) + + dk2theta2 =lambda dist: 1*( + np.exp(-dist*lu)*(ly-lu)**(-2) * ( 1+lu*dist+(-2)*(ly-2*lu+lu*ly*dist-dist*lu**2)*(ly-lu)**(-1) ) + +np.exp(-dist*ly)*(ly-lu)**(-2) * ( (-dist)*(2*lu-ly) -1+(2*lu-ly)*(-2)*(ly-lu)**(-1) ) + ) + + dk3theta2 = lambda dist: np.exp(-dist*lu) * (-3*lu-ly-dist*lu**2-lu*ly*dist)/(lu+ly)**3 + + dktheta2 = lambda dist: self.varianceU*self.varianceY*(dk1theta2 + dk2theta2 +dk3theta2) + + + + k1 = lambda dist: np.exp(-ly*dist)*(2*lu+ly)/(lu+ly)**2 + k2 = lambda dist: (np.exp(-lu*dist)*(ly-2*lu+lu*ly*dist-lu**2*dist) + np.exp(-ly*dist)*(2*lu-ly) ) / (ly-lu)**2 + k3 = lambda dist: np.exp(-lu*dist) * ( (1+lu*dist)/(lu+ly) + (lu)/(lu+ly)**2 ) + dkdvar = k1+k2+k3 + + target[0] += np.sum(self.varianceY*dkdvar * dL_dK) + target[1] += np.sum(self.varianceU*dkdvar * dL_dK) + target[2] += np.sum(dktheta1*(-np.sqrt(3)*self.lengthscaleU**(-2)) * dL_dK) + target[3] += np.sum(dktheta2*(-self.lengthscaleY**(-2)) * dL_dK) + + + # def dKdiag_dtheta(self, dL_dKdiag, X, target): + # """derivative of the diagonal of the covariance matrix with respect to the parameters.""" + # # NB: derivative of diagonal elements wrt lengthscale is 0 + # target[0] += np.sum(dL_dKdiag) + + # def dK_dX(self, dL_dK, X, X2, target): + # """derivative of the covariance matrix with respect to X.""" + # if X2 is None: X2 = X + # dist = np.sqrt(np.sum(np.square((X[:, None, :] - X2[None, :, :]) / self.lengthscale), -1))[:, :, None] + # ddist_dX = (X[:, None, :] - X2[None, :, :]) / self.lengthscale ** 2 / np.where(dist != 0., dist, np.inf) + # dK_dX = -np.transpose(self.variance * np.exp(-dist) * ddist_dX, (1, 0, 2)) + # target += np.sum(dK_dX * dL_dK.T[:, :, None], 0) + + # def dKdiag_dX(self, dL_dKdiag, X, target): + # pass diff --git a/GPy/kern/parts/__init__.py b/GPy/kern/parts/__init__.py new file mode 100644 index 00000000..d8e7f8e6 --- /dev/null +++ b/GPy/kern/parts/__init__.py @@ -0,0 +1,31 @@ +import bias +import Brownian +import coregionalize +import exponential +import eq_ode1 +import finite_dimensional +import fixed +import gibbs +import hetero +import hierarchical +import independent_outputs +import linear +import Matern32 +import Matern52 +import mlp +import ODE_1 +import ODE_UY +import periodic_exponential +import periodic_Matern32 +import periodic_Matern52 +import poly +import prod_orthogonal +import prod +import rational_quadratic +import rbfcos +import rbf +import rbf_inv +import spline +import symmetric +import sympy_helpers +import white diff --git a/GPy/kern/bias.py b/GPy/kern/parts/bias.py similarity index 99% rename from GPy/kern/bias.py rename to GPy/kern/parts/bias.py index 8ec3741d..2b72e7c9 100644 --- a/GPy/kern/bias.py +++ b/GPy/kern/parts/bias.py @@ -6,7 +6,7 @@ from kernpart import Kernpart import numpy as np import hashlib -class bias(Kernpart): +class Bias(Kernpart): def __init__(self,input_dim,variance=1.): """ :param input_dim: the number of input dimensions diff --git a/GPy/kern/coregionalise.py b/GPy/kern/parts/coregionalize.py similarity index 53% rename from GPy/kern/coregionalise.py rename to GPy/kern/parts/coregionalize.py index 8faceafe..4748d276 100644 --- a/GPy/kern/coregionalise.py +++ b/GPy/kern/parts/coregionalize.py @@ -7,26 +7,51 @@ from GPy.util.linalg import mdot, pdinv import pdb from scipy import weave -class Coregionalise(Kernpart): +class Coregionalize(Kernpart): """ - Kernel for Intrinsic Corregionalization Models + Covariance function for intrinsic/linear coregionalization models + + This covariance has the form: + .. math:: + \mathbf{B} = \mathbf{W}\mathbf{W}^\top + \text{diag}(kappa) + + An intrinsic/linear coregionalization covariance function of the form: + .. math:: + + k_2(x, y)=\mathbf{B} k(x, y) + + it is obtained as the tensor product between a covariance function + k(x,y) and B. + + :param output_dim: number of outputs to coregionalize + :type output_dim: int + :param rank: number of columns of the W matrix (this parameter is ignored if parameter W is not None) + :type rank: int + :param W: a low rank matrix that determines the correlations between the different outputs, together with kappa it forms the coregionalization matrix B + :type W: numpy array of dimensionality (num_outpus, W_columns) + :param kappa: a vector which allows the outputs to behave independently + :type kappa: numpy array of dimensionality (output_dim,) + + .. note: see coregionalization examples in GPy.examples.regression for some usage. """ - def __init__(self,Nout,R=1, W=None, kappa=None): + def __init__(self, output_dim, rank=1, W=None, kappa=None): self.input_dim = 1 self.name = 'coregion' - self.Nout = Nout - self.R = R + self.output_dim = output_dim + self.rank = rank + if self.rank>output_dim-1: + print("Warning: Unusual choice of rank, it should normally be less than the output_dim.") if W is None: - self.W = np.ones((self.Nout,self.R)) + self.W = 0.5*np.random.randn(self.output_dim,self.rank)/np.sqrt(self.rank) else: - assert W.shape==(self.Nout,self.R) + assert W.shape==(self.output_dim,self.rank) self.W = W if kappa is None: - kappa = np.ones(self.Nout) + kappa = 0.5*np.ones(self.output_dim) else: - assert kappa.shape==(self.Nout,) + assert kappa.shape==(self.output_dim,) self.kappa = kappa - self.num_params = self.Nout*(self.R + 1) + self.num_params = self.output_dim*(self.rank + 1) self._set_params(np.hstack([self.W.flatten(),self.kappa])) def _get_params(self): @@ -34,12 +59,12 @@ class Coregionalise(Kernpart): def _set_params(self,x): assert x.size == self.num_params - self.kappa = x[-self.Nout:] - self.W = x[:-self.Nout].reshape(self.Nout,self.R) + self.kappa = x[-self.output_dim:] + self.W = x[:-self.output_dim].reshape(self.output_dim,self.rank) self.B = np.dot(self.W,self.W.T) + np.diag(self.kappa) def _get_param_names(self): - return sum([['W%i_%i'%(i,j) for j in range(self.R)] for i in range(self.Nout)],[]) + ['kappa_%i'%i for i in range(self.Nout)] + return sum([['W%i_%i'%(i,j) for j in range(self.rank)] for i in range(self.output_dim)],[]) + ['kappa_%i'%i for i in range(self.output_dim)] def K(self,index,index2,target): index = np.asarray(index,dtype=np.int) @@ -57,26 +82,26 @@ class Coregionalise(Kernpart): if index2 is None: code=""" for(int i=0;i 2: + raise ValueError('Input matrix for ode1 covariance should have at most two columns, one containing times, the other output indices') + + self._K_computations(X, X2) + target += self._scale*self._K_dvar + + if self.gaussian_initial: + # Add covariance associated with initial condition. + t1_mat = self._t[self._rorder, None] + t2_mat = self._t2[None, self._rorder2] + target+=self.initial_variance * np.exp(- self.decay * (t1_mat + t2_mat)) + + def Kdiag(self,index,target): + #target += np.diag(self.B)[np.asarray(index,dtype=np.int).flatten()] + pass + + def dK_dtheta(self,dL_dK,X,X2,target): + + # First extract times and indices. + self._extract_t_indices(X, X2, dL_dK=dL_dK) + self._dK_ode_dtheta(target) + + + def _dK_ode_dtheta(self, target): + """Do all the computations for the ode parts of the covariance function.""" + t_ode = self._t[self._index>0] + dL_dK_ode = self._dL_dK[self._index>0, :] + index_ode = self._index[self._index>0]-1 + if self._t2 is None: + if t_ode.size==0: + return + t2_ode = t_ode + dL_dK_ode = dL_dK_ode[:, self._index>0] + index2_ode = index_ode + else: + t2_ode = self._t2[self._index2>0] + dL_dK_ode = dL_dK_ode[:, self._index2>0] + if t_ode.size==0 or t2_ode.size==0: + return + index2_ode = self._index2[self._index2>0]-1 + + h1 = self._compute_H(t_ode, index_ode, t2_ode, index2_ode, stationary=self.is_stationary, update_derivatives=True) + #self._dK_ddelay = self._dh_ddelay + self._dK_dsigma = self._dh_dsigma + + if self._t2 is None: + h2 = h1 + else: + h2 = self._compute_H(t2_ode, index2_ode, t_ode, index_ode, stationary=self.is_stationary, update_derivatives=True) + + #self._dK_ddelay += self._dh_ddelay.T + self._dK_dsigma += self._dh_dsigma.T + # C1 = self.sensitivity + # C2 = self.sensitivity + + # K = 0.5 * (h1 + h2.T) + # var2 = C1*C2 + # if self.is_normalized: + # dk_dD1 = (sum(sum(dL_dK.*dh1_dD1)) + sum(sum(dL_dK.*dh2_dD1.T)))*0.5*var2 + # dk_dD2 = (sum(sum(dL_dK.*dh1_dD2)) + sum(sum(dL_dK.*dh2_dD2.T)))*0.5*var2 + # dk_dsigma = 0.5 * var2 * sum(sum(dL_dK.*dK_dsigma)) + # dk_dC1 = C2 * sum(sum(dL_dK.*K)) + # dk_dC2 = C1 * sum(sum(dL_dK.*K)) + # else: + # K = np.sqrt(np.pi) * K + # dk_dD1 = (sum(sum(dL_dK.*dh1_dD1)) + * sum(sum(dL_dK.*K)) + # dk_dC2 = self.sigma * C1 * sum(sum(dL_dK.*K)) + + + # dk_dSim1Variance = dk_dC1 + # Last element is the length scale. + (dL_dK_ode[:, :, None]*self._dh_ddelay[:, None, :]).sum(2) + + target[-1] += (dL_dK_ode*self._dK_dsigma/np.sqrt(2)).sum() + + + # # only pass the gradient with respect to the inverse width to one + # # of the gradient vectors ... otherwise it is counted twice. + # g1 = real([dk_dD1 dk_dinvWidth dk_dSim1Variance]) + # g2 = real([dk_dD2 0 dk_dSim2Variance]) + # return g1, g2""" + + def dKdiag_dtheta(self,dL_dKdiag,index,target): + pass + + def dK_dX(self,dL_dK,X,X2,target): + pass + + def _extract_t_indices(self, X, X2=None, dL_dK=None): + """Extract times and output indices from the input matrix X. Times are ordered according to their index for convenience of computation, this ordering is stored in self._order and self.order2. These orderings are then mapped back to the original ordering (in X) using self._rorder and self._rorder2. """ + + # TODO: some fast checking here to see if this needs recomputing? + self._t = X[:, 0] + if not X.shape[1] == 2: + raise ValueError('Input matrix for ode1 covariance should have two columns, one containing times, the other output indices') + self._index = np.asarray(X[:, 1],dtype=np.int) + # Sort indices so that outputs are in blocks for computational + # convenience. + self._order = self._index.argsort() + self._index = self._index[self._order] + self._t = self._t[self._order] + self._rorder = self._order.argsort() # rorder is for reversing the order + + if X2 is None: + self._t2 = None + self._index2 = None + self._order2 = self._order + self._rorder2 = self._rorder + else: + if not X2.shape[1] == 2: + raise ValueError('Input matrix for ode1 covariance should have two columns, one containing times, the other output indices') + self._t2 = X2[:, 0] + self._index2 = np.asarray(X2[:, 1],dtype=np.int) + self._order2 = self._index2.argsort() + self._index2 = self._index2[self._order2] + self._t2 = self._t2[self._order2] + self._rorder2 = self._order2.argsort() # rorder2 is for reversing order + + if dL_dK is not None: + self._dL_dK = dL_dK[self._order, :] + self._dL_dK = self._dL_dK[:, self._order2] + + def _K_computations(self, X, X2): + """Perform main body of computations for the ode1 covariance function.""" + # First extract times and indices. + self._extract_t_indices(X, X2) + + self._K_compute_eq() + self._K_compute_ode_eq() + if X2 is None: + self._K_eq_ode = self._K_ode_eq.T + else: + self._K_compute_ode_eq(transpose=True) + self._K_compute_ode() + + if X2 is None: + self._K_dvar = np.zeros((self._t.shape[0], self._t.shape[0])) + else: + self._K_dvar = np.zeros((self._t.shape[0], self._t2.shape[0])) + + # Reorder values of blocks for placing back into _K_dvar. + self._K_dvar = np.vstack((np.hstack((self._K_eq, self._K_eq_ode)), + np.hstack((self._K_ode_eq, self._K_ode)))) + self._K_dvar = self._K_dvar[self._rorder, :] + self._K_dvar = self._K_dvar[:, self._rorder2] + + + if X2 is None: + # Matrix giving scales of each output + self._scale = np.zeros((self._t.size, self._t.size)) + code=""" + for(int i=0;i0] + index_ode = self._index2[self._index2>0]-1 + else: + t_eq = self._t2[self._index2==0] + t_ode = self._t[self._index>0] + index_ode = self._index[self._index>0]-1 + else: + t_eq = self._t[self._index==0] + t_ode = self._t[self._index>0] + index_ode = self._index[self._index>0]-1 + + if t_ode.size==0 or t_eq.size==0: + if transpose: + self._K_eq_ode = np.zeros((t_eq.shape[0], t_ode.shape[0])) + else: + self._K_ode_eq = np.zeros((t_ode.shape[0], t_eq.shape[0])) + return + + t_ode_mat = t_ode[:, None] + t_eq_mat = t_eq[None, :] + if self.delay is not None: + t_ode_mat -= self.delay[index_ode, None] + diff_t = (t_ode_mat - t_eq_mat) + + inv_sigma_diff_t = 1./self.sigma*diff_t + decay_vals = self.decay[index_ode][:, None] + half_sigma_d_i = 0.5*self.sigma*decay_vals + + if self.is_stationary: + ln_part, signs = ln_diff_erfs(inf, half_sigma_d_i - inv_sigma_diff_t, return_sign=True) + else: + ln_part, signs = ln_diff_erfs(half_sigma_d_i + t_eq_mat/self.sigma, half_sigma_d_i - inv_sigma_diff_t, return_sign=True) + sK = signs*np.exp(half_sigma_d_i*half_sigma_d_i - decay_vals*diff_t + ln_part) + + sK *= 0.5 + + if not self.is_normalized: + sK *= np.sqrt(np.pi)*self.sigma + + + if transpose: + self._K_eq_ode = sK.T + else: + self._K_ode_eq = sK + + def _K_compute_ode(self): + # Compute covariances between outputs of the ODE models. + + t_ode = self._t[self._index>0] + index_ode = self._index[self._index>0]-1 + if self._t2 is None: + if t_ode.size==0: + self._K_ode = np.zeros((0, 0)) + return + t2_ode = t_ode + index2_ode = index_ode + else: + t2_ode = self._t2[self._index2>0] + if t_ode.size==0 or t2_ode.size==0: + self._K_ode = np.zeros((t_ode.size, t2_ode.size)) + return + index2_ode = self._index2[self._index2>0]-1 + + # When index is identical + h = self._compute_H(t_ode, index_ode, t2_ode, index2_ode, stationary=self.is_stationary) + + if self._t2 is None: + self._K_ode = 0.5 * (h + h.T) + else: + h2 = self._compute_H(t2_ode, index2_ode, t_ode, index_ode, stationary=self.is_stationary) + self._K_ode = 0.5 * (h + h2.T) + + if not self.is_normalized: + self._K_ode *= np.sqrt(np.pi)*self.sigma + def _compute_diag_H(self, t, index, update_derivatives=False, stationary=False): + """Helper function for computing H for the diagonal only. + :param t: time input. + :type t: array + :param index: first output indices + :type index: array of int. + :param index: second output indices + :type index: array of int. + :param update_derivatives: whether or not to update the derivative portions (default False). + :type update_derivatives: bool + :param stationary: whether to compute the stationary version of the covariance (default False). + :type stationary: bool""" + + """if delta_i~=delta_j: + [h, dh_dD_i, dh_dD_j, dh_dsigma] = np.diag(simComputeH(t, index, t, index, update_derivatives=True, stationary=self.is_stationary)) + else: + Decay = self.decay[index] + if self.delay is not None: + t = t - self.delay[index] + + t_squared = t*t + half_sigma_decay = 0.5*self.sigma*Decay + [ln_part_1, sign1] = ln_diff_erfs(half_sigma_decay + t/self.sigma, + half_sigma_decay) + + [ln_part_2, sign2] = ln_diff_erfs(half_sigma_decay, + half_sigma_decay - t/self.sigma) + + h = (sign1*np.exp(half_sigma_decay*half_sigma_decay + + ln_part_1 + - log(Decay + D_j)) + - sign2*np.exp(half_sigma_decay*half_sigma_decay + - (Decay + D_j)*t + + ln_part_2 + - log(Decay + D_j))) + + sigma2 = self.sigma*self.sigma + + if update_derivatives: + + dh_dD_i = ((0.5*Decay*sigma2*(Decay + D_j)-1)*h + + t*sign2*np.exp( + half_sigma_decay*half_sigma_decay-(Decay+D_j)*t + ln_part_2 + ) + + self.sigma/np.sqrt(np.pi)* + (-1 + np.exp(-t_squared/sigma2-Decay*t) + + np.exp(-t_squared/sigma2-D_j*t) + - np.exp(-(Decay + D_j)*t))) + + dh_dD_i = (dh_dD_i/(Decay+D_j)).real + + + + dh_dD_j = (t*sign2*np.exp( + half_sigma_decay*half_sigma_decay-(Decay + D_j)*t+ln_part_2 + ) + -h) + dh_dD_j = (dh_dD_j/(Decay + D_j)).real + + dh_dsigma = 0.5*Decay*Decay*self.sigma*h \ + + 2/(np.sqrt(np.pi)*(Decay+D_j))\ + *((-Decay/2) \ + + (-t/sigma2+Decay/2)*np.exp(-t_squared/sigma2 - Decay*t) \ + - (-t/sigma2-Decay/2)*np.exp(-t_squared/sigma2 - D_j*t) \ + - Decay/2*np.exp(-(Decay+D_j)*t))""" + pass + + def _compute_H(self, t, index, t2, index2, update_derivatives=False, stationary=False): + """Helper function for computing part of the ode1 covariance function. + + :param t: first time input. + :type t: array + :param index: Indices of first output. + :type index: array of int + :param t2: second time input. + :type t2: array + :param index2: Indices of second output. + :type index2: array of int + :param update_derivatives: whether to update derivatives (default is False) + :return h : result of this subcomponent of the kernel for the given values. + :rtype: ndarray +""" + + if stationary: + raise NotImplementedError, "Error, stationary version of this covariance not yet implemented." + # Vector of decays and delays associated with each output. + Decay = self.decay[index] + Decay2 = self.decay[index2] + t_mat = t[:, None] + t2_mat = t2[None, :] + if self.delay is not None: + Delay = self.delay[index] + Delay2 = self.delay[index2] + t_mat-=Delay[:, None] + t2_mat-=Delay2[None, :] + + diff_t = (t_mat - t2_mat) + inv_sigma_diff_t = 1./self.sigma*diff_t + half_sigma_decay_i = 0.5*self.sigma*Decay[:, None] + + ln_part_1, sign1 = ln_diff_erfs(half_sigma_decay_i + t2_mat/self.sigma, + half_sigma_decay_i - inv_sigma_diff_t, + return_sign=True) + ln_part_2, sign2 = ln_diff_erfs(half_sigma_decay_i, + half_sigma_decay_i - t_mat/self.sigma, + return_sign=True) + + h = sign1*np.exp(half_sigma_decay_i + *half_sigma_decay_i + -Decay[:, None]*diff_t+ln_part_1 + -np.log(Decay[:, None] + Decay2[None, :])) + h -= sign2*np.exp(half_sigma_decay_i*half_sigma_decay_i + -Decay[:, None]*t_mat-Decay2[None, :]*t2_mat+ln_part_2 + -np.log(Decay[:, None] + Decay2[None, :])) + + if update_derivatives: + sigma2 = self.sigma*self.sigma + # Update ith decay gradient + + dh_ddecay = ((0.5*Decay[:, None]*sigma2*(Decay[:, None] + Decay2[None, :])-1)*h + + (-diff_t*sign1*np.exp( + half_sigma_decay_i*half_sigma_decay_i-Decay[:, None]*diff_t+ln_part_1 + ) + +t_mat*sign2*np.exp( + half_sigma_decay_i*half_sigma_decay_i-Decay[:, None]*t_mat + - Decay2*t2_mat+ln_part_2)) + +self.sigma/np.sqrt(np.pi)*( + -np.exp( + -diff_t*diff_t/sigma2 + )+np.exp( + -t2_mat*t2_mat/sigma2-Decay[:, None]*t_mat + )+np.exp( + -t_mat*t_mat/sigma2-Decay2[None, :]*t2_mat + )-np.exp( + -(Decay[:, None]*t_mat + Decay2[None, :]*t2_mat) + ) + )) + self._dh_ddecay = (dh_ddecay/(Decay[:, None]+Decay2[None, :])).real + + # Update jth decay gradient + dh_ddecay2 = (t2_mat*sign2 + *np.exp( + half_sigma_decay_i*half_sigma_decay_i + -(Decay[:, None]*t_mat + Decay2[None, :]*t2_mat) + +ln_part_2 + ) + -h) + self._dh_ddecay2 = (dh_ddecay/(Decay[:, None] + Decay2[None, :])).real + + # Update sigma gradient + self._dh_dsigma = (half_sigma_decay_i*Decay[:, None]*h + + 2/(np.sqrt(np.pi) + *(Decay[:, None]+Decay2[None, :])) + *((-diff_t/sigma2-Decay[:, None]/2) + *np.exp(-diff_t*diff_t/sigma2) + + (-t2_mat/sigma2+Decay[:, None]/2) + *np.exp(-t2_mat*t2_mat/sigma2-Decay[:, None]*t_mat) + - (-t_mat/sigma2-Decay[:, None]/2) + *np.exp(-t_mat*t_mat/sigma2-Decay2[None, :]*t2_mat) + - Decay[:, None]/2 + *np.exp(-(Decay[:, None]*t_mat+Decay2[None, :]*t2_mat)))) + + return h diff --git a/GPy/kern/exponential.py b/GPy/kern/parts/exponential.py similarity index 99% rename from GPy/kern/exponential.py rename to GPy/kern/parts/exponential.py index 5cb0f584..d8cf76f7 100644 --- a/GPy/kern/exponential.py +++ b/GPy/kern/parts/exponential.py @@ -6,7 +6,7 @@ from kernpart import Kernpart import numpy as np from scipy import integrate -class exponential(Kernpart): +class Exponential(Kernpart): """ Exponential kernel (aka Ornstein-Uhlenbeck or Matern 1/2) diff --git a/GPy/kern/finite_dimensional.py b/GPy/kern/parts/finite_dimensional.py similarity index 97% rename from GPy/kern/finite_dimensional.py rename to GPy/kern/parts/finite_dimensional.py index b23ddb16..6cc2325f 100644 --- a/GPy/kern/finite_dimensional.py +++ b/GPy/kern/parts/finite_dimensional.py @@ -4,9 +4,9 @@ from kernpart import Kernpart import numpy as np -from ..util.linalg import pdinv,mdot +from ...util.linalg import pdinv,mdot -class finite_dimensional(Kernpart): +class FiniteDimensional(Kernpart): def __init__(self, input_dim, F, G, variance=1., weights=None): """ Argumnents diff --git a/GPy/kern/fixed.py b/GPy/kern/parts/fixed.py similarity index 97% rename from GPy/kern/fixed.py rename to GPy/kern/parts/fixed.py index 9e8f6226..67baea91 100644 --- a/GPy/kern/fixed.py +++ b/GPy/kern/parts/fixed.py @@ -15,7 +15,7 @@ class Fixed(Kernpart): self.input_dim = input_dim self.fixed_K = K self.num_params = 1 - self.name = 'Fixed' + self.name = 'fixed' self._set_params(np.array([variance]).flatten()) def _get_params(self): diff --git a/GPy/kern/parts/gibbs.py b/GPy/kern/parts/gibbs.py new file mode 100644 index 00000000..f47144e1 --- /dev/null +++ b/GPy/kern/parts/gibbs.py @@ -0,0 +1,154 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernpart import Kernpart +import numpy as np +from ...util.linalg import tdot +from ...core.mapping import Mapping +import GPy + +class Gibbs(Kernpart): + """ + Gibbs non-stationary covariance function. + + .. math:: + + r = sqrt((x_i - x_j)'*(x_i - x_j)) + + k(x_i, x_j) = \sigma^2*Z*exp(-r^2/(l(x)*l(x) + l(x')*l(x'))) + + Z = (2*l(x)*l(x')/(l(x)*l(x) + l(x')*l(x')^{q/2} + + where :math:`l(x)` is a function giving the length scale as a function of space and :math:`q` is the dimensionality of the input space. + This is the non stationary kernel proposed by Mark Gibbs in his 1997 + thesis. It is similar to an RBF but has a length scale that varies + with input location. This leads to an additional term in front of + the kernel. + + The parameters are :math:`\sigma^2`, the process variance, and + the parameters of l(x) which is a function that can be + specified by the user, by default an multi-layer peceptron is + used. + + :param input_dim: the number of input dimensions + :type input_dim: int + :param variance: the variance :math:`\sigma^2` + :type variance: float + :param mapping: the mapping that gives the lengthscale across the input space (by default GPy.mappings.MLP is used with 20 hidden nodes). + :type mapping: GPy.core.Mapping + :param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one weight variance parameter \sigma^2_w), otherwise there is one weight variance parameter per dimension. + :type ARD: Boolean + :rtype: Kernpart object + + See Mark Gibbs's thesis for more details: Gibbs, + M. N. (1997). Bayesian Gaussian Processes for Regression and + Classification. PhD thesis, Department of Physics, University of + Cambridge. Or also see Page 93 of Gaussian Processes for Machine + Learning by Rasmussen and Williams. Although note that we do not + constrain the lengthscale to be positive by default. This allows + anticorrelation to occur. The positive constraint can be included + by the user manually. + + """ + + def __init__(self, input_dim, variance=1., mapping=None, ARD=False): + self.input_dim = input_dim + self.ARD = ARD + if not mapping: + mapping = GPy.mappings.MLP(output_dim=1, hidden_dim=20, input_dim=input_dim) + if not ARD: + self.num_params=1+mapping.num_params + else: + raise NotImplementedError + + self.mapping = mapping + self.name='gibbs' + self._set_params(np.hstack((variance, self.mapping._get_params()))) + + def _get_params(self): + return np.hstack((self.variance, self.mapping._get_params())) + + def _set_params(self, x): + assert x.size == (self.num_params) + self.variance = x[0] + self.mapping._set_params(x[1:]) + + def _get_param_names(self): + return ['variance'] + self.mapping._get_param_names() + + def K(self, X, X2, target): + """Return covariance between X and X2.""" + self._K_computations(X, X2) + target += self.variance*self._K_dvar + + def Kdiag(self, X, target): + """Compute the diagonal of the covariance matrix for X.""" + np.add(target, self.variance, target) + + def dK_dtheta(self, dL_dK, X, X2, target): + """Derivative of the covariance with respect to the parameters.""" + self._K_computations(X, X2) + self._dK_computations(dL_dK) + if X2==None: + gmapping = self.mapping.df_dtheta(2*self._dL_dl[:, None], X) + else: + gmapping = self.mapping.df_dtheta(self._dL_dl[:, None], X) + gmapping += self.mapping.df_dtheta(self._dL_dl_two[:, None], X2) + + target+= np.hstack([(dL_dK*self._K_dvar).sum(), gmapping]) + + def dK_dX(self, dL_dK, X, X2, target): + """Derivative of the covariance matrix with respect to X.""" + # First account for gradients arising from presence of X in exponent. + self._K_computations(X, X2) + if X2 is None: + _K_dist = 2*(X[:, None, :] - X[None, :, :]) + else: + _K_dist = X[:, None, :] - X2[None, :, :] # don't cache this in _K_co + dK_dX = (-2.*self.variance)*np.transpose((self._K_dvar/self._w2)[:, :, None]*_K_dist, (1, 0, 2)) + target += np.sum(dK_dX*dL_dK.T[:, :, None], 0) + # Now account for gradients arising from presence of X in lengthscale. + self._dK_computations(dL_dK) + if X2 is None: + target += 2.*self.mapping.df_dX(self._dL_dl[:, None], X) + else: + target += self.mapping.df_dX(self._dL_dl[:, None], X) + + def dKdiag_dX(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to X.""" + pass + + def dKdiag_dtheta(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to parameters.""" + target[0] += np.sum(dL_dKdiag) + + + + def _K_computations(self, X, X2=None): + """Pre-computations for the covariance function (used both when computing the covariance and its gradients). Here self._dK_dvar and self._K_dist2 are updated.""" + self._lengthscales=self.mapping.f(X) + self._lengthscales2=np.square(self._lengthscales) + if X2==None: + self._lengthscales_two = self._lengthscales + self._lengthscales_two2 = self._lengthscales2 + Xsquare = np.square(X).sum(1) + self._K_dist2 = -2.*tdot(X) + Xsquare[:, None] + Xsquare[None, :] + else: + self._lengthscales_two = self.mapping.f(X2) + self._lengthscales_two2 = np.square(self._lengthscales_two) + self._K_dist2 = -2.*np.dot(X, X2.T) + np.square(X).sum(1)[:, None] + np.square(X2).sum(1)[None, :] + self._w2 = self._lengthscales2 + self._lengthscales_two2.T + prod_length = self._lengthscales*self._lengthscales_two.T + self._K_exponential = np.exp(-self._K_dist2/self._w2) + self._K_dvar = np.sign(prod_length)*(2*np.abs(prod_length)/self._w2)**(self.input_dim/2.)*np.exp(-self._K_dist2/self._w2) + + def _dK_computations(self, dL_dK): + """Pre-computations for the gradients of the covaraince function. Here the gradient of the covariance with respect to all the individual lengthscales is computed. + :param dL_dK: the gradient of the objective with respect to the covariance function. + :type dL_dK: ndarray""" + + self._dL_dl = (dL_dK*self.variance*self._K_dvar*(self.input_dim/2.*(self._lengthscales_two.T**4 - self._lengthscales**4) + 2*self._lengthscales2*self._K_dist2)/(self._w2*self._w2*self._lengthscales)).sum(1) + if self._lengthscales_two is self._lengthscales: + self._dL_dl_two = None + else: + self._dL_dl_two = (dL_dK*self.variance*self._K_dvar*(self.input_dim/2.*(self._lengthscales**4 - self._lengthscales_two.T**4 ) + 2*self._lengthscales_two2.T*self._K_dist2)/(self._w2*self._w2*self._lengthscales_two.T)).sum(0) diff --git a/GPy/kern/parts/hetero.py b/GPy/kern/parts/hetero.py new file mode 100644 index 00000000..c716eaad --- /dev/null +++ b/GPy/kern/parts/hetero.py @@ -0,0 +1,103 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernpart import Kernpart +import numpy as np +from ...util.linalg import tdot +from ...core.mapping import Mapping +import GPy + +class Hetero(Kernpart): + """ + TODO: Need to constrain the function outputs + positive (still thinking of best way of doing this!!! Yes, intend to use + transformations, but what's the *best* way). Currently just squaring output. + + Heteroschedastic noise which depends on input location. See, for example, + this paper by Goldberg et al. + + .. math:: + + k(x_i, x_j) = \delta_{i,j} \sigma^2(x_i) + + where :math:`\sigma^2(x)` is a function giving the variance as a function of input space and :math:`\delta_{i,j}` is the Kronecker delta function. + + The parameters are the parameters of \sigma^2(x) which is a + function that can be specified by the user, by default an + multi-layer peceptron is used. + + :param input_dim: the number of input dimensions + :type input_dim: int + :param mapping: the mapping that gives the lengthscale across the input space (by default GPy.mappings.MLP is used with 20 hidden nodes). + :type mapping: GPy.core.Mapping + :rtype: Kernpart object + + See this paper: + + Goldberg, P. W. Williams, C. K. I. and Bishop, + C. M. (1998) Regression with Input-dependent Noise: a Gaussian + Process Treatment In Advances in Neural Information Processing + Systems, Volume 10, pp. 493-499. MIT Press + + for a Gaussian process treatment of this problem. + + """ + + def __init__(self, input_dim, mapping=None, transform=None): + self.input_dim = input_dim + if not mapping: + mapping = GPy.mappings.MLP(output_dim=1, hidden_dim=20, input_dim=input_dim) + if not transform: + transform = GPy.core.transformations.logexp() + + self.transform = transform + self.mapping = mapping + self.name='hetero' + self.num_params=self.mapping.num_params + self._set_params(self.mapping._get_params()) + + def _get_params(self): + return self.mapping._get_params() + + def _set_params(self, x): + assert x.size == (self.num_params) + self.mapping._set_params(x) + + def _get_param_names(self): + return self.mapping._get_param_names() + + def K(self, X, X2, target): + """Return covariance between X and X2.""" + if (X2 is None) or (X2 is X): + target[np.diag_indices_from(target)] += self._Kdiag(X) + + def Kdiag(self, X, target): + """Compute the diagonal of the covariance matrix for X.""" + target+=self._Kdiag(X) + + def _Kdiag(self, X): + """Helper function for computing the diagonal elements of the covariance.""" + return self.mapping.f(X).flatten()**2 + + def dK_dtheta(self, dL_dK, X, X2, target): + """Derivative of the covariance with respect to the parameters.""" + if (X2 is None) or (X2 is X): + dL_dKdiag = dL_dK.flat[::dL_dK.shape[0]+1] + self.dKdiag_dtheta(dL_dKdiag, X, target) + + def dKdiag_dtheta(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to parameters.""" + target += 2.*self.mapping.df_dtheta(dL_dKdiag[:, None]*self.mapping.f(X), X) + + def dK_dX(self, dL_dK, X, X2, target): + """Derivative of the covariance matrix with respect to X.""" + if X2==None or X2 is X: + dL_dKdiag = dL_dK.flat[::dL_dK.shape[0]+1] + self.dKdiag_dX(dL_dKdiag, X, target) + + def dKdiag_dX(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to X.""" + target += 2.*self.mapping.df_dX(dL_dKdiag[:, None], X)*self.mapping.f(X) + + + diff --git a/GPy/kern/parts/hierarchical.py b/GPy/kern/parts/hierarchical.py new file mode 100644 index 00000000..c629f6b9 --- /dev/null +++ b/GPy/kern/parts/hierarchical.py @@ -0,0 +1,76 @@ +# Copyright (c) 2012, James Hesnsman +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernpart import Kernpart +import numpy as np +from independent_outputs import index_to_slices + +class Hierarchical(Kernpart): + """ + A kernel part which can reopresent a hierarchy of indepencnce: a generalisation of independent_outputs + + """ + def __init__(self,parts): + self.levels = len(parts) + self.input_dim = parts[0].input_dim + 1 + self.num_params = np.sum([k.num_params for k in parts]) + self.name = 'hierarchy' + self.parts = parts + + self.param_starts = np.hstack((0,np.cumsum([k.num_params for k in self.parts[:-1]]))) + self.param_stops = np.cumsum([k.num_params for k in self.parts]) + + def _get_params(self): + return np.hstack([k._get_params() for k in self.parts]) + + def _set_params(self,x): + [k._set_params(x[start:stop]) for k, start, stop in zip(self.parts, self.param_starts, self.param_stops)] + + def _get_param_names(self): + return sum([[str(i)+'_'+k.name+'_'+n for n in k._get_param_names()] for i,k in enumerate(self.parts)],[]) + + def _sort_slices(self,X,X2): + slices = [index_to_slices(x) for x in X[:,-self.levels:].T] + X = X[:,:-self.levels] + if X2 is None: + slices2 = slices + X2 = X + else: + slices2 = [index_to_slices(x) for x in X2[:,-self.levels:].T] + X2 = X2[:,:-self.levels] + return X, X2, slices, slices2 + + def K(self,X,X2,target): + X, X2, slices, slices2 = self._sort_slices(X,X2) + + [[[[k.K(X[s],X2[s2],target[s,s2]) for s in slices_i] for s2 in slices_j] for slices_i,slices_j in zip(slices_,slices2_)] for k, slices_, slices2_ in zip(self.parts,slices,slices2)] + + def Kdiag(self,X,target): + raise NotImplementedError + #X,slices = X[:,:-1],index_to_slices(X[:,-1]) + #[[self.k.Kdiag(X[s],target[s]) for s in slices_i] for slices_i in slices] + + def dK_dtheta(self,dL_dK,X,X2,target): + X, X2, slices, slices2 = self._sort_slices(X,X2) + [[[[k.dK_dtheta(dL_dK[s,s2],X[s],X2[s2],target[p_start:p_stop]) for s in slices_i] for s2 in slices_j] for slices_i,slices_j in zip(slices_, slices2_)] for k, p_start, p_stop, slices_, slices2_ in zip(self.parts, self.param_starts, self.param_stops, slices, slices2)] + + + def dK_dX(self,dL_dK,X,X2,target): + raise NotImplementedError + #X,slices = X[:,:-1],index_to_slices(X[:,-1]) + #if X2 is None: + #X2,slices2 = X,slices + #else: + #X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1]) + #[[[self.k.dK_dX(dL_dK[s,s2],X[s],X2[s2],target[s,:-1]) for s in slices_i] for s2 in slices_j] for slices_i,slices_j in zip(slices,slices2)] +# + def dKdiag_dX(self,dL_dKdiag,X,target): + raise NotImplementedError + #X,slices = X[:,:-1],index_to_slices(X[:,-1]) + #[[self.k.dKdiag_dX(dL_dKdiag[s],X[s],target[s,:-1]) for s in slices_i] for slices_i in slices] + + + def dKdiag_dtheta(self,dL_dKdiag,X,target): + raise NotImplementedError + #X,slices = X[:,:-1],index_to_slices(X[:,-1]) + #[[self.k.dKdiag_dX(dL_dKdiag[s],X[s],target) for s in slices_i] for slices_i in slices] diff --git a/GPy/kern/independent_outputs.py b/GPy/kern/parts/independent_outputs.py similarity index 100% rename from GPy/kern/independent_outputs.py rename to GPy/kern/parts/independent_outputs.py diff --git a/GPy/kern/parts/kernpart.py b/GPy/kern/parts/kernpart.py new file mode 100644 index 00000000..f6777083 --- /dev/null +++ b/GPy/kern/parts/kernpart.py @@ -0,0 +1,125 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +class Kernpart(object): + def __init__(self,input_dim): + """ + The base class for a kernpart: a positive definite function which forms part of a covariance function (kernel). + + :param input_dim: the number of input dimensions to the function + :type input_dim: int + + Do not instantiate. + """ + # the input dimensionality for the covariance + self.input_dim = input_dim + # the number of optimisable parameters + self.num_params = 1 + # the name of the covariance function. + self.name = 'unnamed' + + def _get_params(self): + raise NotImplementedError + def _set_params(self,x): + raise NotImplementedError + def _get_param_names(self): + raise NotImplementedError + def K(self,X,X2,target): + raise NotImplementedError + def Kdiag(self,X,target): + raise NotImplementedError + def dK_dtheta(self,dL_dK,X,X2,target): + raise NotImplementedError + def dKdiag_dtheta(self,dL_dKdiag,X,target): + # In the base case compute this by calling dK_dtheta. Need to + # override for stationary covariances (for example) to save + # time. + for i in range(X.shape[0]): + self.dK_dtheta(dL_dKdiag[i], X[i, :][None, :], X2=None, target=target) + def psi0(self,Z,mu,S,target): + raise NotImplementedError + def dpsi0_dtheta(self,dL_dpsi0,Z,mu,S,target): + raise NotImplementedError + def dpsi0_dmuS(self,dL_dpsi0,Z,mu,S,target_mu,target_S): + raise NotImplementedError + def psi1(self,Z,mu,S,target): + raise NotImplementedError + def dpsi1_dtheta(self,Z,mu,S,target): + raise NotImplementedError + def dpsi1_dZ(self,dL_dpsi1,Z,mu,S,target): + raise NotImplementedError + def dpsi1_dmuS(self,dL_dpsi1,Z,mu,S,target_mu,target_S): + raise NotImplementedError + def psi2(self,Z,mu,S,target): + raise NotImplementedError + def dpsi2_dZ(self,dL_dpsi2,Z,mu,S,target): + raise NotImplementedError + def dpsi2_dtheta(self,dL_dpsi2,Z,mu,S,target): + raise NotImplementedError + def dpsi2_dmuS(self,dL_dpsi2,Z,mu,S,target_mu,target_S): + raise NotImplementedError + def dK_dX(self, dL_dK, X, X2, target): + raise NotImplementedError + def dKdiag_dX(self, dL_dK, X, target): + raise NotImplementedError + + + +class Kernpart_stationary(Kernpart): + def __init__(self, input_dim, lengthscale=None, ARD=False): + self.input_dim = input_dim + self.ARD = ARD + if not ARD: + self.num_params = 2 + if lengthscale is not None: + self.lengthscale = np.asarray(lengthscale) + assert self.lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel" + else: + self.lengthscale = np.ones(1) + else: + self.num_params = self.input_dim + 1 + if lengthscale is not None: + self.lengthscale = np.asarray(lengthscale) + assert self.lengthscale.size == self.input_dim, "bad number of lengthscales" + else: + self.lengthscale = np.ones(self.input_dim) + + # initialize cache + self._Z, self._mu, self._S = np.empty(shape=(3, 1)) + self._X, self._X2, self._params = np.empty(shape=(3, 1)) + + def _set_params(self, x): + self.lengthscale = x + self.lengthscale2 = np.square(self.lengthscale) + # reset cached results + self._X, self._X2, self._params = np.empty(shape=(3, 1)) + self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S + + + def dKdiag_dtheta(self, dL_dKdiag, X, target): + # For stationary covariances, derivative of diagonal elements + # wrt lengthscale is 0. + target[0] += np.sum(dL_dKdiag) + + def dKdiag_dX(self, dL_dK, X, target): + pass # true for all stationary kernels + + +class Kernpart_inner(Kernpart): + def __init__(self,input_dim): + """ + The base class for a kernpart_inner: a positive definite function which forms part of a kernel that is based on the inner product between inputs. + + :param input_dim: the number of input dimensions to the function + :type input_dim: int + + Do not instantiate. + """ + Kernpart.__init__(self, input_dim) + + # initialize cache + self._Z, self._mu, self._S = np.empty(shape=(3, 1)) + self._X, self._X2, self._params = np.empty(shape=(3, 1)) + + diff --git a/GPy/kern/linear.py b/GPy/kern/parts/linear.py similarity index 72% rename from GPy/kern/linear.py rename to GPy/kern/parts/linear.py index 0ed887d2..ab96bb31 100644 --- a/GPy/kern/linear.py +++ b/GPy/kern/parts/linear.py @@ -4,10 +4,12 @@ from kernpart import Kernpart import numpy as np -from ..util.linalg import tdot +from ...util.linalg import tdot +from ...util.misc import fast_array_equal from scipy import weave +from ...util.config import * -class linear(Kernpart): +class Linear(Kernpart): """ Linear kernel @@ -50,6 +52,26 @@ class linear(Kernpart): self._Z, self._mu, self._S = np.empty(shape=(3, 1)) self._X, self._X2, self._params = np.empty(shape=(3, 1)) + # a set of optional args to pass to weave + weave_options_openmp = {'headers' : [''], + 'extra_compile_args': ['-fopenmp -O3'], + 'extra_link_args' : ['-lgomp'], + 'libraries': ['gomp']} + weave_options_noopenmp = {'extra_compile_args': ['-O3']} + + + if config.getboolean('parallel', 'openmp'): + self.weave_options = weave_options_openmp + self.weave_support_code = """ + #include + #include + """ + else: + self.weave_options = weave_options_noopenmp + self.weave_support_code = """ + #include + """ + def _get_params(self): return self.variances @@ -98,7 +120,10 @@ class linear(Kernpart): target += tmp.sum() def dK_dX(self, dL_dK, X, X2, target): - target += (((X2[None,:, :] * self.variances)) * dL_dK[:, :, None]).sum(1) + if X2 is None: + target += 2*(((X[None,:, :] * self.variances)) * dL_dK[:, :, None]).sum(1) + else: + target += (((X2[None,:, :] * self.variances)) * dL_dK[:, :, None]).sum(1) def dKdiag_dX(self,dL_dKdiag,X,target): target += 2.*self.variances*dL_dKdiag[:,None]*X @@ -140,28 +165,24 @@ class linear(Kernpart): self.dK_dX(dL_dpsi1.T, Z, mu, target) def psi2(self, Z, mu, S, target): - """ - returns N,num_inducing,num_inducing matrix - """ self._psi_computations(Z, mu, S) -# psi2_old = self.ZZ * np.square(self.variances) * self.mu2_S[:, None, None, :] -# target += psi2.sum(-1) - # slow way of doing it, but right -# psi2_real = rm np.zeros((mu.shape[0], Z.shape[0], Z.shape[0])) -# for n in range(mu.shape[0]): -# for m_prime in range(Z.shape[0]): -# for m in range(Z.shape[0]): -# tmp = self._Z[m:m + 1] * self.variances -# tmp = np.dot(tmp, (tdot(self._mu[n:n + 1].T) + np.diag(S[n]))) -# psi2_real[n, m, m_prime] = np.dot(tmp, ( -# self._Z[m_prime:m_prime + 1] * self.variances).T) -# mu2_S = (self._mu[:, None, :] * self._mu[:, :, None]) -# mu2_S[:, np.arange(self.input_dim), np.arange(self.input_dim)] += self._S -# psi2 = (self.ZA[None, :, None, :] * mu2_S[:, None]).sum(-1) -# psi2 = (psi2[:, :, None] * self.ZA[None, None]).sum(-1) -# psi2_tensor = np.tensordot(self.ZZ[None, :, :, :] * np.square(self.variances), self.mu2_S[:, None, None, :], ((3), (3))).squeeze().T target += self._psi2 + def psi2_new(self,Z,mu,S,target): + tmp = np.zeros((mu.shape[0], Z.shape[0])) + self.K(mu,Z,tmp) + target += tmp[:,:,None]*tmp[:,None,:] + np.sum(S[:,None,None,:]*self.variances**2*Z[None,:,None,:]*Z[None,None,:,:],-1) + + def dpsi2_dtheta_new(self, dL_dpsi2, Z, mu, S, target): + tmp = np.zeros((mu.shape[0], Z.shape[0])) + self.K(mu,Z,tmp) + self.dK_dtheta(2.*np.sum(dL_dpsi2*tmp[:,None,:],2),mu,Z,target) + result= 2.*(dL_dpsi2[:,:,:,None]*S[:,None,None,:]*self.variances*Z[None,:,None,:]*Z[None,None,:,:]).sum(0).sum(0).sum(0) + if self.ARD: + target += result.sum(0).sum(0).sum(0) + else: + target += result.sum() + def dpsi2_dtheta(self, dL_dpsi2, Z, mu, S, target): self._psi_computations(Z, mu, S) tmp = dL_dpsi2[:, :, :, None] * (self.ZAinner[:, :, None, :] * (2 * Z)[None, None, :, :]) @@ -170,6 +191,15 @@ class linear(Kernpart): else: target += tmp.sum() + def dpsi2_dmuS_new(self, dL_dpsi2, Z, mu, S, target_mu, target_S): + tmp = np.zeros((mu.shape[0], Z.shape[0])) + self.K(mu,Z,tmp) + self.dK_dX(2.*np.sum(dL_dpsi2*tmp[:,None,:],2),mu,Z,target_mu) + + Zs = Z*self.variances + Zs_sq = Zs[:,None,:]*Zs[None,:,:] + target_S += (dL_dpsi2[:,:,:,None]*Zs_sq[None,:,:,:]).sum(1).sum(1) + def dpsi2_dmuS(self, dL_dpsi2, Z, mu, S, target_mu, target_S): """Think N,num_inducing,num_inducing,input_dim """ self._psi_computations(Z, mu, S) @@ -181,11 +211,17 @@ class linear(Kernpart): #target_mu_dummy += (dL_dpsi2[:, :, :, None] * muAZZA).sum(1).sum(1) #target_S_dummy += (dL_dpsi2[:, :, :, None] * self.ZA[None, :, None, :] * self.ZA[None, None, :, :]).sum(1).sum(1) + + if config.getboolean('parallel', 'openmp'): + pragma_string = "#pragma omp parallel for private(m,mm,q,qq,factor,tmp)" + else: + pragma_string = '' + #Using weave, we can exploiut the symmetry of this problem: code = """ int n, m, mm,q,qq; double factor,tmp; - #pragma omp parallel for private(m,mm,q,qq,factor,tmp) + %s for(n=0;n - #include - """ - weave_options = {'headers' : [''], - 'extra_compile_args': ['-fopenmp -O3'], #-march=native'], - 'extra_link_args' : ['-lgomp']} + """ % pragma_string - N,num_inducing,input_dim = mu.shape[0],Z.shape[0],mu.shape[1] - weave.inline(code, support_code=support_code, libraries=['gomp'], - arg_names=['N','num_inducing','input_dim','mu','AZZA','AZZA_2','target_mu','target_S','dL_dpsi2'], - type_converters=weave.converters.blitz,**weave_options) + + N,num_inducing,input_dim = int(mu.shape[0]),int(Z.shape[0]),int(mu.shape[1]) + weave.inline(code, support_code=self.weave_support_code, + arg_names=['N','num_inducing','input_dim','mu','AZZA','AZZA_2','target_mu','target_S','dL_dpsi2'], + type_converters=weave.converters.blitz,**self.weave_options) def dpsi2_dZ(self, dL_dpsi2, Z, mu, S, target): @@ -231,9 +261,15 @@ class linear(Kernpart): #dummy_target += psi2_dZ.sum(0).sum(0) AZA = self.variances*self.ZAinner + + if config.getboolean('parallel', 'openmp'): + pragma_string = '#pragma omp parallel for private(n,mm,q)' + else: + pragma_string = '' + code=""" int n,m,mm,q; - #pragma omp parallel for private(n,mm,q) + %s for(m=0;m - #include - """ - weave_options = {'headers' : [''], - 'extra_compile_args': ['-fopenmp -O3'], #-march=native'], - 'extra_link_args' : ['-lgomp']} + """ % pragma_string - N,num_inducing,input_dim = mu.shape[0],Z.shape[0],mu.shape[1] - weave.inline(code, support_code=support_code, libraries=['gomp'], + + N,num_inducing,input_dim = int(mu.shape[0]),int(Z.shape[0]),int(mu.shape[1]) + weave.inline(code, support_code=self.weave_support_code, arg_names=['N','num_inducing','input_dim','AZA','target','dL_dpsi2'], - type_converters=weave.converters.blitz,**weave_options) - - - + type_converters=weave.converters.blitz,**self.weave_options) #---------------------------------------# @@ -266,7 +293,7 @@ class linear(Kernpart): #---------------------------------------# def _K_computations(self, X, X2): - if not (np.array_equal(X, self._Xcache) and np.array_equal(X2, self._X2cache)): + if not (fast_array_equal(X, self._Xcache) and fast_array_equal(X2, self._X2cache)): self._Xcache = X.copy() if X2 is None: self._dot_product = tdot(X) @@ -277,8 +304,8 @@ class linear(Kernpart): def _psi_computations(self, Z, mu, S): # here are the "statistics" for psi1 and psi2 - Zv_changed = not (np.array_equal(Z, self._Z) and np.array_equal(self.variances, self._variances)) - muS_changed = not (np.array_equal(mu, self._mu) and np.array_equal(S, self._S)) + Zv_changed = not (fast_array_equal(Z, self._Z) and fast_array_equal(self.variances, self._variances)) + muS_changed = not (fast_array_equal(mu, self._mu) and fast_array_equal(S, self._S)) if Zv_changed: # Z has changed, compute Z specific stuff # self.ZZ = Z[:,None,:]*Z[None,:,:] # num_inducing,num_inducing,input_dim diff --git a/GPy/kern/parts/mlp.py b/GPy/kern/parts/mlp.py new file mode 100644 index 00000000..e68aaa72 --- /dev/null +++ b/GPy/kern/parts/mlp.py @@ -0,0 +1,162 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernpart import Kernpart +import numpy as np +four_over_tau = 2./np.pi + +class MLP(Kernpart): + """ + + Multi layer perceptron kernel (also known as arc sine kernel or neural network kernel) + + .. math:: + + k(x,y) = \\sigma^{2}\\frac{2}{\\pi } \\text{asin} \\left ( \\frac{ \\sigma_w^2 x^\\top y+\\sigma_b^2}{\\sqrt{\\sigma_w^2x^\\top x + \\sigma_b^2 + 1}\\sqrt{\\sigma_w^2 y^\\top y \\sigma_b^2 +1}} \\right ) + + + :param input_dim: the number of input dimensions + :type input_dim: int + :param variance: the variance :math:`\sigma^2` + :type variance: float + :param weight_variance: the vector of the variances of the prior over input weights in the neural network :math:`\sigma^2_w` + :type weight_variance: array or list of the appropriate size (or float if there is only one weight variance parameter) + :param bias_variance: the variance of the prior over bias parameters :math:`\sigma^2_b` + :param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one weight variance parameter \sigma^2_w), otherwise there is one weight variance parameter per dimension. + :type ARD: Boolean + :rtype: Kernpart object + + + """ + + def __init__(self, input_dim, variance=1., weight_variance=None, bias_variance=100., ARD=False): + self.input_dim = input_dim + self.ARD = ARD + if not ARD: + self.num_params=3 + if weight_variance is not None: + weight_variance = np.asarray(weight_variance) + assert weight_variance.size == 1, "Only one weight variance needed for non-ARD kernel" + else: + weight_variance = 100.*np.ones(1) + else: + self.num_params = self.input_dim + 2 + if weight_variance is not None: + weight_variance = np.asarray(weight_variance) + assert weight_variance.size == self.input_dim, "bad number of weight variances" + else: + weight_variance = np.ones(self.input_dim) + raise NotImplementedError + + self.name='mlp' + self._set_params(np.hstack((variance, weight_variance.flatten(), bias_variance))) + + def _get_params(self): + return np.hstack((self.variance, self.weight_variance.flatten(), self.bias_variance)) + + def _set_params(self, x): + assert x.size == (self.num_params) + self.variance = x[0] + self.weight_variance = x[1:-1] + self.weight_std = np.sqrt(self.weight_variance) + self.bias_variance = x[-1] + + def _get_param_names(self): + if self.num_params == 3: + return ['variance', 'weight_variance', 'bias_variance'] + else: + return ['variance'] + ['weight_variance_%i' % i for i in range(self.lengthscale.size)] + ['bias_variance'] + + def K(self, X, X2, target): + """Return covariance between X and X2.""" + self._K_computations(X, X2) + target += self.variance*self._K_dvar + + def Kdiag(self, X, target): + """Compute the diagonal of the covariance matrix for X.""" + self._K_diag_computations(X) + target+= self.variance*self._K_diag_dvar + + def dK_dtheta(self, dL_dK, X, X2, target): + """Derivative of the covariance with respect to the parameters.""" + self._K_computations(X, X2) + denom3 = self._K_denom*self._K_denom*self._K_denom + base = four_over_tau*self.variance/np.sqrt(1-self._K_asin_arg*self._K_asin_arg) + base_cov_grad = base*dL_dK + + if X2 is None: + vec = np.diag(self._K_inner_prod) + target[1] += ((self._K_inner_prod/self._K_denom + -.5*self._K_numer/denom3 + *(np.outer((self.weight_variance*vec+self.bias_variance+1.), vec) + +np.outer(vec,(self.weight_variance*vec+self.bias_variance+1.))))*base_cov_grad).sum() + target[2] += ((1./self._K_denom + -.5*self._K_numer/denom3 + *((vec[None, :]+vec[:, None])*self.weight_variance + +2.*self.bias_variance + 2.))*base_cov_grad).sum() + else: + vec1 = (X*X).sum(1) + vec2 = (X2*X2).sum(1) + target[1] += ((self._K_inner_prod/self._K_denom + -.5*self._K_numer/denom3 + *(np.outer((self.weight_variance*vec1+self.bias_variance+1.), vec2) + np.outer(vec1, self.weight_variance*vec2 + self.bias_variance+1.)))*base_cov_grad).sum() + target[2] += ((1./self._K_denom + -.5*self._K_numer/denom3 + *((vec1[:, None]+vec2[None, :])*self.weight_variance + + 2*self.bias_variance + 2.))*base_cov_grad).sum() + + target[0] += np.sum(self._K_dvar*dL_dK) + + def dK_dX(self, dL_dK, X, X2, target): + """Derivative of the covariance matrix with respect to X""" + self._K_computations(X, X2) + arg = self._K_asin_arg + numer = self._K_numer + denom = self._K_denom + denom3 = denom*denom*denom + if X2 is not None: + vec2 = (X2*X2).sum(1)*self.weight_variance+self.bias_variance + 1. + target += four_over_tau*self.weight_variance*self.variance*((X2[None, :, :]/denom[:, :, None] - vec2[None, :, None]*X[:, None, :]*(numer/denom3)[:, :, None])*(dL_dK/np.sqrt(1-arg*arg))[:, :, None]).sum(1) + else: + vec = (X*X).sum(1)*self.weight_variance+self.bias_variance + 1. + target += 2*four_over_tau*self.weight_variance*self.variance*((X[None, :, :]/denom[:, :, None] - vec[None, :, None]*X[:, None, :]*(numer/denom3)[:, :, None])*(dL_dK/np.sqrt(1-arg*arg))[:, :, None]).sum(1) + + def dKdiag_dX(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to X""" + self._K_diag_computations(X) + arg = self._K_diag_asin_arg + denom = self._K_diag_denom + numer = self._K_diag_numer + target += four_over_tau*2.*self.weight_variance*self.variance*X*(1/denom*(1 - arg)*dL_dKdiag/(np.sqrt(1-arg*arg)))[:, None] + + + def _K_computations(self, X, X2): + """Pre-computations for the covariance matrix (used for computing the covariance and its gradients.""" + if self.ARD: + pass + else: + if X2 is None: + self._K_inner_prod = np.dot(X,X.T) + self._K_numer = self._K_inner_prod*self.weight_variance+self.bias_variance + vec = np.diag(self._K_numer) + 1. + self._K_denom = np.sqrt(np.outer(vec,vec)) + self._K_asin_arg = self._K_numer/self._K_denom + self._K_dvar = four_over_tau*np.arcsin(self._K_asin_arg) + else: + self._K_inner_prod = np.dot(X,X2.T) + self._K_numer = self._K_inner_prod*self.weight_variance + self.bias_variance + vec1 = (X*X).sum(1)*self.weight_variance + self.bias_variance + 1. + vec2 = (X2*X2).sum(1)*self.weight_variance + self.bias_variance + 1. + self._K_denom = np.sqrt(np.outer(vec1,vec2)) + self._K_asin_arg = self._K_numer/self._K_denom + self._K_dvar = four_over_tau*np.arcsin(self._K_asin_arg) + + def _K_diag_computations(self, X): + """Pre-computations concerning the diagonal terms (used for computation of diagonal and its gradients).""" + if self.ARD: + pass + else: + self._K_diag_numer = (X*X).sum(1)*self.weight_variance + self.bias_variance + self._K_diag_denom = self._K_diag_numer+1. + self._K_diag_asin_arg = self._K_diag_numer/self._K_diag_denom + self._K_diag_dvar = four_over_tau*np.arcsin(self._K_diag_asin_arg) diff --git a/GPy/kern/parts/odekern1.c b/GPy/kern/parts/odekern1.c new file mode 100644 index 00000000..5aecf164 --- /dev/null +++ b/GPy/kern/parts/odekern1.c @@ -0,0 +1,38 @@ +#include + + double k_uu(t1,t2,theta1,theta2,sig1,sig2) + { + double kern=0; + double dist=0; + + dist = sqrt(t2*t2-t1*t1) + + kern = sig1*(1+theta1*dist)*exp(-theta1*dist) + + return kern; + } + + + + double k_yy(t1, t2, theta1,theta2,sig1,sig2) + { + double kern=0; + double dist=0; + + dist = sqrt(t2*t2-t1*t1) + + kern = sig1*sig2 * ( exp(-theta1*dist)*(theta2-2*theta1+theta1*theta2*dist-theta1*theta1*dist) + + exp(-dist) ) / ((theta2-theta1)*(theta2-theta1)) + + return kern; + } + + + + + + + + + + diff --git a/GPy/kern/periodic_Matern32.py b/GPy/kern/parts/periodic_Matern32.py similarity index 99% rename from GPy/kern/periodic_Matern32.py rename to GPy/kern/parts/periodic_Matern32.py index 664a5183..0de57f82 100644 --- a/GPy/kern/periodic_Matern32.py +++ b/GPy/kern/parts/periodic_Matern32.py @@ -7,7 +7,7 @@ import numpy as np from GPy.util.linalg import mdot from GPy.util.decorators import silence_errors -class periodic_Matern32(Kernpart): +class PeriodicMatern32(Kernpart): """ Kernel of the periodic subspace (up to a given frequency) of a Matern 3/2 RKHS. Only defined for input_dim=1. @@ -113,7 +113,7 @@ class periodic_Matern32(Kernpart): @silence_errors def dK_dtheta(self,dL_dK,X,X2,target): - """derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" + """derivative of the covariance matrix with respect to the parameters (shape is num_data x num_inducing x num_params)""" if X2 is None: X2 = X FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) diff --git a/GPy/kern/periodic_Matern52.py b/GPy/kern/parts/periodic_Matern52.py similarity index 99% rename from GPy/kern/periodic_Matern52.py rename to GPy/kern/parts/periodic_Matern52.py index c01d2d26..882084fd 100644 --- a/GPy/kern/periodic_Matern52.py +++ b/GPy/kern/parts/periodic_Matern52.py @@ -7,7 +7,7 @@ import numpy as np from GPy.util.linalg import mdot from GPy.util.decorators import silence_errors -class periodic_Matern52(Kernpart): +class PeriodicMatern52(Kernpart): """ Kernel of the periodic subspace (up to a given frequency) of a Matern 5/2 RKHS. Only defined for input_dim=1. @@ -115,7 +115,7 @@ class periodic_Matern52(Kernpart): @silence_errors def dK_dtheta(self,dL_dK,X,X2,target): - """derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" + """derivative of the covariance matrix with respect to the parameters (shape is num_data x num_inducing x num_params)""" if X2 is None: X2 = X FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) diff --git a/GPy/kern/periodic_exponential.py b/GPy/kern/parts/periodic_exponential.py similarity index 99% rename from GPy/kern/periodic_exponential.py rename to GPy/kern/parts/periodic_exponential.py index fcaf6420..201def6d 100644 --- a/GPy/kern/periodic_exponential.py +++ b/GPy/kern/parts/periodic_exponential.py @@ -7,7 +7,7 @@ import numpy as np from GPy.util.linalg import mdot from GPy.util.decorators import silence_errors -class periodic_exponential(Kernpart): +class PeriodicExponential(Kernpart): """ Kernel of the periodic subspace (up to a given frequency) of a exponential (Matern 1/2) RKHS. Only defined for input_dim=1. @@ -111,7 +111,7 @@ class periodic_exponential(Kernpart): @silence_errors def dK_dtheta(self,dL_dK,X,X2,target): - """derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" + """derivative of the covariance matrix with respect to the parameters (shape is N x num_inducing x num_params)""" if X2 is None: X2 = X FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) diff --git a/GPy/kern/parts/poly.py b/GPy/kern/parts/poly.py new file mode 100644 index 00000000..98c520f0 --- /dev/null +++ b/GPy/kern/parts/poly.py @@ -0,0 +1,138 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernpart import Kernpart +import numpy as np +four_over_tau = 2./np.pi + +class POLY(Kernpart): + """ + + Polynomial kernel parameter initialisation. Included for completeness, but generally not recommended, is the polynomial kernel: + + .. math:: + k(x, y) = \sigma^2\*(\sigma_w^2 x'y+\sigma_b^b)^d + + The kernel parameters are :math:`\sigma^2` (variance), :math:`\sigma^2_w` + (weight_variance), :math:`\sigma^2_b` (bias_variance) and d + (degree). Only gradients of the first three are provided for + kernel optimisation, it is assumed that polynomial degree would + be set by hand. + + The kernel is not recommended as it is badly behaved when the + :math:`\sigma^2_w\*x'\*y + \sigma^2_b` has a magnitude greater than one. For completeness + there is an automatic relevance determination version of this + kernel provided (NOTE YET IMPLEMENTED!). + :param input_dim: the number of input dimensions + :type input_dim: int + :param variance: the variance :math:`\sigma^2` + :type variance: float + :param weight_variance: the vector of the variances of the prior over input weights in the neural network :math:`\sigma^2_w` + :type weight_variance: array or list of the appropriate size (or float if there is only one weight variance parameter) + :param bias_variance: the variance of the prior over bias parameters :math:`\sigma^2_b` + :param degree: the degree of the polynomial. + :type degree: int + :param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one weight variance parameter :math:`\sigma^2_w`), otherwise there is one weight variance parameter per dimension. + :type ARD: Boolean + :rtype: Kernpart object + + """ + + def __init__(self, input_dim, variance=1., weight_variance=None, bias_variance=1., degree=2, ARD=False): + self.input_dim = input_dim + self.ARD = ARD + if not ARD: + self.num_params=3 + if weight_variance is not None: + weight_variance = np.asarray(weight_variance) + assert weight_variance.size == 1, "Only one weight variance needed for non-ARD kernel" + else: + weight_variance = 1.*np.ones(1) + else: + self.num_params = self.input_dim + 2 + if weight_variance is not None: + weight_variance = np.asarray(weight_variance) + assert weight_variance.size == self.input_dim, "bad number of weight variances" + else: + weight_variance = np.ones(self.input_dim) + raise NotImplementedError + self.degree=degree + self.name='poly_deg' + str(self.degree) + self._set_params(np.hstack((variance, weight_variance.flatten(), bias_variance))) + + def _get_params(self): + return np.hstack((self.variance, self.weight_variance.flatten(), self.bias_variance)) + + def _set_params(self, x): + assert x.size == (self.num_params) + self.variance = x[0] + self.weight_variance = x[1:-1] + self.weight_std = np.sqrt(self.weight_variance) + self.bias_variance = x[-1] + + def _get_param_names(self): + if self.num_params == 3: + return ['variance', 'weight_variance', 'bias_variance'] + else: + return ['variance'] + ['weight_variance_%i' % i for i in range(self.lengthscale.size)] + ['bias_variance'] + + def K(self, X, X2, target): + """Return covariance between X and X2.""" + self._K_computations(X, X2) + target += self.variance*self._K_dvar + + def Kdiag(self, X, target): + """Compute the diagonal of the covariance matrix for X.""" + self._K_diag_computations(X) + target+= self.variance*self._K_diag_dvar + + def dK_dtheta(self, dL_dK, X, X2, target): + """Derivative of the covariance with respect to the parameters.""" + self._K_computations(X, X2) + base = self.variance*self.degree*self._K_poly_arg**(self.degree-1) + base_cov_grad = base*dL_dK + + + + target[0] += np.sum(self._K_dvar*dL_dK) + target[1] += (self._K_inner_prod*base_cov_grad).sum() + target[2] += base_cov_grad.sum() + + + def dK_dX(self, dL_dK, X, X2, target): + """Derivative of the covariance matrix with respect to X""" + self._K_computations(X, X2) + arg = self._K_poly_arg + if X2 is None: + target += 2*self.weight_variance*self.degree*self.variance*(((X[None,:, :])) *(arg**(self.degree-1))[:, :, None]*dL_dK[:, :, None]).sum(1) + else: + target += self.weight_variance*self.degree*self.variance*(((X2[None,:, :])) *(arg**(self.degree-1))[:, :, None]*dL_dK[:, :, None]).sum(1) + + def dKdiag_dX(self, dL_dKdiag, X, target): + """Gradient of diagonal of covariance with respect to X""" + self._K_diag_computations(X) + arg = self._K_diag_poly_arg + target += 2.*self.weight_variance*self.degree*self.variance*X*dL_dKdiag[:, None]*(arg**(self.degree-1))[:, None] + + + def _K_computations(self, X, X2): + if self.ARD: + pass + else: + if X2 is None: + self._K_inner_prod = np.dot(X,X.T) + else: + self._K_inner_prod = np.dot(X,X2.T) + self._K_poly_arg = self._K_inner_prod*self.weight_variance + self.bias_variance + self._K_dvar = self._K_poly_arg**self.degree + + def _K_diag_computations(self, X): + if self.ARD: + pass + else: + self._K_diag_poly_arg = (X*X).sum(1)*self.weight_variance + self.bias_variance + self._K_diag_dvar = self._K_diag_poly_arg**self.degree + + + + diff --git a/GPy/kern/prod.py b/GPy/kern/parts/prod.py similarity index 68% rename from GPy/kern/prod.py rename to GPy/kern/parts/prod.py index 493129c6..f517262c 100644 --- a/GPy/kern/prod.py +++ b/GPy/kern/parts/prod.py @@ -2,10 +2,11 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) from kernpart import Kernpart +from coregionalize import Coregionalize import numpy as np import hashlib -class prod(Kernpart): +class Prod(Kernpart): """ Computes the product of 2 kernels @@ -18,7 +19,10 @@ class prod(Kernpart): """ def __init__(self,k1,k2,tensor=False): self.num_params = k1.num_params + k2.num_params - self.name = k1.name + '' + k2.name + if tensor: + self.name = '['+k1.name + '**' + k2.name +']' + else: + self.name = '['+k1.name + '*' + k2.name +']' self.k1 = k1 self.k2 = k2 if tensor: @@ -51,8 +55,18 @@ class prod(Kernpart): self._K_computations(X,X2) target += self._K1 * self._K2 + def K1(self,X, X2): + """Compute the part of the kernel associated with k1.""" + self._K_computations(X, X2) + return self._K1 + + def K2(self, X, X2): + """Compute the part of the kernel associated with k2.""" + self._K_computations(X, X2) + return self._K2 + def dK_dtheta(self,dL_dK,X,X2,target): - """derivative of the covariance matrix with respect to the parameters.""" + """Derivative of the covariance matrix with respect to the parameters.""" self._K_computations(X,X2) if X2 is None: self.k1.dK_dtheta(dL_dK*self._K2, X[:,self.slice1], None, target[:self.k1.num_params]) @@ -80,8 +94,18 @@ class prod(Kernpart): def dK_dX(self,dL_dK,X,X2,target): """derivative of the covariance matrix with respect to X.""" self._K_computations(X,X2) - self.k1.dK_dX(dL_dK*self._K2, X[:,self.slice1], X2[:,self.slice1], target) - self.k2.dK_dX(dL_dK*self._K1, X[:,self.slice2], X2[:,self.slice2], target) + if X2 is None: + if not isinstance(self.k1,Coregionalize) and not isinstance(self.k2,Coregionalize): + self.k1.dK_dX(dL_dK*self._K2, X[:,self.slice1], None, target[:,self.slice1]) + self.k2.dK_dX(dL_dK*self._K1, X[:,self.slice2], None, target[:,self.slice2]) + else:#if isinstance(self.k1,Coregionalize) or isinstance(self.k2,Coregionalize): + #NOTE The indices column in the inputs makes the ki.dK_dX fail when passing None instead of X[:,self.slicei] + X2 = X + self.k1.dK_dX(2.*dL_dK*self._K2, X[:,self.slice1], X2[:,self.slice1], target[:,self.slice1]) + self.k2.dK_dX(2.*dL_dK*self._K1, X[:,self.slice2], X2[:,self.slice2], target[:,self.slice2]) + else: + self.k1.dK_dX(dL_dK*self._K2, X[:,self.slice1], X2[:,self.slice1], target[:,self.slice1]) + self.k2.dK_dX(dL_dK*self._K1, X[:,self.slice2], X2[:,self.slice2], target[:,self.slice2]) def dKdiag_dX(self, dL_dKdiag, X, target): K1 = np.zeros(X.shape[0]) @@ -89,8 +113,8 @@ class prod(Kernpart): self.k1.Kdiag(X[:,self.slice1],K1) self.k2.Kdiag(X[:,self.slice2],K2) - self.k1.dK_dX(dL_dKdiag*K2, X[:,self.slice1], target) - self.k2.dK_dX(dL_dKdiag*K1, X[:,self.slice2], target) + self.k1.dK_dX(dL_dKdiag*K2, X[:,self.slice1], target[:,self.slice1]) + self.k2.dK_dX(dL_dKdiag*K1, X[:,self.slice2], target[:,self.slice2]) def _K_computations(self,X,X2): if not (np.array_equal(X,self._X) and np.array_equal(X2,self._X2) and np.array_equal(self._params , self._get_params())): @@ -109,3 +133,13 @@ class prod(Kernpart): self.k1.K(X[:,self.slice1],X2[:,self.slice1],self._K1) self.k2.K(X[:,self.slice2],X2[:,self.slice2],self._K2) + #def __getstate__(self): + #return [self.k1, self.k2, self.slice1, self.slice2, self.name, self.input_dim, self.num_params] + + #def __setstate__(self, state): + #self.k1, self.k2, self.slice1, self.slice2, self.name, self.input_dim, self.num_params = state + #self._X, self._X2, self._params = np.empty(shape=(3,1)) + + + + diff --git a/GPy/kern/prod_orthogonal.py b/GPy/kern/parts/prod_orthogonal.py similarity index 100% rename from GPy/kern/prod_orthogonal.py rename to GPy/kern/parts/prod_orthogonal.py diff --git a/GPy/kern/rational_quadratic.py b/GPy/kern/parts/rational_quadratic.py similarity index 81% rename from GPy/kern/rational_quadratic.py rename to GPy/kern/parts/rational_quadratic.py index d1e7a7e3..a75a5b11 100644 --- a/GPy/kern/rational_quadratic.py +++ b/GPy/kern/parts/rational_quadratic.py @@ -5,7 +5,7 @@ from kernpart import Kernpart import numpy as np -class rational_quadratic(Kernpart): +class RationalQuadratic(Kernpart): """ rational quadratic kernel @@ -57,7 +57,7 @@ class rational_quadratic(Kernpart): dist2 = np.square((X-X2.T)/self.lengthscale) dvar = (1 + dist2/2.)**(-self.power) - dl = self.power * self.variance * dist2 * self.lengthscale**(-3) * (1 + dist2/2./self.power)**(-self.power-1) + dl = self.power * self.variance * dist2 / self.lengthscale * (1 + dist2/2.)**(-self.power-1) dp = - self.variance * np.log(1 + dist2/2.) * (1 + dist2/2.)**(-self.power) target[0] += np.sum(dvar*dL_dK) @@ -70,10 +70,12 @@ class rational_quadratic(Kernpart): def dK_dX(self,dL_dK,X,X2,target): """derivative of the covariance matrix with respect to X.""" - if X2 is None: X2 = X - dist2 = np.square((X-X2.T)/self.lengthscale) - - dX = -self.variance*self.power * (X-X2.T)/self.lengthscale**2 * (1 + dist2/2./self.lengthscale)**(-self.power-1) + if X2 is None: + dist2 = np.square((X-X.T)/self.lengthscale) + dX = -2.*self.variance*self.power * (X-X.T)/self.lengthscale**2 * (1 + dist2/2./self.lengthscale)**(-self.power-1) + else: + dist2 = np.square((X-X2.T)/self.lengthscale) + dX = -self.variance*self.power * (X-X2.T)/self.lengthscale**2 * (1 + dist2/2./self.lengthscale)**(-self.power-1) target += np.sum(dL_dK*dX,1)[:,np.newaxis] def dKdiag_dX(self,dL_dKdiag,X,target): diff --git a/GPy/kern/rbf.py b/GPy/kern/parts/rbf.py similarity index 66% rename from GPy/kern/rbf.py rename to GPy/kern/parts/rbf.py index 03b37b01..dbc689d5 100644 --- a/GPy/kern/rbf.py +++ b/GPy/kern/parts/rbf.py @@ -4,11 +4,12 @@ from kernpart import Kernpart import numpy as np -import hashlib from scipy import weave -from ..util.linalg import tdot +from ...util.linalg import tdot +from ...util.misc import fast_array_equal +from ...util.config import * -class rbf(Kernpart): +class RBF(Kernpart): """ Radial Basis Function kernel, aka squared-exponential, exponentiated quadratic or Gaussian kernel: @@ -57,12 +58,27 @@ class rbf(Kernpart): self._X, self._X2, self._params = np.empty(shape=(3, 1)) # a set of optional args to pass to weave - self.weave_options = {'headers' : [''], - 'extra_compile_args': ['-fopenmp -O3'], # -march=native'], - 'extra_link_args' : ['-lgomp']} + weave_options_openmp = {'headers' : [''], + 'extra_compile_args': ['-fopenmp -O3'], + 'extra_link_args' : ['-lgomp'], + 'libraries': ['gomp']} + weave_options_noopenmp = {'extra_compile_args': ['-O3']} + if config.getboolean('parallel', 'openmp'): + self.weave_options = weave_options_openmp + self.weave_support_code = """ + #include + #include + """ + else: + self.weave_options = weave_options_noopenmp + self.weave_support_code = """ + #include + """ + + def _get_params(self): return np.hstack((self.variance, self.lengthscale)) @@ -110,8 +126,8 @@ class rbf(Kernpart): target(q+1) += var_len3(q)*tmp; } """ - num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim - weave.inline(code, arg_names=['num_data','num_inducing','input_dim','X','X2','target','dvardLdK','var_len3'], type_converters=weave.converters.blitz, **self.weave_options) + num_data, num_inducing, input_dim = int(X.shape[0]), int(X.shape[0]), int(self.input_dim) + weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: code = """ int q,i,j; @@ -126,9 +142,9 @@ class rbf(Kernpart): target(q+1) += var_len3(q)*tmp; } """ - num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim - #[np.add(target[1+q:2+q],var_len3[q]*np.sum(dvardLdK*np.square(X[:,q][:,None]-X2[:,q][None,:])),target[1+q:2+q]) for q in range(self.input_dim)] - weave.inline(code, arg_names=['num_data','num_inducing','input_dim','X','X2','target','dvardLdK','var_len3'], type_converters=weave.converters.blitz, **self.weave_options) + num_data, num_inducing, input_dim = int(X.shape[0]), int(X2.shape[0]), int(self.input_dim) + # [np.add(target[1+q:2+q],var_len3[q]*np.sum(dvardLdK*np.square(X[:,q][:,None]-X2[:,q][None,:])),target[1+q:2+q]) for q in range(self.input_dim)] + weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: target[1] += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dK) @@ -138,7 +154,10 @@ class rbf(Kernpart): def dK_dX(self, dL_dK, X, X2, target): self._K_computations(X, X2) - _K_dist = X[:, None, :] - X2[None, :, :] # don't cache this in _K_computations because it is high memory. If this function is being called, chances are we're not in the high memory arena. + if X2 is None: + _K_dist = 2*(X[:, None, :] - X[None, :, :]) + else: + _K_dist = X[:, None, :] - X2[None, :, :] # don't cache this in _K_computations because it is high memory. If this function is being called, chances are we're not in the high memory arena. dK_dX = (-self.variance / self.lengthscale2) * np.transpose(self._K_dvar[:, :, np.newaxis] * _K_dist, (1, 0, 2)) target += np.sum(dK_dX * dL_dK.T[:, :, None], 0) @@ -165,10 +184,9 @@ class rbf(Kernpart): def dpsi1_dtheta(self, dL_dpsi1, Z, mu, S, target): self._psi_computations(Z, mu, S) - denom_deriv = S[:, None, :] / (self.lengthscale ** 3 + self.lengthscale * S[:, None, :]) - d_length = self._psi1[:, :, None] * (self.lengthscale * np.square(self._psi1_dist / (self.lengthscale2 + S[:, None, :])) + denom_deriv) target[0] += np.sum(dL_dpsi1 * self._psi1 / self.variance) - dpsi1_dlength = d_length * dL_dpsi1[:, :, None] + d_length = self._psi1[:,:,None] * ((self._psi1_dist_sq - 1.)/(self.lengthscale*self._psi1_denom) +1./self.lengthscale) + dpsi1_dlength = d_length * np.atleast_3d(dL_dpsi1) if not self.ARD: target[1] += dpsi1_dlength.sum() else: @@ -190,12 +208,19 @@ class rbf(Kernpart): self._psi_computations(Z, mu, S) target += self._psi2 + def _crossterm_mu_S(self, Z, mu, S): + # compute the crossterm expectation for K as the other kernel: + Sigma = 1./self.lengthscale2[None,None,:] + 1./S[:,None,:] # is independent across M, + Sigma_tilde = (self.lengthscale2[None, :] + S) + M = (S*mu/Sigma_tilde)[:, None, :] + (self.lengthscale2[None,:]*Z)[None, :, :]/Sigma_tilde[:, None, :] + # make sure return is [N x M x Q] + return M, Sigma.repeat(Z.shape[0],1) + def dpsi2_dtheta(self, dL_dpsi2, Z, mu, S, target): """Shape N,num_inducing,num_inducing,Ntheta""" self._psi_computations(Z, mu, S) d_var = 2.*self._psi2 / self.variance d_length = 2.*self._psi2[:, :, :, None] * (self._psi2_Zdist_sq * self._psi2_denom + self._psi2_mudist_sq + S[:, None, None, :] / self.lengthscale2) / (self.lengthscale * self._psi2_denom) - target[0] += np.sum(dL_dpsi2 * d_var) dpsi2_dlength = d_length * dL_dpsi2[:, :, :, None] if not self.ARD: @@ -222,9 +247,10 @@ class rbf(Kernpart): #---------------------------------------# def _K_computations(self, X, X2): - if not (np.array_equal(X, self._X) and np.array_equal(X2, self._X2) and np.array_equal(self._params , self._get_params())): + params = self._get_params() + if not (fast_array_equal(X, self._X) and fast_array_equal(X2, self._X2) and fast_array_equal(self._params , params)): self._X = X.copy() - self._params == self._get_params().copy() + self._params = params.copy() if X2 is None: self._X2 = None X = X / self.lengthscale @@ -239,55 +265,61 @@ class rbf(Kernpart): def _psi_computations(self, Z, mu, S): # here are the "statistics" for psi1 and psi2 - if not np.array_equal(Z, self._Z): - #Z has changed, compute Z specific stuff - self._psi2_Zhat = 0.5*(Z[:,None,:] +Z[None,:,:]) # M,M,Q - self._psi2_Zdist = 0.5*(Z[:,None,:]-Z[None,:,:]) # M,M,Q - self._psi2_Zdist_sq = np.square(self._psi2_Zdist/self.lengthscale) # M,M,Q - self._Z = Z + Z_changed = not fast_array_equal(Z, self._Z) + if Z_changed: + # Z has changed, compute Z specific stuff + self._psi2_Zhat = 0.5 * (Z[:, None, :] + Z[None, :, :]) # M,M,Q + self._psi2_Zdist = 0.5 * (Z[:, None, :] - Z[None, :, :]) # M,M,Q + self._psi2_Zdist_sq = np.square(self._psi2_Zdist / self.lengthscale) # M,M,Q - if not (np.array_equal(Z, self._Z) and np.array_equal(mu, self._mu) and np.array_equal(S, self._S)): - #something's changed. recompute EVERYTHING + if Z_changed or not fast_array_equal(mu, self._mu) or not fast_array_equal(S, self._S): + # something's changed. recompute EVERYTHING - #psi1 - self._psi1_denom = S[:,None,:]/self.lengthscale2 + 1. - self._psi1_dist = Z[None,:,:]-mu[:,None,:] - self._psi1_dist_sq = np.square(self._psi1_dist)/self.lengthscale2/self._psi1_denom - self._psi1_exponent = -0.5*np.sum(self._psi1_dist_sq+np.log(self._psi1_denom),-1) - self._psi1 = self.variance*np.exp(self._psi1_exponent) + # psi1 + self._psi1_denom = S[:, None, :] / self.lengthscale2 + 1. + self._psi1_dist = Z[None, :, :] - mu[:, None, :] + self._psi1_dist_sq = np.square(self._psi1_dist) / self.lengthscale2 / self._psi1_denom + self._psi1_exponent = -0.5 * np.sum(self._psi1_dist_sq + np.log(self._psi1_denom), -1) + self._psi1 = self.variance * np.exp(self._psi1_exponent) - #psi2 - self._psi2_denom = 2.*S[:,None,None,:]/self.lengthscale2+1. # N,M,M,Q - self._psi2_mudist, self._psi2_mudist_sq, self._psi2_exponent, _ = self.weave_psi2(mu,self._psi2_Zhat) - #self._psi2_mudist = mu[:,None,None,:]-self._psi2_Zhat #N,M,M,Q - #self._psi2_mudist_sq = np.square(self._psi2_mudist)/(self.lengthscale2*self._psi2_denom) - #self._psi2_exponent = np.sum(-self._psi2_Zdist_sq -self._psi2_mudist_sq -0.5*np.log(self._psi2_denom),-1) #N,M,M,Q - self._psi2 = np.square(self.variance)*np.exp(self._psi2_exponent) # N,M,M,Q + # psi2 + self._psi2_denom = 2.*S[:, None, None, :] / self.lengthscale2 + 1. # N,M,M,Q + self._psi2_mudist, self._psi2_mudist_sq, self._psi2_exponent, _ = self.weave_psi2(mu, self._psi2_Zhat) + # self._psi2_mudist = mu[:,None,None,:]-self._psi2_Zhat #N,M,M,Q + # self._psi2_mudist_sq = np.square(self._psi2_mudist)/(self.lengthscale2*self._psi2_denom) + # self._psi2_exponent = np.sum(-self._psi2_Zdist_sq -self._psi2_mudist_sq -0.5*np.log(self._psi2_denom),-1) #N,M,M,Q + self._psi2 = np.square(self.variance) * np.exp(self._psi2_exponent) # N,M,M,Q - #store matrices for caching - self._Z, self._mu, self._S = Z, mu,S + # store matrices for caching + self._Z, self._mu, self._S = Z, mu, S - def weave_psi2(self,mu,Zhat): - N,input_dim = mu.shape + def weave_psi2(self, mu, Zhat): + N, input_dim = mu.shape num_inducing = Zhat.shape[0] - mudist = np.empty((N,num_inducing,num_inducing,input_dim)) - mudist_sq = np.empty((N,num_inducing,num_inducing,input_dim)) - psi2_exponent = np.zeros((N,num_inducing,num_inducing)) - psi2 = np.empty((N,num_inducing,num_inducing)) + mudist = np.empty((N, num_inducing, num_inducing, input_dim)) + mudist_sq = np.empty((N, num_inducing, num_inducing, input_dim)) + psi2_exponent = np.zeros((N, num_inducing, num_inducing)) + psi2 = np.empty((N, num_inducing, num_inducing)) psi2_Zdist_sq = self._psi2_Zdist_sq - _psi2_denom = self._psi2_denom.squeeze().reshape(N, self.input_dim) - half_log_psi2_denom = 0.5 * np.log(self._psi2_denom).squeeze().reshape(N, self.input_dim) + _psi2_denom = self._psi2_denom.squeeze().reshape(-1, input_dim) + half_log_psi2_denom = 0.5 * np.log(self._psi2_denom).squeeze().reshape(-1, input_dim) variance_sq = float(np.square(self.variance)) if self.ARD: lengthscale2 = self.lengthscale2 else: lengthscale2 = np.ones(input_dim) * self.lengthscale2 + + if config.getboolean('parallel', 'openmp'): + pragma_string = '#pragma omp parallel for private(tmp)' + else: + pragma_string = '' + code = """ double tmp; - #pragma omp parallel for private(tmp) + %s for (int n=0; n + %s #include - """ - weave.inline(code, support_code=support_code, libraries=['gomp'], - arg_names=['N','num_inducing','input_dim','mu','Zhat','mudist_sq','mudist','lengthscale2','_psi2_denom','psi2_Zdist_sq','psi2_exponent','half_log_psi2_denom','psi2','variance_sq'], + """ % pragma_string + + N, num_inducing, input_dim = int(N), int(num_inducing), int(input_dim) + weave.inline(code, support_code=support_code, + arg_names=['N', 'num_inducing', 'input_dim', 'mu', 'Zhat', 'mudist_sq', 'mudist', 'lengthscale2', '_psi2_denom', 'psi2_Zdist_sq', 'psi2_exponent', 'half_log_psi2_denom', 'psi2', 'variance_sq'], type_converters=weave.converters.blitz, **self.weave_options) return mudist, mudist_sq, psi2_exponent, psi2 diff --git a/GPy/kern/parts/rbf_inv.py b/GPy/kern/parts/rbf_inv.py new file mode 100644 index 00000000..1cc05aaa --- /dev/null +++ b/GPy/kern/parts/rbf_inv.py @@ -0,0 +1,341 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +from rbf import RBF +import numpy as np +import hashlib +from scipy import weave +from ...util.linalg import tdot +from ...util.config import * + + +class RBFInv(RBF): + """ + Radial Basis Function kernel, aka squared-exponential, exponentiated quadratic or Gaussian kernel. It only + differs from RBF in that here the parametrization is wrt the inverse lengthscale: + + .. math:: + + k(r) = \sigma^2 \exp \\bigg(- \\frac{1}{2} r^2 \\bigg) \ \ \ \ \ \\text{ where } r^2 = \sum_{i=1}^d \\frac{ (x_i-x^\prime_i)^2}{\ell_i^2} + + where \ell_i is the lengthscale, \sigma^2 the variance and d the dimensionality of the input. + + :param input_dim: the number of input dimensions + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param lengthscale: the vector of lengthscale of the kernel + :type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter) + :param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension. + :type ARD: Boolean + :rtype: kernel object + + .. Note: this object implements both the ARD and 'spherical' version of the function + """ + + def __init__(self, input_dim, variance=1., inv_lengthscale=None, ARD=False): + self.input_dim = input_dim + self.name = 'rbf_inv' + self.ARD = ARD + if not ARD: + self.num_params = 2 + if inv_lengthscale is not None: + inv_lengthscale = np.asarray(inv_lengthscale) + assert inv_lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel" + else: + inv_lengthscale = np.ones(1) + else: + self.num_params = self.input_dim + 1 + if inv_lengthscale is not None: + inv_lengthscale = np.asarray(inv_lengthscale) + assert inv_lengthscale.size == self.input_dim, "bad number of lengthscales" + else: + inv_lengthscale = np.ones(self.input_dim) + + self._set_params(np.hstack((variance, inv_lengthscale.flatten()))) + + # initialize cache + self._Z, self._mu, self._S = np.empty(shape=(3, 1)) + self._X, self._X2, self._params = np.empty(shape=(3, 1)) + + # a set of optional args to pass to weave + weave_options_openmp = {'headers' : [''], + 'extra_compile_args': ['-fopenmp -O3'], + 'extra_link_args' : ['-lgomp'], + 'libraries': ['gomp']} + weave_options_noopenmp = {'extra_compile_args': ['-O3']} + + if config.getboolean('parallel', 'openmp'): + self.weave_options = weave_options_openmp + self.weave_support_code = """ + #include + #include + """ + else: + self.weave_options = weave_options_noopenmp + self.weave_support_code = """ + #include + """ + + def _get_params(self): + return np.hstack((self.variance, self.inv_lengthscale)) + + def _set_params(self, x): + assert x.size == (self.num_params) + self.variance = x[0] + self.inv_lengthscale = x[1:] + self.inv_lengthscale2 = np.square(self.inv_lengthscale) + # TODO: We can rewrite everything with inv_lengthscale and never need to do the below + self.lengthscale = 1. / self.inv_lengthscale + self.lengthscale2 = np.square(self.lengthscale) + # reset cached results + self._X, self._X2, self._params = np.empty(shape=(3, 1)) + self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S + + def _get_param_names(self): + if self.num_params == 2: + return ['variance', 'inv_lengthscale'] + else: + return ['variance'] + ['inv_lengthscale%i' % i for i in range(self.inv_lengthscale.size)] + + # TODO: Rewrite computations so that lengthscale is not needed (but only inv. lengthscale) + def dK_dtheta(self, dL_dK, X, X2, target): + self._K_computations(X, X2) + target[0] += np.sum(self._K_dvar * dL_dK) + if self.ARD: + dvardLdK = self._K_dvar * dL_dK + var_len3 = self.variance / np.power(self.lengthscale, 3) + len2 = self.lengthscale2 + if X2 is None: + # save computation for the symmetrical case + dvardLdK = dvardLdK + dvardLdK.T + code = """ + int q,i,j; + double tmp; + for(q=0; q=0.,1.,0.) -class spline(Kernpart): +class Spline(Kernpart): """ Spline kernel diff --git a/GPy/kern/symmetric.py b/GPy/kern/parts/symmetric.py similarity index 98% rename from GPy/kern/symmetric.py rename to GPy/kern/parts/symmetric.py index c7099a6f..d836763d 100644 --- a/GPy/kern/symmetric.py +++ b/GPy/kern/parts/symmetric.py @@ -4,7 +4,7 @@ from kernpart import Kernpart import numpy as np -class symmetric(Kernpart): +class Symmetric(Kernpart): """ Symmetrical kernels @@ -56,7 +56,7 @@ class symmetric(Kernpart): AX = np.dot(X,self.transform) if X2 is None: X2 = X - ZX2 = AX + AX2 = AX else: AX2 = np.dot(X2, self.transform) self.k.dK_dtheta(dL_dK,X,X2,target) diff --git a/GPy/kern/parts/sympy_helpers.cpp b/GPy/kern/parts/sympy_helpers.cpp new file mode 100644 index 00000000..56aa6f21 --- /dev/null +++ b/GPy/kern/parts/sympy_helpers.cpp @@ -0,0 +1,196 @@ +#include "Python.h" +#include +#include +#include +#include +#include +double DiracDelta(double x){ + // TODO: this doesn't seem to be a dirac delta ... should return infinity. Neil + if((x<0.000001) & (x>-0.000001))//go on, laugh at my c++ skills + return 1.0; + else + return 0.0; +}; +double DiracDelta(double x,int foo){ + return 0.0; +}; + +double sinc(double x){ + // compute the sinc function + if (x==0) + return 1.0; + else + return sin(x)/x; +} + +double sinc_grad(double x){ + // compute the gradient of the sinc function. + if (x==0) + return 0.0; + else + return (x*cos(x) - sin(x))/(x*x); +} +double erfcx(double x){ + // Based on code by Soren Hauberg 2010 for Octave. + // compute the scaled complex error function. + //return erfc(x)*exp(x*x); + double xneg=-sqrt(log(DBL_MAX/2)); + double xmax = 1/(sqrt(M_PI)*DBL_MIN); + xmax = DBL_MAXxmax) + return 0.0; + else + return y; +} + +double ln_diff_erf(double x0, double x1){ + // stably compute the log of difference between two erfs. + if (x1>x0){ + PyErr_SetString(PyExc_RuntimeError,"second argument must be smaller than or equal to first in ln_diff_erf"); + throw 1; + } + if (x0==x1){ + PyErr_WarnEx(PyExc_RuntimeWarning,"divide by zero encountered in log", 1); + return -INFINITY; + } + else if(x0<0 && x1>0 || x0>0 && x1<0) //x0 and x1 have opposite signs + return log(erf(x0)-erf(x1)); + else if(x0>0) //x0 positive, x1 non-negative + return log(erfcx(x1)-erfcx(x0)*exp(x1*x1- x0*x0))-x1*x1; + else //x0 and x1 non-positive + return log(erfcx(-x0)-erfcx(-x1)*exp(x0*x0 - x1*x1))-x0*x0; +} +// TODO: For all these computations of h things are very efficient at the moment. Need to recode sympykern to allow the precomputations to take place and all the gradients to be computed in one function. Not sure of best way forward for that yet. Neil +double h(double t, double tprime, double d_i, double d_j, double l){ + // Compute the h function for the sim covariance. + double half_l_di = 0.5*l*d_i; + double arg_1 = half_l_di + tprime/l; + double arg_2 = half_l_di - (t-tprime)/l; + double ln_part_1 = ln_diff_erf(arg_1, arg_2); + arg_2 = half_l_di - t/l; + double sign_val = 1.0; + if(t/l==0) + sign_val = 0.0; + else if (t/l < 0) + sign_val = -1.0; + arg_2 = half_l_di - t/l; + double ln_part_2 = ln_diff_erf(half_l_di, arg_2); + // if either ln_part_1 or ln_part_2 are -inf, don't bother computing rest of that term. + double part_1 = 0.0; + if(isfinite(ln_part_1)) + part_1 = sign_val*exp(half_l_di*half_l_di - d_i*(t-tprime) + ln_part_1 - log(d_i + d_j)); + double part_2 = 0.0; + if(isfinite(ln_part_2)) + part_2 = sign_val*exp(half_l_di*half_l_di - d_i*t - d_j*tprime + ln_part_2 - log(d_i + d_j)); + return part_1 - part_2; +} + + +double dh_dd_i(double t, double tprime, double d_i, double d_j, double l){ + double diff_t = (t-tprime); + double l2 = l*l; + double hv = h(t, tprime, d_i, d_j, l); + double half_l_di = 0.5*l*d_i; + double arg_1 = half_l_di + tprime/l; + double arg_2 = half_l_di - (t-tprime)/l; + double ln_part_1 = ln_diff_erf(arg_1, arg_2); + arg_1 = half_l_di; + arg_2 = half_l_di - t/l; + double sign_val = 1.0; + if(t/l==0) + sign_val = 0.0; + else if (t/l < 0) + sign_val = -1.0; + double ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l); + double base = (0.5*d_i*l2*(d_i+d_j)-1)*hv; + if(isfinite(ln_part_1)) + base -= diff_t*sign_val*exp(half_l_di*half_l_di + -d_i*diff_t + +ln_part_1); + if(isfinite(ln_part_2)) + base += t*sign_val*exp(half_l_di*half_l_di + -d_i*t-d_j*tprime + +ln_part_2); + base += l/sqrt(M_PI)*(-exp(-diff_t*diff_t/l2) + +exp(-tprime*tprime/l2-d_i*t) + +exp(-t*t/l2-d_j*tprime) + -exp(-(d_i*t + d_j*tprime))); + return base/(d_i+d_j); + +} + +double dh_dd_j(double t, double tprime, double d_i, double d_j, double l){ + double half_l_di = 0.5*l*d_i; + double hv = h(t, tprime, d_i, d_j, l); + double sign_val = 1.0; + if(t/l==0) + sign_val = 0.0; + else if (t/l < 0) + sign_val = -1.0; + double ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l); + double base = -hv; + if(isfinite(ln_part_2)) + base += tprime*sign_val*exp(half_l_di*half_l_di-(d_i*t+d_j*tprime)+ln_part_2); + return base/(d_i+d_j); +} + +double dh_dl(double t, double tprime, double d_i, double d_j, double l){ + // compute gradient of h function with respect to lengthscale for sim covariance + // TODO a lot of energy wasted recomputing things here, need to do this in a shared way somehow ... perhaps needs rewrite of sympykern. + double half_l_di = 0.5*l*d_i; + double arg_1 = half_l_di + tprime/l; + double arg_2 = half_l_di - (t-tprime)/l; + double ln_part_1 = ln_diff_erf(arg_1, arg_2); + arg_2 = half_l_di - t/l; + double ln_part_2 = ln_diff_erf(half_l_di, arg_2); + double diff_t = t - tprime; + double l2 = l*l; + double hv = h(t, tprime, d_i, d_j, l); + return 0.5*d_i*d_i*l*hv + 2/(sqrt(M_PI)*(d_i+d_j))*((-diff_t/l2-d_i/2)*exp(-diff_t*diff_t/l2)+(-tprime/l2+d_i/2)*exp(-tprime*tprime/l2-d_i*t)-(-t/l2-d_i/2)*exp(-t*t/l2-d_j*tprime)-d_i/2*exp(-(d_i*t+d_j*tprime))); +} + +double dh_dt(double t, double tprime, double d_i, double d_j, double l){ + // compute gradient of h function with respect to t. + double diff_t = t - tprime; + double half_l_di = 0.5*l*d_i; + double arg_1 = half_l_di + tprime/l; + double arg_2 = half_l_di - diff_t/l; + double ln_part_1 = ln_diff_erf(arg_1, arg_2); + arg_2 = half_l_di - t/l; + double ln_part_2 = ln_diff_erf(half_l_di, arg_2); + + return (d_i*exp(ln_part_2-d_i*t - d_j*tprime) - d_i*exp(ln_part_1-d_i*diff_t) + 2*exp(-d_i*diff_t - pow(half_l_di - diff_t/l, 2))/(sqrt(M_PI)*l) - 2*exp(-d_i*t - d_j*tprime - pow(half_l_di - t/l,2))/(sqrt(M_PI)*l))*exp(half_l_di*half_l_di)/(d_i + d_j); +} + +double dh_dtprime(double t, double tprime, double d_i, double d_j, double l){ + // compute gradient of h function with respect to tprime. + double diff_t = t - tprime; + double half_l_di = 0.5*l*d_i; + double arg_1 = half_l_di + tprime/l; + double arg_2 = half_l_di - diff_t/l; + double ln_part_1 = ln_diff_erf(arg_1, arg_2); + arg_2 = half_l_di - t/l; + double ln_part_2 = ln_diff_erf(half_l_di, arg_2); + + return (d_i*exp(ln_part_1-d_i*diff_t) + d_j*exp(ln_part_2-d_i*t - d_j*tprime) + (-2*exp(-pow(half_l_di - diff_t/l,2)) + 2*exp(-pow(half_l_di + tprime/l,2)))*exp(-d_i*diff_t)/(sqrt(M_PI)*l))*exp(half_l_di*half_l_di)/(d_i + d_j); +} diff --git a/GPy/kern/parts/sympy_helpers.h b/GPy/kern/parts/sympy_helpers.h new file mode 100644 index 00000000..5e58d5d2 --- /dev/null +++ b/GPy/kern/parts/sympy_helpers.h @@ -0,0 +1,16 @@ +#include +double DiracDelta(double x); +double DiracDelta(double x, int foo); + +double sinc(double x); +double sinc_grad(double x); + +double erfcx(double x); +double ln_diff_erf(double x0, double x1); + +double h(double t, double tprime, double d_i, double d_j, double l); +double dh_dl(double t, double tprime, double d_i, double d_j, double l); +double dh_dd_i(double t, double tprime, double d_i, double d_j, double l); +double dh_dd_j(double t, double tprime, double d_i, double d_j, double l); +double dh_dt(double t, double tprime, double d_i, double d_j, double l); +double dh_dtprime(double t, double tprime, double d_i, double d_j, double l); diff --git a/GPy/kern/parts/sympy_helpers.py b/GPy/kern/parts/sympy_helpers.py new file mode 100644 index 00000000..125dac58 --- /dev/null +++ b/GPy/kern/parts/sympy_helpers.py @@ -0,0 +1,71 @@ +# Code for testing functions written in sympy_helpers.cpp +from scipy import weave +import tempfile +import os +import numpy as np +current_dir = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) +extra_compile_args = [] + +weave_kwargs = { + 'support_code': "", + 'include_dirs':[tempfile.gettempdir(), current_dir], + 'headers':['"parts/sympy_helpers.h"'], + 'sources':[os.path.join(current_dir,"parts/sympy_helpers.cpp")], + 'extra_compile_args':extra_compile_args, + 'extra_link_args':['-lgomp'], + 'verbose':True} + +def erfcx(x): + code = """ + // Code for computing scaled complementary erf + int i; + int dim; + int elements = Ntarget[0]; + for (dim=1; dim 1: + self.input_dim += 1 + assert self.input_dim == input_dim + self.output_dim = output_dim + # extract parameter names + thetas = sorted([e for e in sp_vars if not (e.name[0:2]=='x_' or e.name[0:2]=='z_')],key=lambda e:e.name) + + + # Look for parameters with index. + if self.output_dim>1: + self._sp_theta_i = sorted([e for e in thetas if (e.name[-2:]=='_i')], key=lambda e:e.name) + self._sp_theta_j = sorted([e for e in thetas if (e.name[-2:]=='_j')], key=lambda e:e.name) + # Make sure parameter appears with both indices! + assert len(self._sp_theta_i)==len(self._sp_theta_j) + assert all([theta_i.name[:-2]==theta_j.name[:-2] for theta_i, theta_j in zip(self._sp_theta_i, self._sp_theta_j)]) + + # Extract names of shared parameters + self._sp_theta = [theta for theta in thetas if theta not in self._sp_theta_i and theta not in self._sp_theta_j] + + self.num_split_params = len(self._sp_theta_i) + self._split_theta_names = ["%s"%theta.name[:-2] for theta in self._sp_theta_i] + for theta in self._split_theta_names: + setattr(self, theta, np.ones(self.output_dim)) + + self.num_shared_params = len(self._sp_theta) + self.num_params = self.num_shared_params+self.num_split_params*self.output_dim + + else: + self.num_split_params = 0 + self._split_theta_names = [] + self._sp_theta = thetas + self.num_shared_params = len(self._sp_theta) + self.num_params = self.num_shared_params + + for theta in self._sp_theta: + val = 1.0 + if param is not None: + if param.has_key(theta): + val = param[theta] + setattr(self, theta.name, val) + #deal with param + self._set_params(self._get_params()) + + #Differentiate! + self._sp_dk_dtheta = [sp.diff(k,theta).simplify() for theta in self._sp_theta] + if self.output_dim > 1: + self._sp_dk_dtheta_i = [sp.diff(k,theta).simplify() for theta in self._sp_theta_i] + + self._sp_dk_dx = [sp.diff(k,xi).simplify() for xi in self._sp_x] + + if False: + self.compute_psi_stats() + + self._gen_code() + + if False: + extra_compile_args = ['-ftree-vectorize', '-mssse3', '-ftree-vectorizer-verbose=5'] + else: + extra_compile_args = [] + + self.weave_kwargs = { + 'support_code':self._function_code, + 'include_dirs':[tempfile.gettempdir(), os.path.join(current_dir,'parts/')], + 'headers':['"sympy_helpers.h"'], + 'sources':[os.path.join(current_dir,"parts/sympy_helpers.cpp")], + 'extra_compile_args':extra_compile_args, + 'extra_link_args':[], + 'verbose':True} + if config.getboolean('parallel', 'openmp'): self.weave_kwargs.append('-lgomp') + + def __add__(self,other): + return spkern(self._sp_k+other._sp_k) + + def _gen_code(self): + """Generates the C functions necessary for computing the covariance function using the sympy objects as input.""" + #TODO: maybe generate one C function only to save compile time? Also easier to take that as a basis and hand craft other covariances?? + + #generate c functions from sympy objects + argument_sequence = self._sp_x+self._sp_z+self._sp_theta + code_list = [('k',self._sp_k)] + # gradients with respect to covariance input + code_list += [('dk_d%s'%x.name,dx) for x,dx in zip(self._sp_x,self._sp_dk_dx)] + # gradient with respect to parameters + code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta,self._sp_dk_dtheta)] + # gradient with respect to multiple output parameters + if self.output_dim > 1: + argument_sequence += self._sp_theta_i + self._sp_theta_j + code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta_i,self._sp_dk_dtheta_i)] + (foo_c,self._function_code), (foo_h,self._function_header) = \ + codegen(code_list, "C",'foobar',argument_sequence=argument_sequence) + #put the header file where we can find it + f = file(os.path.join(tempfile.gettempdir(),'foobar.h'),'w') + f.write(self._function_header) + f.close() + + # Substitute any known derivatives which sympy doesn't compute + self._function_code = re.sub('DiracDelta\(.+?,.+?\)','0.0',self._function_code) + + + ############################################################ + # This is the basic argument construction for the C code. # + ############################################################ + + arg_list = (["X2(i, %s)"%x.name[2:] for x in self._sp_x] + + ["Z2(j, %s)"%z.name[2:] for z in self._sp_z]) + + # for multiple outputs need to also provide these arguments reversed. + if self.output_dim>1: + reverse_arg_list = list(arg_list) + reverse_arg_list.reverse() + + # Add in any 'shared' parameters to the list. + param_arg_list = [shared_params.name for shared_params in self._sp_theta] + arg_list += param_arg_list + + precompute_list=[] + if self.output_dim > 1: + reverse_arg_list+=list(param_arg_list) + split_param_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['ii', 'jj'] for theta in self._sp_theta_i] + split_param_reverse_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['jj', 'ii'] for theta in self._sp_theta_i] + arg_list += split_param_arg_list + reverse_arg_list += split_param_reverse_arg_list + # Extract the right output indices from the inputs. + c_define_output_indices = [' '*16 + "int %s=(int)%s(%s, %i);"%(index, var, index2, self.input_dim-1) for index, var, index2 in zip(['ii', 'jj'], ['X2', 'Z2'], ['i', 'j'])] + precompute_list += c_define_output_indices + reverse_arg_string = ", ".join(reverse_arg_list) + arg_string = ", ".join(arg_list) + precompute_string = "\n".join(precompute_list) + + # Code to compute argments string needed when only X is provided. + X_arg_string = re.sub('Z','X',arg_string) + # Code to compute argument string when only diagonal is required. + diag_arg_string = re.sub('int jj','//int jj',X_arg_string) + diag_arg_string = re.sub('j','i',diag_arg_string) + if precompute_string == '': + # if it's not multioutput, the precompute strings are set to zero + diag_precompute_string = '' + diag_precompute_replace = '' + else: + # for multioutput we need to extract the index of the output form the input. + diag_precompute_string = precompute_list[0] + diag_precompute_replace = precompute_list[1] + + + # Here's the code to do the looping for K + self._K_code =\ + """ + // _K_code + // Code for computing the covariance function. + int i; + int j; + int N = target_array->dimensions[0]; + int num_inducing = target_array->dimensions[1]; + int input_dim = X_array->dimensions[1]; + //#pragma omp parallel for private(j) + for (i=0;idimensions[0]; + int num_inducing = target_array->dimensions[1]; + int input_dim = X_array->dimensions[1]; + //#pragma omp parallel for private(j) + for (i=0;idimensions[0]; + int input_dim = X_array->dimensions[1]; + //#pragma omp parallel for + for (i=0;i1: + grad_func_list += c_define_output_indices + grad_func_list += [' '*16 + 'TARGET1(%i+ii) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, arg_string) for i, theta in enumerate(self._sp_theta_i)] + grad_func_list += [' '*16 + 'TARGET1(%i+jj) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, reverse_arg_string) for i, theta in enumerate(self._sp_theta_i)] + grad_func_list += ([' '*16 + 'TARGET1(%i) += PARTIAL2(i, j)*dk_d%s(%s);'%(i,theta.name,arg_string) for i,theta in enumerate(self._sp_theta)]) + grad_func_string = '\n'.join(grad_func_list) + + self._dK_dtheta_code =\ + """ + // _dK_dtheta_code + // Code for computing gradient of covariance with respect to parameters. + int i; + int j; + int N = partial_array->dimensions[0]; + int num_inducing = partial_array->dimensions[1]; + int input_dim = X_array->dimensions[1]; + //#pragma omp parallel for private(j) + for (i=0;idimensions[0]; + int input_dim = X_array->dimensions[1]; + for (i=0;i1: + gradX_func_list += c_define_output_indices + gradX_func_list += ["TARGET2(i, %i) += PARTIAL2(i, j)*dk_dx_%i(%s);"%(q,q,arg_string) for q in range(self._real_input_dim)] + gradX_func_string = "\n".join(gradX_func_list) + + self._dK_dX_code = \ + """ + // _dK_dX_code + // Code for computing gradient of covariance with respect to inputs. + int i; + int j; + int N = partial_array->dimensions[0]; + int num_inducing = partial_array->dimensions[1]; + int input_dim = X_array->dimensions[1]; + //#pragma omp parallel for private(j) + for (i=0;idimensions[0]; + int input_dim = X_array->dimensions[1]; + for (int i=0;i1: + arg_names += self._split_theta_names + arg_names += ['output_dim'] + return arg_names + + def _weave_inline(self, code, X, target, Z=None, partial=None): + output_dim = self.output_dim + for shared_params in self._sp_theta: + locals()[shared_params.name] = getattr(self, shared_params.name) + + # Need to extract parameters first + for split_params in self._split_theta_names: + locals()[split_params] = getattr(self, split_params) + arg_names = self._get_arg_names(Z, partial) + weave.inline(code=code, arg_names=arg_names,**self.weave_kwargs) + + def K(self,X,Z,target): + if Z is None: + self._weave_inline(self._K_code_X, X, target) + else: + self._weave_inline(self._K_code, X, target, Z) + + + def Kdiag(self,X,target): + self._weave_inline(self._Kdiag_code, X, target) + + def dK_dtheta(self,partial,X,Z,target): + if Z is None: + self._weave_inline(self._dK_dtheta_code_X, X, target, Z, partial) + else: + self._weave_inline(self._dK_dtheta_code, X, target, Z, partial) + + def dKdiag_dtheta(self,partial,X,target): + self._weave_inline(self._dKdiag_dtheta_code, X, target, Z=None, partial=partial) + + def dK_dX(self,partial,X,Z,target): + if Z is None: + self._weave_inline(self._dK_dX_code_X, X, target, Z, partial) + else: + self._weave_inline(self._dK_dX_code, X, target, Z, partial) + + def dKdiag_dX(self,partial,X,target): + self._weave_inline(self._dKdiag_dX_code, X, target, Z=None, partial=partial) + + def compute_psi_stats(self): + #define some normal distributions + mus = [sp.var('mu_%i'%i,real=True) for i in range(self.input_dim)] + Ss = [sp.var('S_%i'%i,positive=True) for i in range(self.input_dim)] + normals = [(2*sp.pi*Si)**(-0.5)*sp.exp(-0.5*(xi-mui)**2/Si) for xi, mui, Si in zip(self._sp_x, mus, Ss)] + + #do some integration! + #self._sp_psi0 = ?? + self._sp_psi1 = self._sp_k + for i in range(self.input_dim): + print 'perfoming integrals %i of %i'%(i+1,2*self.input_dim) + sys.stdout.flush() + self._sp_psi1 *= normals[i] + self._sp_psi1 = sp.integrate(self._sp_psi1,(self._sp_x[i],-sp.oo,sp.oo)) + clear_cache() + self._sp_psi1 = self._sp_psi1.simplify() + + #and here's psi2 (eek!) + zprime = [sp.Symbol('zp%i'%i) for i in range(self.input_dim)] + self._sp_psi2 = self._sp_k.copy()*self._sp_k.copy().subs(zip(self._sp_z,zprime)) + for i in range(self.input_dim): + print 'perfoming integrals %i of %i'%(self.input_dim+i+1,2*self.input_dim) + sys.stdout.flush() + self._sp_psi2 *= normals[i] + self._sp_psi2 = sp.integrate(self._sp_psi2,(self._sp_x[i],-sp.oo,sp.oo)) + clear_cache() + self._sp_psi2 = self._sp_psi2.simplify() + + + def _set_params(self,param): + assert param.size == (self.num_params) + for i, shared_params in enumerate(self._sp_theta): + setattr(self, shared_params.name, param[i]) + + if self.output_dim>1: + for i, split_params in enumerate(self._split_theta_names): + start = self.num_shared_params + i*self.output_dim + end = self.num_shared_params + (i+1)*self.output_dim + setattr(self, split_params, param[start:end]) + + + def _get_params(self): + params = np.zeros(0) + for shared_params in self._sp_theta: + params = np.hstack((params, getattr(self, shared_params.name))) + if self.output_dim>1: + for split_params in self._split_theta_names: + params = np.hstack((params, getattr(self, split_params).flatten())) + return params + + def _get_param_names(self): + if self.output_dim>1: + return [x.name for x in self._sp_theta] + [x.name[:-2] + str(i) for x in self._sp_theta_i for i in range(self.output_dim)] + else: + return [x.name for x in self._sp_theta] diff --git a/GPy/kern/white.py b/GPy/kern/parts/white.py similarity index 92% rename from GPy/kern/white.py rename to GPy/kern/parts/white.py index 41f075c3..49200bd6 100644 --- a/GPy/kern/white.py +++ b/GPy/kern/parts/white.py @@ -1,10 +1,10 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - from kernpart import Kernpart import numpy as np -class white(Kernpart): + +class White(Kernpart): """ White noise kernel. @@ -44,17 +44,17 @@ class white(Kernpart): def dKdiag_dtheta(self,dL_dKdiag,X,target): target += np.sum(dL_dKdiag) - def dK_dX(self,dL_dK,X,X2,target): + def dK_dX(self,dL_dK,X,X2,target): pass def dKdiag_dX(self,dL_dKdiag,X,target): pass def psi0(self,Z,mu,S,target): - target += self.variance + pass # target += self.variance def dpsi0_dtheta(self,dL_dpsi0,Z,mu,S,target): - target += dL_dpsi0.sum() + pass # target += dL_dpsi0.sum() def dpsi0_dmuS(self,dL_dpsi0,Z,mu,S,target_mu,target_S): pass diff --git a/GPy/kern/sympy_helpers.cpp b/GPy/kern/sympy_helpers.cpp deleted file mode 100644 index 2af4737a..00000000 --- a/GPy/kern/sympy_helpers.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include -double DiracDelta(double x){ - if((x<0.000001) & (x>-0.000001))//go on, laught at my c++ skills - return 1.0; - else - return 0.0; -}; -double DiracDelta(double x,int foo){ - return 0.0; -}; diff --git a/GPy/kern/sympy_helpers.h b/GPy/kern/sympy_helpers.h deleted file mode 100644 index 29244eca..00000000 --- a/GPy/kern/sympy_helpers.h +++ /dev/null @@ -1,3 +0,0 @@ -#include -double DiracDelta(double x); -double DiracDelta(double x, int foo); diff --git a/GPy/kern/sympykern.py b/GPy/kern/sympykern.py deleted file mode 100644 index def1bc5f..00000000 --- a/GPy/kern/sympykern.py +++ /dev/null @@ -1,258 +0,0 @@ -import numpy as np -import sympy as sp -from sympy.utilities.codegen import codegen -from sympy.core.cache import clear_cache -from scipy import weave -import re -import os -import sys -current_dir = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) -import tempfile -import pdb -from kernpart import Kernpart - -class spkern(Kernpart): - """ - A kernel object, where all the hard work in done by sympy. - - :param k: the covariance function - :type k: a positive definite sympy function of x1, z1, x2, z2... - - To construct a new sympy kernel, you'll need to define: - - a kernel function using a sympy object. Ensure that the kernel is of the form k(x,z). - - that's it! we'll extract the variables from the function k. - - Note: - - to handle multiple inputs, call them x1, z1, etc - - to handle multpile correlated outputs, you'll need to define each covariance function and 'cross' variance function. TODO - """ - def __init__(self,input_dim,k,param=None): - self.name='sympykern' - self._sp_k = k - sp_vars = [e for e in k.atoms() if e.is_Symbol] - self._sp_x= sorted([e for e in sp_vars if e.name[0]=='x'],key=lambda x:int(x.name[1:])) - self._sp_z= sorted([e for e in sp_vars if e.name[0]=='z'],key=lambda z:int(z.name[1:])) - assert all([x.name=='x%i'%i for i,x in enumerate(self._sp_x)]) - assert all([z.name=='z%i'%i for i,z in enumerate(self._sp_z)]) - assert len(self._sp_x)==len(self._sp_z) - self.input_dim = len(self._sp_x) - assert self.input_dim == input_dim - self._sp_theta = sorted([e for e in sp_vars if not (e.name[0]=='x' or e.name[0]=='z')],key=lambda e:e.name) - self.num_params = len(self._sp_theta) - - #deal with param - if param is None: - param = np.ones(self.num_params) - assert param.size==self.num_params - self._set_params(param) - - #Differentiate! - self._sp_dk_dtheta = [sp.diff(k,theta).simplify() for theta in self._sp_theta] - self._sp_dk_dx = [sp.diff(k,xi).simplify() for xi in self._sp_x] - #self._sp_dk_dz = [sp.diff(k,zi) for zi in self._sp_z] - - #self.compute_psi_stats() - self._gen_code() - - self.weave_kwargs = {\ - 'support_code':self._function_code,\ - 'include_dirs':[tempfile.gettempdir(), os.path.join(current_dir,'kern/')],\ - 'headers':['"sympy_helpers.h"'],\ - 'sources':[os.path.join(current_dir,"kern/sympy_helpers.cpp")],\ - #'extra_compile_args':['-ftree-vectorize', '-mssse3', '-ftree-vectorizer-verbose=5'],\ - 'extra_compile_args':[],\ - 'extra_link_args':['-lgomp'],\ - 'verbose':True} - - def __add__(self,other): - return spkern(self._sp_k+other._sp_k) - - def compute_psi_stats(self): - #define some normal distributions - mus = [sp.var('mu%i'%i,real=True) for i in range(self.input_dim)] - Ss = [sp.var('S%i'%i,positive=True) for i in range(self.input_dim)] - normals = [(2*sp.pi*Si)**(-0.5)*sp.exp(-0.5*(xi-mui)**2/Si) for xi, mui, Si in zip(self._sp_x, mus, Ss)] - - #do some integration! - #self._sp_psi0 = ?? - self._sp_psi1 = self._sp_k - for i in range(self.input_dim): - print 'perfoming integrals %i of %i'%(i+1,2*self.input_dim) - sys.stdout.flush() - self._sp_psi1 *= normals[i] - self._sp_psi1 = sp.integrate(self._sp_psi1,(self._sp_x[i],-sp.oo,sp.oo)) - clear_cache() - self._sp_psi1 = self._sp_psi1.simplify() - - #and here's psi2 (eek!) - zprime = [sp.Symbol('zp%i'%i) for i in range(self.input_dim)] - self._sp_psi2 = self._sp_k.copy()*self._sp_k.copy().subs(zip(self._sp_z,zprime)) - for i in range(self.input_dim): - print 'perfoming integrals %i of %i'%(self.input_dim+i+1,2*self.input_dim) - sys.stdout.flush() - self._sp_psi2 *= normals[i] - self._sp_psi2 = sp.integrate(self._sp_psi2,(self._sp_x[i],-sp.oo,sp.oo)) - clear_cache() - self._sp_psi2 = self._sp_psi2.simplify() - - - def _gen_code(self): - #generate c functions from sympy objects - (foo_c,self._function_code),(foo_h,self._function_header) = \ - codegen([('k',self._sp_k)] \ - + [('dk_d%s'%x.name,dx) for x,dx in zip(self._sp_x,self._sp_dk_dx)]\ - #+ [('dk_d%s'%z.name,dz) for z,dz in zip(self._sp_z,self._sp_dk_dz)]\ - + [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta,self._sp_dk_dtheta)]\ - ,"C",'foobar',argument_sequence=self._sp_x+self._sp_z+self._sp_theta) - #put the header file where we can find it - f = file(os.path.join(tempfile.gettempdir(),'foobar.h'),'w') - f.write(self._function_header) - f.close() - - #get rid of derivatives of DiracDelta - self._function_code = re.sub('DiracDelta\(.+?,.+?\)','0.0',self._function_code) - - #Here's some code to do the looping for K - arglist = ", ".join(["X[i*input_dim+%s]"%x.name[1:] for x in self._sp_x]\ - + ["Z[j*input_dim+%s]"%z.name[1:] for z in self._sp_z]\ - + ["param[%i]"%i for i in range(self.num_params)]) - - self._K_code =\ - """ - int i; - int j; - int N = target_array->dimensions[0]; - int num_inducing = target_array->dimensions[1]; - int input_dim = X_array->dimensions[1]; - //#pragma omp parallel for private(j) - for (i=0;idimensions[0]; - int input_dim = X_array->dimensions[1]; - //#pragma omp parallel for - for (i=0;idimensions[0]; - int num_inducing = partial_array->dimensions[1]; - int input_dim = X_array->dimensions[1]; - //#pragma omp parallel for private(j) - for (i=0;idimensions[0]; - int input_dim = X_array->dimensions[1]; - for (i=0;idimensions[0]; - int num_inducing = partial_array->dimensions[1]; - int input_dim = X_array->dimensions[1]; - //#pragma omp parallel for private(j) - for (i=0;idimensions[0]; - int num_inducing = 0; - int input_dim = X_array->dimensions[1]; - for (i=0;i self.epsilon or epsilon_np2 > self.epsilon: - update_order = np.random.permutation(self.N) + update_order = np.random.permutation(self.num_data) for i in update_order: #Cavity distribution parameters self.tau_[i] = 1./Sigma[i,i] - self.eta*self.tau_tilde[i] self.v_[i] = mu[i]/Sigma[i,i] - self.eta*self.v_tilde[i] #Marginal moments - self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.LikelihoodFunction.moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model.moments_match(self.data[i],self.tau_[i],self.v_[i]) #Site parameters update Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma[i,i]) Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma[i,i]) @@ -122,23 +161,32 @@ class EP(likelihood): self.iterations += 1 #Sigma recomptutation with Cholesky decompositon Sroot_tilde_K = np.sqrt(self.tau_tilde)[:,None]*K - B = np.eye(self.N) + np.sqrt(self.tau_tilde)[None,:]*Sroot_tilde_K + B = np.eye(self.num_data) + np.sqrt(self.tau_tilde)[None,:]*Sroot_tilde_K L = jitchol(B) V,info = dtrtrs(L,Sroot_tilde_K,lower=1) Sigma = K - np.dot(V.T,V) mu = np.dot(Sigma,self.v_tilde) - epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.N - epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.N + epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.num_data + epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.num_data self.np1.append(self.tau_tilde.copy()) self.np2.append(self.v_tilde.copy()) return self._compute_GP_variables() - def fit_DTC(self, Kmm, Kmn): + def fit_DTC(self, Kmm, Kmn, epsilon=1e-3,power_ep=[1.,1.]): """ The expectation-propagation algorithm with sparse pseudo-input. For nomenclature see ... 2013. + + :param epsilon: Convergence criterion, maximum squared difference allowed between mean updates to stop iterations (float) + :type epsilon: float + :param power_ep: Power EP parameters + :type power_ep: list of floats + """ + self.epsilon = epsilon + self.eta, self.delta = power_ep + num_inducing = Kmm.shape[0] #TODO: this doesn't work with uncertain inputs! @@ -167,7 +215,7 @@ class EP(likelihood): Sigma = Diag + P*R.T*R*P.T + K mu = w + P*Gamma """ - mu = np.zeros(self.N) + mu = np.zeros(self.num_data) LLT = Kmm.copy() Sigma_diag = Qnn_diag.copy() @@ -177,15 +225,15 @@ class EP(likelihood): sigma_ = 1./tau_ mu_ = v_/tau_ """ - self.tau_ = np.empty(self.N,dtype=float) - self.v_ = np.empty(self.N,dtype=float) + self.tau_ = np.empty(self.num_data,dtype=float) + self.v_ = np.empty(self.num_data,dtype=float) #Initial values - Marginal moments - z = np.empty(self.N,dtype=float) - self.Z_hat = np.empty(self.N,dtype=float) - phi = np.empty(self.N,dtype=float) - mu_hat = np.empty(self.N,dtype=float) - sigma2_hat = np.empty(self.N,dtype=float) + z = np.empty(self.num_data,dtype=float) + self.Z_hat = np.empty(self.num_data,dtype=float) + phi = np.empty(self.num_data,dtype=float) + mu_hat = np.empty(self.num_data,dtype=float) + sigma2_hat = np.empty(self.num_data,dtype=float) #Approximation epsilon_np1 = 1 @@ -194,13 +242,13 @@ class EP(likelihood): np1 = [self.tau_tilde.copy()] np2 = [self.v_tilde.copy()] while epsilon_np1 > self.epsilon or epsilon_np2 > self.epsilon: - update_order = np.random.permutation(self.N) + update_order = np.random.permutation(self.num_data) for i in update_order: #Cavity distribution parameters self.tau_[i] = 1./Sigma_diag[i] - self.eta*self.tau_tilde[i] self.v_[i] = mu[i]/Sigma_diag[i] - self.eta*self.v_tilde[i] #Marginal moments - self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.LikelihoodFunction.moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model.moments_match(self.data[i],self.tau_[i],self.v_[i]) #Site parameters update Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma_diag[i]) Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma_diag[i]) @@ -223,18 +271,26 @@ class EP(likelihood): Sigma_diag = np.sum(V*V,-2) Knmv_tilde = np.dot(Kmn,self.v_tilde) mu = np.dot(V2.T,Knmv_tilde) - epsilon_np1 = sum((self.tau_tilde-np1[-1])**2)/self.N - epsilon_np2 = sum((self.v_tilde-np2[-1])**2)/self.N + epsilon_np1 = sum((self.tau_tilde-np1[-1])**2)/self.num_data + epsilon_np2 = sum((self.v_tilde-np2[-1])**2)/self.num_data np1.append(self.tau_tilde.copy()) np2.append(self.v_tilde.copy()) self._compute_GP_variables() - def fit_FITC(self, Kmm, Kmn, Knn_diag): + def fit_FITC(self, Kmm, Kmn, Knn_diag, epsilon=1e-3,power_ep=[1.,1.]): """ The expectation-propagation algorithm with sparse pseudo-input. For nomenclature see Naish-Guzman and Holden, 2008. + + :param epsilon: Convergence criterion, maximum squared difference allowed between mean updates to stop iterations (float) + :type epsilon: float + :param power_ep: Power EP parameters + :type power_ep: list of floats """ + self.epsilon = epsilon + self.eta, self.delta = power_ep + num_inducing = Kmm.shape[0] """ @@ -257,9 +313,9 @@ class EP(likelihood): Sigma = Diag + P*R.T*R*P.T + K mu = w + P*Gamma """ - self.w = np.zeros(self.N) + self.w = np.zeros(self.num_data) self.Gamma = np.zeros(num_inducing) - mu = np.zeros(self.N) + mu = np.zeros(self.num_data) P = P0.copy() R = R0.copy() Diag = Diag0.copy() @@ -272,15 +328,15 @@ class EP(likelihood): sigma_ = 1./tau_ mu_ = v_/tau_ """ - self.tau_ = np.empty(self.N,dtype=float) - self.v_ = np.empty(self.N,dtype=float) + self.tau_ = np.empty(self.num_data,dtype=float) + self.v_ = np.empty(self.num_data,dtype=float) #Initial values - Marginal moments - z = np.empty(self.N,dtype=float) - self.Z_hat = np.empty(self.N,dtype=float) - phi = np.empty(self.N,dtype=float) - mu_hat = np.empty(self.N,dtype=float) - sigma2_hat = np.empty(self.N,dtype=float) + z = np.empty(self.num_data,dtype=float) + self.Z_hat = np.empty(self.num_data,dtype=float) + phi = np.empty(self.num_data,dtype=float) + mu_hat = np.empty(self.num_data,dtype=float) + sigma2_hat = np.empty(self.num_data,dtype=float) #Approximation epsilon_np1 = 1 @@ -289,13 +345,13 @@ class EP(likelihood): self.np1 = [self.tau_tilde.copy()] self.np2 = [self.v_tilde.copy()] while epsilon_np1 > self.epsilon or epsilon_np2 > self.epsilon: - update_order = np.random.permutation(self.N) + update_order = np.random.permutation(self.num_data) for i in update_order: #Cavity distribution parameters self.tau_[i] = 1./Sigma_diag[i] - self.eta*self.tau_tilde[i] self.v_[i] = mu[i]/Sigma_diag[i] - self.eta*self.v_tilde[i] #Marginal moments - self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.LikelihoodFunction.moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model.moments_match(self.data[i],self.tau_[i],self.v_[i]) #Site parameters update Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma_diag[i]) Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma_diag[i]) @@ -328,8 +384,8 @@ class EP(likelihood): self.w = Diag * self.v_tilde self.Gamma = np.dot(R.T, np.dot(RPT,self.v_tilde)) mu = self.w + np.dot(P,self.Gamma) - epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.N - epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.N + epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.num_data + epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.num_data self.np1.append(self.tau_tilde.copy()) self.np2.append(self.v_tilde.copy()) diff --git a/GPy/likelihoods/ep_mixed_noise.py b/GPy/likelihoods/ep_mixed_noise.py new file mode 100644 index 00000000..f5452512 --- /dev/null +++ b/GPy/likelihoods/ep_mixed_noise.py @@ -0,0 +1,385 @@ +# Copyright (c) 2013, Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats +from ..util.linalg import pdinv,mdot,jitchol,chol_inv,DSYR,tdot,dtrtrs +from likelihood import likelihood + +class EP_Mixed_Noise(likelihood): + def __init__(self,data_list,noise_model_list,epsilon=1e-3,power_ep=[1.,1.]): + """ + Expectation Propagation + + Arguments + --------- + :param data_list: list of outputs + :param noise_model_list: a list of noise models + :param epsilon: Convergence criterion, maximum squared difference allowed between mean updates to stop iterations + :type epsilon: float + :param power_ep: list of power ep parameters + """ + assert len(data_list) == len(noise_model_list) + self.noise_model_list = noise_model_list + n_list = [data.size for data in data_list] + self.n_models = len(data_list) + self.n_params = [noise_model._get_params().size for noise_model in noise_model_list] + self.index = np.vstack([np.repeat(i,n)[:,None] for i,n in zip(range(self.n_models),n_list)]) + self.epsilon = epsilon + self.eta, self.delta = power_ep + self.data = np.vstack(data_list) + self.N, self.output_dim = self.data.shape + self.is_heteroscedastic = True + self.num_params = 0#FIXME + self._transf_data = np.vstack([noise_model._preprocess_values(data) for noise_model,data in zip(noise_model_list,data_list)]) + #TODO non-gaussian index + + #Initial values - Likelihood approximation parameters: + #p(y|f) = t(f|tau_tilde,v_tilde) + self.tau_tilde = np.zeros(self.N) + self.v_tilde = np.zeros(self.N) + + #initial values for the GP variables + self.Y = np.zeros((self.N,1)) + self.covariance_matrix = np.eye(self.N) + self.precision = np.ones(self.N)[:,None] + self.Z = 0 + self.YYT = None + self.V = self.precision * self.Y + self.VVT_factor = self.V + self.trYYT = 0. + + def restart(self): + self.tau_tilde = np.zeros(self.N) + self.v_tilde = np.zeros(self.N) + self.Y = np.zeros((self.N,1)) + self.covariance_matrix = np.eye(self.N) + self.precision = np.ones(self.N)[:,None] + self.Z = 0 + self.YYT = None + self.V = self.precision * self.Y + self.VVT_factor = self.V + self.trYYT = 0. + + def predictive_values(self,mu,var,full_cov,noise_model): + """ + Predicts the output given the GP + + :param mu: GP's mean + :param var: GP's variance + :param full_cov: whether to return the full covariance matrix, or just the diagonal + :type full_cov: False|True + :param noise_model: noise model to use + :type noise_model: integer + """ + if full_cov: + raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood" + #_mu = [] + #_var = [] + #_q1 = [] + #_q2 = [] + #for m,v,o in zip(mu,var,output.flatten()): + # a,b,c,d = self.noise_model_list[int(o)].predictive_values(m,v) + # _mu.append(a) + # _var.append(b) + # _q1.append(c) + # _q2.append(d) + #return np.vstack(_mu),np.vstack(_var),np.vstack(_q1),np.vstack(_q2) + return self.noise_model_list[noise_model].predictive_values(mu,var) + + def _get_params(self): + return np.hstack([noise_model._get_params().flatten() for noise_model in self.noise_model_list]) + + def _get_param_names(self): + names = [] + for noise_model in self.noise_model_list: + names += noise_model._get_param_names() + return names + + def _set_params(self,p): + cs_params = np.cumsum([0]+self.n_params) + for i in range(len(self.n_params)): + self.noise_model_list[i]._set_params(p[cs_params[i]:cs_params[i+1]]) + + def _gradients(self,partial): + #NOTE this is not tested + return np.hstack([noise_model._gradients(partial) for noise_model in self.noise_model_list]) + + def _compute_GP_variables(self): + #Variables to be called from GP + mu_tilde = self.v_tilde/self.tau_tilde #When calling EP, this variable is used instead of Y in the GP model + sigma_sum = 1./self.tau_ + 1./self.tau_tilde + mu_diff_2 = (self.v_/self.tau_ - mu_tilde)**2 + self.Z = np.sum(np.log(self.Z_hat)) + 0.5*np.sum(np.log(sigma_sum)) + 0.5*np.sum(mu_diff_2/sigma_sum) #Normalization constant, aka Z_ep + + self.Y = mu_tilde[:,None] + self.YYT = np.dot(self.Y,self.Y.T) + self.covariance_matrix = np.diag(1./self.tau_tilde) + self.precision = self.tau_tilde[:,None] + self.V = self.precision * self.Y + self.VVT_factor = self.V + self.trYYT = np.trace(self.YYT) + + def fit_full(self,K): + """ + The expectation-propagation algorithm. + For nomenclature see Rasmussen & Williams 2006. + """ + #Initial values - Posterior distribution parameters: q(f|X,Y) = N(f|mu,Sigma) + mu = np.zeros(self.N) + Sigma = K.copy() + + """ + Initial values - Cavity distribution parameters: + q_(f|mu_,sigma2_) = Product{q_i(f|mu_i,sigma2_i)} + sigma_ = 1./tau_ + mu_ = v_/tau_ + """ + self.tau_ = np.empty(self.N,dtype=float) + self.v_ = np.empty(self.N,dtype=float) + + #Initial values - Marginal moments + z = np.empty(self.N,dtype=float) + self.Z_hat = np.empty(self.N,dtype=float) + phi = np.empty(self.N,dtype=float) + mu_hat = np.empty(self.N,dtype=float) + sigma2_hat = np.empty(self.N,dtype=float) + + #Approximation + epsilon_np1 = self.epsilon + 1. + epsilon_np2 = self.epsilon + 1. + self.iterations = 0 + self.np1 = [self.tau_tilde.copy()] + self.np2 = [self.v_tilde.copy()] + while epsilon_np1 > self.epsilon or epsilon_np2 > self.epsilon: + update_order = np.random.permutation(self.N) + for i in update_order: + #Cavity distribution parameters + self.tau_[i] = 1./Sigma[i,i] - self.eta*self.tau_tilde[i] + self.v_[i] = mu[i]/Sigma[i,i] - self.eta*self.v_tilde[i] + #Marginal moments + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model_list[self.index[i]].moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + #Site parameters update + Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma[i,i]) + Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma[i,i]) + self.tau_tilde[i] += Delta_tau + self.v_tilde[i] += Delta_v + #Posterior distribution parameters update + DSYR(Sigma,Sigma[:,i].copy(), -float(Delta_tau/(1.+ Delta_tau*Sigma[i,i]))) + mu = np.dot(Sigma,self.v_tilde) + self.iterations += 1 + #Sigma recomptutation with Cholesky decompositon + Sroot_tilde_K = np.sqrt(self.tau_tilde)[:,None]*K + B = np.eye(self.N) + np.sqrt(self.tau_tilde)[None,:]*Sroot_tilde_K + L = jitchol(B) + V,info = dtrtrs(L,Sroot_tilde_K,lower=1) + Sigma = K - np.dot(V.T,V) + mu = np.dot(Sigma,self.v_tilde) + epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.N + epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.N + self.np1.append(self.tau_tilde.copy()) + self.np2.append(self.v_tilde.copy()) + + return self._compute_GP_variables() + + def fit_DTC(self, Kmm, Kmn): + """ + The expectation-propagation algorithm with sparse pseudo-input. + For nomenclature see ... 2013. + """ + num_inducing = Kmm.shape[0] + + #TODO: this doesn't work with uncertain inputs! + + """ + Prior approximation parameters: + q(f|X) = int_{df}{N(f|KfuKuu_invu,diag(Kff-Qff)*N(u|0,Kuu)} = N(f|0,Sigma0) + Sigma0 = Qnn = Knm*Kmmi*Kmn + """ + KmnKnm = np.dot(Kmn,Kmn.T) + Lm = jitchol(Kmm) + Lmi = chol_inv(Lm) + Kmmi = np.dot(Lmi.T,Lmi) + KmmiKmn = np.dot(Kmmi,Kmn) + Qnn_diag = np.sum(Kmn*KmmiKmn,-2) + LLT0 = Kmm.copy() + + #Kmmi, Lm, Lmi, Kmm_logdet = pdinv(Kmm) + #KmnKnm = np.dot(Kmn, Kmn.T) + #KmmiKmn = np.dot(Kmmi,Kmn) + #Qnn_diag = np.sum(Kmn*KmmiKmn,-2) + #LLT0 = Kmm.copy() + + """ + Posterior approximation: q(f|y) = N(f| mu, Sigma) + Sigma = Diag + P*R.T*R*P.T + K + mu = w + P*Gamma + """ + mu = np.zeros(self.N) + LLT = Kmm.copy() + Sigma_diag = Qnn_diag.copy() + + """ + Initial values - Cavity distribution parameters: + q_(g|mu_,sigma2_) = Product{q_i(g|mu_i,sigma2_i)} + sigma_ = 1./tau_ + mu_ = v_/tau_ + """ + self.tau_ = np.empty(self.N,dtype=float) + self.v_ = np.empty(self.N,dtype=float) + + #Initial values - Marginal moments + z = np.empty(self.N,dtype=float) + self.Z_hat = np.empty(self.N,dtype=float) + phi = np.empty(self.N,dtype=float) + mu_hat = np.empty(self.N,dtype=float) + sigma2_hat = np.empty(self.N,dtype=float) + + #Approximation + epsilon_np1 = 1 + epsilon_np2 = 1 + self.iterations = 0 + np1 = [self.tau_tilde.copy()] + np2 = [self.v_tilde.copy()] + while epsilon_np1 > self.epsilon or epsilon_np2 > self.epsilon: + update_order = np.random.permutation(self.N) + for i in update_order: + #Cavity distribution parameters + self.tau_[i] = 1./Sigma_diag[i] - self.eta*self.tau_tilde[i] + self.v_[i] = mu[i]/Sigma_diag[i] - self.eta*self.v_tilde[i] + #Marginal moments + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model_list[self.index[i]].moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + #Site parameters update + Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma_diag[i]) + Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma_diag[i]) + self.tau_tilde[i] += Delta_tau + self.v_tilde[i] += Delta_v + #Posterior distribution parameters update + DSYR(LLT,Kmn[:,i].copy(),Delta_tau) #LLT = LLT + np.outer(Kmn[:,i],Kmn[:,i])*Delta_tau + L = jitchol(LLT) + #cholUpdate(L,Kmn[:,i]*np.sqrt(Delta_tau)) + V,info = dtrtrs(L,Kmn,lower=1) + Sigma_diag = np.sum(V*V,-2) + si = np.sum(V.T*V[:,i],-1) + mu += (Delta_v-Delta_tau*mu[i])*si + self.iterations += 1 + #Sigma recomputation with Cholesky decompositon + LLT = LLT0 + np.dot(Kmn*self.tau_tilde[None,:],Kmn.T) + L = jitchol(LLT) + V,info = dtrtrs(L,Kmn,lower=1) + V2,info = dtrtrs(L.T,V,lower=0) + Sigma_diag = np.sum(V*V,-2) + Knmv_tilde = np.dot(Kmn,self.v_tilde) + mu = np.dot(V2.T,Knmv_tilde) + epsilon_np1 = sum((self.tau_tilde-np1[-1])**2)/self.N + epsilon_np2 = sum((self.v_tilde-np2[-1])**2)/self.N + np1.append(self.tau_tilde.copy()) + np2.append(self.v_tilde.copy()) + + self._compute_GP_variables() + + def fit_FITC(self, Kmm, Kmn, Knn_diag): + """ + The expectation-propagation algorithm with sparse pseudo-input. + For nomenclature see Naish-Guzman and Holden, 2008. + """ + num_inducing = Kmm.shape[0] + + """ + Prior approximation parameters: + q(f|X) = int_{df}{N(f|KfuKuu_invu,diag(Kff-Qff)*N(u|0,Kuu)} = N(f|0,Sigma0) + Sigma0 = diag(Knn-Qnn) + Qnn, Qnn = Knm*Kmmi*Kmn + """ + Lm = jitchol(Kmm) + Lmi = chol_inv(Lm) + Kmmi = np.dot(Lmi.T,Lmi) + P0 = Kmn.T + KmnKnm = np.dot(P0.T, P0) + KmmiKmn = np.dot(Kmmi,P0.T) + Qnn_diag = np.sum(P0.T*KmmiKmn,-2) + Diag0 = Knn_diag - Qnn_diag + R0 = jitchol(Kmmi).T + + """ + Posterior approximation: q(f|y) = N(f| mu, Sigma) + Sigma = Diag + P*R.T*R*P.T + K + mu = w + P*Gamma + """ + self.w = np.zeros(self.N) + self.Gamma = np.zeros(num_inducing) + mu = np.zeros(self.N) + P = P0.copy() + R = R0.copy() + Diag = Diag0.copy() + Sigma_diag = Knn_diag + RPT0 = np.dot(R0,P0.T) + + """ + Initial values - Cavity distribution parameters: + q_(g|mu_,sigma2_) = Product{q_i(g|mu_i,sigma2_i)} + sigma_ = 1./tau_ + mu_ = v_/tau_ + """ + self.tau_ = np.empty(self.N,dtype=float) + self.v_ = np.empty(self.N,dtype=float) + + #Initial values - Marginal moments + z = np.empty(self.N,dtype=float) + self.Z_hat = np.empty(self.N,dtype=float) + phi = np.empty(self.N,dtype=float) + mu_hat = np.empty(self.N,dtype=float) + sigma2_hat = np.empty(self.N,dtype=float) + + #Approximation + epsilon_np1 = 1 + epsilon_np2 = 1 + self.iterations = 0 + self.np1 = [self.tau_tilde.copy()] + self.np2 = [self.v_tilde.copy()] + while epsilon_np1 > self.epsilon or epsilon_np2 > self.epsilon: + update_order = np.random.permutation(self.N) + for i in update_order: + #Cavity distribution parameters + self.tau_[i] = 1./Sigma_diag[i] - self.eta*self.tau_tilde[i] + self.v_[i] = mu[i]/Sigma_diag[i] - self.eta*self.v_tilde[i] + #Marginal moments + self.Z_hat[i], mu_hat[i], sigma2_hat[i] = self.noise_model_list[self.index[i]].moments_match(self._transf_data[i],self.tau_[i],self.v_[i]) + #Site parameters update + Delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma_diag[i]) + Delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma_diag[i]) + self.tau_tilde[i] += Delta_tau + self.v_tilde[i] += Delta_v + #Posterior distribution parameters update + dtd1 = Delta_tau*Diag[i] + 1. + dii = Diag[i] + Diag[i] = dii - (Delta_tau * dii**2.)/dtd1 + pi_ = P[i,:].reshape(1,num_inducing) + P[i,:] = pi_ - (Delta_tau*dii)/dtd1 * pi_ + Rp_i = np.dot(R,pi_.T) + RTR = np.dot(R.T,np.dot(np.eye(num_inducing) - Delta_tau/(1.+Delta_tau*Sigma_diag[i]) * np.dot(Rp_i,Rp_i.T),R)) + R = jitchol(RTR).T + self.w[i] += (Delta_v - Delta_tau*self.w[i])*dii/dtd1 + self.Gamma += (Delta_v - Delta_tau*mu[i])*np.dot(RTR,P[i,:].T) + RPT = np.dot(R,P.T) + Sigma_diag = Diag + np.sum(RPT.T*RPT.T,-1) + mu = self.w + np.dot(P,self.Gamma) + self.iterations += 1 + #Sigma recomptutation with Cholesky decompositon + Iplus_Dprod_i = 1./(1.+ Diag0 * self.tau_tilde) + Diag = Diag0 * Iplus_Dprod_i + P = Iplus_Dprod_i[:,None] * P0 + safe_diag = np.where(Diag0 < self.tau_tilde, self.tau_tilde/(1.+Diag0*self.tau_tilde), (1. - Iplus_Dprod_i)/Diag0) + L = jitchol(np.eye(num_inducing) + np.dot(RPT0,safe_diag[:,None]*RPT0.T)) + R,info = dtrtrs(L,R0,lower=1) + RPT = np.dot(R,P.T) + Sigma_diag = Diag + np.sum(RPT.T*RPT.T,-1) + self.w = Diag * self.v_tilde + self.Gamma = np.dot(R.T, np.dot(RPT,self.v_tilde)) + mu = self.w + np.dot(P,self.Gamma) + epsilon_np1 = sum((self.tau_tilde-self.np1[-1])**2)/self.N + epsilon_np2 = sum((self.v_tilde-self.np2[-1])**2)/self.N + self.np1.append(self.tau_tilde.copy()) + self.np2.append(self.v_tilde.copy()) + + return self._compute_GP_variables() diff --git a/GPy/likelihoods/gaussian.py b/GPy/likelihoods/gaussian.py index 59d8fe86..c12d8e6d 100644 --- a/GPy/likelihoods/gaussian.py +++ b/GPy/likelihoods/gaussian.py @@ -1,19 +1,21 @@ import numpy as np from likelihood import likelihood +from ..util.linalg import jitchol + class Gaussian(likelihood): """ Likelihood class for doing Expectation propagation - :param Y: observed output (Nx1 numpy.darray) - ..Note:: Y values allowed depend on the likelihood_function used - :param variance : + :param data: observed output + :type data: Nx1 numpy.darray + :param variance: noise parameter :param normalize: whether to normalize the data before computing (predictions will be in original scales) :type normalize: False|True """ def __init__(self, data, variance=1., normalize=False): self.is_heteroscedastic = False - self.Nparams = 1 + self.num_params = 1 self.Z = 0. # a correction factor which accounts for the approximation made N, self.output_dim = data.shape @@ -32,6 +34,8 @@ class Gaussian(likelihood): self._variance = np.asarray(variance) + 1. self._set_params(np.asarray(variance)) + super(Gaussian, self).__init__() + def set_data(self, data): self.data = data self.N, D = data.shape @@ -40,9 +44,11 @@ class Gaussian(likelihood): if D > self.N: self.YYT = np.dot(self.Y, self.Y.T) self.trYYT = np.trace(self.YYT) + self.YYT_factor = jitchol(self.YYT) else: self.YYT = None self.trYYT = np.sum(np.square(self.Y)) + self.YYT_factor = self.Y def _get_params(self): return np.asarray(self._variance) @@ -53,16 +59,17 @@ class Gaussian(likelihood): def _set_params(self, x): x = np.float64(x) if np.all(self._variance != x): - if x == 0.: + if x == 0.:#special case of zero noise self.precision = np.inf self.V = None else: self.precision = 1. / x self.V = (self.precision) * self.Y + self.VVT_factor = self.precision * self.YYT_factor self.covariance_matrix = np.eye(self.N) * x self._variance = x - def predictive_values(self, mu, var, full_cov): + def predictive_values(self, mu, var, full_cov, **likelihood_args): """ Un-normalize the prediction and add the likelihood variance, then return the 5%, 95% interval """ @@ -83,11 +90,25 @@ class Gaussian(likelihood): _95pc = mean + 2.*np.sqrt(true_var) return mean, true_var, _5pc, _95pc - def fit_full(self): + def log_predictive_density(self, y_test, mu_star, var_star): """ - No approximations needed + Calculation of the log predictive density + + .. math: + p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*}) + + :param y_test: test observations (y_{*}) + :type y_test: (Nx1) array + :param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*}) + :type mu_star: (Nx1) array + :param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*}) + :type var_star: (Nx1) array + + .. Note: + Works as if each test point was provided individually, i.e. not full_cov """ - pass + y_rescaled = (y_test - self._offset)/self._scale + return -0.5*np.log(2*np.pi) -0.5*np.log(var_star + self._variance) -0.5*(np.square(y_rescaled - mu_star))/(var_star + self._variance) def _gradients(self, partial): return np.sum(partial) diff --git a/GPy/likelihoods/gaussian_mixed_noise.py b/GPy/likelihoods/gaussian_mixed_noise.py new file mode 100644 index 00000000..696867c0 --- /dev/null +++ b/GPy/likelihoods/gaussian_mixed_noise.py @@ -0,0 +1,108 @@ +# Copyright (c) 2013, Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats +from ..util.linalg import pdinv,mdot,jitchol,chol_inv,DSYR,tdot,dtrtrs +from likelihood import likelihood +from . import Gaussian + + +class Gaussian_Mixed_Noise(likelihood): + """ + Gaussian Likelihood for multiple outputs + + This is a wrapper around likelihood.Gaussian class + + :param data_list: data observations + :type data_list: list of numpy arrays (num_data_output_i x 1), one array per output + :param noise_params: noise parameters of each output + :type noise_params: list of floats, one per output + :param normalize: whether to normalize the data before computing (predictions will be in original scales) + :type normalize: False|True + """ + def __init__(self, data_list, noise_params=None, normalize=True): + self.num_params = len(data_list) + self.n_list = [data.size for data in data_list] + self.index = np.vstack([np.repeat(i,n)[:,None] for i,n in zip(range(self.num_params),self.n_list)]) + + if noise_params is None: + noise_params = [1.] * self.num_params + else: + assert self.num_params == len(noise_params), 'Number of noise parameters does not match the number of noise models.' + + self.noise_model_list = [Gaussian(Y,variance=v,normalize = normalize) for Y,v in zip(data_list,noise_params)] + self.n_params = [noise_model._get_params().size for noise_model in self.noise_model_list] + self.data = np.vstack(data_list) + self.N, self.output_dim = self.data.shape + self._offset = np.zeros((1, self.output_dim)) + self._scale = np.ones((1, self.output_dim)) + + self.is_heteroscedastic = True + self.Z = 0. # a correction factor which accounts for the approximation made + + self.set_data(data_list) + self._set_params(np.asarray(noise_params)) + + super(Gaussian_Mixed_Noise, self).__init__() + + def set_data(self, data_list): + self.data = np.vstack(data_list) + self.N, D = self.data.shape + assert D == self.output_dim + self.Y = (self.data - self._offset) / self._scale + if D > self.N: + raise NotImplementedError + #self.YYT = np.dot(self.Y, self.Y.T) + #self.trYYT = np.trace(self.YYT) + #self.YYT_factor = jitchol(self.YYT) + else: + self.YYT = None + self.trYYT = np.sum(np.square(self.Y)) + self.YYT_factor = self.Y + + def predictive_values(self,mu,var,full_cov,noise_model): + """ + Predicts the output given the GP + + :param mu: GP's mean + :param var: GP's variance + :param full_cov: whether to return the full covariance matrix, or just the diagonal + :type full_cov: False|True + :param noise_model: noise model to use + :type noise_model: integer + """ + if full_cov: + raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood" + return self.noise_model_list[noise_model].predictive_values(mu,var,full_cov) + + def _get_params(self): + return np.hstack([noise_model._get_params().flatten() for noise_model in self.noise_model_list]) + + def _get_param_names(self): + if len(self.noise_model_list) == 1: + names = self.noise_model_list[0]._get_param_names() + else: + names = [] + for noise_model,i in zip(self.noise_model_list,range(len(self.n_list))): + names.append(''.join(noise_model._get_param_names() + ['_%s' %i])) + return names + + def _set_params(self,p): + cs_params = np.cumsum([0]+self.n_params) + + for i in range(len(self.n_params)): + self.noise_model_list[i]._set_params(p[cs_params[i]:cs_params[i+1]]) + self.precision = np.hstack([np.repeat(noise_model.precision,n) for noise_model,n in zip(self.noise_model_list,self.n_list)])[:,None] + + self.V = self.precision * self.Y + self.VVT_factor = self.precision * self.YYT_factor + self.covariance_matrix = np.eye(self.N) * 1./self.precision + + def _gradients(self,partial): + gradients = [] + aux = np.cumsum([0]+self.n_list) + for ai,af,noise_model in zip(aux[:-1],aux[1:],self.noise_model_list): + gradients += [noise_model._gradients(partial[ai:af])] + return np.hstack(gradients) diff --git a/GPy/likelihoods/laplace.py b/GPy/likelihoods/laplace.py new file mode 100644 index 00000000..0def0c8b --- /dev/null +++ b/GPy/likelihoods/laplace.py @@ -0,0 +1,403 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) +# +#Parts of this file were influenced by the Matlab GPML framework written by +#Carl Edward Rasmussen & Hannes Nickisch, however all bugs are our own. +# +#The GPML code is released under the FreeBSD License. +#Copyright (c) 2005-2013 Carl Edward Rasmussen & Hannes Nickisch. All rights reserved. +# +#The code and associated documentation is available from +#http://gaussianprocess.org/gpml/code. + +import numpy as np +import scipy as sp +from likelihood import likelihood +from ..util.linalg import mdot, jitchol, pddet, dpotrs +from functools import partial as partial_func +import warnings + +class Laplace(likelihood): + """Laplace approximation to a posterior""" + + def __init__(self, data, noise_model, extra_data=None): + """ + Laplace Approximation + + Find the moments \hat{f} and the hessian at this point + (using Newton-Raphson) of the unnormalised posterior + + Compute the GP variables (i.e. generate some Y^{squiggle} and + z^{squiggle} which makes a gaussian the same as the laplace + approximation to the posterior, but normalised + + Arguments + --------- + + :param data: array of data the likelihood function is approximating + :type data: NxD + :param noise_model: likelihood function - subclass of noise_model + :type noise_model: noise_model + :param extra_data: additional data used by some likelihood functions, + """ + self.data = data + self.noise_model = noise_model + self.extra_data = extra_data + + #Inital values + self.N, self.D = self.data.shape + self.is_heteroscedastic = True + self.Nparams = 0 + self.NORMAL_CONST = ((0.5 * self.N) * np.log(2 * np.pi)) + + self.restart() + likelihood.__init__(self) + + def restart(self): + """ + Reset likelihood variables to their defaults + """ + #Initial values for the GP variables + self.Y = np.zeros((self.N, 1)) + self.covariance_matrix = np.eye(self.N) + self.precision = np.ones(self.N)[:, None] + self.Z = 0 + self.YYT = None + + self.old_Ki_f = None + self.bad_fhat = False + + def predictive_values(self,mu,var,full_cov,**noise_args): + if full_cov: + raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood" + return self.noise_model.predictive_values(mu,var,**noise_args) + + def log_predictive_density(self, y_test, mu_star, var_star): + """ + Calculation of the log predictive density + + .. math: + p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*}) + + :param y_test: test observations (y_{*}) + :type y_test: (Nx1) array + :param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*}) + :type mu_star: (Nx1) array + :param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*}) + :type var_star: (Nx1) array + """ + return self.noise_model.log_predictive_density(y_test, mu_star, var_star) + + def _get_params(self): + return np.asarray(self.noise_model._get_params()) + + def _get_param_names(self): + return self.noise_model._get_param_names() + + def _set_params(self, p): + return self.noise_model._set_params(p) + + def _shared_gradients_components(self): + d3lik_d3fhat = self.noise_model.d3logpdf_df3(self.f_hat, self.data, extra_data=self.extra_data) + dL_dfhat = 0.5*(np.diag(self.Ki_W_i)[:, None]*d3lik_d3fhat).T #why isn't this -0.5? + I_KW_i = np.eye(self.N) - np.dot(self.K, self.Wi_K_i) + return dL_dfhat, I_KW_i + + def _Kgradients(self): + """ + Gradients with respect to prior kernel parameters dL_dK to be chained + with dK_dthetaK to give dL_dthetaK + :returns: dL_dK matrix + :rtype: Matrix (1 x num_kernel_params) + """ + dL_dfhat, I_KW_i = self._shared_gradients_components() + dlp = self.noise_model.dlogpdf_df(self.f_hat, self.data, extra_data=self.extra_data) + + #Explicit + #expl_a = np.dot(self.Ki_f, self.Ki_f.T) + #expl_b = self.Wi_K_i + #expl = 0.5*expl_a - 0.5*expl_b + #dL_dthetaK_exp = dK_dthetaK(expl, X) + + #Implicit + impl = mdot(dlp, dL_dfhat, I_KW_i) + + #No longer required as we are computing these in the gp already + #otherwise we would take them away and add them back + #dL_dthetaK_imp = dK_dthetaK(impl, X) + #dL_dthetaK = dL_dthetaK_exp + dL_dthetaK_imp + #dL_dK = expl + impl + + #No need to compute explicit as we are computing dZ_dK to account + #for the difference between the K gradients of a normal GP, + #and the K gradients including the implicit part + dL_dK = impl + return dL_dK + + def _gradients(self, partial): + """ + Gradients with respect to likelihood parameters (dL_dthetaL) + + :param partial: Not needed by this likelihood + :type partial: lambda function + :rtype: array of derivatives (1 x num_likelihood_params) + """ + dL_dfhat, I_KW_i = self._shared_gradients_components() + dlik_dthetaL, dlik_grad_dthetaL, dlik_hess_dthetaL = self.noise_model._laplace_gradients(self.f_hat, self.data, extra_data=self.extra_data) + + #len(dlik_dthetaL) + num_params = len(self._get_param_names()) + # make space for one derivative for each likelihood parameter + dL_dthetaL = np.zeros(num_params) + for thetaL_i in range(num_params): + #Explicit + dL_dthetaL_exp = ( np.sum(dlik_dthetaL[:, thetaL_i]) + #- 0.5*np.trace(mdot(self.Ki_W_i, (self.K, np.diagflat(dlik_hess_dthetaL[thetaL_i])))) + + np.dot(0.5*np.diag(self.Ki_W_i)[:,None].T, dlik_hess_dthetaL[:, thetaL_i]) + ) + + #Implicit + dfhat_dthetaL = mdot(I_KW_i, self.K, dlik_grad_dthetaL[:, thetaL_i]) + dL_dthetaL_imp = np.dot(dL_dfhat, dfhat_dthetaL) + dL_dthetaL[thetaL_i] = dL_dthetaL_exp + dL_dthetaL_imp + + return dL_dthetaL + + def _compute_GP_variables(self): + """ + Generate data Y which would give the normal distribution identical + to the laplace approximation to the posterior, but normalised + + GPy expects a likelihood to be gaussian, so need to caluclate + the data Y^{\tilde} that makes the posterior match that found + by a laplace approximation to a non-gaussian likelihood but with + a gaussian likelihood + + Firstly, + The hessian of the unormalised posterior distribution is (K^{-1} + W)^{-1}, + i.e. z*N(f|f^{\hat}, (K^{-1} + W)^{-1}) but this assumes a non-gaussian likelihood, + we wish to find the hessian \Sigma^{\tilde} + that has the same curvature but using our new simulated data Y^{\tilde} + i.e. we do N(Y^{\tilde}|f^{\hat}, \Sigma^{\tilde})N(f|0, K) = z*N(f|f^{\hat}, (K^{-1} + W)^{-1}) + and we wish to find what Y^{\tilde} and \Sigma^{\tilde} + We find that Y^{\tilde} = W^{-1}(K^{-1} + W)f^{\hat} and \Sigma^{tilde} = W^{-1} + + Secondly, + GPy optimizes the log marginal log p(y) = -0.5*ln|K+\Sigma^{\tilde}| - 0.5*Y^{\tilde}^{T}(K^{-1} + \Sigma^{tilde})^{-1}Y + lik.Z + So we can suck up any differences between that and our log marginal likelihood approximation + p^{\squiggle}(y) = -0.5*f^{\hat}K^{-1}f^{\hat} + log p(y|f^{\hat}) - 0.5*log |K||K^{-1} + W| + which we want to optimize instead, by equating them and rearranging, the difference is added onto + the log p(y) that GPy optimizes by default + + Thirdly, + Since we have gradients that depend on how we move f^{\hat}, we have implicit components + aswell as the explicit dL_dK, we hold these differences in dZ_dK and add them to dL_dK in the + gp.py code + """ + Wi = 1.0/self.W + self.Sigma_tilde = np.diagflat(Wi) + + Y_tilde = Wi*self.Ki_f + self.f_hat + + self.Wi_K_i = self.W12BiW12 + ln_det_Wi_K = pddet(self.Sigma_tilde + self.K) + lik = self.noise_model.logpdf(self.f_hat, self.data, extra_data=self.extra_data) + y_Wi_K_i_y = mdot(Y_tilde.T, self.Wi_K_i, Y_tilde) + + Z_tilde = (+ lik + - 0.5*self.ln_B_det + + 0.5*ln_det_Wi_K + - 0.5*self.f_Ki_f + + 0.5*y_Wi_K_i_y + + self.NORMAL_CONST + ) + + #Convert to float as its (1, 1) and Z must be a scalar + self.Z = np.float64(Z_tilde) + self.Y = Y_tilde + self.YYT = np.dot(self.Y, self.Y.T) + self.covariance_matrix = self.Sigma_tilde + self.precision = 1.0 / np.diag(self.covariance_matrix)[:, None] + + #Compute dZ_dK which is how the approximated distributions gradients differ from the dL_dK computed for other likelihoods + self.dZ_dK = self._Kgradients() + #+ 0.5*self.Wi_K_i - 0.5*np.dot(self.Ki_f, self.Ki_f.T) #since we are not adding the K gradients explicit part theres no need to compute this again + + def fit_full(self, K): + """ + The laplace approximation algorithm, find K and expand hessian + For nomenclature see Rasmussen & Williams 2006 - modified for numerical stability + + :param K: Prior covariance matrix evaluated at locations X + :type K: NxN matrix + """ + self.K = K.copy() + + #Find mode + self.f_hat = self.rasm_mode(self.K) + + #Compute hessian and other variables at mode + self._compute_likelihood_variables() + + #Compute fake variables replicating laplace approximation to posterior + self._compute_GP_variables() + + def _compute_likelihood_variables(self): + """ + Compute the variables required to compute gaussian Y variables + """ + #At this point get the hessian matrix (or vector as W is diagonal) + self.W = -self.noise_model.d2logpdf_df2(self.f_hat, self.data, extra_data=self.extra_data) + + if not self.noise_model.log_concave: + #print "Under 1e-10: {}".format(np.sum(self.W < 1e-6)) + self.W[self.W < 1e-6] = 1e-6 # FIXME-HACK: This is a hack since GPy can't handle negative variances which can occur + + self.W12BiW12, self.ln_B_det = self._compute_B_statistics(self.K, self.W, np.eye(self.N)) + + self.Ki_f = self.Ki_f + self.f_Ki_f = np.dot(self.f_hat.T, self.Ki_f) + self.Ki_W_i = self.K - mdot(self.K, self.W12BiW12, self.K) + + def _compute_B_statistics(self, K, W, a): + """ + Rasmussen suggests the use of a numerically stable positive definite matrix B + Which has a positive diagonal element and can be easyily inverted + + :param K: Prior Covariance matrix evaluated at locations X + :type K: NxN matrix + :param W: Negative hessian at a point (diagonal matrix) + :type W: Vector of diagonal values of hessian (1xN) + :param a: Matrix to calculate W12BiW12a + :type a: Matrix NxN + :returns: (W12BiW12, ln_B_det) + """ + if not self.noise_model.log_concave: + #print "Under 1e-10: {}".format(np.sum(W < 1e-6)) + W[W < 1e-6] = 1e-6 # FIXME-HACK: This is a hack since GPy can't handle negative variances which can occur + # If the likelihood is non-log-concave. We wan't to say that there is a negative variance + # To cause the posterior to become less certain than the prior and likelihood, + # This is a property only held by non-log-concave likelihoods + + + #W is diagonal so its sqrt is just the sqrt of the diagonal elements + W_12 = np.sqrt(W) + B = np.eye(self.N) + W_12*K*W_12.T + L = jitchol(B) + + W12BiW12a = W_12*dpotrs(L, np.asfortranarray(W_12*a), lower=1)[0] + ln_B_det = 2*np.sum(np.log(np.diag(L))) + return W12BiW12a, ln_B_det + + def rasm_mode(self, K, MAX_ITER=40): + """ + Rasmussen's numerically stable mode finding + For nomenclature see Rasmussen & Williams 2006 + Influenced by GPML (BSD) code, all errors are our own + + :param K: Covariance matrix evaluated at locations X + :type K: NxD matrix + :param MAX_ITER: Maximum number of iterations of newton-raphson before forcing finish of optimisation + :type MAX_ITER: scalar + :returns: f_hat, mode on which to make laplace approxmiation + :rtype: NxD matrix + """ + #old_Ki_f = np.zeros((self.N, 1)) + + #Start f's at zero originally of if we have gone off track, try restarting + if self.old_Ki_f is None or self.bad_fhat: + old_Ki_f = np.random.rand(self.N, 1)/50.0 + #old_Ki_f = self.Y + f = np.dot(K, old_Ki_f) + else: + #Start at the old best point + old_Ki_f = self.old_Ki_f.copy() + f = self.f_hat.copy() + + new_obj = -np.inf + old_obj = np.inf + + def obj(Ki_f, f): + return -0.5*np.dot(Ki_f.T, f) + self.noise_model.logpdf(f, self.data, extra_data=self.extra_data) + + difference = np.inf + epsilon = 1e-7 + #step_size = 1 + #rs = 0 + i = 0 + + while difference > epsilon and i < MAX_ITER: + W = -self.noise_model.d2logpdf_df2(f, self.data, extra_data=self.extra_data) + + W_f = W*f + grad = self.noise_model.dlogpdf_df(f, self.data, extra_data=self.extra_data) + + b = W_f + grad + W12BiW12Kb, _ = self._compute_B_statistics(K, W.copy(), np.dot(K, b)) + + #Work out the DIRECTION that we want to move in, but don't choose the stepsize yet + full_step_Ki_f = b - W12BiW12Kb + dKi_f = full_step_Ki_f - old_Ki_f + + f_old = f.copy() + def inner_obj(step_size, old_Ki_f, dKi_f, K): + Ki_f = old_Ki_f + step_size*dKi_f + f = np.dot(K, Ki_f) + # This is nasty, need to set something within an optimization though + self.tmp_Ki_f = Ki_f.copy() + self.tmp_f = f.copy() + return -obj(Ki_f, f) + + i_o = partial_func(inner_obj, old_Ki_f=old_Ki_f, dKi_f=dKi_f, K=K) + #Find the stepsize that minimizes the objective function using a brent line search + #The tolerance and maxiter matter for speed! Seems to be best to keep them low and make more full + #steps than get this exact then make a step, if B was bigger it might be the other way around though + #new_obj = sp.optimize.minimize_scalar(i_o, method='brent', tol=1e-4, options={'maxiter':5}).fun + new_obj = sp.optimize.brent(i_o, tol=1e-4, maxiter=10) + f = self.tmp_f.copy() + Ki_f = self.tmp_Ki_f.copy() + + #Optimize without linesearch + #f_old = f.copy() + #update_passed = False + #while not update_passed: + #Ki_f = old_Ki_f + step_size*dKi_f + #f = np.dot(K, Ki_f) + + #old_obj = new_obj + #new_obj = obj(Ki_f, f) + #difference = new_obj - old_obj + ##print "difference: ",difference + #if difference < 0: + ##print "Objective function rose", np.float(difference) + ##If the objective function isn't rising, restart optimization + #step_size *= 0.8 + ##print "Reducing step-size to {ss:.3} and restarting optimization".format(ss=step_size) + ##objective function isn't increasing, try reducing step size + #f = f_old.copy() #it's actually faster not to go back to old location and just zigzag across the mode + #old_obj = new_obj + #rs += 1 + #else: + #update_passed = True + + #old_Ki_f = self.Ki_f.copy() + + #difference = abs(new_obj - old_obj) + #old_obj = new_obj.copy() + difference = np.abs(np.sum(f - f_old)) + np.abs(np.sum(Ki_f - old_Ki_f)) + #difference = np.abs(np.sum(Ki_f - old_Ki_f))/np.float(self.N) + old_Ki_f = Ki_f.copy() + i += 1 + + self.old_Ki_f = old_Ki_f.copy() + + #Warn of bad fits + if difference > epsilon: + self.bad_fhat = True + warnings.warn("Not perfect f_hat fit difference: {}".format(difference)) + elif self.bad_fhat: + self.bad_fhat = False + warnings.warn("f_hat now perfect again") + + self.Ki_f = Ki_f + return f diff --git a/GPy/likelihoods/likelihood.py b/GPy/likelihoods/likelihood.py index d073ba6e..5e7c8c68 100644 --- a/GPy/likelihoods/likelihood.py +++ b/GPy/likelihoods/likelihood.py @@ -1,7 +1,8 @@ import numpy as np import copy +from ..core.parameterized import Parameterized -class likelihood: +class likelihood(Parameterized): """ The atom for a likelihood class @@ -9,17 +10,20 @@ class likelihood: (Gaussian) inherits directly from this, as does the EP algorithm Some things must be defined for this to work properly: - self.Y : the effective Gaussian target of the GP - self.N, self.D : Y.shape - self.covariance_matrix : the effective (noise) covariance of the GP targets - self.Z : a factor which gets added to the likelihood (0 for a Gaussian, Z_EP for EP) - self.is_heteroscedastic : enables significant computational savings in GP - self.precision : a scalar or vector representation of the effective target precision - self.YYT : (optional) = np.dot(self.Y, self.Y.T) enables computational savings for D>N - self.V : self.precision * self.Y + + - self.Y : the effective Gaussian target of the GP + - self.N, self.D : Y.shape + - self.covariance_matrix : the effective (noise) covariance of the GP targets + - self.Z : a factor which gets added to the likelihood (0 for a Gaussian, Z_EP for EP) + - self.is_heteroscedastic : enables significant computational savings in GP + - self.precision : a scalar or vector representation of the effective target precision + - self.YYT : (optional) = np.dot(self.Y, self.Y.T) enables computational savings for D>N + - self.V : self.precision * self.Y + """ - def __init__(self,data): - raise ValueError, "this class is not to be instantiated" + def __init__(self): + Parameterized.__init__(self) + self.dZ_dK = 0 def _get_params(self): raise NotImplementedError @@ -30,8 +34,17 @@ class likelihood: def _set_params(self, x): raise NotImplementedError - def fit(self): - raise NotImplementedError + def fit_full(self, K): + """ + No approximations needed by default + """ + pass + + def restart(self): + """ + No need to restart if not an approximation + """ + pass def _gradients(self, partial): raise NotImplementedError @@ -39,6 +52,18 @@ class likelihood: def predictive_values(self, mu, var): raise NotImplementedError - def copy(self): - """ Returns a (deep) copy of the current likelihood """ - return copy.deepcopy(self) + def log_predictive_density(self, y_test, mu_star, var_star): + """ + Calculation of the predictive density + + .. math: + p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*}) + + :param y_test: test observations (y_{*}) + :type y_test: (Nx1) array + :param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*}) + :type mu_star: (Nx1) array + :param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*}) + :type var_star: (Nx1) array + """ + raise NotImplementedError diff --git a/GPy/likelihoods/likelihood_functions.py b/GPy/likelihoods/likelihood_functions.py deleted file mode 100644 index 7b9b8982..00000000 --- a/GPy/likelihoods/likelihood_functions.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2012, 2013 Ricardo Andrade -# Licensed under the BSD 3-clause license (see LICENSE.txt) - - -import numpy as np -from scipy import stats -import scipy as sp -import pylab as pb -from ..util.plot import gpplot -from ..util.univariate_Gaussian import std_norm_pdf,std_norm_cdf -import link_functions - -class LikelihoodFunction(object): - """ - Likelihood class for doing Expectation propagation - - :param Y: observed output (Nx1 numpy.darray) - ..Note:: Y values allowed depend on the LikelihoodFunction used - """ - def __init__(self,link): - if link == self._analytical: - self.moments_match = self._moments_match_analytical - else: - assert isinstance(link,link_functions.LinkFunction) - self.link = link - self.moments_match = self._moments_match_numerical - - def _preprocess_values(self,Y): - return Y - - def _product(self,gp,obs,mu,sigma): - return stats.norm.pdf(gp,loc=mu,scale=sigma) * self._distribution(gp,obs) - - def _nlog_product(self,gp,obs,mu,sigma): - return -(-.5*(gp-mu)**2/sigma**2 + self._log_distribution(gp,obs)) - - def _locate(self,obs,mu,sigma): - """ - Golden Search to find the mode in the _product function (cavity x exact likelihood) and define a grid around it for numerical integration - """ - golden_A = -1 if obs == 0 else np.array([np.log(obs),mu]).min() #Lower limit - golden_B = np.array([np.log(obs),mu]).max() #Upper limit - return sp.optimize.golden(self._nlog_product, args=(obs,mu,sigma), brack=(golden_A,golden_B)) #Better to work with _nlog_product than with _product - - def _moments_match_numerical(self,obs,tau,v): - """ - Simpson's Rule is used to calculate the moments mumerically, it needs a grid of points as input. - """ - mu = v/tau - sigma = np.sqrt(1./tau) - opt = self._locate(obs,mu,sigma) - width = 3./np.log(max(obs,2)) - A = opt - width #Grid's lower limit - B = opt + width #Grid's Upper limit - K = 10*int(np.log(max(obs,150))) #Number of points in the grid - h = (B-A)/K # length of the intervals - grid_x = np.hstack([np.linspace(opt-width,opt,K/2+1)[1:-1], np.linspace(opt,opt+width,K/2+1)]) # grid of points (X axis) - x = np.hstack([A,B,grid_x[range(1,K,2)],grid_x[range(2,K-1,2)]]) # grid_x rearranged, just to make Simpson's algorithm easier - _aux1 = self._product(A,obs,mu,sigma) - _aux2 = self._product(B,obs,mu,sigma) - _aux3 = 4*self._product(grid_x[range(1,K,2)],obs,mu,sigma) - _aux4 = 2*self._product(grid_x[range(2,K-1,2)],obs,mu,sigma) - zeroth = np.hstack((_aux1,_aux2,_aux3,_aux4)) # grid of points (Y axis) rearranged - first = zeroth*x - second = first*x - Z_hat = sum(zeroth)*h/3 # Zero-th moment - mu_hat = sum(first)*h/(3*Z_hat) # First moment - m2 = sum(second)*h/(3*Z_hat) # Second moment - sigma2_hat = m2 - mu_hat**2 # Second central moment - return float(Z_hat), float(mu_hat), float(sigma2_hat) - -class Binomial(LikelihoodFunction): - """ - Probit likelihood - Y is expected to take values in {-1,1} - ----- - $$ - L(x) = \\Phi (Y_i*f_i) - $$ - """ - def __init__(self,link=None): - self._analytical = link_functions.Probit - if not link: - link = self._analytical - super(Binomial, self).__init__(link) - - def _distribution(self,gp,obs): - pass - - def _log_distribution(self,gp,obs): - pass - - def _preprocess_values(self,Y): - """ - Check if the values of the observations correspond to the values - assumed by the likelihood function. - - ..Note:: Binary classification algorithm works better with classes {-1,1} - """ - Y_prep = Y.copy() - Y1 = Y[Y.flatten()==1].size - Y2 = Y[Y.flatten()==0].size - assert Y1 + Y2 == Y.size, 'Binomial likelihood is meant to be used only with outputs in {0,1}.' - Y_prep[Y.flatten() == 0] = -1 - return Y_prep - - def _moments_match_analytical(self,data_i,tau_i,v_i): - """ - Moments match of the marginal approximation in EP algorithm - - :param i: number of observation (int) - :param tau_i: precision of the cavity distribution (float) - :param v_i: mean/variance of the cavity distribution (float) - """ - z = data_i*v_i/np.sqrt(tau_i**2 + tau_i) - Z_hat = std_norm_cdf(z) - phi = std_norm_pdf(z) - mu_hat = v_i/tau_i + data_i*phi/(Z_hat*np.sqrt(tau_i**2 + tau_i)) - sigma2_hat = 1./tau_i - (phi/((tau_i**2+tau_i)*Z_hat))*(z+phi/Z_hat) - return Z_hat, mu_hat, sigma2_hat - - def predictive_values(self,mu,var): - """ - Compute mean, variance and conficence interval (percentiles 5 and 95) of the prediction - :param mu: mean of the latent variable - :param var: variance of the latent variable - """ - mu = mu.flatten() - var = var.flatten() - mean = stats.norm.cdf(mu/np.sqrt(1+var)) - norm_025 = [stats.norm.ppf(.025,m,v) for m,v in zip(mu,var)] - norm_975 = [stats.norm.ppf(.975,m,v) for m,v in zip(mu,var)] - p_025 = stats.norm.cdf(norm_025/np.sqrt(1+var)) - p_975 = stats.norm.cdf(norm_975/np.sqrt(1+var)) - return mean[:,None], np.nan*var, p_025[:,None], p_975[:,None] # TODO: var - -class Poisson(LikelihoodFunction): - """ - Poisson likelihood - Y is expected to take values in {0,1,2,...} - ----- - $$ - L(x) = \exp(\lambda) * \lambda**Y_i / Y_i! - $$ - """ - def __init__(self,link=None): - self._analytical = None - if not link: - link = link_functions.Log() - super(Poisson, self).__init__(link) - - def _distribution(self,gp,obs): - return stats.poisson.pmf(obs,self.link.inv_transf(gp)) - - def _log_distribution(self,gp,obs): - return - self.link.inv_transf(gp) + obs * self.link.log_inv_transf(gp) - - def predictive_values(self,mu,var): - """ - Compute mean, and conficence interval (percentiles 5 and 95) of the prediction - """ - mean = self.link.transf(mu)#np.exp(mu*self.scale + self.location) - tmp = stats.poisson.ppf(np.array([.025,.975]),mean) - p_025 = tmp[:,0] - p_975 = tmp[:,1] - return mean,np.nan*mean,p_025,p_975 # better variance here TODO diff --git a/GPy/likelihoods/link_functions.py b/GPy/likelihoods/link_functions.py deleted file mode 100644 index 3b9a55b2..00000000 --- a/GPy/likelihoods/link_functions.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2012, 2013 Ricardo Andrade -# Licensed under the BSD 3-clause license (see LICENSE.txt) - - -import numpy as np -from scipy import stats -import scipy as sp -import pylab as pb -from ..util.plot import gpplot -from ..util.univariate_Gaussian import std_norm_pdf,std_norm_cdf - -class LinkFunction(object): - """ - Link function class for doing non-Gaussian likelihoods approximation - - :param Y: observed output (Nx1 numpy.darray) - ..Note:: Y values allowed depend on the likelihood_function used - """ - def __init__(self): - pass - -class Probit(LinkFunction): - """ - Probit link function: Squashes a likelihood between 0 and 1 - """ - def transf(self,mu): - pass - - def inv_transf(self,f): - pass - - def log_inv_transf(self,f): - pass diff --git a/GPy/likelihoods/noise_model_constructors.py b/GPy/likelihoods/noise_model_constructors.py new file mode 100644 index 00000000..e626c6a3 --- /dev/null +++ b/GPy/likelihoods/noise_model_constructors.py @@ -0,0 +1,121 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +import noise_models + +def bernoulli(gp_link=None): + """ + Construct a bernoulli likelihood + + :param gp_link: a GPy gp_link function + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Probit() + #else: + # assert isinstance(gp_link,noise_models.gp_transformations.GPTransformation), 'gp_link function is not valid.' + + if isinstance(gp_link,noise_models.gp_transformations.Probit): + analytical_mean = True + analytical_variance = False + + elif isinstance(gp_link,noise_models.gp_transformations.Heaviside): + analytical_mean = True + analytical_variance = True + + else: + analytical_mean = False + analytical_variance = False + + return noise_models.bernoulli_noise.Bernoulli(gp_link,analytical_mean,analytical_variance) + +def exponential(gp_link=None): + + """ + Construct a exponential likelihood + + :param gp_link: a GPy gp_link function + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Log_ex_1() + + analytical_mean = False + analytical_variance = False + return noise_models.exponential_noise.Exponential(gp_link,analytical_mean,analytical_variance) + +def gaussian_ep(gp_link=None,variance=1.): + """ + Construct a gaussian likelihood + + :param gp_link: a GPy gp_link function + :param variance: scalar + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Identity() + #else: + # assert isinstance(gp_link,noise_models.gp_transformations.GPTransformation), 'gp_link function is not valid.' + + analytical_mean = False + analytical_variance = False + return noise_models.gaussian_noise.Gaussian(gp_link,analytical_mean,analytical_variance,variance) + +def poisson(gp_link=None): + """ + Construct a Poisson likelihood + + :param gp_link: a GPy gp_link function + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Log_ex_1() + #else: + # assert isinstance(gp_link,noise_models.gp_transformations.GPTransformation), 'gp_link function is not valid.' + analytical_mean = False + analytical_variance = False + return noise_models.poisson_noise.Poisson(gp_link,analytical_mean,analytical_variance) + +def gamma(gp_link=None,beta=1.): + """ + Construct a Gamma likelihood + + :param gp_link: a GPy gp_link function + :param beta: scalar + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Log_ex_1() + analytical_mean = False + analytical_variance = False + return noise_models.gamma_noise.Gamma(gp_link,analytical_mean,analytical_variance,beta) + +def gaussian(gp_link=None, variance=2, D=None, N=None): + """ + Construct a Gaussian likelihood + + :param gp_link: a GPy gp_link function + :param variance: variance + :type variance: scalar + :returns: Gaussian noise model: + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Identity() + analytical_mean = True + analytical_variance = True # ? + return noise_models.gaussian_noise.Gaussian(gp_link, analytical_mean, + analytical_variance, variance=variance, D=D, N=N) + +def student_t(gp_link=None, deg_free=5, sigma2=2): + """ + Construct a Student t likelihood + + :param gp_link: a GPy gp_link function + :param deg_free: degrees of freedom of student-t + :type deg_free: scalar + :param sigma2: variance + :type sigma2: scalar + :returns: Student-T noise model + """ + if gp_link is None: + gp_link = noise_models.gp_transformations.Identity() + analytical_mean = True + analytical_variance = True + return noise_models.student_t_noise.StudentT(gp_link, analytical_mean, + analytical_variance,deg_free, sigma2) diff --git a/GPy/likelihoods/noise_models/__init__.py b/GPy/likelihoods/noise_models/__init__.py new file mode 100644 index 00000000..d1d134dc --- /dev/null +++ b/GPy/likelihoods/noise_models/__init__.py @@ -0,0 +1,8 @@ +import noise_distributions +import bernoulli_noise +import exponential_noise +import gaussian_noise +import gamma_noise +import poisson_noise +import student_t_noise +import gp_transformations diff --git a/GPy/likelihoods/noise_models/bernoulli_noise.py b/GPy/likelihoods/noise_models/bernoulli_noise.py new file mode 100644 index 00000000..14f4adc8 --- /dev/null +++ b/GPy/likelihoods/noise_models/bernoulli_noise.py @@ -0,0 +1,222 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats,special +import scipy as sp +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from noise_distributions import NoiseDistribution + +class Bernoulli(NoiseDistribution): + """ + Bernoulli likelihood + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\lambda(f_{i})^{y_{i}}(1-f_{i})^{1-y_{i}} + + .. Note:: + Y is expected to take values in {-1,1} + Probit likelihood usually used + """ + def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False): + super(Bernoulli, self).__init__(gp_link,analytical_mean,analytical_variance) + if isinstance(gp_link , (gp_transformations.Heaviside, gp_transformations.Probit)): + self.log_concave = True + + def _preprocess_values(self,Y): + """ + Check if the values of the observations correspond to the values + assumed by the likelihood function. + + ..Note:: Binary classification algorithm works better with classes {-1,1} + """ + Y_prep = Y.copy() + Y1 = Y[Y.flatten()==1].size + Y2 = Y[Y.flatten()==0].size + assert Y1 + Y2 == Y.size, 'Bernoulli likelihood is meant to be used only with outputs in {0,1}.' + Y_prep[Y.flatten() == 0] = -1 + return Y_prep + + def _moments_match_analytical(self,data_i,tau_i,v_i): + """ + Moments match of the marginal approximation in EP algorithm + + :param i: number of observation (int) + :param tau_i: precision of the cavity distribution (float) + :param v_i: mean/variance of the cavity distribution (float) + """ + if data_i == 1: + sign = 1. + elif data_i == 0: + sign = -1 + else: + raise ValueError("bad value for Bernouilli observation (0,1)") + if isinstance(self.gp_link,gp_transformations.Probit): + z = sign*v_i/np.sqrt(tau_i**2 + tau_i) + Z_hat = std_norm_cdf(z) + phi = std_norm_pdf(z) + mu_hat = v_i/tau_i + sign*phi/(Z_hat*np.sqrt(tau_i**2 + tau_i)) + sigma2_hat = 1./tau_i - (phi/((tau_i**2+tau_i)*Z_hat))*(z+phi/Z_hat) + + elif isinstance(self.gp_link,gp_transformations.Heaviside): + a = sign*v_i/np.sqrt(tau_i) + Z_hat = std_norm_cdf(a) + N = std_norm_pdf(a) + mu_hat = v_i/tau_i + sign*N/Z_hat/np.sqrt(tau_i) + sigma2_hat = (1. - a*N/Z_hat - np.square(N/Z_hat))/tau_i + if np.any(np.isnan([Z_hat, mu_hat, sigma2_hat])): + stop + else: + raise ValueError("Exact moment matching not available for link {}".format(self.gp_link.gp_transformations.__name__)) + + return Z_hat, mu_hat, sigma2_hat + + def _predictive_mean_analytical(self,mu,variance): + + if isinstance(self.gp_link,gp_transformations.Probit): + return stats.norm.cdf(mu/np.sqrt(1+variance)) + + elif isinstance(self.gp_link,gp_transformations.Heaviside): + return stats.norm.cdf(mu/np.sqrt(variance)) + + else: + raise NotImplementedError + + def _predictive_variance_analytical(self,mu,variance, pred_mean): + + if isinstance(self.gp_link,gp_transformations.Heaviside): + return 0. + else: + raise NotImplementedError + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\lambda(f_{i})^{y_{i}}(1-f_{i})^{1-y_{i}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in bernoulli + :returns: likelihood evaluated for this point + :rtype: float + + .. Note: + Each y_i must be in {0,1} + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + objective = (link_f**y) * ((1.-link_f)**(1.-y)) + return np.exp(np.sum(np.log(objective))) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log Likelihood function given link(f) + + .. math:: + \\ln p(y_{i}|\\lambda(f_{i})) = y_{i}\\log\\lambda(f_{i}) + (1-y_{i})\\log (1-f_{i}) + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in bernoulli + :returns: log likelihood evaluated at points link(f) + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + #objective = y*np.log(link_f) + (1.-y)*np.log(link_f) + objective = np.where(y==1, np.log(link_f), np.log(1-link_f)) + return np.sum(objective) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the pdf at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d\\ln p(y_{i}|\\lambda(f_{i}))}{d\\lambda(f)} = \\frac{y_{i}}{\\lambda(f_{i})} - \\frac{(1 - y_{i})}{(1 - \\lambda(f_{i}))} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in bernoulli + :returns: gradient of log likelihood evaluated at points link(f) + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + grad = (y/link_f) - (1.-y)/(1-link_f) + return grad + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link_f, w.r.t link_f the hessian will be 0 unless i == j + i.e. second derivative logpdf at y given link(f_i) link(f_j) w.r.t link(f_i) and link(f_j) + + + .. math:: + \\frac{d^{2}\\ln p(y_{i}|\\lambda(f_{i}))}{d\\lambda(f)^{2}} = \\frac{-y_{i}}{\\lambda(f)^{2}} - \\frac{(1-y_{i})}{(1-\\lambda(f))^{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in bernoulli + :returns: Diagonal of log hessian matrix (second derivative of log likelihood evaluated at points link(f)) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + d2logpdf_dlink2 = -y/(link_f**2) - (1-y)/((1-link_f)**2) + return d2logpdf_dlink2 + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{2y_{i}}{\\lambda(f)^{3}} - \\frac{2(1-y_{i}}{(1-\\lambda(f))^{3}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in bernoulli + :returns: third derivative of log likelihood evaluated at points link(f) + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + d3logpdf_dlink3 = 2*(y/(link_f**3) - (1-y)/((1-link_f)**3)) + return d3logpdf_dlink3 + + def _mean(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp) + + def _variance(self,gp): + """ + Mass (or density) function + """ + p = self.gp_link.transf(gp) + return p*(1.-p) + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + orig_shape = gp.shape + gp = gp.flatten() + ns = np.ones_like(gp, dtype=int) + Ysim = np.random.binomial(ns, self.gp_link.transf(gp)) + return Ysim.reshape(orig_shape) diff --git a/GPy/likelihoods/noise_models/exponential_noise.py b/GPy/likelihoods/noise_models/exponential_noise.py new file mode 100644 index 00000000..602ccea5 --- /dev/null +++ b/GPy/likelihoods/noise_models/exponential_noise.py @@ -0,0 +1,156 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats,special +import scipy as sp +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from noise_distributions import NoiseDistribution + +class Exponential(NoiseDistribution): + """ + Expoential likelihood + Y is expected to take values in {0,1,2,...} + ----- + $$ + L(x) = \exp(\lambda) * \lambda**Y_i / Y_i! + $$ + """ + def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False): + super(Exponential, self).__init__(gp_link,analytical_mean,analytical_variance) + + def _preprocess_values(self,Y): + return Y + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\lambda(f_{i})\\exp (-y\\lambda(f_{i})) + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in exponential distribution + :returns: likelihood evaluated for this point + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + log_objective = link_f*np.exp(-y*link_f) + return np.exp(np.sum(np.log(log_objective))) + #return np.exp(np.sum(-y/link_f - np.log(link_f) )) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log Likelihood Function given link(f) + + .. math:: + \\ln p(y_{i}|\lambda(f_{i})) = \\ln \\lambda(f_{i}) - y_{i}\\lambda(f_{i}) + + :param link_f: latent variables (link(f)) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in exponential distribution + :returns: likelihood evaluated for this point + :rtype: float + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + log_objective = np.log(link_f) - y*link_f + #logpdf_link = np.sum(-np.log(link_f) - y/link_f) + return np.sum(log_objective) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the log likelihood function at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\lambda(f)} = \\frac{1}{\\lambda(f)} - y_{i} + + :param link_f: latent variables (f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in exponential distribution + :returns: gradient of likelihood evaluated at points + :rtype: Nx1 array + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + grad = 1./link_f - y + #grad = y/(link_f**2) - 1./link_f + return grad + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link(f), w.r.t link(f) + i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j) + The hessian will be 0 unless i == j + + .. math:: + \\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = -\\frac{1}{\\lambda(f_{i})^{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in exponential distribution + :returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + hess = -1./(link_f**2) + #hess = -2*y/(link_f**3) + 1/(link_f**2) + return hess + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{2}{\\lambda(f_{i})^{3}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in exponential distribution + :returns: third derivative of likelihood evaluated at points f + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + d3lik_dlink3 = 2./(link_f**3) + #d3lik_dlink3 = 6*y/(link_f**4) - 2./(link_f**3) + return d3lik_dlink3 + + def _mean(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp) + + def _variance(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp)**2 + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + orig_shape = gp.shape + gp = gp.flatten() + Ysim = np.random.exponential(1.0/self.gp_link.transf(gp)) + return Ysim.reshape(orig_shape) diff --git a/GPy/likelihoods/noise_models/gamma_noise.py b/GPy/likelihoods/noise_models/gamma_noise.py new file mode 100644 index 00000000..2be3106a --- /dev/null +++ b/GPy/likelihoods/noise_models/gamma_noise.py @@ -0,0 +1,155 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats,special +import scipy as sp +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from noise_distributions import NoiseDistribution + +class Gamma(NoiseDistribution): + """ + Gamma likelihood + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\beta^{\\alpha_{i}}}{\\Gamma(\\alpha_{i})}y_{i}^{\\alpha_{i}-1}e^{-\\beta y_{i}}\\\\ + \\alpha_{i} = \\beta y_{i} + + """ + def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False,beta=1.): + self.beta = beta + super(Gamma, self).__init__(gp_link,analytical_mean,analytical_variance) + + def _preprocess_values(self,Y): + return Y + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\beta^{\\alpha_{i}}}{\\Gamma(\\alpha_{i})}y_{i}^{\\alpha_{i}-1}e^{-\\beta y_{i}}\\\\ + \\alpha_{i} = \\beta y_{i} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: likelihood evaluated for this point + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + #return stats.gamma.pdf(obs,a = self.gp_link.transf(gp)/self.variance,scale=self.variance) + alpha = link_f*self.beta + objective = (y**(alpha - 1.) * np.exp(-self.beta*y) * self.beta**alpha)/ special.gamma(alpha) + return np.exp(np.sum(np.log(objective))) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log Likelihood Function given link(f) + + .. math:: + \\ln p(y_{i}|\lambda(f_{i})) = \\alpha_{i}\\log \\beta - \\log \\Gamma(\\alpha_{i}) + (\\alpha_{i} - 1)\\log y_{i} - \\beta y_{i}\\\\ + \\alpha_{i} = \\beta y_{i} + + :param link_f: latent variables (link(f)) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: likelihood evaluated for this point + :rtype: float + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + #alpha = self.gp_link.transf(gp)*self.beta + #return (1. - alpha)*np.log(obs) + self.beta*obs - alpha * np.log(self.beta) + np.log(special.gamma(alpha)) + alpha = link_f*self.beta + log_objective = alpha*np.log(self.beta) - np.log(special.gamma(alpha)) + (alpha - 1)*np.log(y) - self.beta*y + return np.sum(log_objective) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the log likelihood function at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\lambda(f)} = \\beta (\\log \\beta y_{i}) - \\Psi(\\alpha_{i})\\beta\\\\ + \\alpha_{i} = \\beta y_{i} + + :param link_f: latent variables (f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in gamma distribution + :returns: gradient of likelihood evaluated at points + :rtype: Nx1 array + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + grad = self.beta*np.log(self.beta*y) - special.psi(self.beta*link_f)*self.beta + #old + #return -self.gp_link.dtransf_df(gp)*self.beta*np.log(obs) + special.psi(self.gp_link.transf(gp)*self.beta) * self.gp_link.dtransf_df(gp)*self.beta + return grad + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link(f), w.r.t link(f) + i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j) + The hessian will be 0 unless i == j + + .. math:: + \\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = -\\beta^{2}\\frac{d\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ + \\alpha_{i} = \\beta y_{i} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in gamma distribution + :returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + hess = -special.polygamma(1, self.beta*link_f)*(self.beta**2) + #old + #return -self.gp_link.d2transf_df2(gp)*self.beta*np.log(obs) + special.polygamma(1,self.gp_link.transf(gp)*self.beta)*(self.gp_link.dtransf_df(gp)*self.beta)**2 + special.psi(self.gp_link.transf(gp)*self.beta)*self.gp_link.d2transf_df2(gp)*self.beta + return hess + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ + \\alpha_{i} = \\beta y_{i} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in gamma distribution + :returns: third derivative of likelihood evaluated at points f + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + d3lik_dlink3 = -special.polygamma(2, self.beta*link_f)*(self.beta**3) + return d3lik_dlink3 + + def _mean(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp) + + def _variance(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp)/self.beta diff --git a/GPy/likelihoods/noise_models/gaussian_noise.py b/GPy/likelihoods/noise_models/gaussian_noise.py new file mode 100644 index 00000000..3da6bcc8 --- /dev/null +++ b/GPy/likelihoods/noise_models/gaussian_noise.py @@ -0,0 +1,300 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from scipy import stats,special +import scipy as sp +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from noise_distributions import NoiseDistribution + +class Gaussian(NoiseDistribution): + """ + Gaussian likelihood + + .. math:: + \\ln p(y_{i}|\\lambda(f_{i})) = -\\frac{N \\ln 2\\pi}{2} - \\frac{\\ln |K|}{2} - \\frac{(y_{i} - \\lambda(f_{i}))^{T}\\sigma^{-2}(y_{i} - \\lambda(f_{i}))}{2} + + :param variance: variance value of the Gaussian distribution + :param N: Number of data points + :type N: int + """ + def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False,variance=1., D=None, N=None): + self.variance = variance + self.N = N + self._set_params(np.asarray(variance)) + super(Gaussian, self).__init__(gp_link,analytical_mean,analytical_variance) + if isinstance(gp_link , gp_transformations.Identity): + self.log_concave = True + + def _get_params(self): + return np.array([self.variance]) + + def _get_param_names(self): + return ['noise_model_variance'] + + def _set_params(self, p): + self.variance = float(p) + self.I = np.eye(self.N) + self.covariance_matrix = self.I * self.variance + self.Ki = self.I*(1.0 / self.variance) + #self.ln_det_K = np.sum(np.log(np.diag(self.covariance_matrix))) + self.ln_det_K = self.N*np.log(self.variance) + + def _gradients(self,partial): + return np.zeros(1) + #return np.sum(partial) + + def _preprocess_values(self,Y): + """ + Check if the values of the observations correspond to the values + assumed by the likelihood function. + """ + return Y + + def _moments_match_analytical(self,data_i,tau_i,v_i): + """ + Moments match of the marginal approximation in EP algorithm + + :param i: number of observation (int) + :param tau_i: precision of the cavity distribution (float) + :param v_i: mean/variance of the cavity distribution (float) + """ + sigma2_hat = 1./(1./self.variance + tau_i) + mu_hat = sigma2_hat*(data_i/self.variance + v_i) + sum_var = self.variance + 1./tau_i + Z_hat = 1./np.sqrt(2.*np.pi*sum_var)*np.exp(-.5*(data_i - v_i/tau_i)**2./sum_var) + return Z_hat, mu_hat, sigma2_hat + + def _predictive_mean_analytical(self,mu,sigma): + new_sigma2 = self.predictive_variance(mu,sigma) + return new_sigma2*(mu/sigma**2 + self.gp_link.transf(mu)/self.variance) + + def _predictive_variance_analytical(self,mu,sigma,predictive_mean=None): + return 1./(1./self.variance + 1./sigma**2) + + def _mass(self, link_f, y, extra_data=None): + NotImplementedError("Deprecated, now doing chain in noise_model.py for link function evaluation\ + Please negate your function and use pdf in noise_model.py, if implementing a likelihood\ + rederivate the derivative without doing the chain and put in logpdf, dlogpdf_dlink or\ + its derivatives") + def _nlog_mass(self, link_f, y, extra_data=None): + NotImplementedError("Deprecated, now doing chain in noise_model.py for link function evaluation\ + Please negate your function and use logpdf in noise_model.py, if implementing a likelihood\ + rederivate the derivative without doing the chain and put in logpdf, dlogpdf_dlink or\ + its derivatives") + + def _dnlog_mass_dgp(self, link_f, y, extra_data=None): + NotImplementedError("Deprecated, now doing chain in noise_model.py for link function evaluation\ + Please negate your function and use dlogpdf_df in noise_model.py, if implementing a likelihood\ + rederivate the derivative without doing the chain and put in logpdf, dlogpdf_dlink or\ + its derivatives") + + def _d2nlog_mass_dgp2(self, link_f, y, extra_data=None): + NotImplementedError("Deprecated, now doing chain in noise_model.py for link function evaluation\ + Please negate your function and use d2logpdf_df2 in noise_model.py, if implementing a likelihood\ + rederivate the derivative without doing the chain and put in logpdf, dlogpdf_dlink or\ + its derivatives") + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + \\ln p(y_{i}|\\lambda(f_{i})) = -\\frac{N \\ln 2\\pi}{2} - \\frac{\\ln |K|}{2} - \\frac{(y_{i} - \\lambda(f_{i}))^{T}\\sigma^{-2}(y_{i} - \\lambda(f_{i}))}{2} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: likelihood evaluated for this point + :rtype: float + """ + #Assumes no covariance, exp, sum, log for numerical stability + return np.exp(np.sum(np.log(stats.norm.pdf(y, link_f, np.sqrt(self.variance))))) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log likelihood function given link(f) + + .. math:: + \\ln p(y_{i}|\\lambda(f_{i})) = -\\frac{N \\ln 2\\pi}{2} - \\frac{\\ln |K|}{2} - \\frac{(y_{i} - \\lambda(f_{i}))^{T}\\sigma^{-2}(y_{i} - \\lambda(f_{i}))}{2} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: log likelihood evaluated for this point + :rtype: float + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + return -0.5*(np.sum((y-link_f)**2/self.variance) + self.ln_det_K + self.N*np.log(2.*np.pi)) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the pdf at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\lambda(f)} = \\frac{1}{\\sigma^{2}}(y_{i} - \\lambda(f_{i})) + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: gradient of log likelihood evaluated at points link(f) + :rtype: Nx1 array + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + s2_i = (1.0/self.variance) + grad = s2_i*y - s2_i*link_f + return grad + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link_f, w.r.t link_f. + i.e. second derivative logpdf at y given link(f_i) link(f_j) w.r.t link(f_i) and link(f_j) + + The hessian will be 0 unless i == j + + .. math:: + \\frac{d^{2} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{2}f} = -\\frac{1}{\\sigma^{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: Diagonal of log hessian matrix (second derivative of log likelihood evaluated at points link(f)) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + hess = -(1.0/self.variance)*np.ones((self.N, 1)) + return hess + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{3}\\lambda(f)} = 0 + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: third derivative of log likelihood evaluated at points link(f) + :rtype: Nx1 array + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + d3logpdf_dlink3 = np.diagonal(0*self.I)[:, None] + return d3logpdf_dlink3 + + def dlogpdf_link_dvar(self, link_f, y, extra_data=None): + """ + Gradient of the log-likelihood function at y given link(f), w.r.t variance parameter (noise_variance) + + .. math:: + \\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\sigma^{2}} = -\\frac{N}{2\\sigma^{2}} + \\frac{(y_{i} - \\lambda(f_{i}))^{2}}{2\\sigma^{4}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: derivative of log likelihood evaluated at points link(f) w.r.t variance parameter + :rtype: float + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + e = y - link_f + s_4 = 1.0/(self.variance**2) + dlik_dsigma = -0.5*self.N/self.variance + 0.5*s_4*np.sum(np.square(e)) + return np.sum(dlik_dsigma) # Sure about this sum? + + def dlogpdf_dlink_dvar(self, link_f, y, extra_data=None): + """ + Derivative of the dlogpdf_dlink w.r.t variance parameter (noise_variance) + + .. math:: + \\frac{d}{d\\sigma^{2}}(\\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\lambda(f)}) = \\frac{1}{\\sigma^{4}}(-y_{i} + \\lambda(f_{i})) + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: derivative of log likelihood evaluated at points link(f) w.r.t variance parameter + :rtype: Nx1 array + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + s_4 = 1.0/(self.variance**2) + dlik_grad_dsigma = -s_4*y + s_4*link_f + return dlik_grad_dsigma + + def d2logpdf_dlink2_dvar(self, link_f, y, extra_data=None): + """ + Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (noise_variance) + + .. math:: + \\frac{d}{d\\sigma^{2}}(\\frac{d^{2} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{2}\\lambda(f)}) = \\frac{1}{\\sigma^{4}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data not used in gaussian + :returns: derivative of log hessian evaluated at points link(f_i) and link(f_j) w.r.t variance parameter + :rtype: Nx1 array + """ + assert np.asarray(link_f).shape == np.asarray(y).shape + s_4 = 1.0/(self.variance**2) + d2logpdf_dlink2_dvar = np.diag(s_4*self.I)[:, None] + return d2logpdf_dlink2_dvar + + def dlogpdf_link_dtheta(self, f, y, extra_data=None): + dlogpdf_dvar = self.dlogpdf_link_dvar(f, y, extra_data=extra_data) + return np.asarray([[dlogpdf_dvar]]) + + def dlogpdf_dlink_dtheta(self, f, y, extra_data=None): + dlogpdf_dlink_dvar = self.dlogpdf_dlink_dvar(f, y, extra_data=extra_data) + return dlogpdf_dlink_dvar + + def d2logpdf_dlink2_dtheta(self, f, y, extra_data=None): + d2logpdf_dlink2_dvar = self.d2logpdf_dlink2_dvar(f, y, extra_data=extra_data) + return d2logpdf_dlink2_dvar + + def _mean(self,gp): + """ + Expected value of y under the Mass (or density) function p(y|f) + + .. math:: + E_{p(y|f)}[y] + """ + return self.gp_link.transf(gp) + + def _variance(self,gp): + """ + Variance of y under the Mass (or density) function p(y|f) + + .. math:: + Var_{p(y|f)}[y] + """ + return self.variance + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + orig_shape = gp.shape + gp = gp.flatten() + Ysim = np.array([np.random.normal(self.gp_link.transf(gpj), scale=np.sqrt(self.variance), size=1) for gpj in gp]) + return Ysim.reshape(orig_shape) diff --git a/GPy/likelihoods/noise_models/gp_transformations.py b/GPy/likelihoods/noise_models/gp_transformations.py new file mode 100644 index 00000000..5155a69d --- /dev/null +++ b/GPy/likelihoods/noise_models/gp_transformations.py @@ -0,0 +1,159 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats +import scipy as sp +import pylab as pb +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf,inv_std_norm_cdf + +class GPTransformation(object): + """ + Link function class for doing non-Gaussian likelihoods approximation + + :param Y: observed output (Nx1 numpy.darray) + + .. note:: Y values allowed depend on the likelihood_function used + + """ + def __init__(self): + pass + + def transf(self,f): + """ + Gaussian process tranformation function, latent space -> output space + """ + raise NotImplementedError + + def dtransf_df(self,f): + """ + derivative of transf(f) w.r.t. f + """ + raise NotImplementedError + + def d2transf_df2(self,f): + """ + second derivative of transf(f) w.r.t. f + """ + raise NotImplementedError + + def d3transf_df3(self,f): + """ + third derivative of transf(f) w.r.t. f + """ + raise NotImplementedError + +class Identity(GPTransformation): + """ + .. math:: + + g(f) = f + + """ + def transf(self,f): + return f + + def dtransf_df(self,f): + return np.ones_like(f) + + def d2transf_df2(self,f): + return np.zeros_like(f) + + def d3transf_df3(self,f): + return np.zeros_like(f) + + +class Probit(GPTransformation): + """ + .. math:: + + g(f) = \\Phi^{-1} (mu) + + """ + def transf(self,f): + return std_norm_cdf(f) + + def dtransf_df(self,f): + return std_norm_pdf(f) + + def d2transf_df2(self,f): + #FIXME + return -f * std_norm_pdf(f) + + def d3transf_df3(self,f): + #FIXME + f2 = f**2 + return -(1/(np.sqrt(2*np.pi)))*np.exp(-0.5*(f2))*(1-f2) + +class Log(GPTransformation): + """ + .. math:: + + g(f) = \\log(\\mu) + + """ + def transf(self,f): + return np.exp(f) + + def dtransf_df(self,f): + return np.exp(f) + + def d2transf_df2(self,f): + return np.exp(f) + + def d3transf_df3(self,f): + return np.exp(f) + +class Log_ex_1(GPTransformation): + """ + .. math:: + + g(f) = \\log(\\exp(\\mu) - 1) + + """ + def transf(self,f): + return np.log(1.+np.exp(f)) + + def dtransf_df(self,f): + return np.exp(f)/(1.+np.exp(f)) + + def d2transf_df2(self,f): + aux = np.exp(f)/(1.+np.exp(f)) + return aux*(1.-aux) + + def d3transf_df3(self,f): + aux = np.exp(f)/(1.+np.exp(f)) + daux_df = aux*(1.-aux) + return daux_df - (2.*aux*daux_df) + +class Reciprocal(GPTransformation): + def transf(self,f): + return 1./f + + def dtransf_df(self,f): + return -1./(f**2) + + def d2transf_df2(self,f): + return 2./(f**3) + + def d3transf_df3(self,f): + return -6./(f**4) + +class Heaviside(GPTransformation): + """ + + .. math:: + + g(f) = I_{x \\in A} + + """ + def transf(self,f): + #transformation goes here + return np.where(f>0, 1, 0) + + def dtransf_df(self,f): + raise NotImplementedError, "This function is not differentiable!" + + def d2transf_df2(self,f): + raise NotImplementedError, "This function is not differentiable!" diff --git a/GPy/likelihoods/noise_models/noise_distributions.py b/GPy/likelihoods/noise_models/noise_distributions.py new file mode 100644 index 00000000..a67d8792 --- /dev/null +++ b/GPy/likelihoods/noise_models/noise_distributions.py @@ -0,0 +1,433 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from scipy import stats,special +import scipy as sp +import pylab as pb +from GPy.util.plot import gpplot +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from GPy.util.misc import chain_1, chain_2, chain_3 +from scipy.integrate import quad +import warnings + +class NoiseDistribution(object): + """ + Likelihood class for doing approximations + """ + def __init__(self,gp_link,analytical_mean=False,analytical_variance=False): + assert isinstance(gp_link,gp_transformations.GPTransformation), "gp_link is not a valid GPTransformation." + self.gp_link = gp_link + self.analytical_mean = analytical_mean + self.analytical_variance = analytical_variance + if self.analytical_mean: + self.moments_match = self._moments_match_analytical + self.predictive_mean = self._predictive_mean_analytical + else: + self.moments_match = self._moments_match_numerical + self.predictive_mean = self._predictive_mean_numerical + if self.analytical_variance: + self.predictive_variance = self._predictive_variance_analytical + else: + self.predictive_variance = self._predictive_variance_numerical + + self.log_concave = False + + def _get_params(self): + return np.zeros(0) + + def _get_param_names(self): + return [] + + def _set_params(self,p): + pass + + def _gradients(self,partial): + return np.zeros(0) + + def _preprocess_values(self,Y): + """ + In case it is needed, this function assess the output values or makes any pertinent transformation on them. + + :param Y: observed output + :type Y: Nx1 numpy.darray + + """ + return Y + + def _moments_match_analytical(self,obs,tau,v): + """ + If available, this function computes the moments analytically. + """ + raise NotImplementedError + + def log_predictive_density(self, y_test, mu_star, var_star): + """ + Calculation of the log predictive density + + .. math: + p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*}) + + :param y_test: test observations (y_{*}) + :type y_test: (Nx1) array + :param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*}) + :type mu_star: (Nx1) array + :param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*}) + :type var_star: (Nx1) array + """ + assert y_test.shape==mu_star.shape + assert y_test.shape==var_star.shape + assert y_test.shape[1] == 1 + def integral_generator(y, m, v): + """Generate a function which can be integrated to give p(Y*|Y) = int p(Y*|f*)p(f*|Y) df*""" + def f(f_star): + return self.pdf(f_star, y)*np.exp(-(1./(2*v))*np.square(m-f_star)) + return f + + scaled_p_ystar, accuracy = zip(*[quad(integral_generator(y, m, v), -np.inf, np.inf) for y, m, v in zip(y_test.flatten(), mu_star.flatten(), var_star.flatten())]) + scaled_p_ystar = np.array(scaled_p_ystar).reshape(-1,1) + p_ystar = scaled_p_ystar/np.sqrt(2*np.pi*var_star) + return np.log(p_ystar) + + def _moments_match_numerical(self,obs,tau,v): + """ + Calculation of moments using quadrature + + :param obs: observed output + :param tau: cavity distribution 1st natural parameter (precision) + :param v: cavity distribution 2nd natural paramenter (mu*precision) + """ + #Compute first integral for zeroth moment. + #NOTE constant np.sqrt(2*pi/tau) added at the end of the function + mu = v/tau + def int_1(f): + return self.pdf(f, obs)*np.exp(-0.5*tau*np.square(mu-f)) + z_scaled, accuracy = quad(int_1, -np.inf, np.inf) + + #Compute second integral for first moment + def int_2(f): + return f*self.pdf(f, obs)*np.exp(-0.5*tau*np.square(mu-f)) + mean, accuracy = quad(int_2, -np.inf, np.inf) + mean /= z_scaled + + #Compute integral for variance + def int_3(f): + return (f**2)*self.pdf(f, obs)*np.exp(-0.5*tau*np.square(mu-f)) + Ef2, accuracy = quad(int_3, -np.inf, np.inf) + Ef2 /= z_scaled + variance = Ef2 - mean**2 + + #Add constant to the zeroth moment + #NOTE: this constant is not needed in the other moments because it cancells out. + z = z_scaled/np.sqrt(2*np.pi/tau) + + return z, mean, variance + + def _predictive_mean_analytical(self,mu,sigma): + """ + Predictive mean + .. math:: + E(Y^{*}|Y) = E( E(Y^{*}|f^{*}, Y) ) + + If available, this function computes the predictive mean analytically. + """ + raise NotImplementedError + + def _predictive_variance_analytical(self,mu,sigma): + """ + Predictive variance + .. math:: + V(Y^{*}| Y) = E( V(Y^{*}|f^{*}, Y) ) + V( E(Y^{*}|f^{*}, Y) ) + + If available, this function computes the predictive variance analytically. + """ + raise NotImplementedError + + def _predictive_mean_numerical(self,mu,variance): + """ + Quadrature calculation of the predictive mean: E(Y_star|Y) = E( E(Y_star|f_star, Y) ) + + :param mu: mean of posterior + :param sigma: standard deviation of posterior + + """ + def int_mean(f,m,v): + return self._mean(f)*np.exp(-(0.5/v)*np.square(f - m)) + scaled_mean = [quad(int_mean, -np.inf, np.inf,args=(mj,s2j))[0] for mj,s2j in zip(mu,variance)] + mean = np.array(scaled_mean)[:,None] / np.sqrt(2*np.pi*(variance)) + + return mean + + def _predictive_variance_numerical(self,mu,variance,predictive_mean=None): + """ + Numerical approximation to the predictive variance: V(Y_star) + + The following variance decomposition is used: + V(Y_star) = E( V(Y_star|f_star) ) + V( E(Y_star|f_star) ) + + :param mu: mean of posterior + :param sigma: standard deviation of posterior + :predictive_mean: output's predictive mean, if None _predictive_mean function will be called. + + """ + #sigma2 = sigma**2 + normalizer = np.sqrt(2*np.pi*variance) + + # E( V(Y_star|f_star) ) + def int_var(f,m,v): + return self._variance(f)*np.exp(-(0.5/v)*np.square(f - m)) + scaled_exp_variance = [quad(int_var, -np.inf, np.inf,args=(mj,s2j))[0] for mj,s2j in zip(mu,variance)] + exp_var = np.array(scaled_exp_variance)[:,None] / normalizer + + #V( E(Y_star|f_star) ) = E( E(Y_star|f_star)**2 ) - E( E(Y_star|f_star) )**2 + + #E( E(Y_star|f_star) )**2 + if predictive_mean is None: + predictive_mean = self.predictive_mean(mu,variance) + predictive_mean_sq = predictive_mean**2 + + #E( E(Y_star|f_star)**2 ) + def int_pred_mean_sq(f,m,v,predictive_mean_sq): + return self._mean(f)**2*np.exp(-(0.5/v)*np.square(f - m)) + scaled_exp_exp2 = [quad(int_pred_mean_sq, -np.inf, np.inf,args=(mj,s2j,pm2j))[0] for mj,s2j,pm2j in zip(mu,variance,predictive_mean_sq)] + exp_exp2 = np.array(scaled_exp_exp2)[:,None] / normalizer + + var_exp = exp_exp2 - predictive_mean_sq + + # V(Y_star) = E( V(Y_star|f_star) ) + V( E(Y_star|f_star) ) + return exp_var + var_exp + + def pdf_link(self, link_f, y, extra_data=None): + raise NotImplementedError + + def logpdf_link(self, link_f, y, extra_data=None): + raise NotImplementedError + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + raise NotImplementedError + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + raise NotImplementedError + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + raise NotImplementedError + + def dlogpdf_link_dtheta(self, link_f, y, extra_data=None): + raise NotImplementedError + + def dlogpdf_dlink_dtheta(self, link_f, y, extra_data=None): + raise NotImplementedError + + def d2logpdf_dlink2_dtheta(self, link_f, y, extra_data=None): + raise NotImplementedError + + def pdf(self, f, y, extra_data=None): + """ + Evaluates the link function link(f) then computes the likelihood (pdf) using it + + .. math: + p(y|\\lambda(f)) + + :param f: latent variables f + :type f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution - not used + :returns: likelihood evaluated for this point + :rtype: float + """ + link_f = self.gp_link.transf(f) + return self.pdf_link(link_f, y, extra_data=extra_data) + + def logpdf(self, f, y, extra_data=None): + """ + Evaluates the link function link(f) then computes the log likelihood (log pdf) using it + + .. math: + \\log p(y|\\lambda(f)) + + :param f: latent variables f + :type f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution - not used + :returns: log likelihood evaluated for this point + :rtype: float + """ + link_f = self.gp_link.transf(f) + return self.logpdf_link(link_f, y, extra_data=extra_data) + + def dlogpdf_df(self, f, y, extra_data=None): + """ + Evaluates the link function link(f) then computes the derivative of log likelihood using it + Uses the Faa di Bruno's formula for the chain rule + + .. math:: + \\frac{d\\log p(y|\\lambda(f))}{df} = \\frac{d\\log p(y|\\lambda(f))}{d\\lambda(f)}\\frac{d\\lambda(f)}{df} + + :param f: latent variables f + :type f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution - not used + :returns: derivative of log likelihood evaluated for this point + :rtype: 1xN array + """ + link_f = self.gp_link.transf(f) + dlogpdf_dlink = self.dlogpdf_dlink(link_f, y, extra_data=extra_data) + dlink_df = self.gp_link.dtransf_df(f) + return chain_1(dlogpdf_dlink, dlink_df) + + def d2logpdf_df2(self, f, y, extra_data=None): + """ + Evaluates the link function link(f) then computes the second derivative of log likelihood using it + Uses the Faa di Bruno's formula for the chain rule + + .. math:: + \\frac{d^{2}\\log p(y|\\lambda(f))}{df^{2}} = \\frac{d^{2}\\log p(y|\\lambda(f))}{d^{2}\\lambda(f)}\\left(\\frac{d\\lambda(f)}{df}\\right)^{2} + \\frac{d\\log p(y|\\lambda(f))}{d\\lambda(f)}\\frac{d^{2}\\lambda(f)}{df^{2}} + + :param f: latent variables f + :type f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution - not used + :returns: second derivative of log likelihood evaluated for this point (diagonal only) + :rtype: 1xN array + """ + link_f = self.gp_link.transf(f) + d2logpdf_dlink2 = self.d2logpdf_dlink2(link_f, y, extra_data=extra_data) + dlink_df = self.gp_link.dtransf_df(f) + dlogpdf_dlink = self.dlogpdf_dlink(link_f, y, extra_data=extra_data) + d2link_df2 = self.gp_link.d2transf_df2(f) + return chain_2(d2logpdf_dlink2, dlink_df, dlogpdf_dlink, d2link_df2) + + def d3logpdf_df3(self, f, y, extra_data=None): + """ + Evaluates the link function link(f) then computes the third derivative of log likelihood using it + Uses the Faa di Bruno's formula for the chain rule + + .. math:: + \\frac{d^{3}\\log p(y|\\lambda(f))}{df^{3}} = \\frac{d^{3}\\log p(y|\\lambda(f)}{d\\lambda(f)^{3}}\\left(\\frac{d\\lambda(f)}{df}\\right)^{3} + 3\\frac{d^{2}\\log p(y|\\lambda(f)}{d\\lambda(f)^{2}}\\frac{d\\lambda(f)}{df}\\frac{d^{2}\\lambda(f)}{df^{2}} + \\frac{d\\log p(y|\\lambda(f)}{d\\lambda(f)}\\frac{d^{3}\\lambda(f)}{df^{3}} + + :param f: latent variables f + :type f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution - not used + :returns: third derivative of log likelihood evaluated for this point + :rtype: float + """ + link_f = self.gp_link.transf(f) + d3logpdf_dlink3 = self.d3logpdf_dlink3(link_f, y, extra_data=extra_data) + dlink_df = self.gp_link.dtransf_df(f) + d2logpdf_dlink2 = self.d2logpdf_dlink2(link_f, y, extra_data=extra_data) + d2link_df2 = self.gp_link.d2transf_df2(f) + dlogpdf_dlink = self.dlogpdf_dlink(link_f, y, extra_data=extra_data) + d3link_df3 = self.gp_link.d3transf_df3(f) + return chain_3(d3logpdf_dlink3, dlink_df, d2logpdf_dlink2, d2link_df2, dlogpdf_dlink, d3link_df3) + + def dlogpdf_dtheta(self, f, y, extra_data=None): + """ + TODO: Doc strings + """ + if len(self._get_param_names()) > 0: + link_f = self.gp_link.transf(f) + return self.dlogpdf_link_dtheta(link_f, y, extra_data=extra_data) + else: + #Is no parameters so return an empty array for its derivatives + return np.empty([1, 0]) + + def dlogpdf_df_dtheta(self, f, y, extra_data=None): + """ + TODO: Doc strings + """ + if len(self._get_param_names()) > 0: + link_f = self.gp_link.transf(f) + dlink_df = self.gp_link.dtransf_df(f) + dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(link_f, y, extra_data=extra_data) + return chain_1(dlogpdf_dlink_dtheta, dlink_df) + else: + #Is no parameters so return an empty array for its derivatives + return np.empty([f.shape[0], 0]) + + def d2logpdf_df2_dtheta(self, f, y, extra_data=None): + """ + TODO: Doc strings + """ + if len(self._get_param_names()) > 0: + link_f = self.gp_link.transf(f) + dlink_df = self.gp_link.dtransf_df(f) + d2link_df2 = self.gp_link.d2transf_df2(f) + d2logpdf_dlink2_dtheta = self.d2logpdf_dlink2_dtheta(link_f, y, extra_data=extra_data) + dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(link_f, y, extra_data=extra_data) + return chain_2(d2logpdf_dlink2_dtheta, dlink_df, dlogpdf_dlink_dtheta, d2link_df2) + else: + #Is no parameters so return an empty array for its derivatives + return np.empty([f.shape[0], 0]) + + def _laplace_gradients(self, f, y, extra_data=None): + dlogpdf_dtheta = self.dlogpdf_dtheta(f, y, extra_data=extra_data) + dlogpdf_df_dtheta = self.dlogpdf_df_dtheta(f, y, extra_data=extra_data) + d2logpdf_df2_dtheta = self.d2logpdf_df2_dtheta(f, y, extra_data=extra_data) + + #Parameters are stacked vertically. Must be listed in same order as 'get_param_names' + # ensure we have gradients for every parameter we want to optimize + assert dlogpdf_dtheta.shape[1] == len(self._get_param_names()) + assert dlogpdf_df_dtheta.shape[1] == len(self._get_param_names()) + assert d2logpdf_df2_dtheta.shape[1] == len(self._get_param_names()) + return dlogpdf_dtheta, dlogpdf_df_dtheta, d2logpdf_df2_dtheta + + def predictive_values(self, mu, var, full_cov=False, sampling=False, num_samples=10000): + """ + Compute mean, variance and conficence interval (percentiles 5 and 95) of the prediction. + + :param mu: mean of the latent variable, f, of posterior + :param var: variance of the latent variable, f, of posterior + :param full_cov: whether to use the full covariance or just the diagonal + :type full_cov: Boolean + :param num_samples: number of samples to use in computing quantiles and + possibly mean variance + :type num_samples: integer + :param sampling: Whether to use samples for mean and variances anyway + :type sampling: Boolean + + """ + + if sampling: + #Get gp_samples f* using posterior mean and variance + if not full_cov: + gp_samples = np.random.multivariate_normal(mu.flatten(), np.diag(var.flatten()), + size=num_samples).T + else: + gp_samples = np.random.multivariate_normal(mu.flatten(), var, + size=num_samples).T + #Push gp samples (f*) through likelihood to give p(y*|f*) + samples = self.samples(gp_samples) + axis=-1 + + #Calculate mean, variance and precentiles from samples + print "WARNING: Using sampling to calculate mean, variance and predictive quantiles." + pred_mean = np.mean(samples, axis=axis)[:,None] + pred_var = np.var(samples, axis=axis)[:,None] + q1 = np.percentile(samples, 2.5, axis=axis)[:,None] + q3 = np.percentile(samples, 97.5, axis=axis)[:,None] + + else: + + pred_mean = self.predictive_mean(mu, var) + pred_var = self.predictive_variance(mu, var, pred_mean) + print "WARNING: Predictive quantiles are only computed when sampling." + q1 = np.repeat(np.nan,pred_mean.size)[:,None] + q3 = q1.copy() + + return pred_mean, pred_var, q1, q3 + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + raise NotImplementedError diff --git a/GPy/likelihoods/noise_models/poisson_noise.py b/GPy/likelihoods/noise_models/poisson_noise.py new file mode 100644 index 00000000..b0300704 --- /dev/null +++ b/GPy/likelihoods/noise_models/poisson_noise.py @@ -0,0 +1,152 @@ +from __future__ import division +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from scipy import stats,special +import scipy as sp +from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf +import gp_transformations +from noise_distributions import NoiseDistribution + +class Poisson(NoiseDistribution): + """ + Poisson likelihood + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\lambda(f_{i})^{y_{i}}}{y_{i}!}e^{-\\lambda(f_{i})} + + .. Note:: + Y is expected to take values in {0,1,2,...} + """ + def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False): + super(Poisson, self).__init__(gp_link,analytical_mean,analytical_variance) + + def _preprocess_values(self,Y): #TODO + return Y + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\lambda(f_{i})^{y_{i}}}{y_{i}!}e^{-\\lambda(f_{i})} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: likelihood evaluated for this point + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + return np.prod(stats.poisson.pmf(y,link_f)) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log Likelihood Function given link(f) + + .. math:: + \\ln p(y_{i}|\lambda(f_{i})) = -\\lambda(f_{i}) + y_{i}\\log \\lambda(f_{i}) - \\log y_{i}! + + :param link_f: latent variables (link(f)) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: likelihood evaluated for this point + :rtype: float + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + return np.sum(-link_f + y*np.log(link_f) - special.gammaln(y+1)) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the log likelihood function at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\lambda(f)} = \\frac{y_{i}}{\\lambda(f_{i})} - 1 + + :param link_f: latent variables (f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: gradient of likelihood evaluated at points + :rtype: Nx1 array + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + return y/link_f - 1 + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link(f), w.r.t link(f) + i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j) + The hessian will be 0 unless i == j + + .. math:: + \\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = \\frac{-y_{i}}{\\lambda(f_{i})^{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + hess = -y/(link_f**2) + return hess + #d2_df = self.gp_link.d2transf_df2(gp) + #transf = self.gp_link.transf(gp) + #return obs * ((self.gp_link.dtransf_df(gp)/transf)**2 - d2_df/transf) + d2_df + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{2y_{i}}{\\lambda(f_{i})^{3}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in poisson distribution + :returns: third derivative of likelihood evaluated at points f + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + d3lik_dlink3 = 2*y/(link_f)**3 + return d3lik_dlink3 + + def _mean(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp) + + def _variance(self,gp): + """ + Mass (or density) function + """ + return self.gp_link.transf(gp) + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + orig_shape = gp.shape + gp = gp.flatten() + Ysim = np.random.poisson(self.gp_link.transf(gp)) + return Ysim.reshape(orig_shape) diff --git a/GPy/likelihoods/noise_models/student_t_noise.py b/GPy/likelihoods/noise_models/student_t_noise.py new file mode 100644 index 00000000..daad7186 --- /dev/null +++ b/GPy/likelihoods/noise_models/student_t_noise.py @@ -0,0 +1,277 @@ +# Copyright (c) 2012, 2013 Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from scipy import stats, special +import scipy as sp +import gp_transformations +from noise_distributions import NoiseDistribution +from scipy import stats, integrate +from scipy.special import gammaln, gamma + +class StudentT(NoiseDistribution): + """ + Student T likelihood + + For nomanclature see Bayesian Data Analysis 2003 p576 + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\Gamma\\left(\\frac{v+1}{2}\\right)}{\\Gamma\\left(\\frac{v}{2}\\right)\\sqrt{v\\pi\\sigma^{2}}}\\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - f_{i})^{2}}{\\sigma^{2}}\\right)\\right)^{\\frac{-v+1}{2}} + + """ + def __init__(self,gp_link=None,analytical_mean=True,analytical_variance=True, deg_free=5, sigma2=2): + self.v = deg_free + self.sigma2 = sigma2 + + self._set_params(np.asarray(sigma2)) + super(StudentT, self).__init__(gp_link,analytical_mean,analytical_variance) + self.log_concave = False + + def _get_params(self): + return np.asarray(self.sigma2) + + def _get_param_names(self): + return ["t_noise_std2"] + + def _set_params(self, x): + self.sigma2 = float(x) + + @property + def variance(self, extra_data=None): + return (self.v / float(self.v - 2)) * self.sigma2 + + def pdf_link(self, link_f, y, extra_data=None): + """ + Likelihood function given link(f) + + .. math:: + p(y_{i}|\\lambda(f_{i})) = \\frac{\\Gamma\\left(\\frac{v+1}{2}\\right)}{\\Gamma\\left(\\frac{v}{2}\\right)\\sqrt{v\\pi\\sigma^{2}}}\\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - \\lambda(f_{i}))^{2}}{\\sigma^{2}}\\right)\\right)^{\\frac{-v+1}{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: likelihood evaluated for this point + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + #Careful gamma(big_number) is infinity! + objective = ((np.exp(gammaln((self.v + 1)*0.5) - gammaln(self.v * 0.5)) + / (np.sqrt(self.v * np.pi * self.sigma2))) + * ((1 + (1./float(self.v))*((e**2)/float(self.sigma2)))**(-0.5*(self.v + 1))) + ) + return np.prod(objective) + + def logpdf_link(self, link_f, y, extra_data=None): + """ + Log Likelihood Function given link(f) + + .. math:: + \\ln p(y_{i}|\lambda(f_{i})) = \\ln \\Gamma\\left(\\frac{v+1}{2}\\right) - \\ln \\Gamma\\left(\\frac{v}{2}\\right) - \\ln \\sqrt{v \\pi\\sigma^{2}} - \\frac{v+1}{2}\\ln \\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - \lambda(f_{i}))^{2}}{\\sigma^{2}}\\right)\\right) + + :param link_f: latent variables (link(f)) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: likelihood evaluated for this point + :rtype: float + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + objective = (+ gammaln((self.v + 1) * 0.5) + - gammaln(self.v * 0.5) + - 0.5*np.log(self.sigma2 * self.v * np.pi) + - 0.5*(self.v + 1)*np.log(1 + (1/np.float(self.v))*((e**2)/self.sigma2)) + ) + return np.sum(objective) + + def dlogpdf_dlink(self, link_f, y, extra_data=None): + """ + Gradient of the log likelihood function at y, given link(f) w.r.t link(f) + + .. math:: + \\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\lambda(f)} = \\frac{(v+1)(y_{i}-\lambda(f_{i}))}{(y_{i}-\lambda(f_{i}))^{2} + \\sigma^{2}v} + + :param link_f: latent variables (f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: gradient of likelihood evaluated at points + :rtype: Nx1 array + + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + grad = ((self.v + 1) * e) / (self.v * self.sigma2 + (e**2)) + return grad + + def d2logpdf_dlink2(self, link_f, y, extra_data=None): + """ + Hessian at y, given link(f), w.r.t link(f) + i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j) + The hessian will be 0 unless i == j + + .. math:: + \\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = \\frac{(v+1)((y_{i}-\lambda(f_{i}))^{2} - \\sigma^{2}v)}{((y_{i}-\lambda(f_{i}))^{2} + \\sigma^{2}v)^{2}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f) + :rtype: Nx1 array + + .. Note:: + Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases + (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + hess = ((self.v + 1)*(e**2 - self.v*self.sigma2)) / ((self.sigma2*self.v + e**2)**2) + return hess + + def d3logpdf_dlink3(self, link_f, y, extra_data=None): + """ + Third order derivative log-likelihood function at y given link(f) w.r.t link(f) + + .. math:: + \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{-2(v+1)((y_{i} - \lambda(f_{i}))^3 - 3(y_{i} - \lambda(f_{i})) \\sigma^{2} v))}{((y_{i} - \lambda(f_{i})) + \\sigma^{2} v)^3} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: third derivative of likelihood evaluated at points f + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + d3lik_dlink3 = ( -(2*(self.v + 1)*(-e)*(e**2 - 3*self.v*self.sigma2)) / + ((e**2 + self.sigma2*self.v)**3) + ) + return d3lik_dlink3 + + def dlogpdf_link_dvar(self, link_f, y, extra_data=None): + """ + Gradient of the log-likelihood function at y given f, w.r.t variance parameter (t_noise) + + .. math:: + \\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\sigma^{2}} = \\frac{v((y_{i} - \lambda(f_{i}))^{2} - \\sigma^{2})}{2\\sigma^{2}(\\sigma^{2}v + (y_{i} - \lambda(f_{i}))^{2})} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: derivative of likelihood evaluated at points f w.r.t variance parameter + :rtype: float + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + dlogpdf_dvar = self.v*(e**2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e**2)) + return np.sum(dlogpdf_dvar) + + def dlogpdf_dlink_dvar(self, link_f, y, extra_data=None): + """ + Derivative of the dlogpdf_dlink w.r.t variance parameter (t_noise) + + .. math:: + \\frac{d}{d\\sigma^{2}}(\\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{df}) = \\frac{-2\\sigma v(v + 1)(y_{i}-\lambda(f_{i}))}{(y_{i}-\lambda(f_{i}))^2 + \\sigma^2 v)^2} + + :param link_f: latent variables link_f + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: derivative of likelihood evaluated at points f w.r.t variance parameter + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + dlogpdf_dlink_dvar = (self.v*(self.v+1)*(-e))/((self.sigma2*self.v + e**2)**2) + return dlogpdf_dlink_dvar + + def d2logpdf_dlink2_dvar(self, link_f, y, extra_data=None): + """ + Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (t_noise) + + .. math:: + \\frac{d}{d\\sigma^{2}}(\\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}f}) = \\frac{v(v+1)(\\sigma^{2}v - 3(y_{i} - \lambda(f_{i}))^{2})}{(\\sigma^{2}v + (y_{i} - \lambda(f_{i}))^{2})^{3}} + + :param link_f: latent variables link(f) + :type link_f: Nx1 array + :param y: data + :type y: Nx1 array + :param extra_data: extra_data which is not used in student t distribution + :returns: derivative of hessian evaluated at points f and f_j w.r.t variance parameter + :rtype: Nx1 array + """ + assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape + e = y - link_f + d2logpdf_dlink2_dvar = ( (self.v*(self.v+1)*(self.sigma2*self.v - 3*(e**2))) + / ((self.sigma2*self.v + (e**2))**3) + ) + return d2logpdf_dlink2_dvar + + def dlogpdf_link_dtheta(self, f, y, extra_data=None): + dlogpdf_dvar = self.dlogpdf_link_dvar(f, y, extra_data=extra_data) + return np.asarray([[dlogpdf_dvar]]) + + def dlogpdf_dlink_dtheta(self, f, y, extra_data=None): + dlogpdf_dlink_dvar = self.dlogpdf_dlink_dvar(f, y, extra_data=extra_data) + return dlogpdf_dlink_dvar + + def d2logpdf_dlink2_dtheta(self, f, y, extra_data=None): + d2logpdf_dlink2_dvar = self.d2logpdf_dlink2_dvar(f, y, extra_data=extra_data) + return d2logpdf_dlink2_dvar + + def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None): + """ + Compute predictive variance of student_t*normal p(y*|f*)p(f*) + + Need to find what the variance is at the latent points for a student t*normal p(y*|f*)p(f*) + (((g((v+1)/2))/(g(v/2)*s*sqrt(v*pi)))*(1+(1/v)*((y-f)/s)^2)^(-(v+1)/2)) + *((1/(s*sqrt(2*pi)))*exp(-(1/(2*(s^2)))*((y-f)^2))) + """ + + #FIXME: Not correct + #We want the variance around test points y which comes from int p(y*|f*)p(f*) df* + #Var(y*) = Var(E[y*|f*]) + E[Var(y*|f*)] + #Since we are given f* (mu) which is our mean (expected) value of y*|f* then the variance is the variance around this + #Which was also given to us as (var) + #We also need to know the expected variance of y* around samples f*, this is the variance of the student t distribution + #However the variance of the student t distribution is not dependent on f, only on sigma and the degrees of freedom + true_var = 1/(1/sigma**2 + 1/self.variance) + + return true_var + + def _predictive_mean_analytical(self, mu, sigma): + """ + Compute mean of the prediction + """ + #FIXME: Not correct + return mu + + def samples(self, gp): + """ + Returns a set of samples of observations based on a given value of the latent variable. + + :param gp: latent variable + """ + orig_shape = gp.shape + gp = gp.flatten() + #FIXME: Very slow as we are computing a new random variable per input! + #Can't get it to sample all at the same time + #student_t_samples = np.array([stats.t.rvs(self.v, self.gp_link.transf(gpj),scale=np.sqrt(self.sigma2), size=1) for gpj in gp]) + dfs = np.ones_like(gp)*self.v + scales = np.ones_like(gp)*np.sqrt(self.sigma2) + student_t_samples = stats.t.rvs(dfs, loc=self.gp_link.transf(gp), + scale=scales) + return student_t_samples.reshape(orig_shape) diff --git a/GPy/mappings/__init__.py b/GPy/mappings/__init__.py new file mode 100644 index 00000000..97573aba --- /dev/null +++ b/GPy/mappings/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +from kernel import Kernel +from linear import Linear +from mlp import MLP +#from rbf import RBF diff --git a/GPy/mappings/kernel.py b/GPy/mappings/kernel.py new file mode 100644 index 00000000..5c802c13 --- /dev/null +++ b/GPy/mappings/kernel.py @@ -0,0 +1,60 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from ..core.mapping import Mapping +import GPy + +class Kernel(Mapping): + """ + Mapping based on a kernel/covariance function. + + .. math:: + + f(\mathbf{x}*) = \mathbf{A}\mathbf{k}(\mathbf{X}, \mathbf{x}^*) + \mathbf{b} + + :param X: input observations containing :math:`\mathbf{X}` + :type X: ndarray + :param output_dim: dimension of output. + :type output_dim: int + :param kernel: a GPy kernel, defaults to GPy.kern.rbf + :type kernel: GPy.kern.kern + + """ + + def __init__(self, X, output_dim=1, kernel=None): + Mapping.__init__(self, input_dim=X.shape[1], output_dim=output_dim) + if kernel is None: + kernel = GPy.kern.rbf(self.input_dim) + self.kern = kernel + self.X = X + self.num_data = X.shape[0] + self.num_params = self.output_dim*(self.num_data + 1) + self.A = np.array((self.num_data, self.output_dim)) + self.bias = np.array(self.output_dim) + self.randomize() + self.name = 'kernel' + def _get_param_names(self): + return sum([['A_%i_%i' % (n, d) for d in range(self.output_dim)] for n in range(self.num_data)], []) + ['bias_%i' % d for d in range(self.output_dim)] + + def _get_params(self): + return np.hstack((self.A.flatten(), self.bias)) + + def _set_params(self, x): + self.A = x[:self.num_data * self.output_dim].reshape(self.num_data, self.output_dim).copy() + self.bias = x[self.num_data*self.output_dim:].copy() + + def randomize(self): + self.A = np.random.randn(self.num_data, self.output_dim)/np.sqrt(self.num_data+1) + self.bias = np.random.randn(self.output_dim)/np.sqrt(self.num_data+1) + + def f(self, X): + return np.dot(self.kern.K(X, self.X),self.A) + self.bias + + def df_dtheta(self, dL_df, X): + self._df_dA = (dL_df[:, :, None]*self.kern.K(X, self.X)[:, None, :]).sum(0).T + self._df_dbias = (dL_df.sum(0)) + return np.hstack((self._df_dA.flatten(), self._df_dbias)) + + def df_dX(self, dL_df, X): + return self.kern.dK_dX((dL_df[:, None, :]*self.A[None, :, :]).sum(2), X, self.X) diff --git a/GPy/mappings/linear.py b/GPy/mappings/linear.py new file mode 100644 index 00000000..5846903d --- /dev/null +++ b/GPy/mappings/linear.py @@ -0,0 +1,53 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from ..core.mapping import Mapping + +class Linear(Mapping): + """ + Mapping based on a linear model. + + .. math:: + + f(\mathbf{x}*) = \mathbf{W}\mathbf{x}^* + \mathbf{b} + + :param X: input observations + :type X: ndarray + :param output_dim: dimension of output. + :type output_dim: int + + """ + + def __init__(self, input_dim=1, output_dim=1): + self.name = 'linear' + Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim) + self.num_params = self.output_dim*(self.input_dim + 1) + self.W = np.array((self.input_dim, self.output_dim)) + self.bias = np.array(self.output_dim) + self.randomize() + + def _get_param_names(self): + return sum([['W_%i_%i' % (n, d) for d in range(self.output_dim)] for n in range(self.input_dim)], []) + ['bias_%i' % d for d in range(self.output_dim)] + + def _get_params(self): + return np.hstack((self.W.flatten(), self.bias)) + + def _set_params(self, x): + self.W = x[:self.input_dim * self.output_dim].reshape(self.input_dim, self.output_dim).copy() + self.bias = x[self.input_dim*self.output_dim:].copy() + def randomize(self): + self.W = np.random.randn(self.input_dim, self.output_dim)/np.sqrt(self.input_dim + 1) + self.bias = np.random.randn(self.output_dim)/np.sqrt(self.input_dim + 1) + + def f(self, X): + return np.dot(X,self.W) + self.bias + + def df_dtheta(self, dL_df, X): + self._df_dW = (dL_df[:, :, None]*X[:, None, :]).sum(0).T + self._df_dbias = (dL_df.sum(0)) + return np.hstack((self._df_dW.flatten(), self._df_dbias)) + + def df_dX(self, dL_df, X): + return (dL_df[:, None, :]*self.W[None, :, :]).sum(2) + diff --git a/GPy/mappings/mlp.py b/GPy/mappings/mlp.py new file mode 100644 index 00000000..46dbc2a9 --- /dev/null +++ b/GPy/mappings/mlp.py @@ -0,0 +1,130 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from ..core.mapping import Mapping + +class MLP(Mapping): + """ + Mapping based on a multi-layer perceptron neural network model. + + .. math:: + + f(\\mathbf{x}*) = \\mathbf{W}^0\\boldsymbol{\\phi}(\\mathbf{W}^1\\mathbf{x}+\\mathbf{b}^1)^* + \\mathbf{b}^0 + + where + + .. math:: + + \\phi(\\cdot) = \\text{tanh}(\\cdot) + + :param X: input observations + :type X: ndarray + :param output_dim: dimension of output. + :type output_dim: int + :param hidden_dim: dimension of hidden layer. If it is an int, there is one hidden layer of the given dimension. If it is a list of ints there are as manny hidden layers as the length of the list, each with the given number of hidden nodes in it. + :type hidden_dim: int or list of ints. + + """ + + def __init__(self, input_dim=1, output_dim=1, hidden_dim=3): + Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim) + self.name = 'mlp' + if isinstance(hidden_dim, int): + hidden_dim = [hidden_dim] + self.hidden_dim = hidden_dim + self.activation = [None]*len(self.hidden_dim) + self.W = [] + self._dL_dW = [] + self.bias = [] + self._dL_dbias = [] + self.W.append(np.zeros((self.input_dim, self.hidden_dim[0]))) + self._dL_dW.append(np.zeros((self.input_dim, self.hidden_dim[0]))) + self.bias.append(np.zeros(self.hidden_dim[0])) + self._dL_dbias.append(np.zeros(self.hidden_dim[0])) + self.num_params = self.hidden_dim[0]*(self.input_dim+1) + for h1, h0 in zip(hidden_dim[1:], hidden_dim[0:-1]): + self.W.append(np.zeros((h0, h1))) + self._dL_dW.append(np.zeros((h0, h1))) + self.bias.append(np.zeros(h1)) + self._dL_dbias.append(np.zeros(h1)) + self.num_params += h1*(h0+1) + self.W.append(np.zeros((self.hidden_dim[-1], self.output_dim))) + self._dL_dW.append(np.zeros((self.hidden_dim[-1], self.output_dim))) + self.bias.append(np.zeros(self.output_dim)) + self._dL_dbias.append(np.zeros(self.output_dim)) + self.num_params += self.output_dim*(self.hidden_dim[-1]+1) + self.randomize() + + def _get_param_names(self): + return sum([['W%i_%i_%i' % (i, n, d) for n in range(self.W[i].shape[0]) for d in range(self.W[i].shape[1])] + ['bias%i_%i' % (i, d) for d in range(self.W[i].shape[1])] for i in range(len(self.W))], []) + + def _get_params(self): + param = np.array([]) + for W, bias in zip(self.W, self.bias): + param = np.hstack((param, W.flatten(), bias)) + return param + + def _set_params(self, x): + start = 0 + for W, bias in zip(self.W, self.bias): + end = W.shape[0]*W.shape[1]+start + W[:] = x[start:end].reshape(W.shape[0], W.shape[1]).copy() + start = end + end = W.shape[1]+end + bias[:] = x[start:end].copy() + start = end + + def randomize(self): + for W, bias in zip(self.W, self.bias): + W[:] = np.random.randn(W.shape[0], W.shape[1])/np.sqrt(W.shape[0]+1) + bias[:] = np.random.randn(W.shape[1])/np.sqrt(W.shape[0]+1) + + def f(self, X): + self._f_computations(X) + return np.dot(np.tanh(self.activation[-1]), self.W[-1]) + self.bias[-1] + + def _f_computations(self, X): + W = self.W[0] + bias = self.bias[0] + self.activation[0] = np.dot(X,W) + bias + for W, bias, index in zip(self.W[1:-1], self.bias[1:-1], range(1, len(self.activation))): + self.activation[index] = np.dot(np.tanh(self.activation[index-1]), W)+bias + + def df_dtheta(self, dL_df, X): + self._df_computations(dL_df, X) + g = np.array([]) + for gW, gbias in zip(self._dL_dW, self._dL_dbias): + g = np.hstack((g, gW.flatten(), gbias)) + return g + + def _df_computations(self, dL_df, X): + self._f_computations(X) + a0 = self.activation[-1] + W = self.W[-1] + self._dL_dW[-1] = (dL_df[:, :, None]*np.tanh(a0[:, None, :])).sum(0).T + dL_dta=(dL_df[:, None, :]*W[None, :, :]).sum(2) + self._dL_dbias[-1] = (dL_df.sum(0)) + for dL_dW, dL_dbias, W, bias, a0, a1 in zip(self._dL_dW[-2:0:-1], + self._dL_dbias[-2:0:-1], + self.W[-2:0:-1], + self.bias[-2:0:-1], + self.activation[-2::-1], + self.activation[-1:0:-1]): + ta = np.tanh(a1) + dL_da = dL_dta*(1-ta*ta) + dL_dW[:] = (dL_da[:, :, None]*np.tanh(a0[:, None, :])).sum(0).T + dL_dbias[:] = (dL_da.sum(0)) + dL_dta = (dL_da[:, None, :]*W[None, :, :]).sum(2) + ta = np.tanh(self.activation[0]) + dL_da = dL_dta*(1-ta*ta) + W = self.W[0] + self._dL_dW[0] = (dL_da[:, :, None]*X[:, None, :]).sum(0).T + self._dL_dbias[0] = (dL_da.sum(0)) + self._dL_dX = (dL_da[:, None, :]*W[None, :, :]).sum(2) + + + def df_dX(self, dL_df, X): + self._df_computations(dL_df, X) + return self._dL_dX + diff --git a/GPy/models.py b/GPy/models.py new file mode 100644 index 00000000..3b2683ea --- /dev/null +++ b/GPy/models.py @@ -0,0 +1,31 @@ +''' +GPy Models +========== + +Implementations for common models used in GP regression and classification. +The different models can be viewed in :mod:`GPy.models_modules`, which holds +detailed explanations for the different models. + +:warning: This module is a convienince module for endusers to use. For developers +see :mod:`GPy.models_modules`, which holds the implementions for each model. +''' + +__updated__ = '2013-11-28' + +from models_modules.bayesian_gplvm import BayesianGPLVM +from models_modules.gp_regression import GPRegression +from models_modules.gp_classification import GPClassification#; _gp_classification = gp_classification ; del gp_classification +from models_modules.sparse_gp_regression import SparseGPRegression#; _sparse_gp_regression = sparse_gp_regression ; del sparse_gp_regression +from models_modules.svigp_regression import SVIGPRegression#; _svigp_regression = svigp_regression ; del svigp_regression +from models_modules.sparse_gp_classification import SparseGPClassification#; _sparse_gp_classification = sparse_gp_classification ; del sparse_gp_classification +from models_modules.fitc_classification import FITCClassification#; _fitc_classification = fitc_classification ; del fitc_classification +from models_modules.gplvm import GPLVM#; _gplvm = gplvm ; del gplvm +from models_modules.bcgplvm import BCGPLVM#; _bcgplvm = bcgplvm; del bcgplvm +from models_modules.sparse_gplvm import SparseGPLVM#; _sparse_gplvm = sparse_gplvm ; del sparse_gplvm +from models_modules.warped_gp import WarpedGP#; _warped_gp = warped_gp ; del warped_gp +from models_modules.bayesian_gplvm import BayesianGPLVM#; _bayesian_gplvm = bayesian_gplvm ; del bayesian_gplvm +from models_modules.mrd import MRD#; _mrd = mrd; del mrd +from models_modules.gradient_checker import GradientChecker#; _gradient_checker = gradient_checker ; del gradient_checker +from models_modules.gp_multioutput_regression import GPMultioutputRegression#; _gp_multioutput_regression = gp_multioutput_regression ; del gp_multioutput_regression +from models_modules.sparse_gp_multioutput_regression import SparseGPMultioutputRegression#; _sparse_gp_multioutput_regression = sparse_gp_multioutput_regression ; del sparse_gp_multioutput_regression +from models_modules.gradient_checker import GradientChecker \ No newline at end of file diff --git a/GPy/models/__init__.py b/GPy/models/__init__.py deleted file mode 100644 index 885372a1..00000000 --- a/GPy/models/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). -# Licensed under the BSD 3-clause license (see LICENSE.txt) - -from gp_regression import GPRegression -from gp_classification import GPClassification -from sparse_gp_regression import SparseGPRegression -from svigp_regression import SVIGPRegression -from sparse_gp_classification import SparseGPClassification -from fitc_classification import FITCClassification -from gplvm import GPLVM -from warped_gp import WarpedGP -from bayesian_gplvm import BayesianGPLVM -from mrd import MRD diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py deleted file mode 100644 index c401f788..00000000 --- a/GPy/models/bayesian_gplvm.py +++ /dev/null @@ -1,583 +0,0 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). -# Licensed under the BSD 3-clause license (see LICENSE.txt) - -import numpy as np -from ..core import SparseGP -from ..likelihoods import Gaussian -from .. import kern -import itertools -from matplotlib.colors import colorConverter -from GPy.inference.optimization import SCG -from GPy.util import plot_latent -from GPy.models.gplvm import GPLVM - -class BayesianGPLVM(SparseGP, GPLVM): - """ - Bayesian Gaussian Process Latent Variable Model - - :param Y: observed data (np.ndarray) or GPy.likelihood - :type Y: np.ndarray| GPy.likelihood instance - :param input_dim: latent dimensionality - :type input_dim: int - :param init: initialisation method for the latent space - :type init: 'PCA'|'random' - - """ - def __init__(self, likelihood_or_Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, - Z=None, kernel=None, oldpsave=10, _debug=False, - **kwargs): - if type(likelihood_or_Y) is np.ndarray: - likelihood = Gaussian(likelihood_or_Y) - else: - likelihood = likelihood_or_Y - - if X == None: - X = self.initialise_latent(init, input_dim, likelihood.Y) - self.init = init - - if X_variance is None: - X_variance = np.clip((np.ones_like(X) * 0.5) + .01 * np.random.randn(*X.shape), 0.001, 1) - - if Z is None: - Z = np.random.permutation(X.copy())[:num_inducing] - assert Z.shape[1] == X.shape[1] - - if kernel is None: - kernel = kern.rbf(input_dim) + kern.white(input_dim) - - self.oldpsave = oldpsave - self._oldps = [] - self._debug = _debug - - if self._debug: - self.f_call = 0 - self._count = itertools.count() - self._savedklll = [] - self._savedparams = [] - self._savedgradients = [] - self._savederrors = [] - self._savedpsiKmm = [] - self._savedABCD = [] - - SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs) - self.ensure_default_constraints() - - @property - def oldps(self): - return self._oldps - @oldps.setter - def oldps(self, p): - if len(self._oldps) == (self.oldpsave + 1): - self._oldps.pop() - # if len(self._oldps) == 0 or not np.any([np.any(np.abs(p - op) > 1e-5) for op in self._oldps]): - self._oldps.insert(0, p.copy()) - - def _get_param_names(self): - X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) - S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) - return (X_names + S_names + SparseGP._get_param_names(self)) - - def _get_params(self): - """ - Horizontally stacks the parameters in order to present them to the optimizer. - The resulting 1-input_dim array has this structure: - - =============================================================== - | mu | S | Z | theta | beta | - =============================================================== - - """ - x = np.hstack((self.X.flatten(), self.X_variance.flatten(), SparseGP._get_params(self))) - return x - - def _clipped(self, x): - return x # np.clip(x, -1e300, 1e300) - - def _set_params(self, x, save_old=True, save_count=0): -# try: - x = self._clipped(x) - N, input_dim = self.num_data, self.input_dim - self.X = x[:self.X.size].reshape(N, input_dim).copy() - self.X_variance = x[(N * input_dim):(2 * N * input_dim)].reshape(N, input_dim).copy() - SparseGP._set_params(self, x[(2 * N * input_dim):]) -# self.oldps = x -# except (LinAlgError, FloatingPointError, ZeroDivisionError): -# print "\rWARNING: Caught LinAlgError, continueing without setting " -# if self._debug: -# self._savederrors.append(self.f_call) -# if save_count > 10: -# raise -# self._set_params(self.oldps[-1], save_old=False, save_count=save_count + 1) - - def dKL_dmuS(self): - dKL_dS = (1. - (1. / (self.X_variance))) * 0.5 - dKL_dmu = self.X - return dKL_dmu, dKL_dS - - def dL_dmuS(self): - dL_dmu_psi0, dL_dS_psi0 = self.kern.dpsi0_dmuS(self.dL_dpsi0, self.Z, self.X, self.X_variance) - dL_dmu_psi1, dL_dS_psi1 = self.kern.dpsi1_dmuS(self.dL_dpsi1, self.Z, self.X, self.X_variance) - dL_dmu_psi2, dL_dS_psi2 = self.kern.dpsi2_dmuS(self.dL_dpsi2, self.Z, self.X, self.X_variance) - dL_dmu = dL_dmu_psi0 + dL_dmu_psi1 + dL_dmu_psi2 - dL_dS = dL_dS_psi0 + dL_dS_psi1 + dL_dS_psi2 - - return dL_dmu, dL_dS - - def KL_divergence(self): - var_mean = np.square(self.X).sum() - var_S = np.sum(self.X_variance - np.log(self.X_variance)) - return 0.5 * (var_mean + var_S) - 0.5 * self.input_dim * self.num_data - - def log_likelihood(self): - ll = SparseGP.log_likelihood(self) - kl = self.KL_divergence() - -# if ll < -2E4: -# ll = -2E4 + np.random.randn() -# if kl > 5E4: -# kl = 5E4 + np.random.randn() - - if self._debug: - self.f_call = self._count.next() - if self.f_call % 1 == 0: - self._savedklll.append([self.f_call, ll, kl]) - self._savedparams.append([self.f_call, self._get_params()]) - self._savedgradients.append([self.f_call, self._log_likelihood_gradients()]) - self._savedpsiKmm.append([self.f_call, [self.Kmm, self.dL_dKmm]]) -# sf2 = self.scale_factor ** 2 - if self.likelihood.is_heteroscedastic: - A = -0.5 * self.num_data * self.input_dim * np.log(2.*np.pi) + 0.5 * np.sum(np.log(self.likelihood.precision)) - 0.5 * np.sum(self.V * self.likelihood.Y) -# B = -0.5 * self.input_dim * (np.sum(self.likelihood.precision.flatten() * self.psi0) - np.trace(self.A) * sf2) - B = -0.5 * self.input_dim * (np.sum(self.likelihood.precision.flatten() * self.psi0) - np.trace(self.A)) - else: - A = -0.5 * self.num_data * self.input_dim * (np.log(2.*np.pi) + np.log(self.likelihood._variance)) - 0.5 * self.likelihood.precision * self.likelihood.trYYT -# B = -0.5 * self.input_dim * (np.sum(self.likelihood.precision * self.psi0) - np.trace(self.A) * sf2) - B = -0.5 * self.input_dim * (np.sum(self.likelihood.precision * self.psi0) - np.trace(self.A)) - C = -self.input_dim * (np.sum(np.log(np.diag(self.LB)))) # + 0.5 * self.num_inducing * np.log(sf2)) - D = 0.5 * np.sum(np.square(self._LBi_Lmi_psi1V)) - self._savedABCD.append([self.f_call, A, B, C, D]) - - # print "\nkl:", kl, "ll:", ll - return ll - kl - - def _log_likelihood_gradients(self): - dKL_dmu, dKL_dS = self.dKL_dmuS() - dL_dmu, dL_dS = self.dL_dmuS() - # TODO: find way to make faster - - d_dmu = (dL_dmu - dKL_dmu).flatten() - d_dS = (dL_dS - dKL_dS).flatten() - # TEST KL: ==================== - # d_dmu = (dKL_dmu).flatten() - # d_dS = (dKL_dS).flatten() - # ======================== - # TEST L: ==================== -# d_dmu = (dL_dmu).flatten() -# d_dS = (dL_dS).flatten() - # ======================== - self.dbound_dmuS = np.hstack((d_dmu, d_dS)) - self.dbound_dZtheta = SparseGP._log_likelihood_gradients(self) - return self._clipped(np.hstack((self.dbound_dmuS.flatten(), self.dbound_dZtheta))) - - def plot_latent(self, *args, **kwargs): - return plot_latent.plot_latent_indices(self, *args, **kwargs) - - def do_test_latents(self, Y): - """ - Compute the latent representation for a set of new points Y - - Notes: - This will only work with a univariate Gaussian likelihood (for now) - """ - assert not self.likelihood.is_heteroscedastic - N_test = Y.shape[0] - input_dim = self.Z.shape[1] - means = np.zeros((N_test, input_dim)) - covars = np.zeros((N_test, input_dim)) - - dpsi0 = -0.5 * self.input_dim * self.likelihood.precision - dpsi2 = self.dL_dpsi2[0][None, :, :] # TODO: this may change if we ignore het. likelihoods - V = self.likelihood.precision * Y - dpsi1 = np.dot(self.Cpsi1V, V.T) - - start = np.zeros(self.input_dim * 2) - - for n, dpsi1_n in enumerate(dpsi1.T[:, :, None]): - args = (self.kern, self.Z, dpsi0, dpsi1_n, dpsi2) - xopt, fopt, neval, status = SCG(f=latent_cost, gradf=latent_grad, x=start, optargs=args, display=False) - - mu, log_S = xopt.reshape(2, 1, -1) - means[n] = mu[0].copy() - covars[n] = np.exp(log_S[0]).copy() - - return means, covars - - - def plot_X_1d(self, fignum=None, ax=None, colors=None): - """ - Plot latent space X in 1D: - - -if fig is given, create input_dim subplots in fig and plot in these - -if ax is given plot input_dim 1D latent space plots of X into each `axis` - -if neither fig nor ax is given create a figure with fignum and plot in there - - colors: - colors of different latent space dimensions input_dim - """ - import pylab - if ax is None: - fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.X.shape[1])))) - if colors is None: - colors = pylab.gca()._get_lines.color_cycle - pylab.clf() - else: - colors = iter(colors) - plots = [] - x = np.arange(self.X.shape[0]) - for i in range(self.X.shape[1]): - if ax is None: - a = fig.add_subplot(self.X.shape[1], 1, i + 1) - elif isinstance(ax, (tuple, list)): - a = ax[i] - else: - raise ValueError("Need one ax per latent dimnesion input_dim") - a.plot(self.X, c='k', alpha=.3) - plots.extend(a.plot(x, self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) - a.fill_between(x, - self.X.T[i] - 2 * np.sqrt(self.X_variance.T[i]), - self.X.T[i] + 2 * np.sqrt(self.X_variance.T[i]), - facecolor=plots[-1].get_color(), - alpha=.3) - a.legend(borderaxespad=0.) - a.set_xlim(x.min(), x.max()) - if i < self.X.shape[1] - 1: - a.set_xticklabels('') - pylab.draw() - fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) - return fig - - def __getstate__(self): - return (self.likelihood, self.input_dim, self.X, self.X_variance, - self.init, self.num_inducing, self.Z, self.kern, - self.oldpsave, self._debug) - - def __setstate__(self, state): - self.__init__(*state) - - def _debug_filter_params(self, x): - start, end = 0, self.X.size, - X = x[start:end].reshape(self.num_data, self.input_dim) - start, end = end, end + self.X_variance.size - X_v = x[start:end].reshape(self.num_data, self.input_dim) - start, end = end, end + (self.num_inducing * self.input_dim) - Z = x[start:end].reshape(self.num_inducing, self.input_dim) - start, end = end, end + self.input_dim - theta = x[start:] - return X, X_v, Z, theta - - - def _debug_get_axis(self, figs): - if figs[-1].axes: - ax1 = figs[-1].axes[0] - ax1.cla() - else: - ax1 = figs[-1].add_subplot(111) - return ax1 - - def _debug_plot(self): - assert self._debug, "must enable _debug, to debug-plot" - import pylab -# from mpl_toolkits.mplot3d import Axes3D - figs = [pylab.figure('BGPLVM DEBUG', figsize=(12, 4))] -# fig.clf() - - # log like -# splotshape = (6, 4) -# ax1 = pylab.subplot2grid(splotshape, (0, 0), 1, 4) - ax1 = self._debug_get_axis(figs) - ax1.text(.5, .5, "Optimization", alpha=.3, transform=ax1.transAxes, - ha='center', va='center') - kllls = np.array(self._savedklll) - LL, = ax1.plot(kllls[:, 0], kllls[:, 1] - kllls[:, 2], '-', label=r'$\log p(\mathbf{Y})$', mew=1.5) - KL, = ax1.plot(kllls[:, 0], kllls[:, 2], '-', label=r'$\mathcal{KL}(p||q)$', mew=1.5) - L, = ax1.plot(kllls[:, 0], kllls[:, 1], '-', label=r'$L$', mew=1.5) # \mathds{E}_{q(\mathbf{X})}[p(\mathbf{Y|X})\frac{p(\mathbf{X})}{q(\mathbf{X})}] - - param_dict = dict(self._savedparams) - gradient_dict = dict(self._savedgradients) -# kmm_dict = dict(self._savedpsiKmm) - iters = np.array(param_dict.keys()) - ABCD_dict = np.array(self._savedABCD) - self.showing = 0 - -# ax2 = pylab.subplot2grid(splotshape, (1, 0), 2, 4) - figs.append(pylab.figure("BGPLVM DEBUG X", figsize=(12, 4))) - ax2 = self._debug_get_axis(figs) - ax2.text(.5, .5, r"$\mathbf{X}$", alpha=.5, transform=ax2.transAxes, - ha='center', va='center') - figs[-1].canvas.draw() - figs[-1].tight_layout(rect=(0, 0, 1, .86)) -# ax3 = pylab.subplot2grid(splotshape, (3, 0), 2, 4, sharex=ax2) - figs.append(pylab.figure("BGPLVM DEBUG S", figsize=(12, 4))) - ax3 = self._debug_get_axis(figs) - ax3.text(.5, .5, r"$\mathbf{S}$", alpha=.5, transform=ax3.transAxes, - ha='center', va='center') - figs[-1].canvas.draw() - figs[-1].tight_layout(rect=(0, 0, 1, .86)) -# ax4 = pylab.subplot2grid(splotshape, (5, 0), 2, 2) - figs.append(pylab.figure("BGPLVM DEBUG Z", figsize=(6, 4))) - ax4 = self._debug_get_axis(figs) - ax4.text(.5, .5, r"$\mathbf{Z}$", alpha=.5, transform=ax4.transAxes, - ha='center', va='center') - figs[-1].canvas.draw() - figs[-1].tight_layout(rect=(0, 0, 1, .86)) -# ax5 = pylab.subplot2grid(splotshape, (5, 2), 2, 2) - figs.append(pylab.figure("BGPLVM DEBUG theta", figsize=(6, 4))) - ax5 = self._debug_get_axis(figs) - ax5.text(.5, .5, r"${\theta}$", alpha=.5, transform=ax5.transAxes, - ha='center', va='center') - figs[-1].canvas.draw() - figs[-1].tight_layout(rect=(.15, 0, 1, .86)) -# figs.append(pylab.figure("BGPLVM DEBUG Kmm", figsize=(12, 6))) -# fig = figs[-1] -# ax6 = fig.add_subplot(121) -# ax6.text(.5, .5, r"${\mathbf{K}_{mm}}$", color='magenta', alpha=.5, transform=ax6.transAxes, -# ha='center', va='center') -# ax7 = fig.add_subplot(122) -# ax7.text(.5, .5, r"${\frac{dL}{dK_{mm}}}$", color='magenta', alpha=.5, transform=ax7.transAxes, -# ha='center', va='center') - figs.append(pylab.figure("BGPLVM DEBUG Kmm", figsize=(12, 6))) - fig = figs[-1] - ax8 = fig.add_subplot(121) - ax8.text(.5, .5, r"${\mathbf{A,B,C,input_dim}}$", color='k', alpha=.5, transform=ax8.transAxes, - ha='center', va='center') - ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 1], label='A') - ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 2], label='B') - ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 3], label='C') - ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 4], label='input_dim') - ax8.legend() - figs[-1].canvas.draw() - figs[-1].tight_layout(rect=(.15, 0, 1, .86)) - - X, S, Z, theta = self._debug_filter_params(param_dict[self.showing]) - Xg, Sg, Zg, thetag = self._debug_filter_params(gradient_dict[self.showing]) -# Xg, Sg, Zg, thetag = -Xg, -Sg, -Zg, -thetag - - quiver_units = 'xy' - quiver_scale = 1 - quiver_scale_units = 'xy' - Xlatentplts = ax2.plot(X, ls="-", marker="x") - colors = colorConverter.to_rgba_array([p.get_color() for p in Xlatentplts], .4) - Ulatent = np.zeros_like(X) - xlatent = np.tile(np.arange(0, X.shape[0])[:, None], X.shape[1]) - Xlatentgrads = ax2.quiver(xlatent, X, Ulatent, Xg, color=colors, - units=quiver_units, scale_units=quiver_scale_units, - scale=quiver_scale) - - Slatentplts = ax3.plot(S, ls="-", marker="x") - Slatentgrads = ax3.quiver(xlatent, S, Ulatent, Sg, color=colors, - units=quiver_units, scale_units=quiver_scale_units, - scale=quiver_scale) - ax3.set_ylim(0, 1.) - - xZ = np.tile(np.arange(0, Z.shape[0])[:, None], Z.shape[1]) - UZ = np.zeros_like(Z) - Zplts = ax4.plot(Z, ls="-", marker="x") - Zgrads = ax4.quiver(xZ, Z, UZ, Zg, color=colors, - units=quiver_units, scale_units=quiver_scale_units, - scale=quiver_scale) - - xtheta = np.arange(len(theta)) - Utheta = np.zeros_like(theta) - thetaplts = ax5.bar(xtheta - .4, theta, color=colors) - thetagrads = ax5.quiver(xtheta, theta, Utheta, thetag, color=colors, - units=quiver_units, scale_units=quiver_scale_units, - scale=quiver_scale, - edgecolors=('k',), linewidths=[1]) - pylab.setp(thetaplts, zorder=0) - pylab.setp(thetagrads, zorder=10) - ax5.set_xticks(np.arange(len(theta))) - ax5.set_xticklabels(self._get_param_names()[-len(theta):], rotation=17) - -# imkmm = ax6.imshow(kmm_dict[self.showing][0]) -# from mpl_toolkits.axes_grid1 import make_axes_locatable -# divider = make_axes_locatable(ax6) -# caxkmm = divider.append_axes("right", "5%", pad="1%") -# cbarkmm = pylab.colorbar(imkmm, cax=caxkmm) -# -# imkmmdl = ax7.imshow(kmm_dict[self.showing][1]) -# divider = make_axes_locatable(ax7) -# caxkmmdl = divider.append_axes("right", "5%", pad="1%") -# cbarkmmdl = pylab.colorbar(imkmmdl, cax=caxkmmdl) - -# input_dimleg = ax1.legend(Xlatentplts, [r"$input_dim_{}$".format(i + 1) for i in range(self.input_dim)], -# loc=3, ncol=self.input_dim, bbox_to_anchor=(0, 1.15, 1, 1.15), -# borderaxespad=0, mode="expand") - ax2.legend(Xlatentplts, [r"$input_dim_{}$".format(i + 1) for i in range(self.input_dim)], - loc=3, ncol=self.input_dim, bbox_to_anchor=(0, 1.1, 1, 1.1), - borderaxespad=0, mode="expand") - ax3.legend(Xlatentplts, [r"$input_dim_{}$".format(i + 1) for i in range(self.input_dim)], - loc=3, ncol=self.input_dim, bbox_to_anchor=(0, 1.1, 1, 1.1), - borderaxespad=0, mode="expand") - ax4.legend(Xlatentplts, [r"$input_dim_{}$".format(i + 1) for i in range(self.input_dim)], - loc=3, ncol=self.input_dim, bbox_to_anchor=(0, 1.1, 1, 1.1), - borderaxespad=0, mode="expand") - ax5.legend(Xlatentplts, [r"$input_dim_{}$".format(i + 1) for i in range(self.input_dim)], - loc=3, ncol=self.input_dim, bbox_to_anchor=(0, 1.1, 1, 1.1), - borderaxespad=0, mode="expand") - Lleg = ax1.legend() - Lleg.draggable() -# ax1.add_artist(input_dimleg) - - indicatorKL, = ax1.plot(kllls[self.showing, 0], kllls[self.showing, 2], 'o', c=KL.get_color()) - indicatorLL, = ax1.plot(kllls[self.showing, 0], kllls[self.showing, 1] - kllls[self.showing, 2], 'o', c=LL.get_color()) - indicatorL, = ax1.plot(kllls[self.showing, 0], kllls[self.showing, 1], 'o', c=L.get_color()) -# for err in self._savederrors: -# if err < kllls.shape[0]: -# ax1.scatter(kllls[err, 0], kllls[err, 2], s=50, marker=(5, 2), c=KL.get_color()) -# ax1.scatter(kllls[err, 0], kllls[err, 1] - kllls[err, 2], s=50, marker=(5, 2), c=LL.get_color()) -# ax1.scatter(kllls[err, 0], kllls[err, 1], s=50, marker=(5, 2), c=L.get_color()) - -# try: -# for f in figs: -# f.canvas.draw() -# f.tight_layout(box=(0, .15, 1, .9)) -# # pylab.draw() -# # pylab.tight_layout(box=(0, .1, 1, .9)) -# except: -# pass - - # parameter changes - # ax2 = pylab.subplot2grid((4, 1), (1, 0), 3, 1, projection='3d') - button_options = [0, 0] # [0]: clicked -- [1]: dragged - - def update_plots(event): - if button_options[0] and not button_options[1]: -# event.button, event.x, event.y, event.xdata, event.ydata) - tmp = np.abs(iters - event.xdata) - closest_hit = iters[tmp == tmp.min()][0] - - if closest_hit != self.showing: - self.showing = closest_hit - # print closest_hit, iters, event.xdata - - indicatorLL.set_data(self.showing, kllls[self.showing, 1] - kllls[self.showing, 2]) - indicatorKL.set_data(self.showing, kllls[self.showing, 2]) - indicatorL.set_data(self.showing, kllls[self.showing, 1]) - - X, S, Z, theta = self._debug_filter_params(param_dict[self.showing]) - Xg, Sg, Zg, thetag = self._debug_filter_params(gradient_dict[self.showing]) -# Xg, Sg, Zg, thetag = -Xg, -Sg, -Zg, -thetag - - for i, Xlatent in enumerate(Xlatentplts): - Xlatent.set_ydata(X[:, i]) - Xlatentgrads.set_offsets(np.array([xlatent.ravel(), X.ravel()]).T) - Xlatentgrads.set_UVC(Ulatent, Xg) - - for i, Slatent in enumerate(Slatentplts): - Slatent.set_ydata(S[:, i]) - Slatentgrads.set_offsets(np.array([xlatent.ravel(), S.ravel()]).T) - Slatentgrads.set_UVC(Ulatent, Sg) - - for i, Zlatent in enumerate(Zplts): - Zlatent.set_ydata(Z[:, i]) - Zgrads.set_offsets(np.array([xZ.ravel(), Z.ravel()]).T) - Zgrads.set_UVC(UZ, Zg) - - for p, t in zip(thetaplts, theta): - p.set_height(t) - thetagrads.set_offsets(np.array([xtheta.ravel(), theta.ravel()]).T) - thetagrads.set_UVC(Utheta, thetag) - -# imkmm.set_data(kmm_dict[self.showing][0]) -# imkmm.autoscale() -# cbarkmm.update_normal(imkmm) -# -# imkmmdl.set_data(kmm_dict[self.showing][1]) -# imkmmdl.autoscale() -# cbarkmmdl.update_normal(imkmmdl) - - ax2.relim() - # ax3.relim() - ax4.relim() - ax5.relim() - ax2.autoscale() - # ax3.autoscale() - ax4.autoscale() - ax5.autoscale() - - [fig.canvas.draw() for fig in figs] - button_options[0] = 0 - button_options[1] = 0 - - def onclick(event): - if event.inaxes is ax1 and event.button == 1: - button_options[0] = 1 - def motion(event): - if button_options[0]: - button_options[1] = 1 - - cidr = figs[0].canvas.mpl_connect('button_release_event', update_plots) - cidp = figs[0].canvas.mpl_connect('button_press_event', onclick) - cidd = figs[0].canvas.mpl_connect('motion_notify_event', motion) - - return ax1, ax2, ax3, ax4, ax5 # , ax6, ax7 - - - - -def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): - """ - objective function for fitting the latent variables for test points - (negative log-likelihood: should be minimised!) - """ - mu, log_S = mu_S.reshape(2, 1, -1) - S = np.exp(log_S) - - psi0 = kern.psi0(Z, mu, S) - psi1 = kern.psi1(Z, mu, S) - psi2 = kern.psi2(Z, mu, S) - - lik = dL_dpsi0 * psi0 + np.dot(dL_dpsi1.flatten(), psi1.flatten()) + np.dot(dL_dpsi2.flatten(), psi2.flatten()) - 0.5 * np.sum(np.square(mu) + S) + 0.5 * np.sum(log_S) - - mu0, S0 = kern.dpsi0_dmuS(dL_dpsi0, Z, mu, S) - mu1, S1 = kern.dpsi1_dmuS(dL_dpsi1, Z, mu, S) - mu2, S2 = kern.dpsi2_dmuS(dL_dpsi2, Z, mu, S) - - dmu = mu0 + mu1 + mu2 - mu - # dS = S0 + S1 + S2 -0.5 + .5/S - dlnS = S * (S0 + S1 + S2 - 0.5) + .5 - return -lik, -np.hstack((dmu.flatten(), dlnS.flatten())) - -def latent_cost(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): - """ - objective function for fitting the latent variables (negative log-likelihood: should be minimised!) - This is the same as latent_cost_and_grad but only for the objective - """ - mu, log_S = mu_S.reshape(2, 1, -1) - S = np.exp(log_S) - - psi0 = kern.psi0(Z, mu, S) - psi1 = kern.psi1(Z, mu, S) - psi2 = kern.psi2(Z, mu, S) - - lik = dL_dpsi0 * psi0 + np.dot(dL_dpsi1.flatten(), psi1.flatten()) + np.dot(dL_dpsi2.flatten(), psi2.flatten()) - 0.5 * np.sum(np.square(mu) + S) + 0.5 * np.sum(log_S) - return -float(lik) - -def latent_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): - """ - This is the same as latent_cost_and_grad but only for the grad - """ - mu, log_S = mu_S.reshape(2, 1, -1) - S = np.exp(log_S) - - mu0, S0 = kern.dpsi0_dmuS(dL_dpsi0, Z, mu, S) - mu1, S1 = kern.dpsi1_dmuS(dL_dpsi1, Z, mu, S) - mu2, S2 = kern.dpsi2_dmuS(dL_dpsi2, Z, mu, S) - - dmu = mu0 + mu1 + mu2 - mu - # dS = S0 + S1 + S2 -0.5 + .5/S - dlnS = S * (S0 + S1 + S2 - 0.5) + .5 - - return -np.hstack((dmu.flatten(), dlnS.flatten())) - - diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py deleted file mode 100644 index 44a9d2ce..00000000 --- a/GPy/models/gplvm.py +++ /dev/null @@ -1,67 +0,0 @@ -### Copyright (c) 2012, GPy authors (see AUTHORS.txt). -# Licensed under the BSD 3-clause license (see LICENSE.txt) - - -import numpy as np -import pylab as pb -import sys, pdb -from .. import kern -from ..core import Model -from ..util.linalg import pdinv, PCA -from ..core import GP -from ..likelihoods import Gaussian -from .. import util -from GPy.util import plot_latent - - -class GPLVM(GP): - """ - Gaussian Process Latent Variable Model - - :param Y: observed data - :type Y: np.ndarray - :param input_dim: latent dimensionality - :type input_dim: int - :param init: initialisation method for the latent space - :type init: 'PCA'|'random' - - """ - def __init__(self, Y, input_dim, init='PCA', X = None, kernel=None, normalize_Y=False): - if X is None: - X = self.initialise_latent(init, input_dim, Y) - if kernel is None: - kernel = kern.rbf(input_dim, ARD=input_dim>1) + kern.bias(input_dim, np.exp(-2)) + kern.white(input_dim, np.exp(-2)) - likelihood = Gaussian(Y, normalize=normalize_Y) - GP.__init__(self, X, likelihood, kernel, normalize_X=False) - self.ensure_default_constraints() - - def initialise_latent(self, init, input_dim, Y): - if init == 'PCA': - return PCA(Y, input_dim)[0] - else: - return np.random.randn(Y.shape[0], input_dim) - - def _get_param_names(self): - return sum([['X_%i_%i'%(n,q) for q in range(self.input_dim)] for n in range(self.num_data)],[]) + GP._get_param_names(self) - - def _get_params(self): - return np.hstack((self.X.flatten(), GP._get_params(self))) - - def _set_params(self,x): - self.X = x[:self.num_data*self.input_dim].reshape(self.num_data,self.input_dim).copy() - GP._set_params(self, x[self.X.size:]) - - def _log_likelihood_gradients(self): - dL_dX = 2.*self.kern.dK_dX(self.dL_dK,self.X) - - return np.hstack((dL_dX.flatten(),GP._log_likelihood_gradients(self))) - - def plot(self): - assert self.likelihood.Y.shape[1]==2 - pb.scatter(self.likelihood.Y[:,0],self.likelihood.Y[:,1],40,self.X[:,0].copy(),linewidth=0,cmap=pb.cm.jet) - Xnew = np.linspace(self.X.min(),self.X.max(),200)[:,None] - mu, var, upper, lower = self.predict(Xnew) - pb.plot(mu[:,0], mu[:,1],'k',linewidth=1.5) - - def plot_latent(self, *args, **kwargs): - return util.plot_latent.plot_latent(self, *args, **kwargs) diff --git a/GPy/models_modules/__init__.py b/GPy/models_modules/__init__.py new file mode 100644 index 00000000..6fc93631 --- /dev/null +++ b/GPy/models_modules/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +# from gp_regression import GPRegression; _gp_regression = gp_regression ; del gp_regression +# from gp_classification import GPClassification; _gp_classification = gp_classification ; del gp_classification +# from sparse_gp_regression import SparseGPRegression; _sparse_gp_regression = sparse_gp_regression ; del sparse_gp_regression +# from svigp_regression import SVIGPRegression; _svigp_regression = svigp_regression ; del svigp_regression +# from sparse_gp_classification import SparseGPClassification; _sparse_gp_classification = sparse_gp_classification ; del sparse_gp_classification +# from fitc_classification import FITCClassification; _fitc_classification = fitc_classification ; del fitc_classification +# from gplvm import GPLVM; _gplvm = gplvm ; del gplvm +# from bcgplvm import BCGPLVM; _bcgplvm = bcgplvm; del bcgplvm +# from sparse_gplvm import SparseGPLVM; _sparse_gplvm = sparse_gplvm ; del sparse_gplvm +# from warped_gp import WarpedGP; _warped_gp = warped_gp ; del warped_gp +# from bayesian_gplvm import BayesianGPLVM; _bayesian_gplvm = bayesian_gplvm ; del bayesian_gplvm +# from mrd import MRD; _mrd = mrd ; del mrd +# from gradient_checker import GradientChecker; _gradient_checker = gradient_checker ; del gradient_checker +# from gp_multioutput_regression import GPMultioutputRegression; _gp_multioutput_regression = gp_multioutput_regression ; del gp_multioutput_regression +# from sparse_gp_multioutput_regression import SparseGPMultioutputRegression; _sparse_gp_multioutput_regression = sparse_gp_multioutput_regression ; del sparse_gp_multioutput_regression + diff --git a/GPy/models_modules/bayesian_gplvm.py b/GPy/models_modules/bayesian_gplvm.py new file mode 100644 index 00000000..90e54111 --- /dev/null +++ b/GPy/models_modules/bayesian_gplvm.py @@ -0,0 +1,396 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from ..core.sparse_gp import SparseGP +from ..likelihoods import Gaussian +from .. import kern +import itertools +from matplotlib.colors import colorConverter +from GPy.inference.optimization import SCG +from GPy.util import plot_latent, linalg +from .gplvm import GPLVM +from GPy.util.plot_latent import most_significant_input_dimensions +from matplotlib import pyplot +from GPy.core.model import Model + +class BayesianGPLVM(SparseGP, GPLVM): + """ + Bayesian Gaussian Process Latent Variable Model + + :param Y: observed data (np.ndarray) or GPy.likelihood + :type Y: np.ndarray| GPy.likelihood instance + :param input_dim: latent dimensionality + :type input_dim: int + :param init: initialisation method for the latent space + :type init: 'PCA'|'random' + + """ + def __init__(self, likelihood_or_Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, + Z=None, kernel=None, **kwargs): + if type(likelihood_or_Y) is np.ndarray: + likelihood = Gaussian(likelihood_or_Y) + else: + likelihood = likelihood_or_Y + + if X == None: + X = self.initialise_latent(init, input_dim, likelihood.Y) + self.init = init + + if X_variance is None: + X_variance = np.clip((np.ones_like(X) * 0.5) + .01 * np.random.randn(*X.shape), 0.001, 1) + + if Z is None: + Z = np.random.permutation(X.copy())[:num_inducing] + assert Z.shape[1] == X.shape[1] + + if kernel is None: + kernel = kern.rbf(input_dim) # + kern.white(input_dim) + + SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs) + self.ensure_default_constraints() + + def _get_param_names(self): + X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + return (X_names + S_names + SparseGP._get_param_names(self)) + + #def _get_print_names(self): + # return SparseGP._get_print_names(self) + + def _get_params(self): + """ + Horizontally stacks the parameters in order to present them to the optimizer. + The resulting 1-input_dim array has this structure: + + =============================================================== + | mu | S | Z | theta | beta | + =============================================================== + + """ + x = np.hstack((self.X.flatten(), self.X_variance.flatten(), SparseGP._get_params(self))) + return x + + def _set_params(self, x, save_old=True, save_count=0): + N, input_dim = self.num_data, self.input_dim + self.X = x[:self.X.size].reshape(N, input_dim).copy() + self.X_variance = x[(N * input_dim):(2 * N * input_dim)].reshape(N, input_dim).copy() + SparseGP._set_params(self, x[(2 * N * input_dim):]) + + def dKL_dmuS(self): + dKL_dS = (1. - (1. / (self.X_variance))) * 0.5 + dKL_dmu = self.X + return dKL_dmu, dKL_dS + + def dL_dmuS(self): + dL_dmu_psi0, dL_dS_psi0 = self.kern.dpsi0_dmuS(self.dL_dpsi0, self.Z, self.X, self.X_variance) + dL_dmu_psi1, dL_dS_psi1 = self.kern.dpsi1_dmuS(self.dL_dpsi1, self.Z, self.X, self.X_variance) + dL_dmu_psi2, dL_dS_psi2 = self.kern.dpsi2_dmuS(self.dL_dpsi2, self.Z, self.X, self.X_variance) + dL_dmu = dL_dmu_psi0 + dL_dmu_psi1 + dL_dmu_psi2 + dL_dS = dL_dS_psi0 + dL_dS_psi1 + dL_dS_psi2 + + return dL_dmu, dL_dS + + def KL_divergence(self): + var_mean = np.square(self.X).sum() + var_S = np.sum(self.X_variance - np.log(self.X_variance)) + return 0.5 * (var_mean + var_S) - 0.5 * self.input_dim * self.num_data + + def log_likelihood(self): + ll = SparseGP.log_likelihood(self) + kl = self.KL_divergence() + return ll - kl + + def _log_likelihood_gradients(self): + dKL_dmu, dKL_dS = self.dKL_dmuS() + dL_dmu, dL_dS = self.dL_dmuS() + d_dmu = (dL_dmu - dKL_dmu).flatten() + d_dS = (dL_dS - dKL_dS).flatten() + self.dbound_dmuS = np.hstack((d_dmu, d_dS)) + self.dbound_dZtheta = SparseGP._log_likelihood_gradients(self) + return np.hstack((self.dbound_dmuS.flatten(), self.dbound_dZtheta)) + + def plot_latent(self, plot_inducing=True, *args, **kwargs): + return plot_latent.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs) + + def do_test_latents(self, Y): + """ + Compute the latent representation for a set of new points Y + + Notes: + This will only work with a univariate Gaussian likelihood (for now) + """ + assert not self.likelihood.is_heteroscedastic + N_test = Y.shape[0] + input_dim = self.Z.shape[1] + means = np.zeros((N_test, input_dim)) + covars = np.zeros((N_test, input_dim)) + + dpsi0 = -0.5 * self.input_dim * self.likelihood.precision + dpsi2 = self.dL_dpsi2[0][None, :, :] # TODO: this may change if we ignore het. likelihoods + V = self.likelihood.precision * Y + + #compute CPsi1V + if self.Cpsi1V is None: + psi1V = np.dot(self.psi1.T, self.likelihood.V) + tmp, _ = linalg.dtrtrs(self._Lm, np.asfortranarray(psi1V), lower=1, trans=0) + tmp, _ = linalg.dpotrs(self.LB, tmp, lower=1) + self.Cpsi1V, _ = linalg.dtrtrs(self._Lm, tmp, lower=1, trans=1) + + dpsi1 = np.dot(self.Cpsi1V, V.T) + + start = np.zeros(self.input_dim * 2) + + for n, dpsi1_n in enumerate(dpsi1.T[:, :, None]): + args = (self.kern, self.Z, dpsi0, dpsi1_n.T, dpsi2) + xopt, fopt, neval, status = SCG(f=latent_cost, gradf=latent_grad, x=start, optargs=args, display=False) + + mu, log_S = xopt.reshape(2, 1, -1) + means[n] = mu[0].copy() + covars[n] = np.exp(log_S[0]).copy() + + return means, covars + + def dmu_dX(self, Xnew): + """ + Calculate the gradient of the prediction at Xnew w.r.t Xnew. + """ + dmu_dX = np.zeros_like(Xnew) + for i in range(self.Z.shape[0]): + dmu_dX += self.kern.dK_dX(self.Cpsi1Vf[i:i + 1, :], Xnew, self.Z[i:i + 1, :]) + return dmu_dX + + def dmu_dXnew(self, Xnew): + """ + Individual gradient of prediction at Xnew w.r.t. each sample in Xnew + """ + dK_dX = np.zeros((Xnew.shape[0], self.num_inducing)) + ones = np.ones((1, 1)) + for i in range(self.Z.shape[0]): + dK_dX[:, i] = self.kern.dK_dX(ones, Xnew, self.Z[i:i + 1, :]).sum(-1) + return np.dot(dK_dX, self.Cpsi1Vf) + + def plot_steepest_gradient_map(self, fignum=None, ax=None, which_indices=None, labels=None, data_labels=None, data_marker='o', data_s=40, resolution=20, aspect='auto', updates=False, ** kwargs): + input_1, input_2 = significant_dims = most_significant_input_dimensions(self, which_indices) + + X = np.zeros((resolution ** 2, self.input_dim)) + indices = np.r_[:X.shape[0]] + if labels is None: + labels = range(self.output_dim) + + def plot_function(x): + X[:, significant_dims] = x + dmu_dX = self.dmu_dXnew(X) + argmax = np.argmax(dmu_dX, 1) + return dmu_dX[indices, argmax], np.array(labels)[argmax] + + if ax is None: + fig = pyplot.figure(num=fignum) + ax = fig.add_subplot(111) + + if data_labels is None: + data_labels = np.ones(self.num_data) + ulabels = [] + for lab in data_labels: + if not lab in ulabels: + ulabels.append(lab) + marker = itertools.cycle(list(data_marker)) + from GPy.util import Tango + for i, ul in enumerate(ulabels): + if type(ul) is np.string_: + this_label = ul + elif type(ul) is np.int64: + this_label = 'class %i' % ul + else: + this_label = 'class %i' % i + m = marker.next() + index = np.nonzero(data_labels == ul)[0] + x = self.X[index, input_1] + y = self.X[index, input_2] + ax.scatter(x, y, marker=m, s=data_s, color=Tango.nextMedium(), label=this_label) + + ax.set_xlabel('latent dimension %i' % input_1) + ax.set_ylabel('latent dimension %i' % input_2) + + from matplotlib.cm import get_cmap + from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImAnnotateController + controller = ImAnnotateController(ax, + plot_function, + tuple(self.X.min(0)[:, significant_dims]) + tuple(self.X.max(0)[:, significant_dims]), + resolution=resolution, + aspect=aspect, + cmap=get_cmap('jet'), + **kwargs) + ax.legend() + ax.figure.tight_layout() + if updates: + pyplot.show() + clear = raw_input('Enter to continue') + if clear.lower() in 'yes' or clear == '': + controller.deactivate() + return controller.view + + def plot_X_1d(self, fignum=None, ax=None, colors=None): + """ + Plot latent space X in 1D: + + - if fig is given, create input_dim subplots in fig and plot in these + - if ax is given plot input_dim 1D latent space plots of X into each `axis` + - if neither fig nor ax is given create a figure with fignum and plot in there + + colors: + colors of different latent space dimensions input_dim + + """ + import pylab + if ax is None: + fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.X.shape[1])))) + if colors is None: + colors = pylab.gca()._get_lines.color_cycle + pylab.clf() + else: + colors = iter(colors) + plots = [] + x = np.arange(self.X.shape[0]) + for i in range(self.X.shape[1]): + if ax is None: + a = fig.add_subplot(self.X.shape[1], 1, i + 1) + elif isinstance(ax, (tuple, list)): + a = ax[i] + else: + raise ValueError("Need one ax per latent dimnesion input_dim") + a.plot(self.X, c='k', alpha=.3) + plots.extend(a.plot(x, self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) + a.fill_between(x, + self.X.T[i] - 2 * np.sqrt(self.X_variance.T[i]), + self.X.T[i] + 2 * np.sqrt(self.X_variance.T[i]), + facecolor=plots[-1].get_color(), + alpha=.3) + a.legend(borderaxespad=0.) + a.set_xlim(x.min(), x.max()) + if i < self.X.shape[1] - 1: + a.set_xticklabels('') + pylab.draw() + fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) + return fig + + def getstate(self): + """ + Get the current state of the class, + here just all the indices, rest can get recomputed + """ + return SparseGP.getstate(self) + [self.init] + + def setstate(self, state): + self._const_jitter = None + self.init = state.pop() + SparseGP.setstate(self, state) + +class BayesianGPLVMWithMissingData(Model): + """ + Bayesian Gaussian Process Latent Variable Model with missing data support. + NOTE: Missing data is assumed to be missing at random! + + This extension comes with a large memory and computing time deficiency. + Use only if fraction of missing data at random is higher than 60%. + Otherwise, try filtering data before using this extension. + + Y can hold missing data as given by `missing`, standard is :class:`~numpy.nan`. + + If likelihood is given for Y, this likelihood will be discarded, but the parameters + of the likelihood will be taken. Also every effort of creating the same likelihood + will be done. + + :param likelihood_or_Y: observed data (np.ndarray) or GPy.likelihood + :type likelihood_or_Y: :class:`~numpy.ndarray` | :class:`~GPy.likelihoods.likelihood.likelihood` instance + :param int input_dim: latent dimensionality + :param init: initialisation method for the latent space + :type init: 'PCA' | 'random' + """ + def __init__(self, likelihood_or_Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, + Z=None, kernel=None, missing=np.nan, **kwargs): + if type(likelihood_or_Y) is np.ndarray: + likelihood = Gaussian(likelihood_or_Y) + else: + likelihood = likelihood_or_Y + + if X == None: + X = self.initialise_latent(init, input_dim, likelihood.Y) + self.init = init + + if X_variance is None: + X_variance = np.clip((np.ones_like(X) * 0.5) + .01 * np.random.randn(*X.shape), 0.001, 1) + + if Z is None: + Z = np.random.permutation(X.copy())[:num_inducing] + assert Z.shape[1] == X.shape[1] + + if kernel is None: + kernel = kern.rbf(input_dim) # + kern.white(input_dim) + + SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs) + self.ensure_default_constraints() + + def _get_param_names(self): + X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + return (X_names + S_names + SparseGP._get_param_names(self)) + + pass + +def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): + """ + objective function for fitting the latent variables for test points + (negative log-likelihood: should be minimised!) + """ + mu, log_S = mu_S.reshape(2, 1, -1) + S = np.exp(log_S) + + psi0 = kern.psi0(Z, mu, S) + psi1 = kern.psi1(Z, mu, S) + psi2 = kern.psi2(Z, mu, S) + + lik = dL_dpsi0 * psi0 + np.dot(dL_dpsi1.flatten(), psi1.flatten()) + np.dot(dL_dpsi2.flatten(), psi2.flatten()) - 0.5 * np.sum(np.square(mu) + S) + 0.5 * np.sum(log_S) + + mu0, S0 = kern.dpsi0_dmuS(dL_dpsi0, Z, mu, S) + mu1, S1 = kern.dpsi1_dmuS(dL_dpsi1, Z, mu, S) + mu2, S2 = kern.dpsi2_dmuS(dL_dpsi2, Z, mu, S) + + dmu = mu0 + mu1 + mu2 - mu + # dS = S0 + S1 + S2 -0.5 + .5/S + dlnS = S * (S0 + S1 + S2 - 0.5) + .5 + return -lik, -np.hstack((dmu.flatten(), dlnS.flatten())) + +def latent_cost(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): + """ + objective function for fitting the latent variables (negative log-likelihood: should be minimised!) + This is the same as latent_cost_and_grad but only for the objective + """ + mu, log_S = mu_S.reshape(2, 1, -1) + S = np.exp(log_S) + + psi0 = kern.psi0(Z, mu, S) + psi1 = kern.psi1(Z, mu, S) + psi2 = kern.psi2(Z, mu, S) + + lik = dL_dpsi0 * psi0 + np.dot(dL_dpsi1.flatten(), psi1.flatten()) + np.dot(dL_dpsi2.flatten(), psi2.flatten()) - 0.5 * np.sum(np.square(mu) + S) + 0.5 * np.sum(log_S) + return -float(lik) + +def latent_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): + """ + This is the same as latent_cost_and_grad but only for the grad + """ + mu, log_S = mu_S.reshape(2, 1, -1) + S = np.exp(log_S) + + mu0, S0 = kern.dpsi0_dmuS(dL_dpsi0, Z, mu, S) + mu1, S1 = kern.dpsi1_dmuS(dL_dpsi1, Z, mu, S) + mu2, S2 = kern.dpsi2_dmuS(dL_dpsi2, Z, mu, S) + + dmu = mu0 + mu1 + mu2 - mu + # dS = S0 + S1 + S2 -0.5 + .5/S + dlnS = S * (S0 + S1 + S2 - 0.5) + .5 + + return -np.hstack((dmu.flatten(), dlnS.flatten())) + + diff --git a/GPy/models_modules/bcgplvm.py b/GPy/models_modules/bcgplvm.py new file mode 100644 index 00000000..92db6953 --- /dev/null +++ b/GPy/models_modules/bcgplvm.py @@ -0,0 +1,50 @@ +# ## Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +import pylab as pb +import sys, pdb +from ..core import GP +from ..models import GPLVM +from ..mappings import Kernel + + +class BCGPLVM(GPLVM): + """ + Back constrained Gaussian Process Latent Variable Model + + :param Y: observed data + :type Y: np.ndarray + :param input_dim: latent dimensionality + :type input_dim: int + :param init: initialisation method for the latent space + :type init: 'PCA'|'random' + :param mapping: mapping for back constraint + :type mapping: GPy.core.Mapping object + + """ + def __init__(self, Y, input_dim, init='PCA', X=None, kernel=None, normalize_Y=False, mapping=None): + + if mapping is None: + mapping = Kernel(X=Y, output_dim=input_dim) + self.mapping = mapping + GPLVM.__init__(self, Y, input_dim, init, X, kernel, normalize_Y) + self.X = self.mapping.f(self.likelihood.Y) + + def _get_param_names(self): + return self.mapping._get_param_names() + GP._get_param_names(self) + + def _get_params(self): + return np.hstack((self.mapping._get_params(), GP._get_params(self))) + + def _set_params(self, x): + self.mapping._set_params(x[:self.mapping.num_params]) + self.X = self.mapping.f(self.likelihood.Y) + GP._set_params(self, x[self.mapping.num_params:]) + + def _log_likelihood_gradients(self): + dL_df = self.kern.dK_dX(self.dL_dK, self.X) + dL_dtheta = self.mapping.df_dtheta(dL_df, self.likelihood.Y) + return np.hstack((dL_dtheta.flatten(), GP._log_likelihood_gradients(self))) + diff --git a/GPy/models/fitc_classification.py b/GPy/models_modules/fitc_classification.py similarity index 87% rename from GPy/models/fitc_classification.py rename to GPy/models_modules/fitc_classification.py index f4cf4e8d..0aa21db9 100644 --- a/GPy/models/fitc_classification.py +++ b/GPy/models_modules/fitc_classification.py @@ -16,7 +16,7 @@ class FITCClassification(FITC): :param X: input observations :param Y: observed values - :param likelihood: a GPy likelihood, defaults to Binomial with probit link function + :param likelihood: a GPy likelihood, defaults to Bernoulli with probit link function :param kernel: a GPy kernel, defaults to rbf+white :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) :type normalize_X: False|True @@ -31,8 +31,8 @@ class FITCClassification(FITC): kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1],1e-3) if likelihood is None: - distribution = likelihoods.likelihood_functions.Binomial() - likelihood = likelihoods.EP(Y, distribution) + noise_model = likelihoods.bernoulli() + likelihood = likelihoods.EP(Y, noise_model) elif Y is not None: if not all(Y.flatten() == likelihood.data.flatten()): raise Warning, 'likelihood.data and Y are different.' diff --git a/GPy/models/gp_classification.py b/GPy/models_modules/gp_classification.py similarity index 83% rename from GPy/models/gp_classification.py rename to GPy/models_modules/gp_classification.py index c6012988..7fc61bb7 100644 --- a/GPy/models/gp_classification.py +++ b/GPy/models_modules/gp_classification.py @@ -14,8 +14,8 @@ class GPClassification(GP): This is a thin wrapper around the models.GP class, with a set of sensible defaults :param X: input observations - :param Y: observed values - :param likelihood: a GPy likelihood, defaults to Binomial with probit link_function + :param Y: observed values, can be None if likelihood is not None + :param likelihood: a GPy likelihood, defaults to Bernoulli with Probit link_function :param kernel: a GPy kernel, defaults to rbf :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) :type normalize_X: False|True @@ -31,8 +31,8 @@ class GPClassification(GP): kernel = kern.rbf(X.shape[1]) if likelihood is None: - distribution = likelihoods.likelihood_functions.Binomial() - likelihood = likelihoods.EP(Y, distribution) + noise_model = likelihoods.bernoulli() + likelihood = likelihoods.EP(Y, noise_model) elif Y is not None: if not all(Y.flatten() == likelihood.data.flatten()): raise Warning, 'likelihood.data and Y are different.' diff --git a/GPy/models_modules/gp_multioutput_regression.py b/GPy/models_modules/gp_multioutput_regression.py new file mode 100644 index 00000000..4ce3dfbc --- /dev/null +++ b/GPy/models_modules/gp_multioutput_regression.py @@ -0,0 +1,58 @@ +# Copyright (c) 2013, Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from ..core import GP +from .. import likelihoods +from .. import kern + +class GPMultioutputRegression(GP): + """ + Multiple output Gaussian process with Gaussian noise + + This is a wrapper around the models.GP class, with a set of sensible defaults + + :param X_list: input observations + :type X_list: list of numpy arrays (num_data_output_i x input_dim), one array per output + :param Y_list: observed values + :type Y_list: list of numpy arrays (num_data_output_i x 1), one array per output + :param kernel_list: GPy kernels, defaults to rbf + :type kernel_list: list of GPy kernels + :param noise_variance_list: noise parameters per output, defaults to 1.0 for every output + :type noise_variance_list: list of floats + :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) + :type normalize_X: False|True + :param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales) + :type normalize_Y: False|True + :param rank: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation) + :type rank: integer + """ + + def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,rank=1): + + self.output_dim = len(Y_list) + assert len(X_list) == self.output_dim, 'Number of outputs do not match length of inputs list.' + + #Inputs indexing + i = 0 + index = [] + for x,y in zip(X_list,Y_list): + assert x.shape[0] == y.shape[0] + index.append(np.repeat(i,x.size)[:,None]) + i += 1 + index = np.vstack(index) + X = np.hstack([np.vstack(X_list),index]) + original_dim = X.shape[1] - 1 + + #Mixed noise likelihood definition + likelihood = likelihoods.Gaussian_Mixed_Noise(Y_list,noise_params=noise_variance_list,normalize=normalize_Y) + + #Coregionalization kernel definition + if kernel_list is None: + kernel_list = [kern.rbf(original_dim)] + mkernel = kern.build_lcm(input_dim=original_dim, output_dim=self.output_dim, kernel_list = kernel_list, rank=rank) + + self.multioutput = True + GP.__init__(self, X, likelihood, mkernel, normalize_X=normalize_X) + self.ensure_default_constraints() diff --git a/GPy/models/gp_regression.py b/GPy/models_modules/gp_regression.py similarity index 75% rename from GPy/models/gp_regression.py rename to GPy/models_modules/gp_regression.py index db5d21b2..8b44c1ba 100644 --- a/GPy/models/gp_regression.py +++ b/GPy/models_modules/gp_regression.py @@ -2,7 +2,6 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) -import numpy as np from ..core import GP from .. import likelihoods from .. import kern @@ -25,11 +24,18 @@ class GPRegression(GP): """ - def __init__(self,X,Y,kernel=None,normalize_X=False,normalize_Y=False): + def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False, likelihood=None): if kernel is None: kernel = kern.rbf(X.shape[1]) - likelihood = likelihoods.Gaussian(Y,normalize=normalize_Y) + if likelihood is None: + likelihood = likelihoods.Gaussian(Y, normalize=normalize_Y) GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X) self.ensure_default_constraints() + + def getstate(self): + return GP.getstate(self) + + def setstate(self, state): + return GP.setstate(self, state) diff --git a/GPy/models_modules/gplvm.py b/GPy/models_modules/gplvm.py new file mode 100644 index 00000000..f27f861c --- /dev/null +++ b/GPy/models_modules/gplvm.py @@ -0,0 +1,92 @@ +# ## Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +import pylab as pb +from .. import kern +from ..core import priors +from ..core import GP +from ..likelihoods import Gaussian +from .. import util + + +class GPLVM(GP): + """ + Gaussian Process Latent Variable Model + + :param Y: observed data + :type Y: np.ndarray + :param input_dim: latent dimensionality + :type input_dim: int + :param init: initialisation method for the latent space + :type init: 'PCA'|'random' + + """ + def __init__(self, Y, input_dim, init='PCA', X=None, kernel=None, normalize_Y=False): + if X is None: + X = self.initialise_latent(init, input_dim, Y) + if kernel is None: + kernel = kern.rbf(input_dim, ARD=input_dim > 1) + kern.bias(input_dim, np.exp(-2)) + likelihood = Gaussian(Y, normalize=normalize_Y, variance=np.exp(-2.)) + GP.__init__(self, X, likelihood, kernel, normalize_X=False) + self.set_prior('.*X', priors.Gaussian(0, 1)) + self.ensure_default_constraints() + + def initialise_latent(self, init, input_dim, Y): + Xr = np.random.randn(Y.shape[0], input_dim) + if init == 'PCA': + from ..util.linalg import PCA + PC = PCA(Y, input_dim)[0] + Xr[:PC.shape[0], :PC.shape[1]] = PC + return Xr + + def _get_param_names(self): + return sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + GP._get_param_names(self) + + def _get_params(self): + return np.hstack((self.X.flatten(), GP._get_params(self))) + + def _set_params(self, x): + self.X = x[:self.num_data * self.input_dim].reshape(self.num_data, self.input_dim).copy() + GP._set_params(self, x[self.X.size:]) + + def _log_likelihood_gradients(self): + dL_dX = self.kern.dK_dX(self.dL_dK, self.X) + + return np.hstack((dL_dX.flatten(), GP._log_likelihood_gradients(self))) + + def jacobian(self,X): + target = np.zeros((X.shape[0],X.shape[1],self.output_dim)) + for i in range(self.output_dim): + target[:,:,i] = self.kern.dK_dX(np.dot(self.Ki,self.likelihood.Y[:,i])[None, :],X,self.X) + return target + + def magnification(self,X): + target=np.zeros(X.shape[0]) + J = np.zeros((X.shape[0],X.shape[1],self.output_dim)) + J=self.jacobian(X) + for i in range(X.shape[0]): + target[i]=np.sqrt(pb.det(np.dot(J[i,:,:],np.transpose(J[i,:,:])))) + return target + + def plot(self): + assert self.likelihood.Y.shape[1] == 2 + pb.scatter(self.likelihood.Y[:, 0], self.likelihood.Y[:, 1], 40, self.X[:, 0].copy(), linewidth=0, cmap=pb.cm.jet) + Xnew = np.linspace(self.X.min(), self.X.max(), 200)[:, None] + mu, var, upper, lower = self.predict(Xnew) + pb.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5) + + def plot_latent(self, *args, **kwargs): + return util.plot_latent.plot_latent(self, *args, **kwargs) + + def plot_magnification(self, *args, **kwargs): + return util.plot_latent.plot_magnification(self, *args, **kwargs) + + def getstate(self): + return GP.getstate(self) + + def setstate(self, state): + GP.setstate(self, state) + + diff --git a/GPy/models_modules/gradient_checker.py b/GPy/models_modules/gradient_checker.py new file mode 100644 index 00000000..dfd0640f --- /dev/null +++ b/GPy/models_modules/gradient_checker.py @@ -0,0 +1,113 @@ +''' +Created on 17 Jul 2013 + +@author: maxz +''' +from GPy.core.model import Model +import itertools +import numpy + +def get_shape(x): + if isinstance(x, numpy.ndarray): + return x.shape + return () + +def at_least_one_element(x): + if isinstance(x, (list, tuple)): + return x + return [x] + +def flatten_if_needed(x): + return numpy.atleast_1d(x).flatten() + +class GradientChecker(Model): + + def __init__(self, f, df, x0, names=None, *args, **kwargs): + """ + :param f: Function to check gradient for + :param df: Gradient of function to check + :param x0: + Initial guess for inputs x (if it has a shape (a,b) this will be reflected in the parameter names). + Can be a list of arrays, if f takes a list of arrays. This list will be passed + to f and df in the same order as given here. + If f takes only one argument, make sure not to pass a list for x0!!! + :type x0: [array-like] | array-like | float | int + :param list names: + Names to print, when performing gradcheck. If a list was passed to x0 + a list of names with the same length is expected. + :param args kwargs: Arguments passed as f(x, *args, **kwargs) and df(x, *args, **kwargs) + + Examples: + --------- + from GPy.models import GradientChecker + N, M, Q = 10, 5, 3 + + Sinusoid: + + X = numpy.random.rand(N, Q) + grad = GradientChecker(numpy.sin,numpy.cos,X,'sin_in') + grad.checkgrad(verbose=1) + + Using GPy: + + X, Z = numpy.random.randn(N,Q), numpy.random.randn(M,Q) + kern = GPy.kern.linear(Q, ARD=True) + GPy.kern.rbf(Q, ARD=True) + grad = GradientChecker(kern.K, + lambda x: kern.dK_dX(numpy.ones((1,1)), x), + x0 = X.copy(), + names=['X_input']) + grad.checkgrad(verbose=1) + grad.randomize() + grad.checkgrad(verbose=1) + """ + Model.__init__(self) + if isinstance(x0, (list, tuple)) and names is None: + self.shapes = [get_shape(xi) for xi in x0] + self.names = ['X{i}'.format(i=i) for i in range(len(x0))] + elif isinstance(x0, (list, tuple)) and names is not None: + self.shapes = [get_shape(xi) for xi in x0] + self.names = names + elif names is None: + self.names = ['X'] + self.shapes = [get_shape(x0)] + else: + self.names = names + self.shapes = [get_shape(x0)] + for name, xi in zip(self.names, at_least_one_element(x0)): + self.__setattr__(name, numpy.float_(xi)) +# self._param_names = [] +# for name, shape in zip(self.names, self.shapes): +# self._param_names.extend(map(lambda nameshape: ('_'.join(nameshape)).strip('_'), itertools.izip(itertools.repeat(name), itertools.imap(lambda t: '_'.join(map(str, t)), itertools.product(*map(lambda xi: range(xi), shape)))))) + self.args = args + self.kwargs = kwargs + self._f = f + self._df = df + + def _get_x(self): + if len(self.names) > 1: + return [self.__getattribute__(name) for name in self.names] + list(self.args) + return [self.__getattribute__(self.names[0])] + list(self.args) + + def log_likelihood(self): + return float(numpy.sum(self._f(*self._get_x(), **self.kwargs))) + + def _log_likelihood_gradients(self): + return numpy.atleast_1d(self._df(*self._get_x(), **self.kwargs)).flatten() + + + def _get_params(self): + return numpy.atleast_1d(numpy.hstack(map(lambda name: flatten_if_needed(self.__getattribute__(name)), self.names))) + + + def _set_params(self, x): + current_index = 0 + for name, shape in zip(self.names, self.shapes): + current_size = numpy.prod(shape) + self.__setattr__(name, x[current_index:current_index + current_size].reshape(shape)) + current_index += current_size + + def _get_param_names(self): + _param_names = [] + for name, shape in zip(self.names, self.shapes): + _param_names.extend(map(lambda nameshape: ('_'.join(nameshape)).strip('_'), itertools.izip(itertools.repeat(name), itertools.imap(lambda t: '_'.join(map(str, t)), itertools.product(*map(lambda xi: range(xi), shape)))))) + return _param_names diff --git a/GPy/models/mrd.py b/GPy/models_modules/mrd.py similarity index 72% rename from GPy/models/mrd.py rename to GPy/models_modules/mrd.py index 1d521e5d..b9c99a64 100644 --- a/GPy/models/mrd.py +++ b/GPy/models_modules/mrd.py @@ -9,8 +9,8 @@ from GPy.util.linalg import PCA import numpy import itertools import pylab -from GPy.kern.kern import kern -from GPy.models.bayesian_gplvm import BayesianGPLVM +from ..kern import kern +from bayesian_gplvm import BayesianGPLVM class MRD(Model): """ @@ -18,37 +18,34 @@ class MRD(Model): All Ys in likelihood_list are in [N x Dn], where Dn can be different per Yn, N must be shared across datasets though. - :param likelihood_list...: likelihoods of observed datasets - :type likelihood_list: [GPy.likelihood] | [Y1..Yy] + :param likelihood_list: list of observed datasets (:py:class:`~GPy.likelihoods.gaussian.Gaussian` if not supplied directly) + :type likelihood_list: [:py:class:`~GPy.likelihoods.likelihood.likelihood` | :py:class:`ndarray`] :param names: names for different gplvm models :type names: [str] - :param input_dim: latent dimensionality (will raise + :param input_dim: latent dimensionality :type input_dim: int - :param initx: initialisation method for the latent space - :type initx: 'PCA'|'random' + :param initx: initialisation method for the latent space : + + * 'concat' - PCA on concatenation of all datasets + * 'single' - Concatenation of PCA on datasets, respectively + * 'random' - Random draw from a normal + + :type initx: ['concat'|'single'|'random'] :param initz: initialisation method for inducing inputs :type initz: 'permute'|'random' - :param X: - Initial latent space - :param X_variance: - Initial latent space variance - :param init: [cooncat|single|random] - initialization method to use: - *concat: PCA on concatenated outputs - *single: PCA on each output - *random: random - :param num_inducing: - number of inducing inputs to use - :param Z: - initial inducing inputs + :param X: Initial latent space + :param X_variance: Initial latent space variance + :param Z: initial inducing inputs + :param num_inducing: number of inducing inputs to use :param kernels: list of kernels or kernel shared for all BGPLVMS :type kernels: [GPy.kern.kern] | GPy.kern.kern | None (default) + """ def __init__(self, likelihood_or_Y_list, input_dim, num_inducing=10, names=None, kernels=None, initx='PCA', initz='permute', _debug=False, **kw): if names is None: - self.names = ["{}".format(i + 1) for i in range(len(likelihood_or_Y_list))] + self.names = ["{}".format(i) for i in range(len(likelihood_or_Y_list))] # sort out the kernels if kernels is None: @@ -61,12 +58,14 @@ class MRD(Model): assert not ('kernel' in kw), "pass kernels through `kernels` argument" self.input_dim = input_dim - self.num_inducing = num_inducing self._debug = _debug + self.num_inducing = num_inducing self._init = True X = self._init_X(initx, likelihood_or_Y_list) Z = self._init_Z(initz, X) + self.num_inducing = Z.shape[0] # ensure M==N if M>N + self.bgplvms = [BayesianGPLVM(l, input_dim=input_dim, kernel=k, X=X, Z=Z, num_inducing=self.num_inducing, **kw) for l, k in zip(likelihood_or_Y_list, kernels)] del self._init @@ -75,6 +74,7 @@ class MRD(Model): self.nparams = nparams.cumsum() self.num_data = self.gref.num_data + self.NQ = self.num_data * self.input_dim self.MQ = self.num_inducing * self.input_dim @@ -141,17 +141,31 @@ class MRD(Model): self._init_X(initx, self.likelihood_list) self._init_Z(initz, self.X) + #def _get_latent_param_names(self): def _get_param_names(self): - # X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) - # S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) n1 = self.gref._get_param_names() n1var = n1[:self.NQ * 2 + self.MQ] + # return n1var + # + #def _get_kernel_names(self): map_names = lambda ns, name: map(lambda x: "{1}_{0}".format(*x), itertools.izip(ns, itertools.repeat(name))) return list(itertools.chain(n1var, *(map_names(\ SparseGP._get_param_names(g)[self.MQ:], n) \ for g, n in zip(self.bgplvms, self.names)))) + # kernel_names = (map_names(SparseGP._get_param_names(g)[self.MQ:], n) for g, n in zip(self.bgplvms, self.names)) + # return kernel_names + + #def _get_param_names(self): + # X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + # S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + # n1var = self._get_latent_param_names() + # kernel_names = self._get_kernel_names() + # return list(itertools.chain(n1var, *kernel_names)) + + #def _get_print_names(self): + # return list(itertools.chain(*self._get_kernel_names())) def _get_params(self): """ @@ -174,8 +188,8 @@ class MRD(Model): # g.Z = Z.reshape(self.num_inducing, self.input_dim) # # def _set_kern_params(self, g, p): -# g.kern._set_params(p[:g.kern.Nparam]) -# g.likelihood._set_params(p[g.kern.Nparam:]) +# g.kern._set_params(p[:g.kern.num_params]) +# g.likelihood._set_params(p[g.kern.num_params:]) def _set_params(self, x): start = 0; end = self.NQ @@ -255,17 +269,30 @@ class MRD(Model): self.Z = Z return Z - def _handle_plotting(self, fignum, axes, plotf): + def _handle_plotting(self, fignum, axes, plotf, sharex=False, sharey=False): if axes is None: - fig = pylab.figure(num=fignum, figsize=(4 * len(self.bgplvms), 3)) + fig = pylab.figure(num=fignum) + sharex_ax = None + sharey_ax = None for i, g in enumerate(self.bgplvms): + try: + if sharex: + sharex_ax = ax # @UndefinedVariable + sharex = False # dont set twice + if sharey: + sharey_ax = ax # @UndefinedVariable + sharey = False # dont set twice + except: + pass if axes is None: - ax = fig.add_subplot(1, len(self.bgplvms), i + 1) + ax = fig.add_subplot(1, len(self.bgplvms), i + 1, sharex=sharex_ax, sharey=sharey_ax) elif isinstance(axes, (tuple, list)): ax = axes[i] else: raise ValueError("Need one axes per latent dimension input_dim") plotf(i, g, ax) + if sharey_ax is not None: + pylab.setp(ax.get_yticklabels(), visible=False) pylab.draw() if axes is None: fig.tight_layout() @@ -280,16 +307,32 @@ class MRD(Model): fig = self._handle_plotting(fignum, ax, lambda i, g, ax: ax.imshow(g.X)) return fig - def plot_predict(self, fignum=None, ax=None, **kwargs): - fig = self._handle_plotting(fignum, ax, lambda i, g, ax: ax.imshow(g. predict(g.X)[0], **kwargs)) + def plot_predict(self, fignum=None, ax=None, sharex=False, sharey=False, **kwargs): + fig = self._handle_plotting(fignum, + ax, + lambda i, g, ax: ax.imshow(g. predict(g.X)[0], **kwargs), + sharex=sharex, sharey=sharey) return fig - def plot_scales(self, fignum=None, ax=None, *args, **kwargs): - fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.kern.plot_ARD(ax=ax, *args, **kwargs)) + def plot_scales(self, fignum=None, ax=None, titles=None, sharex=False, sharey=True, *args, **kwargs): + """ + + TODO: Explain other parameters + + :param titles: titles for axes of datasets + + """ + if titles is None: + titles = [r'${}$'.format(name) for name in self.names] + ymax = reduce(max, [numpy.ceil(max(g.input_sensitivity())) for g in self.bgplvms]) + def plotf(i, g, ax): + ax.set_ylim([0,ymax]) + g.kern.plot_ARD(ax=ax, title=titles[i], *args, **kwargs) + fig = self._handle_plotting(fignum, ax, plotf, sharex=sharex, sharey=sharey) return fig def plot_latent(self, fignum=None, ax=None, *args, **kwargs): - fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.plot_latent(ax=ax, *args, **kwargs)) + fig = self.gref.plot_latent(fignum=fignum, ax=ax, *args, **kwargs) # self._handle_plotting(fignum, ax, lambda i, g, ax: g.plot_latent(ax=ax, *args, **kwargs)) return fig def _debug_plot(self): @@ -305,13 +348,28 @@ class MRD(Model): pylab.draw() fig.tight_layout() - def _debug_optimize(self, opt='scg', maxiters=5000, itersteps=10): - iters = 0 - optstep = lambda: self.optimize(opt, messages=1, max_f_eval=itersteps) - self._debug_plot() - raw_input("enter to start debug") - while iters < maxiters: - optstep() - self._debug_plot() - iters += itersteps + def getstate(self): + return Model.getstate(self) + [self.names, + self.bgplvms, + self.gref, + self.nparams, + self.input_dim, + self.num_inducing, + self.num_data, + self.NQ, + self.MQ] + + def setstate(self, state): + self.MQ = state.pop() + self.NQ = state.pop() + self.num_data = state.pop() + self.num_inducing = state.pop() + self.input_dim = state.pop() + self.nparams = state.pop() + self.gref = state.pop() + self.bgplvms = state.pop() + self.names = state.pop() + Model.setstate(self, state) + + diff --git a/GPy/models/sparse_gp_classification.py b/GPy/models_modules/sparse_gp_classification.py similarity index 76% rename from GPy/models/sparse_gp_classification.py rename to GPy/models_modules/sparse_gp_classification.py index 9228fb89..9274aacc 100644 --- a/GPy/models/sparse_gp_classification.py +++ b/GPy/models_modules/sparse_gp_classification.py @@ -16,7 +16,7 @@ class SparseGPClassification(SparseGP): :param X: input observations :param Y: observed values - :param likelihood: a GPy likelihood, defaults to Binomial with probit link_function + :param likelihood: a GPy likelihood, defaults to Bernoulli with probit link_function :param kernel: a GPy kernel, defaults to rbf+white :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) :type normalize_X: False|True @@ -28,11 +28,11 @@ class SparseGPClassification(SparseGP): def __init__(self, X, Y=None, likelihood=None, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10): if kernel is None: - kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1],1e-3) + kernel = kern.rbf(X.shape[1])# + kern.white(X.shape[1],1e-3) if likelihood is None: - distribution = likelihoods.likelihood_functions.Binomial() - likelihood = likelihoods.EP(Y, distribution) + noise_model = likelihoods.bernoulli() + likelihood = likelihoods.EP(Y, noise_model) elif Y is not None: if not all(Y.flatten() == likelihood.data.flatten()): raise Warning, 'likelihood.data and Y are different.' @@ -41,7 +41,16 @@ class SparseGPClassification(SparseGP): i = np.random.permutation(X.shape[0])[:num_inducing] Z = X[i].copy() else: - assert Z.shape[1]==X.shape[1] + assert Z.shape[1] == X.shape[1] SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X) self.ensure_default_constraints() + + def getstate(self): + return SparseGP.getstate(self) + + + def setstate(self, state): + return SparseGP.setstate(self, state) + + pass diff --git a/GPy/models_modules/sparse_gp_multioutput_regression.py b/GPy/models_modules/sparse_gp_multioutput_regression.py new file mode 100644 index 00000000..d809610b --- /dev/null +++ b/GPy/models_modules/sparse_gp_multioutput_regression.py @@ -0,0 +1,80 @@ +# Copyright (c) 2013, Ricardo Andrade +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +from ..core import SparseGP +from .. import likelihoods +from .. import kern +from ..util import multioutput + +class SparseGPMultioutputRegression(SparseGP): + """ + Sparse multiple output Gaussian process with Gaussian noise + + This is a wrapper around the models.SparseGP class, with a set of sensible defaults + + :param X_list: input observations + :type X_list: list of numpy arrays (num_data_output_i x input_dim), one array per output + :param Y_list: observed values + :type Y_list: list of numpy arrays (num_data_output_i x 1), one array per output + :param kernel_list: GPy kernels, defaults to rbf + :type kernel_list: list of GPy kernels + :param noise_variance_list: noise parameters per output, defaults to 1.0 for every output + :type noise_variance_list: list of floats + :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales) + :type normalize_X: False|True + :param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales) + :type normalize_Y: False|True + :param Z_list: inducing inputs (optional) + :type Z_list: list of numpy arrays (num_inducing_output_i x input_dim), one array per output | empty list + :param num_inducing: number of inducing inputs per output, defaults to 10 (ignored if Z_list is not empty) + :type num_inducing: integer + :param rank: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation) + :type rank: integer + """ + #NOTE not tested with uncertain inputs + def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,Z_list=[],num_inducing=10,rank=1): + + self.output_dim = len(Y_list) + assert len(X_list) == self.output_dim, 'Number of outputs do not match length of inputs list.' + + #Inducing inputs list + if len(Z_list): + assert len(Z_list) == self.output_dim, 'Number of outputs do not match length of inducing inputs list.' + else: + if isinstance(num_inducing,np.int): + num_inducing = [num_inducing] * self.output_dim + num_inducing = np.asarray(num_inducing) + assert num_inducing.size == self.output_dim, 'Number of outputs do not match length of inducing inputs list.' + for ni,X in zip(num_inducing,X_list): + i = np.random.permutation(X.shape[0])[:ni] + Z_list.append(X[i].copy()) + + #Inputs and inducing inputs indexing + i = 0 + index = [] + index_z = [] + for x,y,z in zip(X_list,Y_list,Z_list): + assert x.shape[0] == y.shape[0] + index.append(np.repeat(i,x.size)[:,None]) + index_z.append(np.repeat(i,z.size)[:,None]) + i += 1 + index = np.vstack(index) + index_z = np.vstack(index_z) + X = np.hstack([np.vstack(X_list),index]) + Z = np.hstack([np.vstack(Z_list),index_z]) + original_dim = X.shape[1] - 1 + + #Mixed noise likelihood definition + likelihood = likelihoods.Gaussian_Mixed_Noise(Y_list,noise_params=noise_variance_list,normalize=normalize_Y) + + #Coregionalization kernel definition + if kernel_list is None: + kernel_list = [kern.rbf(original_dim)] + mkernel = kern.build_lcm(input_dim=original_dim, output_dim=self.output_dim, kernel_list = kernel_list, rank=rank) + + self.multioutput = True + SparseGP.__init__(self, X, likelihood, mkernel, Z=Z, normalize_X=normalize_X) + self.constrain_fixed('.*iip_\d+_1') + self.ensure_default_constraints() diff --git a/GPy/models/sparse_gp_regression.py b/GPy/models_modules/sparse_gp_regression.py similarity index 76% rename from GPy/models/sparse_gp_regression.py rename to GPy/models_modules/sparse_gp_regression.py index 0dcef3e0..d2e23887 100644 --- a/GPy/models/sparse_gp_regression.py +++ b/GPy/models_modules/sparse_gp_regression.py @@ -20,7 +20,11 @@ class SparseGPRegression(SparseGP): :type normalize_X: False|True :param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales) :type normalize_Y: False|True + :param Z: inducing inputs (optional, see note) + :type Z: np.ndarray (num_inducing x input_dim) | None :rtype: model object + :param X_variance: The uncertainty in the measurements of X (Gaussian variance) + :type X_variance: np.ndarray (num_data x input_dim) | None .. Note:: Multiple independent outputs are allowed using columns of Y @@ -29,7 +33,7 @@ class SparseGPRegression(SparseGP): def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10, X_variance=None): # kern defaults to rbf (plus white for stability) if kernel is None: - kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1], 1e-3) + kernel = kern.rbf(X.shape[1]) # + kern.white(X.shape[1], 1e-3) # Z defaults to a subset of the data if Z is None: @@ -43,3 +47,13 @@ class SparseGPRegression(SparseGP): SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X, X_variance=X_variance) self.ensure_default_constraints() + pass + + def getstate(self): + return SparseGP.getstate(self) + + + def setstate(self, state): + return SparseGP.setstate(self, state) + + pass diff --git a/GPy/models/sparse_gplvm.py b/GPy/models_modules/sparse_gplvm.py similarity index 85% rename from GPy/models/sparse_gplvm.py rename to GPy/models_modules/sparse_gplvm.py index d6f4adb9..4e401ee3 100644 --- a/GPy/models/sparse_gplvm.py +++ b/GPy/models_modules/sparse_gplvm.py @@ -5,8 +5,8 @@ import numpy as np import pylab as pb import sys, pdb -from GPy.models.sparse_gp_regression import SparseGPRegression -from GPy.models.gplvm import GPLVM +from sparse_gp_regression import SparseGPRegression +from gplvm import GPLVM # from .. import kern # from ..core import model # from ..util.linalg import pdinv, PCA @@ -28,6 +28,14 @@ class SparseGPLVM(SparseGPRegression, GPLVM): SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing) self.ensure_default_constraints() + def getstate(self): + return SparseGPRegression.getstate(self) + + + def setstate(self, state): + return SparseGPRegression.setstate(self, state) + + def _get_param_names(self): return (sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + SparseGPRegression._get_param_names(self)) @@ -58,5 +66,5 @@ class SparseGPLVM(SparseGPRegression, GPLVM): pb.plot(mu[:, 0] , mu[:, 1], 'ko') def plot_latent(self, *args, **kwargs): - input_1, input_2 = GPLVM.plot_latent(*args, **kwargs) - pb.plot(m.Z[:, input_1], m.Z[:, input_2], '^w') + GPLVM.plot_latent(self, *args, **kwargs) + #pb.plot(self.Z[:, input_1], self.Z[:, input_2], '^w') diff --git a/GPy/models/svigp_regression.py b/GPy/models_modules/svigp_regression.py similarity index 85% rename from GPy/models/svigp_regression.py rename to GPy/models_modules/svigp_regression.py index 8448bf37..e826bf35 100644 --- a/GPy/models/svigp_regression.py +++ b/GPy/models_modules/svigp_regression.py @@ -25,7 +25,7 @@ class SVIGPRegression(SVIGP): """ - def __init__(self, X, Y, kernel=None, Z=None, num_inducing=10, q_u=None, batchsize=10): + def __init__(self, X, Y, kernel=None, Z=None, num_inducing=10, q_u=None, batchsize=10, normalize_Y=False): # kern defaults to rbf (plus white for stability) if kernel is None: kernel = kern.rbf(X.shape[1], variance=1., lengthscale=4.) + kern.white(X.shape[1], 1e-3) @@ -38,7 +38,15 @@ class SVIGPRegression(SVIGP): assert Z.shape[1] == X.shape[1] # likelihood defaults to Gaussian - likelihood = likelihoods.Gaussian(Y, normalize=False) + likelihood = likelihoods.Gaussian(Y, normalize=normalize_Y) SVIGP.__init__(self, X, likelihood, kernel, Z, q_u=q_u, batchsize=batchsize) self.load_batch() + + def getstate(self): + return GPBase.getstate(self) + + + def setstate(self, state): + return GPBase.setstate(self, state) + diff --git a/GPy/models/warped_gp.py b/GPy/models_modules/warped_gp.py similarity index 80% rename from GPy/models/warped_gp.py rename to GPy/models_modules/warped_gp.py index fcef66c6..260139a6 100644 --- a/GPy/models/warped_gp.py +++ b/GPy/models_modules/warped_gp.py @@ -19,7 +19,9 @@ class WarpedGP(GP): self.warping_function = TanhWarpingFunction_d(warping_terms) self.warping_params = (np.random.randn(self.warping_function.n_terms * 3 + 1,) * 1) - Y = self._scale_data(Y) + self.scale_data = False + if self.scale_data: + Y = self._scale_data(Y) self.has_uncertain_inputs = False self.Y_untransformed = Y.copy() self.predict_in_warped_space = False @@ -28,6 +30,14 @@ class WarpedGP(GP): GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X) self._set_params(self._get_params()) + def getstate(self): + return GP.getstate(self) + + + def setstate(self, state): + return GP.setstate(self, state) + + def _scale_data(self, Y): self._Ymax = Y.max() self._Ymin = Y.min() @@ -79,11 +89,19 @@ class WarpedGP(GP): def plot_warping(self): self.warping_function.plot(self.warping_params, self.Y_untransformed.min(), self.Y_untransformed.max()) - def _raw_predict(self, *args, **kwargs): - mu, var = GP._raw_predict(self, *args, **kwargs) + def predict(self, Xnew, which_parts='all', full_cov=False, pred_init=None): + # normalize X values + Xnew = (Xnew.copy() - self._Xoffset) / self._Xscale + mu, var = GP._raw_predict(self, Xnew, full_cov=full_cov, which_parts=which_parts) + + # now push through likelihood + mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov) if self.predict_in_warped_space: - mu = self.warping_function.f_inv(mu, self.warping_params) + mean = self.warping_function.f_inv(mean, self.warping_params, y=pred_init) var = self.warping_function.f_inv(var, self.warping_params) - mu = self._unscale_data(mu) - return mu, var + + if self.scale_data: + mean = self._unscale_data(mean) + + return mean, var, _025pm, _975pm diff --git a/GPy/notes.txt b/GPy/notes.txt new file mode 100644 index 00000000..768701f2 --- /dev/null +++ b/GPy/notes.txt @@ -0,0 +1,80 @@ +Prod.py kernel could also take a list of kernels rather than two arguments for kernels. +transformations.py should have limits on what is fed into exp() particularly for the negative log logistic (done -neil). + +Load in a model with mlp kernel, plot it, change a parameter, plot it again. It doesn't update the plot. + +Tests for kernels which work directly on the kernel implementation (not through GP). + +Should stationary covariances have their own kernpart type, I think so, also inner product kernels. That way the caching so carefully constructed for RBF or linear could be shared. + +Where do we declare default kernel parameters. In constructors.py or in the definition file for the kernel? + +When printing to stdout, can we check that our approach is also working nicely for the ipython notebook? I like the way our optimization ticks over, but at the moment this doesn't seem to work in the ipython notebook, it would be nice if it did. My problems may be due to using ipython 0.12, I've had a poke around at fixing this and I can't do it for 0.12. + +When we print a model should we also include information such as number of inputs and number of outputs? + +Let's not use N for giving the number of data in the model. When it pops up as a help tip it's not as clear as num_samples or num_data. Prefer the second, but oddly I've been using first. + +Loving the fact that the * has been overloaded on the kernels (oddly never thought to check this before). Although naming can be a bit confusing. Can we think how to deal with the names in a clearer way when we use a kernel like this one: +kern = GPy.kern.rbf(30)*(GPy.kern.mlp(30)+GPy.kern.poly(30, degree=5)) + GPy.kern.bias(30). There seems to be some tieing of parameters going on ... should there be? (you can try it as the kernel for the robot wireless model). + +Can we comment up some of the list incomprehensions in hierarchical.py?? + +Need to tidy up classification.py, +many examples include help that doesn't apply +(it is suggested that you can try different approximation types) + +Shall we overload the ** operator to have tensor products? (I've done this now we can see if we like it) + +People aren't filling the doc strings in as they go *everyone* needs to get in the habit of this (and modifying them as they edit, or correcting them when there is a problem). + +Need some nice way of explaining how to compile documentation and run the unit tests, could this be in a readme or FAQ somewhere? Maybe it's there already somewhere and I've missed it. + +Shouldn't EP be in the inference package (not likelihoods)? + +When using bfgs in ipython notebook, text appears in the original console, not in the notebook. + +In sparse GPs wouldn't it be clearer to call Z inducing? + +In coregionalisation matrix, setting the W to all ones will (surely?) ensure that symmetry isn't broken. Also, but allowing it to scale like that, the output variance increases as rank is increased (and if user sets rank to more than output dim they could get very different results). + +We are inconsistent about our use of ise and ize e.g. optimize and normalize_X, but coregionalise, we should choose one and stick to it. Suggest -ize. Neil- I'm imposing the US spellings to keep things consistent, so -ize it is. + +Exceptions: we need to provide a list of exceptions we throw and specify what is thrown where. + +Why is it get_params() but it's getstate()? Should be get_state(). Why is it get_gradient instead of get_gradients? Need to be consistent!! Doesn't matter which way we choose as long as it's consistent. + +In likelihood Nparams should be num_params + +In likelihood N should be num_data + +The Gaussian target in likelihood should be F What is V doing here? + +Need to check for nan values in likelihoods. These should be treated as missing values. If the likelihood can't handle the missing value an error should be throw. + + +Sometimes you want to print kernpart objects, for diagnosis etc. This isn't possible currently. + +Why do likelihoods still have YYT everywhere, didn't we agree to set observed data to Y and latent function to F? + +For some reason a stub of _get_param_names(self) wasn't available in the Parameterized base class. Have put it in (is this right?) + +Is there a quick FAQ or something on how to build the documentation? I did it once, but can't remember! Have started a FAQ.txt file where we can add this type of information. + +Similar for the nosetests ... even ran them last week but can't remember the command! + +Now added Gaussian priors to GPLVM latent variables by default. When running the GPy.examples.dimensionality_reduction.stick() example the print out from print model has the same value for the prior+likelihood as for the prior. + +For the back constrained GP-LVM need priors to be on the Xs not on the model parameters (because they aren't parameters, they are constraints). Need to work out how to do this, perhaps by creating the full GP-LVM model then constraining around it, rather than overriding inside the GP-LVM model. + + +This code fails: + +kern = GPy.kern.rbf(2) +GPy.kern.Kern_check_dK_dX(kern, X=np.random.randn(10, 2), X2=None).checkgrad(verbose=True) + +because X2 is now equal to X, so there is a factor of 2 missing. Does this every come up? Yes, in the GP-LVM, (gplvm.py, line 64) where it is called with a corrective factor of 2! And on line 241 of sparse_gp where it is also called with a corrective factor of 2! In original matlab GPLVM, didn't allow gradients with respect to X alone, and multiplied by 2 in base code, but then add diagonal across those elements. This is missing in the new code. + + +In white.py, line 41, Need to check here if X and X2 refer to the same reference too ... becaue up the pipeline somewhere someone may have set X2=X when X2 arrived originally equal to None. + diff --git a/GPy/testing/bcgplvm_tests.py b/GPy/testing/bcgplvm_tests.py new file mode 100644 index 00000000..94282a0b --- /dev/null +++ b/GPy/testing/bcgplvm_tests.py @@ -0,0 +1,50 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt) +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import unittest +import numpy as np +import GPy + +class BCGPLVMTests(unittest.TestCase): + def test_kernel_backconstraint(self): + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) + k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T + k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim) + bk = GPy.kern.rbf(output_dim) + mapping = GPy.mappings.Kernel(output_dim=input_dim, X=Y, kernel=bk) + m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping) + m.randomize() + self.assertTrue(m.checkgrad()) + + def test_linear_backconstraint(self): + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) + k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T + k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim) + bk = GPy.kern.rbf(output_dim) + mapping = GPy.mappings.Linear(output_dim=input_dim, input_dim=output_dim) + m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping) + m.randomize() + self.assertTrue(m.checkgrad()) + + def test_mlp_backconstraint(self): + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) + k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T + k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim) + bk = GPy.kern.rbf(output_dim) + mapping = GPy.mappings.MLP(output_dim=input_dim, input_dim=output_dim, hidden_dim=[5, 4, 7]) + m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping) + m.randomize() + self.assertTrue(m.checkgrad()) + +if __name__ == "__main__": + print "Running unit tests, please be (very) patient..." + unittest.main() diff --git a/GPy/testing/bgplvm_tests.py b/GPy/testing/bgplvm_tests.py index 6b91d999..1192448a 100644 --- a/GPy/testing/bgplvm_tests.py +++ b/GPy/testing/bgplvm_tests.py @@ -4,7 +4,7 @@ import unittest import numpy as np import GPy -from GPy.models.bayesian_gplvm import BayesianGPLVM +from ..models import BayesianGPLVM class BGPLVMTests(unittest.TestCase): def test_bias_kern(self): @@ -55,7 +55,18 @@ class BGPLVMTests(unittest.TestCase): m.randomize() self.assertTrue(m.checkgrad()) - #@unittest.skip('psi2 cross terms are NotImplemented for this combination') + def test_rbf_line_kern(self): + N, num_inducing, input_dim, D = 10, 3, 2, 4 + X = np.random.rand(N, input_dim) + k = GPy.kern.rbf(input_dim) + GPy.kern.linear(input_dim) + GPy.kern.white(input_dim, 0.00001) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T + Y -= Y.mean(axis=0) + k = GPy.kern.rbf(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001) + m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) + m.randomize() + self.assertTrue(m.checkgrad()) + def test_linear_bias_kern(self): N, num_inducing, input_dim, D = 30, 5, 4, 30 X = np.random.rand(N, input_dim) diff --git a/GPy/testing/cgd_tests.py b/GPy/testing/cgd_tests.py index d999c6fc..82041c9f 100644 --- a/GPy/testing/cgd_tests.py +++ b/GPy/testing/cgd_tests.py @@ -7,7 +7,6 @@ import unittest import numpy from GPy.inference.conjugate_gradient_descent import CGD, RUNNING import pylab -import time from scipy.optimize.optimize import rosen, rosen_der from GPy.inference.gradient_descent_update_rules import PolakRibiere diff --git a/GPy/testing/examples_tests.py b/GPy/testing/examples_tests.py index ec030055..9998590a 100644 --- a/GPy/testing/examples_tests.py +++ b/GPy/testing/examples_tests.py @@ -10,6 +10,7 @@ import os import random from nose.tools import nottest import sys +import itertools class ExamplesTests(unittest.TestCase): def _checkgrad(self, Model): @@ -19,14 +20,14 @@ class ExamplesTests(unittest.TestCase): self.assertTrue(isinstance(Model, GPy.models)) """ -def model_instance_generator(Model): +def model_instance_generator(model): def check_model_returned(self): - self._model_instance(Model) + self._model_instance(model) return check_model_returned -def checkgrads_generator(Model): +def checkgrads_generator(model): def model_checkgrads(self): - self._checkgrad(Model) + self._checkgrad(model) return model_checkgrads """ @@ -37,10 +38,21 @@ def model_checkgrads(model): def model_instance(model): #assert isinstance(model, GPy.core.model) - return isinstance(model, GPy.core.Model) + return isinstance(model, GPy.core.model.Model) -@nottest +def flatten_nested(lst): + result = [] + for element in lst: + if hasattr(element, '__iter__'): + result.extend(flatten_nested(element)) + else: + result.append(element) + return result + +#@nottest def test_models(): + optimize=False + plot=True examples_path = os.path.dirname(GPy.examples.__file__) # Load modules failing_models = {} @@ -54,29 +66,34 @@ def test_models(): print "After" print functions for example in functions: - if example[0] in ['oil', 'silhouette', 'GPLVM_oil_100']: - print "SKIPPING" - continue + #if example[0] in ['oil', 'silhouette', 'GPLVM_oil_100', 'brendan_faces']: + #print "SKIPPING" + #continue print "Testing example: ", example[0] # Generate model + try: - model = example[1]() + models = [ example[1](optimize=optimize, plot=plot) ] + #If more than one model returned, flatten them + models = flatten_nested(models) except Exception as e: failing_models[example[0]] = "Cannot make model: \n{e}".format(e=e) else: - print model + print models model_checkgrads.description = 'test_checkgrads_%s' % example[0] try: - if not model_checkgrads(model): - failing_models[model_checkgrads.description] = False + for model in models: + if not model_checkgrads(model): + failing_models[model_checkgrads.description] = False except Exception as e: failing_models[model_checkgrads.description] = e model_instance.description = 'test_instance_%s' % example[0] try: - if not model_instance(model): - failing_models[model_instance.description] = False + for model in models: + if not model_instance(model): + failing_models[model_instance.description] = False except Exception as e: failing_models[model_instance.description] = e diff --git a/GPy/testing/gp_transformation_tests.py b/GPy/testing/gp_transformation_tests.py new file mode 100644 index 00000000..42c0414b --- /dev/null +++ b/GPy/testing/gp_transformation_tests.py @@ -0,0 +1,61 @@ +from nose.tools import with_setup +from GPy.models import GradientChecker +from GPy.likelihoods.noise_models import gp_transformations +import inspect +import unittest +import numpy as np + +class TestTransformations(object): + """ + Generic transformations checker + """ + def setUp(self): + N = 30 + self.fs = [np.random.rand(N, 1), float(np.random.rand(1))] + + + def tearDown(self): + self.fs = None + + def test_transformations(self): + self.setUp() + transformations = [gp_transformations.Identity(), + gp_transformations.Log(), + gp_transformations.Probit(), + gp_transformations.Log_ex_1(), + gp_transformations.Reciprocal(), + ] + + for transformation in transformations: + for f in self.fs: + yield self.t_dtransf_df, transformation, f + yield self.t_d2transf_df2, transformation, f + yield self.t_d3transf_df3, transformation, f + + @with_setup(setUp, tearDown) + def t_dtransf_df(self, transformation, f): + print "\n{}".format(inspect.stack()[0][3]) + grad = GradientChecker(transformation.transf, transformation.dtransf_df, f, 'f') + grad.randomize() + grad.checkgrad(verbose=1) + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d2transf_df2(self, transformation, f): + print "\n{}".format(inspect.stack()[0][3]) + grad = GradientChecker(transformation.dtransf_df, transformation.d2transf_df2, f, 'f') + grad.randomize() + grad.checkgrad(verbose=1) + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d3transf_df3(self, transformation, f): + print "\n{}".format(inspect.stack()[0][3]) + grad = GradientChecker(transformation.d2transf_df2, transformation.d3transf_df3, f, 'f') + grad.randomize() + grad.checkgrad(verbose=1) + assert grad.checkgrad() + +#if __name__ == "__main__": + #print "Running unit tests" + #unittest.main() diff --git a/GPy/testing/gplvm_tests.py b/GPy/testing/gplvm_tests.py index ebb5c4e5..6223d833 100644 --- a/GPy/testing/gplvm_tests.py +++ b/GPy/testing/gplvm_tests.py @@ -7,33 +7,33 @@ import GPy class GPLVMTests(unittest.TestCase): def test_bias_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T k = GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001) m = GPy.models.GPLVM(Y, input_dim, kernel = k) m.randomize() self.assertTrue(m.checkgrad()) def test_linear_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T k = GPy.kern.linear(input_dim) + GPy.kern.white(input_dim, 0.00001) m = GPy.models.GPLVM(Y, input_dim, kernel = k) m.randomize() self.assertTrue(m.checkgrad()) def test_rbf_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) + num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4 + X = np.random.rand(num_data, input_dim) k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T + Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001) m = GPy.models.GPLVM(Y, input_dim, kernel = k) m.randomize() diff --git a/GPy/testing/kernel_tests.py b/GPy/testing/kernel_tests.py index 98c75827..0fceac60 100644 --- a/GPy/testing/kernel_tests.py +++ b/GPy/testing/kernel_tests.py @@ -1,10 +1,19 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) import unittest import numpy as np import GPy +verbose = False + +try: + import sympy + SYMPY_AVAILABLE=True +except ImportError: + SYMPY_AVAILABLE=False + + class KernelTests(unittest.TestCase): def test_kerneltie(self): K = GPy.kern.rbf(5, ARD=True) @@ -15,33 +24,96 @@ class KernelTests(unittest.TestCase): m = GPy.models.GPRegression(X,Y,K) self.assertTrue(m.checkgrad()) + def test_rbfkernel(self): + kern = GPy.kern.rbf(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_rbf_sympykernel(self): + if SYMPY_AVAILABLE: + kern = GPy.kern.rbf_sympy(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_eq_sympykernel(self): + if SYMPY_AVAILABLE: + kern = GPy.kern.eq_sympy(5, 3) + self.assertTrue(GPy.kern.kern_test(kern, output_ind=4, verbose=verbose)) + + def test_ode1_eqkernel(self): + if SYMPY_AVAILABLE: + kern = GPy.kern.ode1_eq(3) + self.assertTrue(GPy.kern.kern_test(kern, output_ind=1, verbose=verbose, X_positive=True)) + + def test_rbf_invkernel(self): + kern = GPy.kern.rbf_inv(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_Matern32kernel(self): + kern = GPy.kern.Matern32(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_Matern52kernel(self): + kern = GPy.kern.Matern52(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_linearkernel(self): + kern = GPy.kern.linear(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_periodic_exponentialkernel(self): + kern = GPy.kern.periodic_exponential(1) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_periodic_Matern32kernel(self): + kern = GPy.kern.periodic_Matern32(1) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_periodic_Matern52kernel(self): + kern = GPy.kern.periodic_Matern52(1) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_rational_quadratickernel(self): + kern = GPy.kern.rational_quadratic(1) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_gibbskernel(self): + kern = GPy.kern.gibbs(5, mapping=GPy.mappings.Linear(5, 1)) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_heterokernel(self): + kern = GPy.kern.hetero(5, mapping=GPy.mappings.Linear(5, 1), transform=GPy.core.transformations.logexp()) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_mlpkernel(self): + kern = GPy.kern.mlp(5) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + + def test_polykernel(self): + kern = GPy.kern.poly(5, degree=4) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + def test_fixedkernel(self): """ Fixed effect kernel test """ X = np.random.rand(30, 4) K = np.dot(X, X.T) - kernel = GPy.kern.Fixed(4, K) - Y = np.ones((30,1)) - m = GPy.models.GPRegression(X,Y,kernel=kernel) - self.assertTrue(m.checkgrad()) - - def test_coregionalisation(self): - X1 = np.random.rand(50,1)*8 - X2 = np.random.rand(30,1)*5 - index = np.vstack((np.zeros_like(X1),np.ones_like(X2))) - X = np.hstack((np.vstack((X1,X2)),index)) - Y1 = np.sin(X1) + np.random.randn(*X1.shape)*0.05 - Y2 = np.sin(X2) + np.random.randn(*X2.shape)*0.05 + 2. - Y = np.vstack((Y1,Y2)) - - k1 = GPy.kern.rbf(1) + GPy.kern.bias(1) - k2 = GPy.kern.Coregionalise(2,1) - k = k1.prod(k2,tensor=True) - m = GPy.models.GPRegression(X,Y,kernel=k) - self.assertTrue(m.checkgrad()) + kernel = GPy.kern.fixed(4, K) + kern = GPy.kern.poly(5, degree=4) + self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) + # def test_coregionalization(self): + # X1 = np.random.rand(50,1)*8 + # X2 = np.random.rand(30,1)*5 + # index = np.vstack((np.zeros_like(X1),np.ones_like(X2))) + # X = np.hstack((np.vstack((X1,X2)),index)) + # Y1 = np.sin(X1) + np.random.randn(*X1.shape)*0.05 + # Y2 = np.sin(X2) + np.random.randn(*X2.shape)*0.05 + 2. + # Y = np.vstack((Y1,Y2)) + # k1 = GPy.kern.rbf(1) + GPy.kern.bias(1) + # k2 = GPy.kern.coregionalize(2,1) + # kern = k1**k2 + # self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose)) if __name__ == "__main__": diff --git a/GPy/testing/likelihoods_tests.py b/GPy/testing/likelihoods_tests.py new file mode 100644 index 00000000..58c9a64b --- /dev/null +++ b/GPy/testing/likelihoods_tests.py @@ -0,0 +1,687 @@ +import numpy as np +import unittest +import GPy +from GPy.models import GradientChecker +import functools +import inspect +from GPy.likelihoods.noise_models import gp_transformations +from functools import partial +#np.random.seed(300) +np.random.seed(7) + +def dparam_partial(inst_func, *args): + """ + If we have a instance method that needs to be called but that doesn't + take the parameter we wish to change to checkgrad, then this function + will change the variable using set params. + + inst_func: should be a instance function of an object that we would like + to change + param: the param that will be given to set_params + args: anything else that needs to be given to the function (for example + the f or Y that are being used in the function whilst we tweak the + param + """ + def param_func(param, inst_func, args): + inst_func.im_self._set_params(param) + return inst_func(*args) + return functools.partial(param_func, inst_func=inst_func, args=args) + +def dparam_checkgrad(func, dfunc, params, args, constraints=None, randomize=False, verbose=False): + """ + checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N + However if we are holding other parameters fixed and moving something else + We need to check the gradient of each of the fixed parameters + (f and y for example) seperately, whilst moving another parameter. + Otherwise f: gives back R^N and + df: gives back R^NxM where M is + The number of parameters and N is the number of data + Need to take a slice out from f and a slice out of df + """ + #print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__, + #func.__name__, dfunc.__name__) + partial_f = dparam_partial(func, *args) + partial_df = dparam_partial(dfunc, *args) + gradchecking = True + for param in params: + fnum = np.atleast_1d(partial_f(param)).shape[0] + dfnum = np.atleast_1d(partial_df(param)).shape[0] + for fixed_val in range(dfnum): + #dlik and dlik_dvar gives back 1 value for each + f_ind = min(fnum, fixed_val+1) - 1 + print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val) + #Make grad checker with this param moving, note that set_params is NOT being called + #The parameter is being set directly with __setattr__ + grad = GradientChecker(lambda x: np.atleast_1d(partial_f(x))[f_ind], + lambda x : np.atleast_1d(partial_df(x))[fixed_val], + param, 'p') + #This is not general for more than one param... + if constraints is not None: + for constraint in constraints: + constraint('p', grad) + if randomize: + grad.randomize() + if verbose: + print grad + grad.checkgrad(verbose=1) + if not grad.checkgrad(): + gradchecking = False + + return gradchecking + + +from nose.tools import with_setup +class TestNoiseModels(object): + """ + Generic model checker + """ + def setUp(self): + self.N = 5 + self.D = 3 + self.X = np.random.rand(self.N, self.D)*10 + + self.real_std = 0.1 + noise = np.random.randn(*self.X[:, 0].shape)*self.real_std + self.Y = (np.sin(self.X[:, 0]*2*np.pi) + noise)[:, None] + self.f = np.random.rand(self.N, 1) + self.binary_Y = np.asarray(np.random.rand(self.N) > 0.5, dtype=np.int)[:, None] + self.positive_Y = np.exp(self.Y.copy()) + tmp = np.round(self.X[:, 0]*3-3)[:, None] + np.random.randint(0,3, self.X.shape[0])[:, None] + self.integer_Y = np.where(tmp > 0, tmp, 0) + + self.var = 0.2 + + self.var = np.random.rand(1) + + #Make a bigger step as lower bound can be quite curved + self.step = 1e-3 + + def tearDown(self): + self.Y = None + self.f = None + self.X = None + + def test_noise_models(self): + self.setUp() + + #################################################### + # Constraint wrappers so we can just list them off # + #################################################### + def constrain_negative(regex, model): + model.constrain_negative(regex) + + def constrain_positive(regex, model): + model.constrain_positive(regex) + + def constrain_bounded(regex, model, lower, upper): + """ + Used like: partial(constrain_bounded, lower=0, upper=1) + """ + model.constrain_bounded(regex, lower, upper) + + """ + Dictionary where we nest models we would like to check + Name: { + "model": model_instance, + "grad_params": { + "names": [names_of_params_we_want, to_grad_check], + "vals": [values_of_params, to_start_at], + "constrain": [constraint_wrappers, listed_here] + }, + "laplace": boolean_of_whether_model_should_work_for_laplace, + "ep": boolean_of_whether_model_should_work_for_laplace, + "link_f_constraints": [constraint_wrappers, listed_here] + } + """ + noise_models = {"Student_t_default": { + "model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [self.var], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Student_t_1_var": { + "model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [1.0], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Student_t_small_var": { + "model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [0.01], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Student_t_large_var": { + "model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [10.0], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Student_t_approx_gauss": { + "model": GPy.likelihoods.student_t(deg_free=1000, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [self.var], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Student_t_log": { + "model": GPy.likelihoods.student_t(gp_link=gp_transformations.Log(), deg_free=5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [self.var], + "constraints": [constrain_positive] + }, + "laplace": True + }, + "Gaussian_default": { + "model": GPy.likelihoods.gaussian(variance=self.var, D=self.D, N=self.N), + "grad_params": { + "names": ["noise_model_variance"], + "vals": [self.var], + "constraints": [constrain_positive] + }, + "laplace": True, + "ep": True + }, + #"Gaussian_log": { + #"model": GPy.likelihoods.gaussian(gp_link=gp_transformations.Log(), variance=self.var, D=self.D, N=self.N), + #"grad_params": { + #"names": ["noise_model_variance"], + #"vals": [self.var], + #"constraints": [constrain_positive] + #}, + #"laplace": True + #}, + #"Gaussian_probit": { + #"model": GPy.likelihoods.gaussian(gp_link=gp_transformations.Probit(), variance=self.var, D=self.D, N=self.N), + #"grad_params": { + #"names": ["noise_model_variance"], + #"vals": [self.var], + #"constraints": [constrain_positive] + #}, + #"laplace": True + #}, + #"Gaussian_log_ex": { + #"model": GPy.likelihoods.gaussian(gp_link=gp_transformations.Log_ex_1(), variance=self.var, D=self.D, N=self.N), + #"grad_params": { + #"names": ["noise_model_variance"], + #"vals": [self.var], + #"constraints": [constrain_positive] + #}, + #"laplace": True + #}, + "Bernoulli_default": { + "model": GPy.likelihoods.bernoulli(), + "link_f_constraints": [partial(constrain_bounded, lower=0, upper=1)], + "laplace": True, + "Y": self.binary_Y, + "ep": True + }, + "Exponential_default": { + "model": GPy.likelihoods.exponential(), + "link_f_constraints": [constrain_positive], + "Y": self.positive_Y, + "laplace": True, + }, + "Poisson_default": { + "model": GPy.likelihoods.poisson(), + "link_f_constraints": [constrain_positive], + "Y": self.integer_Y, + "laplace": True, + "ep": False #Should work though... + }, + "Gamma_default": { + "model": GPy.likelihoods.gamma(), + "link_f_constraints": [constrain_positive], + "Y": self.positive_Y, + "laplace": True + } + } + + for name, attributes in noise_models.iteritems(): + model = attributes["model"] + if "grad_params" in attributes: + params = attributes["grad_params"] + param_vals = params["vals"] + param_names= params["names"] + param_constraints = params["constraints"] + else: + params = [] + param_vals = [] + param_names = [] + constrain_positive = [] + param_constraints = [] # ??? TODO: Saul to Fix. + if "link_f_constraints" in attributes: + link_f_constraints = attributes["link_f_constraints"] + else: + link_f_constraints = [] + if "Y" in attributes: + Y = attributes["Y"].copy() + else: + Y = self.Y.copy() + if "f" in attributes: + f = attributes["f"].copy() + else: + f = self.f.copy() + if "laplace" in attributes: + laplace = attributes["laplace"] + else: + laplace = False + if "ep" in attributes: + ep = attributes["ep"] + else: + ep = False + + if len(param_vals) > 1: + raise NotImplementedError("Cannot support multiple params in likelihood yet!") + + #Required by all + #Normal derivatives + yield self.t_logpdf, model, Y, f + yield self.t_dlogpdf_df, model, Y, f + yield self.t_d2logpdf_df2, model, Y, f + #Link derivatives + yield self.t_dlogpdf_dlink, model, Y, f, link_f_constraints + yield self.t_d2logpdf_dlink2, model, Y, f, link_f_constraints + if laplace: + #Laplace only derivatives + yield self.t_d3logpdf_df3, model, Y, f + yield self.t_d3logpdf_dlink3, model, Y, f, link_f_constraints + #Params + yield self.t_dlogpdf_dparams, model, Y, f, param_vals, param_constraints + yield self.t_dlogpdf_df_dparams, model, Y, f, param_vals, param_constraints + yield self.t_d2logpdf2_df2_dparams, model, Y, f, param_vals, param_constraints + #Link params + yield self.t_dlogpdf_link_dparams, model, Y, f, param_vals, param_constraints + yield self.t_dlogpdf_dlink_dparams, model, Y, f, param_vals, param_constraints + yield self.t_d2logpdf2_dlink2_dparams, model, Y, f, param_vals, param_constraints + + #laplace likelihood gradcheck + yield self.t_laplace_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints + if ep: + #ep likelihood gradcheck + yield self.t_ep_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints + + + self.tearDown() + + ############# + # dpdf_df's # + ############# + @with_setup(setUp, tearDown) + def t_logpdf(self, model, Y, f): + print "\n{}".format(inspect.stack()[0][3]) + print model + print model._get_params() + np.testing.assert_almost_equal( + model.pdf(f.copy(), Y.copy()), + np.exp(model.logpdf(f.copy(), Y.copy())) + ) + + @with_setup(setUp, tearDown) + def t_dlogpdf_df(self, model, Y, f): + print "\n{}".format(inspect.stack()[0][3]) + self.description = "\n{}".format(inspect.stack()[0][3]) + logpdf = functools.partial(model.logpdf, y=Y) + dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y) + grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g') + grad.randomize() + grad.checkgrad(verbose=1) + print model + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d2logpdf_df2(self, model, Y, f): + print "\n{}".format(inspect.stack()[0][3]) + dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y) + d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y) + grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g') + grad.randomize() + grad.checkgrad(verbose=1) + print model + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d3logpdf_df3(self, model, Y, f): + print "\n{}".format(inspect.stack()[0][3]) + d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y) + d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y) + grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g') + grad.randomize() + grad.checkgrad(verbose=1) + print model + assert grad.checkgrad() + + ############## + # df_dparams # + ############## + @with_setup(setUp, tearDown) + def t_dlogpdf_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.logpdf, model.dlogpdf_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=True, verbose=True) + ) + + @with_setup(setUp, tearDown) + def t_dlogpdf_df_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.dlogpdf_df, model.dlogpdf_df_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=True, verbose=True) + ) + + @with_setup(setUp, tearDown) + def t_d2logpdf2_df2_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.d2logpdf_df2, model.d2logpdf_df2_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=True, verbose=True) + ) + + ################ + # dpdf_dlink's # + ################ + @with_setup(setUp, tearDown) + def t_dlogpdf_dlink(self, model, Y, f, link_f_constraints): + print "\n{}".format(inspect.stack()[0][3]) + logpdf = functools.partial(model.logpdf_link, y=Y) + dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y) + grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g') + + #Apply constraints to link_f values + for constraint in link_f_constraints: + constraint('g', grad) + + grad.randomize() + print grad + grad.checkgrad(verbose=1) + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d2logpdf_dlink2(self, model, Y, f, link_f_constraints): + print "\n{}".format(inspect.stack()[0][3]) + dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y) + d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y) + grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g') + + #Apply constraints to link_f values + for constraint in link_f_constraints: + constraint('g', grad) + + grad.randomize() + grad.checkgrad(verbose=1) + print grad + assert grad.checkgrad() + + @with_setup(setUp, tearDown) + def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints): + print "\n{}".format(inspect.stack()[0][3]) + d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y) + d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y) + grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g') + + #Apply constraints to link_f values + for constraint in link_f_constraints: + constraint('g', grad) + + grad.randomize() + grad.checkgrad(verbose=1) + print grad + assert grad.checkgrad() + + ################# + # dlink_dparams # + ################# + @with_setup(setUp, tearDown) + def t_dlogpdf_link_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.logpdf_link, model.dlogpdf_link_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=False, verbose=True) + ) + + @with_setup(setUp, tearDown) + def t_dlogpdf_dlink_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.dlogpdf_dlink, model.dlogpdf_dlink_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=False, verbose=True) + ) + + @with_setup(setUp, tearDown) + def t_d2logpdf2_dlink2_dparams(self, model, Y, f, params, param_constraints): + print "\n{}".format(inspect.stack()[0][3]) + print model + assert ( + dparam_checkgrad(model.d2logpdf_dlink2, model.d2logpdf_dlink2_dtheta, + params, args=(f, Y), constraints=param_constraints, + randomize=False, verbose=True) + ) + + ################ + # laplace test # + ################ + @with_setup(setUp, tearDown) + def t_laplace_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints): + print "\n{}".format(inspect.stack()[0][3]) + #Normalize + Y = Y/Y.max() + white_var = 1e-6 + kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + laplace_likelihood = GPy.likelihoods.Laplace(Y.copy(), model) + m = GPy.models.GPRegression(X.copy(), Y.copy(), kernel, likelihood=laplace_likelihood) + m.ensure_default_constraints() + m.constrain_fixed('white', white_var) + + for param_num in range(len(param_names)): + name = param_names[param_num] + m[name] = param_vals[param_num] + constraints[param_num](name, m) + + print m + m.randomize() + #m.optimize(max_iters=8) + print m + m.checkgrad(verbose=1, step=step) + #if not m.checkgrad(step=step): + #m.checkgrad(verbose=1, step=step) + #import ipdb; ipdb.set_trace() + #NOTE this test appears to be stochastic for some likelihoods (student t?) + # appears to all be working in test mode right now... + assert m.checkgrad(step=step) + + ########### + # EP test # + ########### + @with_setup(setUp, tearDown) + def t_ep_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints): + print "\n{}".format(inspect.stack()[0][3]) + #Normalize + Y = Y/Y.max() + white_var = 1e-6 + kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + ep_likelihood = GPy.likelihoods.EP(Y.copy(), model) + m = GPy.models.GPRegression(X.copy(), Y.copy(), kernel, likelihood=ep_likelihood) + m.ensure_default_constraints() + m.constrain_fixed('white', white_var) + + for param_num in range(len(param_names)): + name = param_names[param_num] + m[name] = param_vals[param_num] + constraints[param_num](name, m) + + m.randomize() + m.checkgrad(verbose=1, step=step) + print m + assert m.checkgrad(step=step) + + +class LaplaceTests(unittest.TestCase): + """ + Specific likelihood tests, not general enough for the above tests + """ + + def setUp(self): + self.N = 5 + self.D = 3 + self.X = np.random.rand(self.N, self.D)*10 + + self.real_std = 0.1 + noise = np.random.randn(*self.X[:, 0].shape)*self.real_std + self.Y = (np.sin(self.X[:, 0]*2*np.pi) + noise)[:, None] + self.f = np.random.rand(self.N, 1) + + self.var = 0.2 + + self.var = np.random.rand(1) + self.stu_t = GPy.likelihoods.student_t(deg_free=5, sigma2=self.var) + self.gauss = GPy.likelihoods.gaussian(gp_transformations.Log(), variance=self.var, D=self.D, N=self.N) + + #Make a bigger step as lower bound can be quite curved + self.step = 1e-6 + + def tearDown(self): + self.stu_t = None + self.gauss = None + self.Y = None + self.f = None + self.X = None + + def test_gaussian_d2logpdf_df2_2(self): + print "\n{}".format(inspect.stack()[0][3]) + self.Y = None + self.gauss = None + + self.N = 2 + self.D = 1 + self.X = np.linspace(0, self.D, self.N)[:, None] + self.real_std = 0.2 + noise = np.random.randn(*self.X.shape)*self.real_std + self.Y = np.sin(self.X*2*np.pi) + noise + self.f = np.random.rand(self.N, 1) + self.gauss = GPy.likelihoods.gaussian(variance=self.var, D=self.D, N=self.N) + + dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y) + d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y) + grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), 'g') + grad.randomize() + grad.checkgrad(verbose=1) + self.assertTrue(grad.checkgrad()) + + #@unittest.skip('Not working yet, needs to be checked') + def test_laplace_log_likelihood(self): + debug = False + real_std = 0.1 + initial_var_guess = 0.5 + + #Start a function, any function + X = np.linspace(0.0, np.pi*2, 100)[:, None] + Y = np.sin(X) + np.random.randn(*X.shape)*real_std + Y = Y/Y.max() + #Yc = Y.copy() + #Yc[75:80] += 1 + kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel2 = kernel1.copy() + + m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1) + m1.constrain_fixed('white', 1e-6) + m1['noise'] = initial_var_guess + m1.constrain_bounded('noise', 1e-4, 10) + m1.constrain_bounded('rbf', 1e-4, 10) + m1.ensure_default_constraints() + m1.randomize() + + gauss_distr = GPy.likelihoods.gaussian(variance=initial_var_guess, D=1, N=Y.shape[0]) + laplace_likelihood = GPy.likelihoods.Laplace(Y.copy(), gauss_distr) + m2 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel2, likelihood=laplace_likelihood) + m2.ensure_default_constraints() + m2.constrain_fixed('white', 1e-6) + m2.constrain_bounded('rbf', 1e-4, 10) + m2.constrain_bounded('noise', 1e-4, 10) + m2.randomize() + + if debug: + print m1 + print m2 + optimizer = 'scg' + print "Gaussian" + m1.optimize(optimizer, messages=debug) + print "Laplace Gaussian" + m2.optimize(optimizer, messages=debug) + if debug: + print m1 + print m2 + + m2._set_params(m1._get_params()) + + #Predict for training points to get posterior mean and variance + post_mean, post_var, _, _ = m1.predict(X) + post_mean_approx, post_var_approx, _, _ = m2.predict(X) + + if debug: + import pylab as pb + pb.figure(5) + pb.title('posterior means') + pb.scatter(X, post_mean, c='g') + pb.scatter(X, post_mean_approx, c='r', marker='x') + + pb.figure(6) + pb.title('plot_f') + m1.plot_f(fignum=6) + m2.plot_f(fignum=6) + fig, axes = pb.subplots(2, 1) + fig.suptitle('Covariance matricies') + a1 = pb.subplot(121) + a1.matshow(m1.likelihood.covariance_matrix) + a2 = pb.subplot(122) + a2.matshow(m2.likelihood.covariance_matrix) + + pb.figure(8) + pb.scatter(X, m1.likelihood.Y, c='g') + pb.scatter(X, m2.likelihood.Y, c='r', marker='x') + + + + #Check Y's are the same + np.testing.assert_almost_equal(Y, m2.likelihood.Y, decimal=5) + #Check marginals are the same + np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) + #Check marginals are the same with random + m1.randomize() + m2._set_params(m1._get_params()) + np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) + + #Check they are checkgradding + #m1.checkgrad(verbose=1) + #m2.checkgrad(verbose=1) + self.assertTrue(m1.checkgrad()) + self.assertTrue(m2.checkgrad()) + +if __name__ == "__main__": + print "Running unit tests" + unittest.main() diff --git a/GPy/testing/mapping_tests.py b/GPy/testing/mapping_tests.py new file mode 100644 index 00000000..cd28e71a --- /dev/null +++ b/GPy/testing/mapping_tests.py @@ -0,0 +1,34 @@ +# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import unittest +import numpy as np +import GPy + + + +class MappingTests(unittest.TestCase): + + def test_kernelmapping(self): + verbose = False + mapping = GPy.mappings.Kernel(np.random.rand(10, 3), 2) + self.assertTrue(GPy.core.mapping.Mapping_check_df_dtheta(mapping=mapping).checkgrad(verbose=verbose)) + self.assertTrue(GPy.core.mapping.Mapping_check_df_dX(mapping=mapping).checkgrad(verbose=verbose)) + + def test_linearmapping(self): + verbose = False + mapping = GPy.mappings.Linear(3, 2) + self.assertTrue(GPy.core.Mapping_check_df_dtheta(mapping=mapping).checkgrad(verbose=verbose)) + self.assertTrue(GPy.core.Mapping_check_df_dX(mapping=mapping).checkgrad(verbose=verbose)) + + def test_mlpmapping(self): + verbose = False + mapping = GPy.mappings.MLP(input_dim=2, hidden_dim=[3, 4, 8, 2], output_dim=2) + self.assertTrue(GPy.core.Mapping_check_df_dtheta(mapping=mapping).checkgrad(verbose=verbose)) + self.assertTrue(GPy.core.Mapping_check_df_dX(mapping=mapping).checkgrad(verbose=verbose)) + + + +if __name__ == "__main__": + print "Running unit tests, please be (very) patient..." + unittest.main() diff --git a/GPy/testing/psi_stat_expactation_tests.py b/GPy/testing/psi_stat_expectation_tests.py similarity index 53% rename from GPy/testing/psi_stat_expactation_tests.py rename to GPy/testing/psi_stat_expectation_tests.py index da71754b..90252197 100644 --- a/GPy/testing/psi_stat_expactation_tests.py +++ b/GPy/testing/psi_stat_expectation_tests.py @@ -7,9 +7,14 @@ import unittest import GPy import numpy as np from GPy import testing +import sys +import numpy +from GPy.kern.parts.rbf import RBF +from GPy.kern.parts.linear import Linear +from copy import deepcopy -__test__ = False -np.random.seed(0) +__test__ = lambda: 'deep' in sys.argv +# np.random.seed(0) def ard(p): try: @@ -19,28 +24,50 @@ def ard(p): pass return "" -@testing.deepTest(__test__) +@testing.deepTest(__test__()) class Test(unittest.TestCase): input_dim = 9 - num_inducing = 4 - N = 3 - Nsamples = 6e6 + num_inducing = 13 + N = 300 + Nsamples = 1e6 def setUp(self): + i_s_dim_list = [2,4,3] + indices = numpy.cumsum(i_s_dim_list).tolist() + input_slices = [slice(a,b) for a,b in zip([None]+indices, indices)] + #input_slices[2] = deepcopy(input_slices[1]) + input_slice_kern = GPy.kern.kern(9, + [ + RBF(i_s_dim_list[0], np.random.rand(), np.random.rand(i_s_dim_list[0]), ARD=True), + RBF(i_s_dim_list[1], np.random.rand(), np.random.rand(i_s_dim_list[1]), ARD=True), + Linear(i_s_dim_list[2], np.random.rand(i_s_dim_list[2]), ARD=True) + ], + input_slices = input_slices + ) self.kerns = ( +# input_slice_kern, # (GPy.kern.rbf(self.input_dim, ARD=True) + # GPy.kern.linear(self.input_dim, ARD=True) + # GPy.kern.bias(self.input_dim) + # GPy.kern.white(self.input_dim)), - (GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) + - GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) + - GPy.kern.linear(self.input_dim, np.random.rand(self.input_dim), ARD=True) + - GPy.kern.bias(self.input_dim) + - GPy.kern.white(self.input_dim)), -# GPy.kern.rbf(self.input_dim), GPy.kern.rbf(self.input_dim, ARD=True), + (#GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) + GPy.kern.linear(self.input_dim, np.random.rand(self.input_dim), ARD=True) + +GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) +# +GPy.kern.bias(self.input_dim) +# +GPy.kern.white(self.input_dim)), + ), +# (GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) + +# GPy.kern.bias(self.input_dim, np.random.rand())), +# (GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) +# +GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) +# #+GPy.kern.bias(self.input_dim, np.random.rand()) +# #+GPy.kern.white(self.input_dim, np.random.rand())), +# ), +# GPy.kern.white(self.input_dim, np.random.rand())), +# GPy.kern.rbf(self.input_dim), GPy.kern.rbf(self.input_dim, ARD=True), # GPy.kern.linear(self.input_dim, ARD=False), GPy.kern.linear(self.input_dim, ARD=True), # GPy.kern.linear(self.input_dim) + GPy.kern.bias(self.input_dim), -# GPy.kern.rbf(self.input_dim) + GPy.kern.bias(self.input_dim), +# GPy.kern.rbf(self.input_dim) + GPy.kern.bias(self.input_dim), # GPy.kern.linear(self.input_dim) + GPy.kern.bias(self.input_dim) + GPy.kern.white(self.input_dim), # GPy.kern.rbf(self.input_dim) + GPy.kern.bias(self.input_dim) + GPy.kern.white(self.input_dim), # GPy.kern.bias(self.input_dim), GPy.kern.white(self.input_dim), @@ -61,22 +88,22 @@ class Test(unittest.TestCase): def test_psi1(self): for kern in self.kerns: - Nsamples = 100 + Nsamples = np.floor(self.Nsamples/self.N) psi1 = kern.psi1(self.Z, self.q_x_mean, self.q_x_variance) K_ = np.zeros((Nsamples, self.num_inducing)) diffs = [] for i, q_x_sample_stripe in enumerate(np.array_split(self.q_x_samples, self.Nsamples / Nsamples)): - K = kern.K(q_x_sample_stripe, self.Z) + K = kern.K(q_x_sample_stripe[:Nsamples], self.Z) K_ += K - diffs.append(((psi1 - (K_ / (i + 1)))).mean()) + diffs.append((np.abs(psi1 - (K_ / (i + 1)))**2).mean()) K_ /= self.Nsamples / Nsamples msg = "psi1: " + "+".join([p.name + ard(p) for p in kern.parts]) try: import pylab pylab.figure(msg) pylab.plot(diffs) - self.assertTrue(np.allclose(psi1.squeeze(), K_, - rtol=1e-1, atol=.1), +# print msg, ((psi1.squeeze() - K_)**2).mean() < .01 + self.assertTrue(((psi1.squeeze() - K_)**2).mean() < .01, msg=msg + ": not matching") # sys.stdout.write(".") except: @@ -87,38 +114,37 @@ class Test(unittest.TestCase): def test_psi2(self): for kern in self.kerns: - Nsamples = 100 + Nsamples = int(np.floor(self.Nsamples/self.N)) psi2 = kern.psi2(self.Z, self.q_x_mean, self.q_x_variance) K_ = np.zeros((self.num_inducing, self.num_inducing)) diffs = [] for i, q_x_sample_stripe in enumerate(np.array_split(self.q_x_samples, self.Nsamples / Nsamples)): K = kern.K(q_x_sample_stripe, self.Z) - K = (K[:, :, None] * K[:, None, :]).mean(0) - K_ += K - diffs.append(((psi2 - (K_ / (i + 1)))).mean()) - K_ /= self.Nsamples / Nsamples + K = (K[:, :, None] * K[:, None, :]) + K_ += K.sum(0) / self.Nsamples + diffs.append(((psi2 - (K_*self.Nsamples/((i+1)*Nsamples)))**2).mean()) + #K_ /= self.Nsamples / Nsamples msg = "psi2: {}".format("+".join([p.name + ard(p) for p in kern.parts])) try: import pylab pylab.figure(msg) - pylab.plot(diffs) - self.assertTrue(np.allclose(psi2.squeeze(), K_, - rtol=1e-1, atol=.1), + pylab.plot(diffs, marker='x', mew=.2) +# print msg, np.allclose(psi2.squeeze(), K_, rtol=1e-1, atol=.1) + self.assertTrue(np.allclose(psi2.squeeze(), K_), + #rtol=1e-1, atol=.1), msg=msg + ": not matching") # sys.stdout.write(".") except: -# import ipdb;ipdb.set_trace() # kern.psi2(self.Z, self.q_x_mean, self.q_x_variance) # sys.stdout.write("E") print msg + ": not matching" + import ipdb;ipdb.set_trace() pass if __name__ == "__main__": - import sys - __test__ = 'deep' in sys.argv sys.argv = ['', - 'Test.test_psi0', - 'Test.test_psi1', + #'Test.test_psi0', + #'Test.test_psi1', 'Test.test_psi2', ] unittest.main() diff --git a/GPy/testing/psi_stat_gradient_tests.py b/GPy/testing/psi_stat_gradient_tests.py index de670f41..e373aaa3 100644 --- a/GPy/testing/psi_stat_gradient_tests.py +++ b/GPy/testing/psi_stat_gradient_tests.py @@ -40,10 +40,9 @@ class PsiStatModel(Model): return self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance).sum() def _log_likelihood_gradients(self): psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance) - try: - psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance) - except AttributeError: - psiZ = numpy.zeros(self.num_inducing * self.input_dim) + #psimu, psiS = numpy.ones(self.N * self.input_dim), numpy.ones(self.N * self.input_dim) + psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance) + #psiZ = numpy.ones(self.num_inducing * self.input_dim) thetagrad = self.kern.__getattribute__("d" + self.which + "_dtheta")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance).flatten() return numpy.hstack((psimu.flatten(), psiS.flatten(), psiZ.flatten(), thetagrad)) @@ -64,40 +63,54 @@ class DPsiStatTest(unittest.TestCase): def testPsi0(self): for k in self.kernels: - m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z, + m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z,\ num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi0".format("+".join(map(lambda x: x.name, k.parts))) - -# def testPsi1(self): -# for k in self.kernels: -# m = PsiStatModel('psi1', X=self.X, X_variance=self.X_var, Z=self.Z, -# num_inducing=self.num_inducing, kernel=k) -# assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts))) + + def testPsi1(self): + for k in self.kernels: + m = PsiStatModel('psi1', X=self.X, X_variance=self.X_var, Z=self.Z, + num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() + assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts))) def testPsi2_lin(self): k = self.kernels[0] m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z, - num_inducing=self.num_inducing, kernel=k) + num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts))) def testPsi2_lin_bia(self): k = self.kernels[3] m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z, num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts))) def testPsi2_rbf(self): k = self.kernels[1] m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z, num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts))) def testPsi2_rbf_bia(self): k = self.kernels[-1] m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z, num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts))) def testPsi2_bia(self): k = self.kernels[2] m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z, num_inducing=self.num_inducing, kernel=k) + m.ensure_default_constraints() + m.randomize() assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts))) @@ -116,9 +129,9 @@ if __name__ == "__main__": # m.randomize() # # self.assertTrue(m.checkgrad()) numpy.random.seed(0) - input_dim = 5 - N = 50 - num_inducing = 10 + input_dim = 3 + N = 3 + num_inducing = 2 D = 15 X = numpy.random.randn(N, input_dim) X_var = .5 * numpy.ones_like(X) + .1 * numpy.clip(numpy.random.randn(*X.shape), 0, 1) @@ -135,18 +148,35 @@ if __name__ == "__main__": # num_inducing=num_inducing, kernel=k) # assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts))) # -# m0 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z, -# num_inducing=num_inducing, kernel=GPy.kern.linear(input_dim)) + m0 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z, + num_inducing=num_inducing, kernel=GPy.kern.rbf(input_dim)+GPy.kern.bias(input_dim)) # m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z, # num_inducing=num_inducing, kernel=kernel) # m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z, # num_inducing=num_inducing, kernel=kernel) # m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, # num_inducing=num_inducing, kernel=GPy.kern.rbf(input_dim)) - m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, - num_inducing=num_inducing, kernel=GPy.kern.linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim))) +# m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, +# num_inducing=num_inducing, kernel=GPy.kern.linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim))) # + GPy.kern.bias(input_dim)) -# m4 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, -# num_inducing=num_inducing, kernel=GPy.kern.rbf(input_dim) + GPy.kern.bias(input_dim)) +# m = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, +# num_inducing=num_inducing, +# kernel=( +# GPy.kern.rbf(input_dim, ARD=1) +# +GPy.kern.linear(input_dim, ARD=1) +# +GPy.kern.bias(input_dim)) +# ) +# m.ensure_default_constraints() + m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z, + num_inducing=num_inducing, kernel=( + GPy.kern.rbf(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1) + #+GPy.kern.linear(input_dim, numpy.random.rand(input_dim), ARD=1) + #+GPy.kern.rbf(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1) + #+GPy.kern.rbf(input_dim, numpy.random.rand(), numpy.random.rand(), ARD=0) + +GPy.kern.bias(input_dim) + +GPy.kern.white(input_dim) + ) + ) + m2.ensure_default_constraints() else: unittest.main() diff --git a/GPy/testing/sparse_gplvm_tests.py b/GPy/testing/sparse_gplvm_tests.py index e27fccff..c3942b95 100644 --- a/GPy/testing/sparse_gplvm_tests.py +++ b/GPy/testing/sparse_gplvm_tests.py @@ -4,7 +4,7 @@ import unittest import numpy as np import GPy -from GPy.models.sparse_gplvm import SparseGPLVM +from ..models import SparseGPLVM class sparse_GPLVMTests(unittest.TestCase): def test_bias_kern(self): diff --git a/GPy/testing/unit_tests.py b/GPy/testing/unit_tests.py index 6e504a69..9269a4c4 100644 --- a/GPy/testing/unit_tests.py +++ b/GPy/testing/unit_tests.py @@ -5,7 +5,6 @@ import unittest import numpy as np import GPy -from GPy.likelihoods.likelihood_functions import Binomial class GradientTests(unittest.TestCase): def setUp(self): @@ -23,7 +22,7 @@ class GradientTests(unittest.TestCase): self.X2D = np.random.uniform(-3., 3., (40, 2)) self.Y2D = np.sin(self.X2D[:, 0:1]) * np.sin(self.X2D[:, 1:2]) + np.random.randn(40, 1) * 0.05 - def check_model_with_white(self, kern, model_type='GPRegression', dimension=1): + def check_model(self, kern, model_type='GPRegression', dimension=1, uncertain_inputs=False): # Get the correct gradients if dimension == 1: X = self.X1D @@ -34,9 +33,12 @@ class GradientTests(unittest.TestCase): # Get model type (GPRegression, SparseGPRegression, etc) model_fit = getattr(GPy.models, model_type) - noise = GPy.kern.white(dimension) - kern = kern + noise - m = model_fit(X, Y, kernel=kern) + # noise = GPy.kern.white(dimension) + kern = kern # + noise + if uncertain_inputs: + m = model_fit(X, Y, kernel=kern, X_variance=np.random.rand(X.shape[0], X.shape[1])) + else: + m = model_fit(X, Y, kernel=kern) m.randomize() # contrain all parameters to be positive self.assertTrue(m.checkgrad()) @@ -44,105 +46,139 @@ class GradientTests(unittest.TestCase): def test_GPRegression_rbf_1d(self): ''' Testing the GP regression with rbf kernel with white kernel on 1d data ''' rbf = GPy.kern.rbf(1) - self.check_model_with_white(rbf, model_type='GPRegression', dimension=1) + self.check_model(rbf, model_type='GPRegression', dimension=1) def test_GPRegression_rbf_2D(self): - ''' Testing the GP regression with rbf and white kernel on 2d data ''' + ''' Testing the GP regression with rbf kernel on 2d data ''' rbf = GPy.kern.rbf(2) - self.check_model_with_white(rbf, model_type='GPRegression', dimension=2) + self.check_model(rbf, model_type='GPRegression', dimension=2) def test_GPRegression_rbf_ARD_2D(self): - ''' Testing the GP regression with rbf and white kernel on 2d data ''' + ''' Testing the GP regression with rbf kernel on 2d data ''' k = GPy.kern.rbf(2, ARD=True) - self.check_model_with_white(k, model_type='GPRegression', dimension=2) + self.check_model(k, model_type='GPRegression', dimension=2) + + def test_GPRegression_mlp_1d(self): + ''' Testing the GP regression with mlp kernel with white kernel on 1d data ''' + mlp = GPy.kern.mlp(1) + self.check_model(mlp, model_type='GPRegression', dimension=1) + + def test_GPRegression_poly_1d(self): + ''' Testing the GP regression with polynomial kernel with white kernel on 1d data ''' + mlp = GPy.kern.poly(1, degree=5) + self.check_model(mlp, model_type='GPRegression', dimension=1) def test_GPRegression_matern52_1D(self): ''' Testing the GP regression with matern52 kernel on 1d data ''' matern52 = GPy.kern.Matern52(1) - self.check_model_with_white(matern52, model_type='GPRegression', dimension=1) + self.check_model(matern52, model_type='GPRegression', dimension=1) def test_GPRegression_matern52_2D(self): ''' Testing the GP regression with matern52 kernel on 2d data ''' matern52 = GPy.kern.Matern52(2) - self.check_model_with_white(matern52, model_type='GPRegression', dimension=2) + self.check_model(matern52, model_type='GPRegression', dimension=2) def test_GPRegression_matern52_ARD_2D(self): ''' Testing the GP regression with matern52 kernel on 2d data ''' matern52 = GPy.kern.Matern52(2, ARD=True) - self.check_model_with_white(matern52, model_type='GPRegression', dimension=2) + self.check_model(matern52, model_type='GPRegression', dimension=2) def test_GPRegression_matern32_1D(self): ''' Testing the GP regression with matern32 kernel on 1d data ''' matern32 = GPy.kern.Matern32(1) - self.check_model_with_white(matern32, model_type='GPRegression', dimension=1) + self.check_model(matern32, model_type='GPRegression', dimension=1) def test_GPRegression_matern32_2D(self): ''' Testing the GP regression with matern32 kernel on 2d data ''' matern32 = GPy.kern.Matern32(2) - self.check_model_with_white(matern32, model_type='GPRegression', dimension=2) + self.check_model(matern32, model_type='GPRegression', dimension=2) def test_GPRegression_matern32_ARD_2D(self): ''' Testing the GP regression with matern32 kernel on 2d data ''' matern32 = GPy.kern.Matern32(2, ARD=True) - self.check_model_with_white(matern32, model_type='GPRegression', dimension=2) + self.check_model(matern32, model_type='GPRegression', dimension=2) def test_GPRegression_exponential_1D(self): ''' Testing the GP regression with exponential kernel on 1d data ''' exponential = GPy.kern.exponential(1) - self.check_model_with_white(exponential, model_type='GPRegression', dimension=1) + self.check_model(exponential, model_type='GPRegression', dimension=1) def test_GPRegression_exponential_2D(self): ''' Testing the GP regression with exponential kernel on 2d data ''' exponential = GPy.kern.exponential(2) - self.check_model_with_white(exponential, model_type='GPRegression', dimension=2) + self.check_model(exponential, model_type='GPRegression', dimension=2) def test_GPRegression_exponential_ARD_2D(self): ''' Testing the GP regression with exponential kernel on 2d data ''' exponential = GPy.kern.exponential(2, ARD=True) - self.check_model_with_white(exponential, model_type='GPRegression', dimension=2) + self.check_model(exponential, model_type='GPRegression', dimension=2) def test_GPRegression_bias_kern_1D(self): ''' Testing the GP regression with bias kernel on 1d data ''' bias = GPy.kern.bias(1) - self.check_model_with_white(bias, model_type='GPRegression', dimension=1) + self.check_model(bias, model_type='GPRegression', dimension=1) def test_GPRegression_bias_kern_2D(self): ''' Testing the GP regression with bias kernel on 2d data ''' bias = GPy.kern.bias(2) - self.check_model_with_white(bias, model_type='GPRegression', dimension=2) + self.check_model(bias, model_type='GPRegression', dimension=2) def test_GPRegression_linear_kern_1D_ARD(self): ''' Testing the GP regression with linear kernel on 1d data ''' linear = GPy.kern.linear(1, ARD=True) - self.check_model_with_white(linear, model_type='GPRegression', dimension=1) + self.check_model(linear, model_type='GPRegression', dimension=1) def test_GPRegression_linear_kern_2D_ARD(self): ''' Testing the GP regression with linear kernel on 2d data ''' linear = GPy.kern.linear(2, ARD=True) - self.check_model_with_white(linear, model_type='GPRegression', dimension=2) + self.check_model(linear, model_type='GPRegression', dimension=2) def test_GPRegression_linear_kern_1D(self): ''' Testing the GP regression with linear kernel on 1d data ''' linear = GPy.kern.linear(1) - self.check_model_with_white(linear, model_type='GPRegression', dimension=1) + self.check_model(linear, model_type='GPRegression', dimension=1) def test_GPRegression_linear_kern_2D(self): ''' Testing the GP regression with linear kernel on 2d data ''' linear = GPy.kern.linear(2) - self.check_model_with_white(linear, model_type='GPRegression', dimension=2) + self.check_model(linear, model_type='GPRegression', dimension=2) def test_SparseGPRegression_rbf_white_kern_1d(self): ''' Testing the sparse GP regression with rbf kernel with white kernel on 1d data ''' rbf = GPy.kern.rbf(1) - self.check_model_with_white(rbf, model_type='SparseGPRegression', dimension=1) + self.check_model(rbf, model_type='SparseGPRegression', dimension=1) def test_SparseGPRegression_rbf_white_kern_2D(self): - ''' Testing the sparse GP regression with rbf and white kernel on 2d data ''' + ''' Testing the sparse GP regression with rbf kernel on 2d data ''' rbf = GPy.kern.rbf(2) - self.check_model_with_white(rbf, model_type='SparseGPRegression', dimension=2) + self.check_model(rbf, model_type='SparseGPRegression', dimension=2) + + def test_SparseGPRegression_rbf_linear_white_kern_1D(self): + ''' Testing the sparse GP regression with rbf kernel on 2d data ''' + rbflin = GPy.kern.rbf(1) + GPy.kern.linear(1) + self.check_model(rbflin, model_type='SparseGPRegression', dimension=1) + + def test_SparseGPRegression_rbf_linear_white_kern_2D(self): + ''' Testing the sparse GP regression with rbf kernel on 2d data ''' + rbflin = GPy.kern.rbf(2) + GPy.kern.linear(2) + self.check_model(rbflin, model_type='SparseGPRegression', dimension=2) + + #@unittest.expectedFailure + def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self): + ''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs''' + rbflin = GPy.kern.rbf(2) + GPy.kern.linear(2) + raise unittest.SkipTest("This is not implemented yet!") + self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1) + + #@unittest.expectedFailure + def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self): + ''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs''' + rbflin = GPy.kern.rbf(1) + GPy.kern.linear(1) + raise unittest.SkipTest("This is not implemented yet!") + self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1) def test_GPLVM_rbf_bias_white_kern_2D(self): - """ Testing GPLVM with rbf + bias and white kernel """ + """ Testing GPLVM with rbf + bias kernel """ N, input_dim, D = 50, 1, 2 X = np.random.rand(N, input_dim) k = GPy.kern.rbf(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.bias(input_dim, 0.1) + GPy.kern.white(input_dim, 0.05) @@ -152,7 +188,7 @@ class GradientTests(unittest.TestCase): self.assertTrue(m.checkgrad()) def test_GPLVM_rbf_linear_white_kern_2D(self): - """ Testing GPLVM with rbf + bias and white kernel """ + """ Testing GPLVM with rbf + bias kernel """ N, input_dim, D = 50, 1, 2 X = np.random.rand(N, input_dim) k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim, 0.1) + GPy.kern.white(input_dim, 0.05) @@ -166,10 +202,7 @@ class GradientTests(unittest.TestCase): X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None] Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] kernel = GPy.kern.rbf(1) - distribution = GPy.likelihoods.likelihood_functions.Binomial() - likelihood = GPy.likelihoods.EP(Y, distribution) - m = GPy.core.GP(X, likelihood, kernel) - m.ensure_default_constraints() + m = GPy.models.GPClassification(X,Y,kernel=kernel) m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) @@ -179,10 +212,11 @@ class GradientTests(unittest.TestCase): Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] Z = np.linspace(0, 15, 4)[:, None] kernel = GPy.kern.rbf(1) - distribution = GPy.likelihoods.likelihood_functions.Binomial() - likelihood = GPy.likelihoods.EP(Y, distribution) - m = GPy.core.SparseGP(X, likelihood, kernel, Z) - m.ensure_default_constraints() + m = GPy.models.SparseGPClassification(X,Y,kernel=kernel,Z=Z) + #distribution = GPy.likelihoods.likelihood_functions.Bernoulli() + #likelihood = GPy.likelihoods.EP(Y, distribution) + #m = GPy.core.SparseGP(X, likelihood, kernel, Z) + #m.ensure_default_constraints() m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) @@ -191,10 +225,36 @@ class GradientTests(unittest.TestCase): X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None] k = GPy.kern.rbf(1) + GPy.kern.white(1) Y = np.hstack([np.ones(N/2),np.zeros(N/2)])[:,None] - m = GPy.models.FITCClassification(X, Y=Y) + m = GPy.models.FITCClassification(X, Y, kernel = k) m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) + def multioutput_regression_1D(self): + X1 = np.random.rand(50, 1) * 8 + X2 = np.random.rand(30, 1) * 5 + X = np.vstack((X1, X2)) + Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05 + Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05 + Y = np.vstack((Y1, Y2)) + + k1 = GPy.kern.rbf(1) + m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) + m.constrain_fixed('.*rbf_var', 1.) + self.assertTrue(m.checkgrad()) + + def multioutput_sparse_regression_1D(self): + X1 = np.random.rand(500, 1) * 8 + X2 = np.random.rand(300, 1) * 5 + X = np.vstack((X1, X2)) + Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05 + Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05 + Y = np.vstack((Y1, Y2)) + + k1 = GPy.kern.rbf(1) + m = GPy.models.SparseGPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) + m.constrain_fixed('.*rbf_var', 1.) + self.assertTrue(m.checkgrad()) + if __name__ == "__main__": print "Running unit tests, please be (very) patient..." unittest.main() diff --git a/GPy/util/__init__.py b/GPy/util/__init__.py index 27d25518..2d2b6e17 100644 --- a/GPy/util/__init__.py +++ b/GPy/util/__init__.py @@ -7,10 +7,22 @@ import misc import plot import squashers import Tango -import misc import warping_functions import datasets import mocap import visualize import decorators import classification +import latent_space_visualizations + +try: + import sympy + _sympy_available = True + del sympy +except ImportError as e: + _sympy_available = False + +if _sympy_available: + import symbolic + +import netpbmfile diff --git a/GPy/util/block_matrices.py b/GPy/util/block_matrices.py new file mode 100644 index 00000000..8fd5f89d --- /dev/null +++ b/GPy/util/block_matrices.py @@ -0,0 +1,24 @@ +import numpy as np + +def get_blocks(A, blocksizes): + assert (A.shape[0]==A.shape[1]) and len(A.shape)==2, "can;t blockify this non-square matrix" + N = np.sum(blocksizes) + assert A.shape[0] == N, "bad blocksizes" + num_blocks = len(blocksizes) + B = np.empty(shape=(num_blocks, num_blocks), dtype=np.object) + count_i = 0 + for Bi, i in enumerate(blocksizes): + count_j = 0 + for Bj, j in enumerate(blocksizes): + B[Bi, Bj] = A[count_i:count_i + i, count_j : count_j + j] + count_j += j + count_i += i + return B + + + +if __name__=='__main__': + A = np.zeros((5,5)) + B = get_blocks(A,[2,3]) + B[0,0] += 7 + print B diff --git a/GPy/util/config.py b/GPy/util/config.py new file mode 100644 index 00000000..02796e0b --- /dev/null +++ b/GPy/util/config.py @@ -0,0 +1,22 @@ +# +# This loads the configuration +# +import ConfigParser +import os +config = ConfigParser.ConfigParser() + +home = os.getenv('HOME') or os.getenv('USERPROFILE') +user_file = os.path.join(home,'.gpy_config.cfg') +default_file = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..', 'gpy_config.cfg')) +print user_file, os.path.isfile(user_file) +print default_file, os.path.isfile(default_file) + +# 1. check if the user has a ~/.gpy_config.cfg +if os.path.isfile(user_file): + config.read(user_file) +elif os.path.isfile(default_file): + # 2. if not, use the default one + config.read(default_file) +else: + #3. panic + raise ValueError, "no configuration file found" diff --git a/GPy/util/data_resources.json b/GPy/util/data_resources.json new file mode 100644 index 00000000..d86d9088 --- /dev/null +++ b/GPy/util/data_resources.json @@ -0,0 +1,319 @@ +{ + "rogers_girolami_data":{ + "files":[ + [ + "firstcoursemldata.tar.gz" + ] + ], + "license":null, + "citation":"A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146", + "details":"Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.", + "urls":[ + "https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/" + ], + "suffices":[ + [ + "?dl=1" + ] + ], + "size":21949154 + }, + "ankur_pose_data":{ + "files":[ + [ + "ankurDataPoseSilhouette.mat" + ] + ], + "citation":"3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.", + "license":null, + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/ankur_pose_data/" + ], + "details":"Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing." + }, + "osu_accad":{ + "files":[ + [ + "swagger1TXT.ZIP", + "handspring1TXT.ZIP", + "quickwalkTXT.ZIP", + "run1TXT.ZIP", + "sprintTXT.ZIP", + "dogwalkTXT.ZIP", + "camper_04TXT.ZIP", + "dance_KB3_TXT.ZIP", + "per20_TXT.ZIP", + "perTWO07_TXT.ZIP", + "perTWO13_TXT.ZIP", + "perTWO14_TXT.ZIP", + "perTWO15_TXT.ZIP", + "perTWO16_TXT.ZIP" + ], + [ + "connections.txt" + ] + ], + "license":"Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).", + "citation":"The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.", + "details":"Motion capture data of different motions from the Open Motion Data Project at Ohio State University.", + "urls":[ + "http://accad.osu.edu/research/mocap/data/", + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/" + ], + "size":15922790 + }, + "isomap_face_data":{ + "files":[ + [ + "face_data.mat" + ] + ], + "license":null, + "citation":"A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000", + "details":"Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/isomap_face_data/" + ], + "size":24229368 + }, + "boston_housing":{ + "files":[ + [ + "Index", + "housing.data", + "housing.names" + ] + ], + "license":null, + "citation":"Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.", + "details":"The Boston Housing data relates house values in Boston to a range of input variables.", + "urls":[ + "http://archive.ics.uci.edu/ml/machine-learning-databases/housing/" + ], + "size":51276 + }, + "cmu_mocap_full":{ + "files":[ + [ + "allasfamc.zip" + ] + ], + "license":"From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.", + "citation":"Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu.\nThe database was created with funding from NSF EIA-0196217.", + "details":"CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.", + "urls":[ + "http://mocap.cs.cmu.edu/subjects" + ], + "size":null + }, + "brendan_faces":{ + "files":[ + [ + "frey_rawface.mat" + ] + ], + "license":null, + "citation":"Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.", + "details":"A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.", + "urls":[ + "http://www.cs.nyu.edu/~roweis/data/" + ], + "size":1100584 + }, + "olympic_marathon_men":{ + "files":[ + [ + "olympicMarathonTimes.csv" + ] + ], + "license":null, + "citation":null, + "details":"Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olympic_marathon_men/" + ], + "size":584 + }, + "pumadyn-32nm":{ + "files":[ + [ + "pumadyn-32nm.tar.gz" + ] + ], + "license":"Data is made available by the Delve system at the University of Toronto", + "citation":"Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.", + "details":"Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.", + "urls":[ + "ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/" + ], + "size":5861646 + }, + "ripley_prnn_data":{ + "files":[ + [ + "Cushings.dat", + "README", + "crabs.dat", + "fglass.dat", + "fglass.grp", + "pima.te", + "pima.tr", + "pima.tr2", + "synth.te", + "synth.tr", + "viruses.dat", + "virus3.dat" + ] + ], + "license":null, + "citation":"Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7", + "details":"Data sets from Brian Ripley's Pattern Recognition and Neural Networks", + "urls":[ + "http://www.stats.ox.ac.uk/pub/PRNN/" + ], + "size":93565 + }, + "three_phase_oil_flow":{ + "files":[ + [ + "DataTrnLbls.txt", + "DataTrn.txt", + "DataTst.txt", + "DataTstLbls.txt", + "DataVdn.txt", + "DataVdnLbls.txt" + ] + ], + "license":null, + "citation":"Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593", + "details":"The three phase oil data used initially for demonstrating the Generative Topographic mapping.", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/three_phase_oil_flow/" + ], + "size":712796 + }, + "robot_wireless":{ + "files":[ + [ + "uw-floor.txt" + ] + ], + "license":null, + "citation":"WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.", + "details":"Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/robot_wireless/" + ], + "size":284390 + }, + "xw_pen":{ + "files":[ + [ + "xw_pen_15.csv" + ] + ], + "license":null, + "citation":"Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005", + "details":"Accelerometer pen data used for robust regression by Tipping and Lawrence.", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/xw_pen/" + ], + "size":3410 + }, + "swiss_roll":{ + "files":[ + [ + "swiss_roll_data.mat" + ] + ], + "license":null, + "citation":"A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000", + "details":"Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.", + "urls":[ + "http://isomap.stanford.edu/" + ], + "size":800256 + }, + "osu_run1":{ + "files":[ + [ + "run1TXT.ZIP" + ], + [ + "connections.txt" + ] + ], + "license":"Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).", + "citation":"The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.", + "details":"Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.", + "urls":[ + "http://accad.osu.edu/research/mocap/data/", + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/" + ], + "size":338103 + }, + "creep_rupture":{ + "files":[ + [ + "creeprupt.tar" + ] + ], + "license":null, + "citation":"Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.", + "details":"Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.", + "urls":[ + "http://www.msm.cam.ac.uk/map/data/tar/" + ], + "size":602797 + }, + "olivetti_faces":{ + "files":[ + [ + "att_faces.zip" + ], + [ + "olivettifaces.mat" + ] + ], + "license":null, + "citation":"Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994", + "details":"Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. ", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olivetti_faces/", + "http://www.cs.nyu.edu/~roweis/data/" + ], + "size":8561331 + }, + "della_gatta":{ + "files":[ + [ + "DellaGattadata.mat" + ] + ], + "license":null, + "citation":"Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008", + "details":"The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/della_gatta/" + ], + "size":3729650 + }, + "epomeo_gpx":{ + "files":[ + [ + "endomondo_1.gpx", + "endomondo_2.gpx", + "garmin_watch_via_endomondo.gpx", + "viewranger_phone.gpx", + "viewranger_tablet.gpx" + ] + ], + "license":null, + "citation":"", + "details":"Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).", + "urls":[ + "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/epomeo_gpx/" + ], + "size":2031872 + } +} \ No newline at end of file diff --git a/GPy/util/datasets.py b/GPy/util/datasets.py index fb47646f..7fd1b6c5 100644 --- a/GPy/util/datasets.py +++ b/GPy/util/datasets.py @@ -1,69 +1,240 @@ import os -import pylab as pb import numpy as np import GPy -import scipy.sparse import scipy.io import cPickle as pickle -import urllib2 as url +import zipfile +import tarfile +import datetime +import json +ipython_available=True +try: + import IPython +except ImportError: + ipython_available=False + +import sys, urllib2 + +def reporthook(a,b,c): + # ',' at the end of the line is important! + #print "% 3.1f%% of %d bytes\r" % (min(100, float(a * b) / c * 100), c), + #you can also use sys.stdout.write + sys.stdout.write("\r% 3.1f%% of %d bytes" % (min(100, float(a * b) / c * 100), c)) + sys.stdout.flush() + +# Global variables data_path = os.path.join(os.path.dirname(__file__), 'datasets') default_seed = 10000 -neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/' +overide_manual_authorize=False +neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/' -def prompt_user(): +# Read data resources from json file. +path = os.path.join(os.path.dirname(__file__), 'data_resources.json') +json_data=open(path).read() +data_resources = json.loads(json_data) + + +def prompt_user(prompt): + """Ask user for agreeing to data set licenses.""" # raw_input returns the empty string for "enter" yes = set(['yes', 'y']) no = set(['no','n']) - choice = raw_input().lower() + try: + print(prompt) + choice = raw_input().lower() + # would like to test for exception here, but not sure if we can do that without importing IPython + except: + print('Stdin is not implemented.') + print('You need to set') + print('overide_manual_authorize=True') + print('to proceed with the download. Please set that variable and continue.') + raise + + if choice in yes: return True elif choice in no: return False else: - sys.stdout.write("Please respond with 'yes', 'y' or 'no', 'n'") - return prompt_user() - -def download_data(dataset_name=None): - """Helper function which contains the resource locations for each data set in one place""" - - # Note: there may be a better way of doing this. One of the pythonistas will need to take a look. Neil - data_resources = {'oil': {'urls' : [neil_url + 'oil_data/'], - 'files' : [['DataTrnLbls.txt', 'DataTrn.txt']], - 'citation' : 'Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593', - 'details' : """The three phase oil data used initially for demonstrating the Generative Topographic mapping.""", - 'agreement' : None}, - 'brendan_faces' : {'url' : ['http://www.cs.nyu.edu/~roweis/data/'], - 'files': [['frey_rawface.mat']], - 'citation' : 'Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.', - 'details' : """A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.""", - 'agreement': None} - } + print("Your response was a " + choice) + print("Please respond with 'yes', 'y' or 'no', 'n'") + #return prompt_user() +def data_available(dataset_name=None): + """Check if the data set is available on the local machine already.""" + for file_list in data_resources[dataset_name]['files']: + for file in file_list: + if not os.path.exists(os.path.join(data_path, dataset_name, file)): + return False + return True + +def download_url(url, store_directory, save_name = None, messages = True, suffix=''): + """Download a file from a url and save it to disk.""" + i = url.rfind('/') + file = url[i+1:] + print file + dir_name = os.path.join(data_path, store_directory) + save_name = os.path.join(dir_name, file) + print "Downloading ", url, "->", os.path.join(store_directory, file) + if not os.path.exists(dir_name): + os.makedirs(dir_name) + try: + response = urllib2.urlopen(url+suffix) + except urllib2.URLError, e: + if not hasattr(e, "code"): + raise + response = e + if response.code > 399 and response.code<500: + raise ValueError('Tried url ' + url + suffix + ' and received client error ' + str(response.code)) + elif response.code > 499: + raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code)) + # if we wanted to get more sophisticated maybe we should check the response code here again even for successes. + with open(save_name, 'wb') as f: + f.write(response.read()) + + #urllib.urlretrieve(url+suffix, save_name, reporthook) + +def authorize_download(dataset_name=None): + """Check with the user that the are happy with terms and conditions for the data set.""" print('Acquiring resource: ' + dataset_name) # TODO, check resource is in dictionary! + print('') dr = data_resources[dataset_name] print('Details of data: ') print(dr['details']) + print('') if dr['citation']: print('Please cite:') print(dr['citation']) - if dr['agreement']: - print('You must also agree to the following:') - print(dr['agreement']) - print('Do you wish to proceed with the download? [yes/no]') - if prompt_user()==False: - return False + print('') + if dr['size']: + print('After downloading the data will take up ' + str(dr['size']) + ' bytes of space.') + print('') + print('Data will be stored in ' + os.path.join(data_path, dataset_name) + '.') + print('') + if overide_manual_authorize: + if dr['license']: + print('You have agreed to the following license:') + print(dr['license']) + print('') + return True + else: + if dr['license']: + print('You must also agree to the following license:') + print(dr['license']) + print('') + return prompt_user('Do you wish to proceed with the download? [yes/no]') - for url, files in zip(dr['urls'], dr['files']): - for file in files: - download_resource(url + file) +def download_data(dataset_name=None): + """Check with the user that the are happy with terms and conditions for the data set, then download it.""" + + dr = data_resources[dataset_name] + if not authorize_download(dataset_name): + raise Exception("Permission to download data set denied.") + + if dr.has_key('suffices'): + for url, files, suffices in zip(dr['urls'], dr['files'], dr['suffices']): + for file, suffix in zip(files, suffices): + download_url(os.path.join(url,file), dataset_name, dataset_name, suffix=suffix) + else: + for url, files in zip(dr['urls'], dr['files']): + for file in files: + download_url(os.path.join(url,file), dataset_name, dataset_name) return True - - +def data_details_return(data, data_set): + """Update the data component of the data dictionary with details drawn from the data_resources.""" + data.update(data_resources[data_set]) + return data + + +def cmu_urls_files(subj_motions, messages = True): + ''' + Find which resources are missing on the local disk for the requested CMU motion capture motions. + ''' + dr = data_resources['cmu_mocap_full'] + cmu_url = dr['urls'][0] + + subjects_num = subj_motions[0] + motions_num = subj_motions[1] + + resource = {'urls' : [], 'files' : []} + # Convert numbers to strings + subjects = [] + motions = [list() for _ in range(len(subjects_num))] + for i in range(len(subjects_num)): + curSubj = str(int(subjects_num[i])) + if int(subjects_num[i]) < 10: + curSubj = '0' + curSubj + subjects.append(curSubj) + for j in range(len(motions_num[i])): + curMot = str(int(motions_num[i][j])) + if int(motions_num[i][j]) < 10: + curMot = '0' + curMot + motions[i].append(curMot) + + all_skels = [] + + assert len(subjects) == len(motions) + + all_motions = [] + + for i in range(len(subjects)): + skel_dir = os.path.join(data_path, 'cmu_mocap') + cur_skel_file = os.path.join(skel_dir, subjects[i] + '.asf') + + url_required = False + file_download = [] + if not os.path.exists(cur_skel_file): + # Current skel file doesn't exist. + if not os.path.isdir(skel_dir): + os.mkdir(skel_dir) + # Add skel file to list. + url_required = True + file_download.append(subjects[i] + '.asf') + for j in range(len(motions[i])): + file_name = subjects[i] + '_' + motions[i][j] + '.amc' + cur_motion_file = os.path.join(skel_dir, file_name) + if not os.path.exists(cur_motion_file): + url_required = True + file_download.append(subjects[i] + '_' + motions[i][j] + '.amc') + if url_required: + resource['urls'].append(cmu_url + '/' + subjects[i] + '/') + resource['files'].append(file_download) + return resource + +try: + import gpxpy + import gpxpy.gpx + gpxpy_available = True + +except ImportError: + gpxpy_available = False + +if gpxpy_available: + def epomeo_gpx(data_set='epomeo_gpx', sample_every=4): + if not data_available(data_set): + download_data(data_set) + files = ['endomondo_1', 'endomondo_2', 'garmin_watch_via_endomondo','viewranger_phone', 'viewranger_tablet'] + + X = [] + for file in files: + gpx_file = open(os.path.join(data_path, 'epomeo_gpx', file + '.gpx'), 'r') + + gpx = gpxpy.parse(gpx_file) + segment = gpx.tracks[0].segments[0] + points = [point for track in gpx.tracks for segment in track.segments for point in segment.points] + data = [[(point.time-datetime.datetime(2013,8,21)).total_seconds(), point.latitude, point.longitude, point.elevation] for point in points] + X.append(np.asarray(data)[::sample_every, :]) + gpx_file.close() + return data_details_return({'X' : X, 'info' : 'Data is an array containing time in seconds, latitude, longitude and elevation in that order.'}, data_set) + +del gpxpy_available + + # Some general utilities. def sample_class(f): @@ -72,25 +243,25 @@ def sample_class(f): c = np.where(c, 1, -1) return c -def download_resource(resource, save_name = None, save_file = True, messages = True): - if messages: - print "Downloading resource: " , resource, " ... ", - response = url.urlopen(resource) - # TODO: Some error checking... - # ... - html = response.read() - response.close() - if save_file: - # TODO: Check if already exists... - # ... - with open(save_name, "w") as text_file: - text_file.write("%s"%html) - if messages: - print "Done!" - return html +def boston_housing(data_set='boston_housing'): + if not data_available(data_set): + download_data(data_set) + all_data = np.genfromtxt(os.path.join(data_path, data_set, 'housing.data')) + X = all_data[:, 0:13] + Y = all_data[:, 13:14] + return data_details_return({'X' : X, 'Y': Y}, data_set) -def della_gatta_TRP63_gene_expression(gene_number=None): - mat_data = scipy.io.loadmat(os.path.join(data_path, 'DellaGattadata.mat')) +def brendan_faces(data_set='brendan_faces'): + if not data_available(data_set): + download_data(data_set) + mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'frey_rawface.mat')) + Y = mat_data['ff'].T + return data_details_return({'Y': Y}, data_set) + +def della_gatta_TRP63_gene_expression(data_set='della_gatta', gene_number=None): + if not data_available(data_set): + download_data(data_set) + mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'DellaGattadata.mat')) X = np.double(mat_data['timepoints']) if gene_number == None: Y = mat_data['exprs_tp53_RMA'] @@ -98,45 +269,62 @@ def della_gatta_TRP63_gene_expression(gene_number=None): Y = mat_data['exprs_tp53_RMA'][:, gene_number] if len(Y.shape) == 1: Y = Y[:, None] - return {'X': X, 'Y': Y, 'info': "The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA."} + return data_details_return({'X': X, 'Y': Y, 'gene_number' : gene_number}, data_set) -def simulation_BGPLVM(): - mat_data = scipy.io.loadmat(os.path.join(data_path, 'BGPLVMSimulation.mat')) - Y = np.array(mat_data['Y'], dtype=float) - S = np.array(mat_data['initS'], dtype=float) - mu = np.array(mat_data['initMu'], dtype=float) - return {'Y': Y, 'S': S, - 'mu' : mu, - 'info': "Simulated test dataset generated in MATLAB to compare BGPLVM between python and MATLAB"} # The data sets -def oil(): - #if download_data('oil'): - oil_train_file = os.path.join(data_path, 'oil', 'DataTrn.txt') - oil_trainlbls_file = os.path.join(data_path, 'oil', 'DataTrnLbls.txt') +def oil(data_set='three_phase_oil_flow'): + """The three phase oil data from Bishop and James (1993).""" + if not data_available(data_set): + download_data(data_set) + oil_train_file = os.path.join(data_path, data_set, 'DataTrn.txt') + oil_trainlbls_file = os.path.join(data_path, data_set, 'DataTrnLbls.txt') + oil_test_file = os.path.join(data_path, data_set, 'DataTst.txt') + oil_testlbls_file = os.path.join(data_path, data_set, 'DataTstLbls.txt') + oil_valid_file = os.path.join(data_path, data_set, 'DataVdn.txt') + oil_validlbls_file = os.path.join(data_path, data_set, 'DataVdnLbls.txt') fid = open(oil_train_file) X = np.fromfile(fid, sep='\t').reshape((-1, 12)) fid.close() + fid = open(oil_test_file) + Xtest = np.fromfile(fid, sep='\t').reshape((-1, 12)) + fid.close() + fid = open(oil_valid_file) + Xvalid = np.fromfile(fid, sep='\t').reshape((-1, 12)) + fid.close() fid = open(oil_trainlbls_file) Y = np.fromfile(fid, sep='\t').reshape((-1, 3)) * 2. - 1. fid.close() - return {'X': X, 'Y': Y, 'info': "The oil data from Bishop and James (1993)."} + fid = open(oil_testlbls_file) + Ytest = np.fromfile(fid, sep='\t').reshape((-1, 3)) * 2. - 1. + fid.close() + fid = open(oil_validlbls_file) + Yvalid = np.fromfile(fid, sep='\t').reshape((-1, 3)) * 2. - 1. + fid.close() + return data_details_return({'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest, 'Xtest' : Xtest, 'Xvalid': Xvalid, 'Yvalid': Yvalid}, data_set) #else: # throw an error - -def oil_100(seed=default_seed): + +def oil_100(seed=default_seed, data_set = 'three_phase_oil_flow'): np.random.seed(seed=seed) data = oil() indices = np.random.permutation(1000) indices = indices[0:100] X = data['X'][indices, :] Y = data['Y'][indices, :] - return {'X': X, 'Y': Y, 'info': "Subsample of the oil data extracting 100 values randomly without replacement."} + return data_details_return({'X': X, 'Y': Y, 'info': "Subsample of the full oil data extracting 100 values randomly without replacement, here seed was " + str(seed)}, data_set) -def pumadyn(seed=default_seed): +def pumadyn(seed=default_seed, data_set='pumadyn-32nm'): + if not data_available(data_set): + download_data(data_set) + path = os.path.join(data_path, data_set) + tar = tarfile.open(os.path.join(path, 'pumadyn-32nm.tar.gz')) + print('Extracting file.') + tar.extractall(path=path) + tar.close() # Data is variance 1, no need to normalize. - data = np.loadtxt(os.path.join(data_path, 'pumadyn-32nm/Dataset.data.gz')) + data = np.loadtxt(os.path.join(data_path, data_set, 'pumadyn-32nm', 'Dataset.data.gz')) indices = np.random.permutation(data.shape[0]) indicesTrain = indices[0:7168] indicesTest = indices[7168:-1] @@ -146,20 +334,54 @@ def pumadyn(seed=default_seed): Y = data[indicesTrain, -1][:, None] Xtest = data[indicesTest, 0:-2] Ytest = data[indicesTest, -1][:, None] - return {'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest, 'info': "The puma robot arm data with 32 inputs. This data is the non linear case with medium noise (pumadyn-32nm). For training 7,168 examples are sampled without replacement."} + return data_details_return({'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest, 'seed': seed}, data_set) +def robot_wireless(data_set='robot_wireless'): + # WiFi access point strengths on a tour around UW Paul Allen building. + if not data_available(data_set): + download_data(data_set) + file_name = os.path.join(data_path, data_set, 'uw-floor.txt') + all_time = np.genfromtxt(file_name, usecols=(0)) + macaddress = np.genfromtxt(file_name, usecols=(1), dtype='string') + x = np.genfromtxt(file_name, usecols=(2)) + y = np.genfromtxt(file_name, usecols=(3)) + strength = np.genfromtxt(file_name, usecols=(4)) + addresses = np.unique(macaddress) + times = np.unique(all_time) + addresses.sort() + times.sort() + allY = np.zeros((len(times), len(addresses))) + allX = np.zeros((len(times), 2)) + allY[:]=-92. + strengths={} + for address, j in zip(addresses, range(len(addresses))): + ind = np.nonzero(address==macaddress) + temp_strengths=strength[ind] + temp_x=x[ind] + temp_y=y[ind] + temp_times = all_time[ind] + for time in temp_times: + vals = time==temp_times + if any(vals): + ind2 = np.nonzero(vals) + i = np.nonzero(time==times) + allY[i, j] = temp_strengths[ind2] + allX[i, 0] = temp_x[ind2] + allX[i, 1] = temp_y[ind2] + allY = (allY + 85.)/15. -def brendan_faces(): - mat_data = scipy.io.loadmat(os.path.join(data_path, 'frey_rawface.mat')) - Y = mat_data['ff'].T - return {'Y': Y, 'info': "Face data made available by Brendan Frey"} + X = allX[0:215, :] + Y = allY[0:215, :] + Xtest = allX[215:, :] + Ytest = allY[215:, :] + return data_details_return({'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest, 'addresses' : addresses, 'times' : times}, data_set) - - -def silhouette(): +def silhouette(data_set='ankur_pose_data'): # Ankur Agarwal and Bill Trigg's silhoutte data. - mat_data = scipy.io.loadmat(os.path.join(data_path, 'mocap', 'ankur', 'ankurDataPoseSilhouette.mat')) + if not data_available(data_set): + download_data(data_set) + mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'ankurDataPoseSilhouette.mat')) inMean = np.mean(mat_data['Y']) inScales = np.sqrt(np.var(mat_data['Y'])) X = mat_data['Y'] - inMean @@ -168,22 +390,35 @@ def silhouette(): Xtest = Xtest / inScales Y = mat_data['Z'] Ytest = mat_data['Z_test'] - return {'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest, 'info': "Artificial silhouette simulation data developed from Agarwal and Triggs (2004)."} + return data_details_return({'X': X, 'Y': Y, 'Xtest': Xtest, 'Ytest': Ytest}, data_set) -def stick(): - #if download_data('stick'): - Y, connect = GPy.util.mocap.load_text_data('run1', data_path) - Y = Y[0:-1:4, :] - lbls = 'connect' - return {'Y': Y, 'connect' : connect, 'info': "Stick man data from Ohio."} - # else: - # throw an error. +def ripley_synth(data_set='ripley_prnn_data'): + if not data_available(data_set): + download_data(data_set) + train = np.genfromtxt(os.path.join(data_path, data_set, 'synth.tr'), skip_header=1) + X = train[:, 0:2] + y = train[:, 2:3] + test = np.genfromtxt(os.path.join(data_path, data_set, 'synth.te'), skip_header=1) + Xtest = test[:, 0:2] + ytest = test[:, 2:3] + return data_details_return({'X': X, 'y': y, 'Xtest': Xtest, 'ytest': ytest, 'info': 'Synthetic data generated by Ripley for a two class classification problem.'}, data_set) -def swiss_roll_generated(N=1000, sigma=0.0): +def osu_run1(data_set='osu_run1', sample_every=4): + path = os.path.join(data_path, data_set) + if not data_available(data_set): + download_data(data_set) + zip = zipfile.ZipFile(os.path.join(data_path, data_set, 'run1TXT.ZIP'), 'r') + for name in zip.namelist(): + zip.extract(name, path) + Y, connect = GPy.util.mocap.load_text_data('Aug210106', path) + Y = Y[0:-1:sample_every, :] + return data_details_return({'Y': Y, 'connect' : connect}, data_set) + +def swiss_roll_generated(num_samples=1000, sigma=0.0): with open(os.path.join(data_path, 'swiss_roll.pickle')) as f: data = pickle.load(f) Na = data['Y'].shape[0] - perm = np.random.permutation(np.r_[:Na])[:N] + perm = np.random.permutation(np.r_[:Na])[:num_samples] Y = data['Y'][perm, :] t = data['t'][perm] c = data['colors'][perm, :] @@ -194,27 +429,52 @@ def swiss_roll_generated(N=1000, sigma=0.0): return {'Y':Y, 't':t, 'colors':c} def swiss_roll_1000(): - mat_data = scipy.io.loadmat(os.path.join(data_path, 'swiss_roll_data')) - Y = mat_data['X_data'][:, 0:1000].transpose() - return {'Y': Y, 'info': "Subsample of the swiss roll data extracting only the first 1000 values."} + return swiss_roll(num_samples=1000) -def swiss_roll(N=3000): - mat_data = scipy.io.loadmat(os.path.join(data_path, 'swiss_roll_data.mat')) - Y = mat_data['X_data'][:, 0:N].transpose() - return {'Y': Y, 'X': mat_data['X_data'], 'info': "The first 3,000 points from the swiss roll data of Tennenbaum, de Silva and Langford (2001)."} +def swiss_roll(num_samples=3000, data_set='swiss_roll'): + if not data_available(data_set): + download_data(data_set) + mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'swiss_roll_data.mat')) + Y = mat_data['X_data'][:, 0:num_samples].transpose() + return data_details_return({'Y': Y, 'X': mat_data['X_data'], 'info': "The first " + str(num_samples) + " points from the swiss roll data of Tennenbaum, de Silva and Langford (2001)."}, data_set) -def toy_rbf_1d(seed=default_seed): +def isomap_faces(num_samples=698, data_set='isomap_face_data'): + if not data_available(data_set): + download_data(data_set) + mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'face_data.mat')) + Y = mat_data['images'][:, 0:num_samples].transpose() + return data_details_return({'Y': Y, 'poses' : mat_data['poses'], 'lights': mat_data['lights'], 'info': "The first " + str(num_samples) + " points from the face data of Tennenbaum, de Silva and Langford (2001)."}, data_set) + +def simulation_BGPLVM(): + mat_data = scipy.io.loadmat(os.path.join(data_path, 'BGPLVMSimulation.mat')) + Y = np.array(mat_data['Y'], dtype=float) + S = np.array(mat_data['initS'], dtype=float) + mu = np.array(mat_data['initMu'], dtype=float) + #return data_details_return({'S': S, 'Y': Y, 'mu': mu}, data_set) + return {'Y': Y, 'S': S, + 'mu' : mu, + 'info': "Simulated test dataset generated in MATLAB to compare BGPLVM between python and MATLAB"} + +def toy_rbf_1d(seed=default_seed, num_samples=500): + """ + Samples values of a function from an RBF covariance with very small noise for inputs uniformly distributed between -1 and 1. + + :param seed: seed to use for random sampling. + :type seed: int + :param num_samples: number of samples to sample in the function (default 500). + :type num_samples: int + + """ np.random.seed(seed=seed) - numIn = 1 - N = 500 - X = np.random.uniform(low= -1.0, high=1.0, size=(N, numIn)) + num_in = 1 + X = np.random.uniform(low= -1.0, high=1.0, size=(num_samples, num_in)) X.sort(axis=0) - rbf = GPy.kern.rbf(numIn, variance=1., lengthscale=np.array((0.25,))) - white = GPy.kern.white(numIn, variance=1e-2) + rbf = GPy.kern.rbf(num_in, variance=1., lengthscale=np.array((0.25,))) + white = GPy.kern.white(num_in, variance=1e-2) kernel = rbf + white K = kernel.K(X) - y = np.reshape(np.random.multivariate_normal(np.zeros(N), K), (N, 1)) - return {'X':X, 'Y':y, 'info': "Samples 500 values of a function from an RBF covariance with very small noise for inputs uniformly distributed between -1 and 1."} + y = np.reshape(np.random.multivariate_normal(np.zeros(num_samples), K), (num_samples, 1)) + return {'X':X, 'Y':y, 'info': "Sampled " + str(num_samples) + " values of a function from an RBF covariance with very small noise for inputs uniformly distributed between -1 and 1."} def toy_rbf_1d_50(seed=default_seed): np.random.seed(seed=seed) @@ -224,7 +484,7 @@ def toy_rbf_1d_50(seed=default_seed): indices.sort(axis=0) X = data['X'][indices, :] Y = data['Y'][indices, :] - return {'X': X, 'Y': Y, 'info': "Subsamples the toy_rbf_sample with 50 values randomly taken from the original sample."} + return {'X': X, 'Y': Y, 'info': "Subsamples the toy_rbf_sample with 50 values randomly taken from the original sample.", 'seed' : seed} def toy_linear_1d_classification(seed=default_seed): @@ -232,13 +492,131 @@ def toy_linear_1d_classification(seed=default_seed): x1 = np.random.normal(-3, 5, 20) x2 = np.random.normal(3, 5, 20) X = (np.r_[x1, x2])[:, None] - return {'X': X, 'Y': sample_class(2.*X), 'F': 2.*X} + return {'X': X, 'Y': sample_class(2.*X), 'F': 2.*X, 'seed' : seed} + +def olivetti_faces(data_set='olivetti_faces'): + path = os.path.join(data_path, data_set) + if not data_available(data_set): + download_data(data_set) + zip = zipfile.ZipFile(os.path.join(path, 'att_faces.zip'), 'r') + for name in zip.namelist(): + zip.extract(name, path) + Y = [] + lbls = [] + for subject in range(40): + for image in range(10): + image_path = os.path.join(path, 'orl_faces', 's'+str(subject+1), str(image+1) + '.pgm') + Y.append(GPy.util.netpbmfile.imread(image_path).flatten()) + lbls.append(subject) + Y = np.asarray(Y) + lbls = np.asarray(lbls)[:, None] + return data_details_return({'Y': Y, 'lbls' : lbls, 'info': "ORL Faces processed to 64x64 images."}, data_set) + +def xw_pen(data_set='xw_pen'): + if not data_available(data_set): + download_data(data_set) + Y = np.loadtxt(os.path.join(data_path, data_set, 'xw_pen_15.csv'), delimiter=',') + X = np.arange(485)[:, None] + return data_details_return({'Y': Y, 'X': X, 'info': "Tilt data from a personalized digital assistant pen. Plot in original paper showed regression between time steps 175 and 275."}, data_set) + + +def download_rogers_girolami_data(data_set='rogers_girolami_data'): + if not data_available('rogers_girolami_data'): + download_data(data_set) + path = os.path.join(data_path, data_set) + tar_file = os.path.join(path, 'firstcoursemldata.tar.gz') + tar = tarfile.open(tar_file) + print('Extracting file.') + tar.extractall(path=path) + tar.close() + +def olympic_100m_men(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['male100'] -def rogers_girolami_olympics(): - olympic_data = scipy.io.loadmat(os.path.join(data_path, 'olympics.mat'))['male100'] X = olympic_data[:, 0][:, None] Y = olympic_data[:, 1][:, None] - return {'X': X, 'Y': Y, 'info': "Olympic sprint times for 100 m men from 1896 until 2008. Example is from Rogers and Girolami's First Course in Machine Learning."} + return data_details_return({'X': X, 'Y': Y, 'info': "Olympic sprint times for 100 m men from 1896 until 2008. Example is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_100m_women(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['female100'] + + X = olympic_data[:, 0][:, None] + Y = olympic_data[:, 1][:, None] + return data_details_return({'X': X, 'Y': Y, 'info': "Olympic sprint times for 100 m women from 1896 until 2008. Example is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_200m_women(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['female200'] + + X = olympic_data[:, 0][:, None] + Y = olympic_data[:, 1][:, None] + return data_details_return({'X': X, 'Y': Y, 'info': "Olympic 200 m winning times for women from 1896 until 2008. Data is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_200m_men(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['male200'] + + X = olympic_data[:, 0][:, None] + Y = olympic_data[:, 1][:, None] + return data_details_return({'X': X, 'Y': Y, 'info': "Male 200 m winning times for women from 1896 until 2008. Data is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_400m_women(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['female400'] + + X = olympic_data[:, 0][:, None] + Y = olympic_data[:, 1][:, None] + return data_details_return({'X': X, 'Y': Y, 'info': "Olympic 400 m winning times for women until 2008. Data is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_400m_men(data_set='rogers_girolami_data'): + download_rogers_girolami_data() + olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['male400'] + + X = olympic_data[:, 0][:, None] + Y = olympic_data[:, 1][:, None] + return data_details_return({'X': X, 'Y': Y, 'info': "Male 400 m winning times for women until 2008. Data is from Rogers and Girolami's First Course in Machine Learning."}, data_set) + +def olympic_marathon_men(data_set='olympic_marathon_men'): + if not data_available(data_set): + download_data(data_set) + olympics = np.genfromtxt(os.path.join(data_path, data_set, 'olympicMarathonTimes.csv'), delimiter=',') + X = olympics[:, 0:1] + Y = olympics[:, 1:2] + return data_details_return({'X': X, 'Y': Y}, data_set) + +def olympic_sprints(data_set='rogers_girolami_data'): + """All olympics sprint winning times for multiple output prediction.""" + X = np.zeros((0, 2)) + Y = np.zeros((0, 1)) + for i, dataset in enumerate([olympic_100m_men, + olympic_100m_women, + olympic_200m_men, + olympic_200m_women, + olympic_400m_men, + olympic_400m_women]): + data = dataset() + year = data['X'] + time = data['Y'] + X = np.vstack((X, np.hstack((year, np.ones_like(year)*i)))) + Y = np.vstack((Y, time)) + data['X'] = X + data['Y'] = Y + data['info'] = "Olympics sprint event winning for men and women to 2008. Data is from Rogers and Girolami's First Course in Machine Learning." + return data_details_return({ + 'X': X, + 'Y': Y, + 'info': "Olympics sprint event winning for men and women to 2008. Data is from Rogers and Girolami's First Course in Machine Learning.", + 'output_info': { + 0:'100m Men', + 1:'100m Women', + 2:'200m Men', + 3:'200m Women', + 4:'400m Men', + 5:'400m Women'} + }, data_set) + # def movielens_small(partNo=1,seed=default_seed): # np.random.seed(seed=seed) @@ -272,14 +650,16 @@ def rogers_girolami_olympics(): # return {'Y':Y, 'lbls':lbls, 'Ytest':Ytest, 'lblstest':lblstest} - - def crescent_data(num_data=200, seed=default_seed): - """Data set formed from a mixture of four Gaussians. In each class two of the Gaussians are elongated at right angles to each other and offset to form an approximation to the crescent data that is popular in semi-supervised learning as a toy problem. + """ +Data set formed from a mixture of four Gaussians. In each class two of the Gaussians are elongated at right angles to each other and offset to form an approximation to the crescent data that is popular in semi-supervised learning as a toy problem. + :param num_data_part: number of data to be sampled (default is 200). :type num_data: int :param seed: random seed to be used for data generation. - :type seed: int""" + :type seed: int + + """ np.random.seed(seed=seed) sqrt2 = np.sqrt(2) # Rotation matrix @@ -302,34 +682,41 @@ def crescent_data(num_data=200, seed=default_seed): for i in range(0, 4): num_data_part.append(round(((i + 1) * num_data) / 4.)) num_data_part[i] -= num_data_total - # print num_data_part[i] part = np.random.normal(size=(num_data_part[i], 2)) part = np.dot(np.dot(part, scales[i]), R) + means[i] Xparts.append(part) num_data_total += num_data_part[i] X = np.vstack((Xparts[0], Xparts[1], Xparts[2], Xparts[3])) - Y = np.vstack((np.ones((num_data_part[0] + num_data_part[1], 1)), -np.ones((num_data_part[2] + num_data_part[3], 1)))) return {'X':X, 'Y':Y, 'info': "Two separate classes of data formed approximately in the shape of two crescents."} -def creep_data(): - all_data = np.loadtxt(os.path.join(data_path, 'creep', 'taka')) +def creep_data(data_set='creep_rupture'): + """Brun and Yoshida's metal creep rupture data.""" + if not data_available(data_set): + download_data(data_set) + path = os.path.join(data_path, data_set) + tar_file = os.path.join(path, 'creeprupt.tar') + tar = tarfile.open(tar_file) + print('Extracting file.') + tar.extractall(path=path) + tar.close() + all_data = np.loadtxt(os.path.join(data_path, data_set, 'taka')) y = all_data[:, 1:2].copy() features = [0] features.extend(range(2, 31)) X = all_data[:, features].copy() - return {'X': X, 'y' : y} + return data_details_return({'X': X, 'y': y}, data_set) -def cmu_mocap_49_balance(): +def cmu_mocap_49_balance(data_set='cmu_mocap'): """Load CMU subject 49's one legged balancing motion that was used by Alvarez, Luengo and Lawrence at AISTATS 2009.""" train_motions = ['18', '19'] test_motions = ['20'] - data = cmu_mocap('49', train_motions, test_motions, sample_every=4) + data = cmu_mocap('49', train_motions, test_motions, sample_every=4, data_set=data_set) data['info'] = "One legged balancing motions from CMU data base subject 49. As used in Alvarez, Luengo and Lawrence at AISTATS 2009. It consists of " + data['info'] return data -def cmu_mocap_35_walk_jog(): +def cmu_mocap_35_walk_jog(data_set='cmu_mocap'): """Load CMU subject 35's walking and jogging motions, the same data that was used by Taylor, Roweis and Hinton at NIPS 2007. but without their preprocessing. Also used by Lawrence at AISTATS 2007.""" train_motions = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', @@ -337,18 +724,23 @@ def cmu_mocap_35_walk_jog(): '20', '21', '22', '23', '24', '25', '26', '28', '30', '31', '32', '33', '34'] test_motions = ['18', '29'] - data = cmu_mocap('35', train_motions, test_motions, sample_every=4) + data = cmu_mocap('35', train_motions, test_motions, sample_every=4, data_set=data_set) data['info'] = "Walk and jog data from CMU data base subject 35. As used in Tayor, Roweis and Hinton at NIPS 2007, but without their pre-processing (i.e. as used by Lawrence at AISTATS 2007). It consists of " + data['info'] return data -def cmu_mocap(subject, train_motions, test_motions=[], sample_every=4): +def cmu_mocap(subject, train_motions, test_motions=[], sample_every=4, data_set='cmu_mocap'): """Load a given subject's training and test motions from the CMU motion capture data.""" - # Load in subject skeleton. - subject_dir = os.path.join(data_path, 'mocap', 'cmu', subject) + subject_dir = os.path.join(data_path, data_set) # Make sure the data is downloaded. - mocap.fetch_cmu(([subject], [train_motions]), skel_store_dir=subject_dir,motion_store_dir=subject_dir) + all_motions = train_motions + test_motions + resource = cmu_urls_files(([subject], [all_motions])) + data_resources[data_set] = data_resources['cmu_mocap_full'].copy() + data_resources[data_set]['files'] = resource['files'] + data_resources[data_set]['urls'] = resource['urls'] + if resource['urls']: + download_data(data_set) skel = GPy.util.mocap.acclaim_skeleton(os.path.join(subject_dir, subject + '.asf')) @@ -413,4 +805,6 @@ def cmu_mocap(subject, train_motions, test_motions=[], sample_every=4): info += '.' if sample_every != 1: info += ' Data is sub-sampled to every ' + str(sample_every) + ' frames.' - return {'Y': Y, 'lbls' : lbls, 'Ytest': Ytest, 'lblstest' : lblstest, 'info': info, 'skel': skel} + return data_details_return({'Y': Y, 'lbls' : lbls, 'Ytest': Ytest, 'lblstest' : lblstest, 'info': info, 'skel': skel}, data_set) + + diff --git a/GPy/util/datasets/DellaGattadata.mat b/GPy/util/datasets/DellaGattadata.mat deleted file mode 100644 index fa70953e..00000000 Binary files a/GPy/util/datasets/DellaGattadata.mat and /dev/null differ diff --git a/GPy/util/datasets/connections.txt b/GPy/util/datasets/connections.txt new file mode 100644 index 00000000..e1886100 --- /dev/null +++ b/GPy/util/datasets/connections.txt @@ -0,0 +1,22 @@ +LFHD, RFHD +RFHD, RBHD +RBHD, LBHD +LBHD, LFHD +LELB, LWRB +LWRB, LFIN +LELB, LSHO +LSHO, RSHO +RSHO, STRN +LSHO, STRN +RSHO, RELB +RELB, RWRB +RWRB, RFIN +LSHO, LFWT +RSHO, RFWT +LFWT, RFWT +LFWT, LKNE +RFWT, RKNE +LKNE, LHEE +RKNE, RHEE +RMT5, RHEE +LMT5, LHEE diff --git a/GPy/util/datasets/crabs.dat b/GPy/util/datasets/crabs.dat deleted file mode 100644 index 74145ab0..00000000 --- a/GPy/util/datasets/crabs.dat +++ /dev/null @@ -1,201 +0,0 @@ -sp sex index FL RW CL CW BD -B M 1 8.1 6.7 16.1 19.0 7.0 -B M 2 8.8 7.7 18.1 20.8 7.4 -B M 3 9.2 7.8 19.0 22.4 7.7 -B M 4 9.6 7.9 20.1 23.1 8.2 -B M 5 9.8 8.0 20.3 23.0 8.2 -B M 6 10.8 9.0 23.0 26.5 9.8 -B M 7 11.1 9.9 23.8 27.1 9.8 -B M 8 11.6 9.1 24.5 28.4 10.4 -B M 9 11.8 9.6 24.2 27.8 9.7 -B M 10 11.8 10.5 25.2 29.3 10.3 -B M 11 12.2 10.8 27.3 31.6 10.9 -B M 12 12.3 11.0 26.8 31.5 11.4 -B M 13 12.6 10.0 27.7 31.7 11.4 -B M 14 12.8 10.2 27.2 31.8 10.9 -B M 15 12.8 10.9 27.4 31.5 11.0 -B M 16 12.9 11.0 26.8 30.9 11.4 -B M 17 13.1 10.6 28.2 32.3 11.0 -B M 18 13.1 10.9 28.3 32.4 11.2 -B M 19 13.3 11.1 27.8 32.3 11.3 -B M 20 13.9 11.1 29.2 33.3 12.1 -B M 21 14.3 11.6 31.3 35.5 12.7 -B M 22 14.6 11.3 31.9 36.4 13.7 -B M 23 15.0 10.9 31.4 36.4 13.2 -B M 24 15.0 11.5 32.4 37.0 13.4 -B M 25 15.0 11.9 32.5 37.2 13.6 -B M 26 15.2 12.1 32.3 36.7 13.6 -B M 27 15.4 11.8 33.0 37.5 13.6 -B M 28 15.7 12.6 35.8 40.3 14.5 -B M 29 15.9 12.7 34.0 38.9 14.2 -B M 30 16.1 11.6 33.8 39.0 14.4 -B M 31 16.1 12.8 34.9 40.7 15.7 -B M 32 16.2 13.3 36.0 41.7 15.4 -B M 33 16.3 12.7 35.6 40.9 14.9 -B M 34 16.4 13.0 35.7 41.8 15.2 -B M 35 16.6 13.5 38.1 43.4 14.9 -B M 36 16.8 12.8 36.2 41.8 14.9 -B M 37 16.9 13.2 37.3 42.7 15.6 -B M 38 17.1 12.6 36.4 42.0 15.1 -B M 39 17.1 12.7 36.7 41.9 15.6 -B M 40 17.2 13.5 37.6 43.9 16.1 -B M 41 17.7 13.6 38.7 44.5 16.0 -B M 42 17.9 14.1 39.7 44.6 16.8 -B M 43 18.0 13.7 39.2 44.4 16.2 -B M 44 18.8 15.8 42.1 49.0 17.8 -B M 45 19.3 13.5 41.6 47.4 17.8 -B M 46 19.3 13.8 40.9 46.5 16.8 -B M 47 19.7 15.3 41.9 48.5 17.8 -B M 48 19.8 14.2 43.2 49.7 18.6 -B M 49 19.8 14.3 42.4 48.9 18.3 -B M 50 21.3 15.7 47.1 54.6 20.0 -B F 1 7.2 6.5 14.7 17.1 6.1 -B F 2 9.0 8.5 19.3 22.7 7.7 -B F 3 9.1 8.1 18.5 21.6 7.7 -B F 4 9.1 8.2 19.2 22.2 7.7 -B F 5 9.5 8.2 19.6 22.4 7.8 -B F 6 9.8 8.9 20.4 23.9 8.8 -B F 7 10.1 9.3 20.9 24.4 8.4 -B F 8 10.3 9.5 21.3 24.7 8.9 -B F 9 10.4 9.7 21.7 25.4 8.3 -B F 10 10.8 9.5 22.5 26.3 9.1 -B F 11 11.0 9.8 22.5 25.7 8.2 -B F 12 11.2 10.0 22.8 26.9 9.4 -B F 13 11.5 11.0 24.7 29.2 10.1 -B F 14 11.6 11.0 24.6 28.5 10.4 -B F 15 11.6 11.4 23.7 27.7 10.0 -B F 16 11.7 10.6 24.9 28.5 10.4 -B F 17 11.9 11.4 26.0 30.1 10.9 -B F 18 12.0 10.7 24.6 28.9 10.5 -B F 19 12.0 11.1 25.4 29.2 11.0 -B F 20 12.6 12.2 26.1 31.6 11.2 -B F 21 12.8 11.7 27.1 31.2 11.9 -B F 22 12.8 12.2 26.7 31.1 11.1 -B F 23 12.8 12.2 27.9 31.9 11.5 -B F 24 13.0 11.4 27.3 31.8 11.3 -B F 25 13.1 11.5 27.6 32.6 11.1 -B F 26 13.2 12.2 27.9 32.1 11.5 -B F 27 13.4 11.8 28.4 32.7 11.7 -B F 28 13.7 12.5 28.6 33.8 11.9 -B F 29 13.9 13.0 30.0 34.9 13.1 -B F 30 14.7 12.5 30.1 34.7 12.5 -B F 31 14.9 13.2 30.1 35.6 12.0 -B F 32 15.0 13.8 31.7 36.9 14.0 -B F 33 15.0 14.2 32.8 37.4 14.0 -B F 34 15.1 13.3 31.8 36.3 13.5 -B F 35 15.1 13.5 31.9 37.0 13.8 -B F 36 15.1 13.8 31.7 36.6 13.0 -B F 37 15.2 14.3 33.9 38.5 14.7 -B F 38 15.3 14.2 32.6 38.3 13.8 -B F 39 15.4 13.3 32.4 37.6 13.8 -B F 40 15.5 13.8 33.4 38.7 14.7 -B F 41 15.6 13.9 32.8 37.9 13.4 -B F 42 15.6 14.7 33.9 39.5 14.3 -B F 43 15.7 13.9 33.6 38.5 14.1 -B F 44 15.8 15.0 34.5 40.3 15.3 -B F 45 16.2 15.2 34.5 40.1 13.9 -B F 46 16.4 14.0 34.2 39.8 15.2 -B F 47 16.7 16.1 36.6 41.9 15.4 -B F 48 17.4 16.9 38.2 44.1 16.6 -B F 49 17.5 16.7 38.6 44.5 17.0 -B F 50 19.2 16.5 40.9 47.9 18.1 -O M 1 9.1 6.9 16.7 18.6 7.4 -O M 2 10.2 8.2 20.2 22.2 9.0 -O M 3 10.7 8.6 20.7 22.7 9.2 -O M 4 11.4 9.0 22.7 24.8 10.1 -O M 5 12.5 9.4 23.2 26.0 10.8 -O M 6 12.5 9.4 24.2 27.0 11.2 -O M 7 12.7 10.4 26.0 28.8 12.1 -O M 8 13.2 11.0 27.1 30.4 12.2 -O M 9 13.4 10.1 26.6 29.6 12.0 -O M 10 13.7 11.0 27.5 30.5 12.2 -O M 11 14.0 11.5 29.2 32.2 13.1 -O M 12 14.1 10.4 28.9 31.8 13.5 -O M 13 14.1 10.5 29.1 31.6 13.1 -O M 14 14.1 10.7 28.7 31.9 13.3 -O M 15 14.2 10.6 28.7 31.7 12.9 -O M 16 14.2 10.7 27.8 30.9 12.7 -O M 17 14.2 11.3 29.2 32.2 13.5 -O M 18 14.6 11.3 29.9 33.5 12.8 -O M 19 14.7 11.1 29.0 32.1 13.1 -O M 20 15.1 11.4 30.2 33.3 14.0 -O M 21 15.1 11.5 30.9 34.0 13.9 -O M 22 15.4 11.1 30.2 33.6 13.5 -O M 23 15.7 12.2 31.7 34.2 14.2 -O M 24 16.2 11.8 32.3 35.3 14.7 -O M 25 16.3 11.6 31.6 34.2 14.5 -O M 26 17.1 12.6 35.0 38.9 15.7 -O M 27 17.4 12.8 36.1 39.5 16.2 -O M 28 17.5 12.0 34.4 37.3 15.3 -O M 29 17.5 12.7 34.6 38.4 16.1 -O M 30 17.8 12.5 36.0 39.8 16.7 -O M 31 17.9 12.9 36.9 40.9 16.5 -O M 32 18.0 13.4 36.7 41.3 17.1 -O M 33 18.2 13.7 38.8 42.7 17.2 -O M 34 18.4 13.4 37.9 42.2 17.7 -O M 35 18.6 13.4 37.8 41.9 17.3 -O M 36 18.6 13.5 36.9 40.2 17.0 -O M 37 18.8 13.4 37.2 41.1 17.5 -O M 38 18.8 13.8 39.2 43.3 17.9 -O M 39 19.4 14.1 39.1 43.2 17.8 -O M 40 19.4 14.4 39.8 44.3 17.9 -O M 41 20.1 13.7 40.6 44.5 18.0 -O M 42 20.6 14.4 42.8 46.5 19.6 -O M 43 21.0 15.0 42.9 47.2 19.4 -O M 44 21.5 15.5 45.5 49.7 20.9 -O M 45 21.6 15.4 45.7 49.7 20.6 -O M 46 21.6 14.8 43.4 48.2 20.1 -O M 47 21.9 15.7 45.4 51.0 21.1 -O M 48 22.1 15.8 44.6 49.6 20.5 -O M 49 23.0 16.8 47.2 52.1 21.5 -O M 50 23.1 15.7 47.6 52.8 21.6 -O F 1 10.7 9.7 21.4 24.0 9.8 -O F 2 11.4 9.2 21.7 24.1 9.7 -O F 3 12.5 10.0 24.1 27.0 10.9 -O F 4 12.6 11.5 25.0 28.1 11.5 -O F 5 12.9 11.2 25.8 29.1 11.9 -O F 6 14.0 11.9 27.0 31.4 12.6 -O F 7 14.0 12.8 28.8 32.4 12.7 -O F 8 14.3 12.2 28.1 31.8 12.5 -O F 9 14.7 13.2 29.6 33.4 12.9 -O F 10 14.9 13.0 30.0 33.7 13.3 -O F 11 15.0 12.3 30.1 33.3 14.0 -O F 12 15.6 13.5 31.2 35.1 14.1 -O F 13 15.6 14.0 31.6 35.3 13.8 -O F 14 15.6 14.1 31.0 34.5 13.8 -O F 15 15.7 13.6 31.0 34.8 13.8 -O F 16 16.1 13.6 31.6 36.0 14.0 -O F 17 16.1 13.7 31.4 36.1 13.9 -O F 18 16.2 14.0 31.6 35.6 13.7 -O F 19 16.7 14.3 32.3 37.0 14.7 -O F 20 17.1 14.5 33.1 37.2 14.6 -O F 21 17.5 14.3 34.5 39.6 15.6 -O F 22 17.5 14.4 34.5 39.0 16.0 -O F 23 17.5 14.7 33.3 37.6 14.6 -O F 24 17.6 14.0 34.0 38.6 15.5 -O F 25 18.0 14.9 34.7 39.5 15.7 -O F 26 18.0 16.3 37.9 43.0 17.2 -O F 27 18.3 15.7 35.1 40.5 16.1 -O F 28 18.4 15.5 35.6 40.0 15.9 -O F 29 18.4 15.7 36.5 41.6 16.4 -O F 30 18.5 14.6 37.0 42.0 16.6 -O F 31 18.6 14.5 34.7 39.4 15.0 -O F 32 18.8 15.2 35.8 40.5 16.6 -O F 33 18.9 16.7 36.3 41.7 15.3 -O F 34 19.1 16.0 37.8 42.3 16.8 -O F 35 19.1 16.3 37.9 42.6 17.2 -O F 36 19.7 16.7 39.9 43.6 18.2 -O F 37 19.9 16.6 39.4 43.9 17.9 -O F 38 19.9 17.9 40.1 46.4 17.9 -O F 39 20.0 16.7 40.4 45.1 17.7 -O F 40 20.1 17.2 39.8 44.1 18.6 -O F 41 20.3 16.0 39.4 44.1 18.0 -O F 42 20.5 17.5 40.0 45.5 19.2 -O F 43 20.6 17.5 41.5 46.2 19.2 -O F 44 20.9 16.5 39.9 44.7 17.5 -O F 45 21.3 18.4 43.8 48.4 20.0 -O F 46 21.4 18.0 41.2 46.2 18.7 -O F 47 21.7 17.1 41.7 47.2 19.6 -O F 48 21.9 17.2 42.6 47.4 19.5 -O F 49 22.5 17.2 43.0 48.7 19.8 -O F 50 23.1 20.2 46.2 52.5 21.1 diff --git a/GPy/util/datasets/data_resources_create.py b/GPy/util/datasets/data_resources_create.py new file mode 100644 index 00000000..8ae62a85 --- /dev/null +++ b/GPy/util/datasets/data_resources_create.py @@ -0,0 +1,127 @@ +import json + +neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/' +sam_url = 'http://www.cs.nyu.edu/~roweis/data/' +cmu_url = 'http://mocap.cs.cmu.edu/subjects/' + +data_resources = {'ankur_pose_data' : {'urls' : [neil_url + 'ankur_pose_data/'], + 'files' : [['ankurDataPoseSilhouette.mat']], + 'license' : None, + 'citation' : """3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.""", + 'details' : """Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing."""}, + + 'boston_housing' : {'urls' : ['http://archive.ics.uci.edu/ml/machine-learning-databases/housing/'], + 'files' : [['Index', 'housing.data', 'housing.names']], + 'citation' : """Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.""", + 'details' : """The Boston Housing data relates house values in Boston to a range of input variables.""", + 'license' : None, + 'size' : 51276 + }, + 'brendan_faces' : {'urls' : [sam_url], + 'files': [['frey_rawface.mat']], + 'citation' : 'Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.', + 'details' : """A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.""", + 'license': None, + 'size' : 1100584}, + 'cmu_mocap_full' : {'urls' : ['http://mocap.cs.cmu.edu'], + 'files' : [['allasfamc.zip']], + 'citation' : """Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu. +The database was created with funding from NSF EIA-0196217.""", + 'details' : """CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.""", + 'license' : """From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.""", + 'size' : None}, + 'creep_rupture' : {'urls' : ['http://www.msm.cam.ac.uk/map/data/tar/'], + 'files' : [['creeprupt.tar']], + 'citation' : 'Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.', + 'details' : """Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.""", + 'license' : None, + 'size' : 602797}, + 'della_gatta' : {'urls' : [neil_url + 'della_gatta/'], + 'files': [['DellaGattadata.mat']], + 'citation' : 'Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008', + 'details': "The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.", + 'license':None, + 'size':3729650}, + 'epomeo_gpx' : {'urls' : [neil_url + 'epomeo_gpx/'], + 'files': [['endomondo_1.gpx', 'endomondo_2.gpx', 'garmin_watch_via_endomondo.gpx','viewranger_phone.gpx','viewranger_tablet.gpx']], + 'citation' : '', + 'details': "Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).", + 'license':None, + 'size': 2031872}, + 'three_phase_oil_flow': {'urls' : [neil_url + 'three_phase_oil_flow/'], + 'files' : [['DataTrnLbls.txt', 'DataTrn.txt', 'DataTst.txt', 'DataTstLbls.txt', 'DataVdn.txt', 'DataVdnLbls.txt']], + 'citation' : 'Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593', + 'details' : """The three phase oil data used initially for demonstrating the Generative Topographic mapping.""", + 'license' : None, + 'size' : 712796}, + 'rogers_girolami_data' : {'urls' : ['https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/'], + 'files' : [['firstcoursemldata.tar.gz']], + 'suffices' : [['?dl=1']], + 'citation' : 'A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146', + 'details' : """Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.""", + 'license' : None, + 'size' : 21949154}, + 'olivetti_faces' : {'urls' : [neil_url + 'olivetti_faces/', sam_url], + 'files' : [['att_faces.zip'], ['olivettifaces.mat']], + 'citation' : 'Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994', + 'details' : """Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. """, + 'license': None, + 'size' : 8561331}, + 'olympic_marathon_men' : {'urls' : [neil_url + 'olympic_marathon_men/'], + 'files' : [['olympicMarathonTimes.csv']], + 'citation' : None, + 'details' : """Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data""", + 'license': None, + 'size' : 584}, + 'osu_run1' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'], + 'files': [['run1TXT.ZIP'],['connections.txt']], + 'details' : "Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.", + 'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.', + 'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).', + 'size': 338103}, + 'osu_accad' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'], + 'files': [['swagger1TXT.ZIP','handspring1TXT.ZIP','quickwalkTXT.ZIP','run1TXT.ZIP','sprintTXT.ZIP','dogwalkTXT.ZIP','camper_04TXT.ZIP','dance_KB3_TXT.ZIP','per20_TXT.ZIP','perTWO07_TXT.ZIP','perTWO13_TXT.ZIP','perTWO14_TXT.ZIP','perTWO15_TXT.ZIP','perTWO16_TXT.ZIP'],['connections.txt']], + 'details' : "Motion capture data of different motions from the Open Motion Data Project at Ohio State University.", + 'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.', + 'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).', + 'size': 15922790}, + 'pumadyn-32nm' : {'urls' : ['ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/'], + 'files' : [['pumadyn-32nm.tar.gz']], + 'details' : """Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.""", + 'citation' : """Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.""", + 'license' : """Data is made available by the Delve system at the University of Toronto""", + 'size' : 5861646}, + 'robot_wireless' : {'urls' : [neil_url + 'robot_wireless/'], + 'files' : [['uw-floor.txt']], + 'citation' : """WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.""", + 'details' : """Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.""", + 'license' : None, + 'size' : 284390}, + 'swiss_roll' : {'urls' : ['http://isomap.stanford.edu/'], + 'files' : [['swiss_roll_data.mat']], + 'details' : """Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""", + 'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000', + 'license' : None, + 'size' : 800256}, + 'ripley_prnn_data' : {'urls' : ['http://www.stats.ox.ac.uk/pub/PRNN/'], + 'files' : [['Cushings.dat', 'README', 'crabs.dat', 'fglass.dat', 'fglass.grp', 'pima.te', 'pima.tr', 'pima.tr2', 'synth.te', 'synth.tr', 'viruses.dat', 'virus3.dat']], + 'details' : """Data sets from Brian Ripley's Pattern Recognition and Neural Networks""", + 'citation': """Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7""", + 'license' : None, + 'size' : 93565}, + 'isomap_face_data' : {'urls' : [neil_url + 'isomap_face_data/'], + 'files' : [['face_data.mat']], + 'details' : """Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""", + 'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000', + 'license' : None, + 'size' : 24229368}, + 'xw_pen' : {'urls' : [neil_url + 'xw_pen/'], + 'files' : [['xw_pen_15.csv']], + 'details' : """Accelerometer pen data used for robust regression by Tipping and Lawrence.""", + 'citation' : 'Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005', + 'license' : None, + 'size' : 3410} + } + +with open('data_resources.json', 'w') as file: + json.dump(data_resources, file) diff --git a/GPy/util/datasets/mocap/ankur/ankurDataPoseSilhouette.mat b/GPy/util/datasets/mocap/ankur/ankurDataPoseSilhouette.mat deleted file mode 100644 index 350618df..00000000 Binary files a/GPy/util/datasets/mocap/ankur/ankurDataPoseSilhouette.mat and /dev/null differ diff --git a/GPy/util/datasets/oil/DataTrn.txt b/GPy/util/datasets/oil/DataTrn.txt deleted file mode 100644 index bb427fc4..00000000 --- a/GPy/util/datasets/oil/DataTrn.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 3.3150000e-01 2.1560000e-01 6.8020000e-01 1.4340000e-01 6.8250000e-01 2.7200000e-01 6.2230000e-01 2.0920000e-01 7.9610000e-01 1.5300000e-01 5.8560000e-01 2.5730000e-01 - 9.3900000e-02 1.0089000e+00 3.6500000e-02 6.9440000e-01 9.0800000e-02 4.9610000e-01 7.2200000e-02 6.5210000e-01 -1.3000000e-02 6.0850000e-01 6.3100000e-02 6.5970000e-01 - 5.1840000e-01 2.2830000e-01 5.3000000e-01 6.8840000e-01 7.4560000e-01 6.1710000e-01 6.1360000e-01 5.9280000e-01 7.6780000e-01 6.1300000e-01 6.7050000e-01 5.2020000e-01 - 4.2080000e-01 6.7400000e-01 1.6510000e-01 7.5920000e-01 1.8100000e-01 5.4480000e-01 1.7070000e-01 7.5540000e-01 1.6350000e-01 5.4920000e-01 2.5980000e-01 6.4550000e-01 - 3.1300000e-01 6.4650000e-01 5.9080000e-01 6.9240000e-01 7.6640000e-01 6.2620000e-01 1.7177000e+00 1.5000000e-02 8.5100000e-02 1.9046000e+00 -1.6500000e-02 2.2100000e-02 - 1.1458000e+00 -4.6700000e-02 4.0560000e-01 5.6620000e-01 3.1230000e-01 4.5800000e-01 3.6360000e-01 6.1340000e-01 3.3050000e-01 4.1320000e-01 4.1670000e-01 5.5140000e-01 - -1.9000000e-03 1.7320000e-01 5.7000000e-03 4.8820000e-01 2.0760000e-01 3.9100000e-01 8.6000000e-03 1.7198000e+00 2.1500000e-02 -2.5800000e-02 6.7300000e-02 -8.2900000e-02 - 7.8000000e-03 4.6150000e-01 1.1810000e-01 6.5900000e-01 2.5870000e-01 6.3520000e-01 -1.9100000e-02 1.7491000e+00 1.0980000e-01 -1.3150000e-01 4.0700000e-02 -4.8500000e-02 - 7.3050000e-01 1.1890000e-01 1.0626000e+00 1.0130000e-01 1.1385000e+00 1.4860000e-01 1.7631000e+00 -4.1600000e-02 2.0731000e+00 -8.7200000e-02 -4.3900000e-02 5.3000000e-02 - 6.1990000e-01 4.7790000e-01 9.5280000e-01 7.4630000e-01 9.7410000e-01 8.6330000e-01 1.6771000e+00 6.2000000e-02 2.0271000e+00 -3.0100000e-02 1.5100000e-02 1.7147000e+00 - 1.1308000e+00 -2.8600000e-02 1.0005000e+00 7.3870000e-01 8.3900000e-01 1.8740000e-01 1.0848000e+00 6.3740000e-01 8.5340000e-01 1.3730000e-01 1.0422000e+00 6.8360000e-01 - 5.0930000e-01 6.0380000e-01 2.8740000e-01 8.1620000e-01 2.2670000e-01 6.1810000e-01 2.6460000e-01 8.4900000e-01 3.3650000e-01 4.6480000e-01 2.8150000e-01 8.2540000e-01 - 7.6000000e-02 5.0100000e-01 1.8700000e-01 7.0110000e-01 1.7280000e-01 8.4750000e-01 2.3000000e-01 6.5360000e-01 1.6160000e-01 8.7320000e-01 1.6030000e-01 7.3310000e-01 - 4.6620000e-01 1.2090000e-01 7.6570000e-01 1.3900000e-01 8.8860000e-01 1.3040000e-01 1.6940000e+00 4.5300000e-02 -1.1700000e-02 2.0111000e+00 -1.9400000e-02 2.1700000e-02 - 1.1254000e+00 -2.5200000e-02 6.7760000e-01 1.7930000e-01 4.2870000e-01 2.8040000e-01 6.0030000e-01 2.7370000e-01 4.5410000e-01 2.3700000e-01 6.7750000e-01 1.8500000e-01 - 3.3340000e-01 5.4680000e-01 6.0720000e-01 7.5490000e-01 6.2940000e-01 9.3220000e-01 5.8500000e-01 7.8590000e-01 6.3240000e-01 9.5680000e-01 6.0260000e-01 7.6160000e-01 - 3.7600000e-02 7.1130000e-01 4.9920000e-01 5.3770000e-01 6.0490000e-01 5.5300000e-01 1.6635000e+00 8.2100000e-02 -1.8900000e-02 2.0241000e+00 1.4000000e-03 -3.0000000e-03 - 1.7030000e-01 2.2340000e-01 2.6770000e-01 3.4770000e-01 2.7340000e-01 4.3240000e-01 1.2220000e-01 5.2130000e-01 2.3330000e-01 4.9260000e-01 2.0440000e-01 4.2150000e-01 - 1.3280000e-01 2.1440000e-01 2.4660000e-01 2.9120000e-01 3.2440000e-01 2.8150000e-01 1.5860000e-01 3.9820000e-01 3.1650000e-01 3.0050000e-01 3.0380000e-01 2.2390000e-01 - 1.1069000e+00 -1.5000000e-03 1.7871000e+00 -7.2300000e-02 1.0389000e+00 7.0860000e-01 1.6728000e+00 7.0100000e-02 1.0083000e+00 5.9010000e-01 1.7405000e+00 -1.8800000e-02 - 1.0837000e+00 2.9400000e-02 6.6750000e-01 5.4210000e-01 5.0400000e-01 3.9970000e-01 7.2710000e-01 4.6730000e-01 6.0610000e-01 2.5260000e-01 7.9570000e-01 3.8610000e-01 - 6.3790000e-01 3.7090000e-01 9.1390000e-01 6.7850000e-01 1.1171000e+00 6.8250000e-01 9.0740000e-01 6.9170000e-01 1.1527000e+00 6.7390000e-01 9.8660000e-01 5.9570000e-01 - 6.5200000e-02 4.5750000e-01 5.1700000e-01 2.9000000e-01 6.2570000e-01 3.0410000e-01 1.7007000e+00 3.4100000e-02 -1.7000000e-03 2.4000000e-03 -4.4300000e-02 5.2700000e-02 - 5.4600000e-02 4.3170000e-01 2.2600000e-02 8.0260000e-01 2.2280000e-01 7.0770000e-01 1.3700000e-02 1.7178000e+00 -5.6400000e-02 6.5400000e-02 -9.1000000e-03 1.0700000e-02 - 6.2500000e-02 1.0361000e+00 -3.1600000e-02 9.8350000e-01 1.0200000e-02 7.3310000e-01 -5.2600000e-02 1.0055000e+00 -5.3600000e-02 7.9150000e-01 -2.2700000e-02 9.7540000e-01 - 8.8130000e-01 2.3210000e-01 4.6370000e-01 1.1170000e-01 2.7490000e-01 2.2700000e-01 4.3280000e-01 1.4970000e-01 3.2230000e-01 1.5890000e-01 4.6390000e-01 1.1340000e-01 - 2.8300000e-02 1.0874000e+00 1.0690000e-01 6.2610000e-01 1.0890000e-01 4.8790000e-01 -3.6700000e-02 7.9780000e-01 -6.9100000e-02 6.8710000e-01 4.1700000e-02 7.0480000e-01 - 1.1160000e+00 -1.4900000e-02 1.0759000e+00 6.2810000e-01 6.5180000e-01 9.1790000e-01 9.0020000e-01 8.3560000e-01 6.3250000e-01 8.4420000e-01 8.7480000e-01 8.5950000e-01 - 3.3710000e-01 7.7140000e-01 7.1030000e-01 8.6780000e-01 8.3290000e-01 8.6520000e-01 1.7016000e+00 3.2700000e-02 -1.3000000e-01 2.1572000e+00 -8.2800000e-02 1.8347000e+00 - 7.3270000e-01 3.1950000e-01 9.9080000e-01 6.7990000e-01 1.1545000e+00 7.4370000e-01 1.0390000e+00 6.2000000e-01 1.2518000e+00 6.6170000e-01 9.4250000e-01 7.3950000e-01 - 3.5680000e-01 4.7840000e-01 7.5970000e-01 3.7090000e-01 7.1850000e-01 5.6140000e-01 1.6897000e+00 4.8200000e-02 -5.5000000e-02 2.0698000e+00 1.1100000e-02 -1.0700000e-02 - 8.4510000e-01 2.5370000e-01 1.1759000e+00 3.1580000e-01 1.1577000e+00 4.8330000e-01 1.6908000e+00 5.0600000e-02 1.8694000e+00 1.5340000e-01 -8.7000000e-02 1.8340000e+00 - 1.0534000e+00 6.2400000e-02 9.9200000e-01 7.4620000e-01 6.5450000e-01 9.6060000e-01 9.6660000e-01 7.7420000e-01 7.7710000e-01 7.0860000e-01 9.8770000e-01 7.5120000e-01 - 7.1500000e-02 1.0450000e+00 1.1330000e-01 8.2520000e-01 4.2000000e-02 7.0570000e-01 1.0160000e-01 8.4220000e-01 7.3600000e-02 6.5400000e-01 7.8400000e-02 8.6800000e-01 - 1.0026000e+00 1.1980000e-01 6.9420000e-01 1.0487000e+00 5.6690000e-01 5.7870000e-01 7.9050000e-01 9.4020000e-01 6.8750000e-01 3.9290000e-01 7.3610000e-01 1.0008000e+00 - 4.8430000e-01 6.1020000e-01 2.8270000e-01 1.4417000e+00 2.1760000e-01 8.8560000e-01 3.2160000e-01 1.3991000e+00 8.6000000e-02 1.0023000e+00 2.4660000e-01 1.4851000e+00 - 3.9720000e-01 1.0780000e-01 5.1780000e-01 2.9480000e-01 5.9230000e-01 3.3440000e-01 5.0370000e-01 3.1210000e-01 6.1180000e-01 3.2700000e-01 5.4250000e-01 2.6330000e-01 - 3.5220000e-01 3.8680000e-01 5.2010000e-01 6.3770000e-01 5.5220000e-01 7.7760000e-01 5.0760000e-01 6.5530000e-01 6.2580000e-01 7.1050000e-01 4.5150000e-01 7.1960000e-01 - 2.2300000e-01 8.8790000e-01 1.5930000e-01 7.1290000e-01 1.0890000e-01 5.9140000e-01 1.2940000e-01 7.5010000e-01 1.5900000e-01 5.1800000e-01 1.1320000e-01 7.6770000e-01 - 4.4090000e-01 -1.8200000e-02 6.1900000e-01 5.9300000e-02 5.6890000e-01 2.2860000e-01 4.7170000e-01 2.3110000e-01 5.9740000e-01 2.0560000e-01 4.8490000e-01 2.1920000e-01 - 7.3100000e-02 4.6570000e-01 1.0220000e-01 7.5030000e-01 5.5100000e-02 9.2770000e-01 7.3000000e-02 7.8520000e-01 1.1800000e-01 8.6510000e-01 1.2190000e-01 7.2600000e-01 - 6.6700000e-02 5.4610000e-01 4.3700000e-01 4.7890000e-01 5.5230000e-01 4.8300000e-01 1.6686000e+00 7.7000000e-02 1.2800000e-02 1.9787000e+00 3.3400000e-02 -4.0300000e-02 - 8.4270000e-01 2.2700000e-01 1.0943000e+00 3.0230000e-01 1.2949000e+00 2.0220000e-01 1.8863000e+00 -1.8030000e-01 2.0447000e+00 -4.7800000e-02 2.9000000e-03 1.7328000e+00 - 1.1266000e+00 -2.5600000e-02 5.2150000e-01 1.2107000e+00 4.6690000e-01 6.6850000e-01 5.5710000e-01 1.1705000e+00 4.6760000e-01 6.3080000e-01 4.9480000e-01 1.2449000e+00 - 5.6000000e-01 3.6400000e-01 8.2550000e-01 6.3070000e-01 9.8310000e-01 6.6790000e-01 8.8420000e-01 5.6260000e-01 9.3570000e-01 7.4880000e-01 8.2660000e-01 6.3120000e-01 - 4.8100000e-02 1.0074000e+00 4.2960000e-01 9.3040000e-01 4.4780000e-01 1.0472000e+00 1.7560000e+00 -3.3800000e-02 -7.4800000e-02 2.0893000e+00 -4.2500000e-02 5.1400000e-02 - 1.0952000e+00 1.3500000e-02 8.8620000e-01 8.4750000e-01 7.1440000e-01 3.8920000e-01 9.7400000e-01 7.5290000e-01 7.1140000e-01 3.5710000e-01 9.2480000e-01 8.0770000e-01 - 2.0060000e-01 3.4540000e-01 3.9860000e-01 4.4690000e-01 3.4170000e-01 6.3870000e-01 2.8080000e-01 5.7930000e-01 4.3920000e-01 5.3740000e-01 3.4240000e-01 5.0960000e-01 - -2.4800000e-02 8.0790000e-01 3.5140000e-01 7.2370000e-01 4.6030000e-01 7.3860000e-01 8.5700000e-02 1.6323000e+00 6.2400000e-02 1.9227000e+00 -2.6000000e-02 3.1200000e-02 - 4.2970000e-01 1.8250000e-01 5.9390000e-01 3.7940000e-01 7.2330000e-01 3.8160000e-01 6.7310000e-01 2.8930000e-01 7.6940000e-01 3.4350000e-01 7.3080000e-01 2.1920000e-01 - 6.3570000e-01 4.5470000e-01 2.6370000e-01 7.2170000e-01 2.1750000e-01 5.5630000e-01 2.7510000e-01 7.1270000e-01 1.2610000e-01 6.4510000e-01 2.4600000e-01 7.4260000e-01 - 1.1248000e+00 -2.5900000e-02 4.9160000e-01 1.2293000e+00 3.9320000e-01 9.4620000e-01 5.2710000e-01 1.1855000e+00 4.2110000e-01 8.5910000e-01 5.1420000e-01 1.2064000e+00 - 1.1311000e+00 -2.9800000e-02 1.0137000e+00 7.1440000e-01 8.0360000e-01 2.3170000e-01 1.0760000e+00 6.4980000e-01 7.9270000e-01 2.0990000e-01 9.6500000e-01 7.7130000e-01 - 3.0260000e-01 4.8890000e-01 6.1140000e-01 4.9410000e-01 6.8890000e-01 5.4640000e-01 1.6156000e+00 1.3610000e-01 5.0000000e-04 2.0043000e+00 3.2900000e-02 -3.9700000e-02 - -1.5800000e-02 4.5530000e-01 1.1580000e-01 6.4420000e-01 3.4860000e-01 5.1060000e-01 -1.0190000e-01 1.8546000e+00 2.4500000e-02 -3.0500000e-02 6.2600000e-02 -7.4600000e-02 - -8.6100000e-02 1.2076000e+00 3.4070000e-01 1.0901000e+00 5.3730000e-01 9.9810000e-01 -3.5300000e-02 1.7764000e+00 -1.2000000e-03 2.0017000e+00 2.5300000e-02 1.7018000e+00 - 3.4250000e-01 1.6990000e-01 6.0390000e-01 1.8530000e-01 5.6080000e-01 3.6060000e-01 5.7480000e-01 2.1850000e-01 6.0620000e-01 3.2050000e-01 4.8010000e-01 3.3130000e-01 - 8.6240000e-01 2.3890000e-01 1.1296000e+00 5.3810000e-01 1.2925000e+00 4.8960000e-01 1.8105000e+00 -9.3000000e-02 2.0412000e+00 -4.9800000e-02 4.8000000e-03 1.7304000e+00 - -2.3600000e-02 6.9870000e-01 1.4930000e-01 8.4150000e-01 2.9330000e-01 8.1210000e-01 -3.4100000e-02 1.7728000e+00 -7.3900000e-02 2.0858000e+00 -5.1900000e-02 6.1000000e-02 - 1.1406000e+00 -3.7600000e-02 5.5920000e-01 1.1729000e+00 3.9890000e-01 8.1880000e-01 6.1760000e-01 1.1081000e+00 4.0370000e-01 7.7170000e-01 5.5800000e-01 1.1728000e+00 - 4.4420000e-01 2.9890000e-01 6.3700000e-01 5.4360000e-01 8.2150000e-01 5.0030000e-01 7.8410000e-01 3.6470000e-01 7.7940000e-01 5.7300000e-01 6.9970000e-01 4.6530000e-01 - 7.4100000e-02 3.9670000e-01 4.1330000e-01 3.6450000e-01 4.4130000e-01 4.7670000e-01 -4.3600000e-02 1.7818000e+00 4.8300000e-02 -5.7600000e-02 1.2400000e-02 -1.2600000e-02 - 3.5830000e-01 7.4600000e-01 1.4890000e-01 6.6810000e-01 1.3270000e-01 5.2540000e-01 1.7050000e-01 6.4340000e-01 1.7330000e-01 4.6430000e-01 1.5700000e-01 6.5520000e-01 - 5.1440000e-01 7.1000000e-03 7.3100000e-01 1.1660000e-01 8.4380000e-01 1.2920000e-01 1.7454000e+00 -2.0400000e-02 5.2400000e-02 -6.2200000e-02 2.0000000e-04 -1.2000000e-03 - 6.7260000e-01 1.3230000e-01 1.1448000e+00 9.5900000e-02 1.3620000e+00 3.8700000e-02 1.1697000e+00 6.8400000e-02 1.3696000e+00 5.1100000e-02 1.1821000e+00 5.3700000e-02 - 1.6740000e-01 3.8970000e-01 4.1110000e-01 4.7410000e-01 6.1530000e-01 3.6880000e-01 1.6979000e+00 3.1400000e-02 -5.5200000e-02 2.0652000e+00 -4.3200000e-02 5.0200000e-02 - 3.7850000e-01 7.2820000e-01 1.8240000e-01 7.2660000e-01 1.6050000e-01 5.5800000e-01 1.4350000e-01 7.7140000e-01 1.9220000e-01 5.0560000e-01 3.3170000e-01 5.5220000e-01 - 3.1450000e-01 7.8180000e-01 1.2410000e-01 5.9760000e-01 1.2300000e-01 4.6770000e-01 1.2200000e-01 6.0600000e-01 1.6490000e-01 4.0580000e-01 7.6900000e-02 6.5460000e-01 - 2.9180000e-01 2.7190000e-01 6.4590000e-01 2.2410000e-01 7.3780000e-01 2.5740000e-01 1.6834000e+00 6.2500000e-02 2.8600000e-02 1.9644000e+00 -5.6000000e-03 6.5000000e-03 - 1.1513000e+00 -5.1200000e-02 6.1830000e-01 3.0490000e-01 6.6270000e-01 3.7900000e-02 7.9370000e-01 9.4200000e-02 5.6980000e-01 1.3170000e-01 8.3850000e-01 4.3900000e-02 - 1.2690000e-01 9.7310000e-01 4.1200000e-01 1.0367000e+00 5.7330000e-01 9.8560000e-01 1.6994000e+00 3.6800000e-02 -2.4700000e-02 2.0243000e+00 -1.1000000e-02 1.7438000e+00 - -9.2000000e-03 6.6160000e-01 5.0800000e-02 9.1170000e-01 1.7670000e-01 9.0750000e-01 2.0000000e-02 1.7098000e+00 -6.3400000e-02 2.0767000e+00 -2.0200000e-02 2.4500000e-02 - 3.0960000e-01 3.6700000e-01 4.8940000e-01 5.6770000e-01 5.2870000e-01 6.8410000e-01 4.1900000e-01 6.5440000e-01 5.8440000e-01 6.3750000e-01 5.9730000e-01 4.4010000e-01 - 5.3300000e-02 8.7480000e-01 1.8180000e-01 1.0729000e+00 2.5530000e-01 1.1237000e+00 -2.4500000e-02 1.7603000e+00 -3.0000000e-03 2.0090000e+00 4.3200000e-02 -5.3500000e-02 - 1.0547000e+00 5.7000000e-02 9.9060000e-01 9.3900000e-02 8.8460000e-01 -8.2200000e-02 1.0245000e+00 4.9200000e-02 7.4180000e-01 6.9300000e-02 1.0356000e+00 3.8300000e-02 - 4.3990000e-01 3.5810000e-01 8.3650000e-01 3.8850000e-01 9.0790000e-01 4.9230000e-01 7.7250000e-01 4.5730000e-01 7.9290000e-01 6.5070000e-01 7.8820000e-01 4.4340000e-01 - 4.1670000e-01 5.0070000e-01 6.3820000e-01 7.9860000e-01 8.0460000e-01 8.2370000e-01 7.4040000e-01 6.8310000e-01 7.4030000e-01 9.2330000e-01 6.3210000e-01 8.1180000e-01 - 1.6410000e-01 5.1160000e-01 5.4040000e-01 4.4250000e-01 5.5210000e-01 5.6610000e-01 1.6993000e+00 3.6100000e-02 2.8100000e-02 1.9643000e+00 6.1300000e-02 -7.2800000e-02 - 6.2880000e-01 4.7480000e-01 3.0360000e-01 1.4294000e+00 1.9560000e-01 1.2495000e+00 3.5260000e-01 1.3684000e+00 2.8190000e-01 1.0803000e+00 2.7040000e-01 1.4683000e+00 - 2.3690000e-01 6.6110000e-01 5.2880000e-01 6.8340000e-01 6.5570000e-01 6.7260000e-01 1.7093000e+00 2.5700000e-02 -3.2300000e-02 2.0383000e+00 -7.0100000e-02 8.6800000e-02 - 8.9340000e-01 2.1950000e-01 4.3950000e-01 9.4750000e-01 3.6110000e-01 6.0750000e-01 3.9240000e-01 1.0032000e+00 3.4870000e-01 5.8860000e-01 4.4470000e-01 9.4330000e-01 - 3.1270000e-01 7.9360000e-01 2.4300000e-01 1.4804000e+00 1.3310000e-01 1.8433000e+00 1.9190000e-01 1.5386000e+00 8.0900000e-02 1.6202000e+00 2.3070000e-01 1.4930000e+00 - 4.7780000e-01 5.6780000e-01 8.7140000e-01 7.4620000e-01 9.0550000e-01 9.4990000e-01 8.4230000e-01 7.8110000e-01 9.1410000e-01 9.7470000e-01 7.7810000e-01 8.6220000e-01 - 8.8960000e-01 2.2070000e-01 2.5780000e-01 1.3493000e+00 2.1690000e-01 8.2160000e-01 4.1230000e-01 1.1654000e+00 3.7970000e-01 5.9480000e-01 4.3650000e-01 1.1296000e+00 - 1.1640000e-01 5.1700000e-01 9.5000000e-03 1.0172000e+00 1.0900000e-02 1.1569000e+00 -2.7700000e-02 1.0609000e+00 -2.0000000e-03 1.1869000e+00 7.9000000e-02 9.3520000e-01 - 4.0840000e-01 6.9350000e-01 7.8130000e-01 7.4560000e-01 8.5800000e-01 7.9810000e-01 1.7634000e+00 -3.3800000e-02 3.1200000e-02 1.9629000e+00 4.6400000e-02 1.6806000e+00 - 4.1150000e-01 3.1500000e-01 6.2510000e-01 5.1680000e-01 6.8640000e-01 6.2160000e-01 6.1570000e-01 5.2830000e-01 5.5010000e-01 8.0350000e-01 5.8480000e-01 5.6600000e-01 - 5.7930000e-01 5.2560000e-01 2.2750000e-01 6.2830000e-01 2.3500000e-01 4.4140000e-01 3.0380000e-01 5.3160000e-01 2.4040000e-01 4.1830000e-01 2.9910000e-01 5.3560000e-01 - 1.9880000e-01 4.4990000e-01 2.2770000e-01 8.0060000e-01 2.8050000e-01 8.8520000e-01 2.2380000e-01 8.0690000e-01 2.9150000e-01 8.8950000e-01 2.6050000e-01 7.6040000e-01 - 5.3800000e-02 4.1150000e-01 1.5280000e-01 5.6270000e-01 5.8700000e-02 7.8140000e-01 1.2040000e-01 6.0250000e-01 2.9000000e-03 8.5750000e-01 8.2100000e-02 6.5110000e-01 - 5.1430000e-01 4.6700000e-01 6.9540000e-01 6.3130000e-01 8.3950000e-01 5.9810000e-01 1.8336000e+00 -1.1950000e-01 6.7300000e-02 1.9271000e+00 4.3200000e-02 -5.0200000e-02 - 1.0932000e+00 1.7000000e-02 9.9200000e-01 2.1310000e-01 8.4280000e-01 4.3100000e-02 1.0374000e+00 1.5930000e-01 8.3530000e-01 2.4000000e-02 1.1271000e+00 5.9300000e-02 - 1.5290000e-01 7.1660000e-01 4.7600000e-01 7.0660000e-01 6.9000000e-01 5.9410000e-01 1.7901000e+00 -6.7100000e-02 8.6000000e-03 1.9878000e+00 -4.0000000e-03 4.6000000e-03 - 6.4140000e-01 1.8860000e-01 9.4150000e-01 2.0360000e-01 9.9660000e-01 2.8320000e-01 1.7744000e+00 -5.0000000e-02 2.0619000e+00 -7.0500000e-02 2.7500000e-02 -3.3100000e-02 - 1.1004000e+00 1.0800000e-02 1.5410000e+00 1.7910000e-01 9.8950000e-01 8.3660000e-01 1.4461000e+00 2.8870000e-01 9.3640000e-01 7.0760000e-01 1.5127000e+00 2.1140000e-01 - 1.0633000e+00 4.9700000e-02 6.3280000e-01 1.1032000e+00 4.6120000e-01 1.0089000e+00 7.1230000e-01 1.0132000e+00 5.2310000e-01 8.6120000e-01 7.7060000e-01 9.3770000e-01 - 9.3900000e-02 2.8520000e-01 1.5020000e-01 5.7440000e-01 3.3060000e-01 4.9720000e-01 2.5000000e-02 1.6993000e+00 2.9400000e-02 -3.5700000e-02 -5.6100000e-02 6.7500000e-02 - 6.9540000e-01 2.6700000e-01 1.0118000e+00 2.5930000e-01 1.1374000e+00 2.5610000e-01 1.7068000e+00 3.3400000e-02 2.0839000e+00 -9.2700000e-02 1.3000000e-02 -1.7800000e-02 - 1.1908000e+00 -9.9600000e-02 3.9610000e-01 1.2858000e+00 3.9050000e-01 6.3590000e-01 4.4260000e-01 1.2335000e+00 3.4460000e-01 6.6140000e-01 4.9120000e-01 1.1800000e+00 - 5.3740000e-01 3.8050000e-01 8.8300000e-01 5.4440000e-01 1.0491000e+00 5.7710000e-01 9.0360000e-01 5.2340000e-01 1.0495000e+00 5.9640000e-01 9.3240000e-01 4.8810000e-01 - 1.0391000e+00 7.8600000e-02 1.2662000e+00 4.7420000e-01 9.1610000e-01 1.6260000e-01 1.1979000e+00 5.4700000e-01 9.1370000e-01 1.2690000e-01 1.2137000e+00 5.3330000e-01 - 6.4580000e-01 4.6490000e-01 2.5890000e-01 9.2170000e-01 3.3290000e-01 5.3230000e-01 2.3110000e-01 9.5480000e-01 1.8990000e-01 6.8020000e-01 3.2690000e-01 8.4200000e-01 - 1.0521000e+00 6.6500000e-02 7.5280000e-01 9.8070000e-01 6.0240000e-01 8.7030000e-01 7.8030000e-01 9.4600000e-01 7.2220000e-01 6.5070000e-01 7.2610000e-01 1.0120000e+00 - 5.8400000e-02 5.2790000e-01 8.2400000e-02 8.3890000e-01 8.5800000e-02 9.6520000e-01 7.9900000e-02 8.3640000e-01 1.6400000e-02 1.0558000e+00 1.3200000e-01 7.8040000e-01 - 4.0500000e-02 9.2630000e-01 3.4320000e-01 9.4080000e-01 4.8260000e-01 9.1610000e-01 -1.6200000e-02 1.7479000e+00 -1.5300000e-02 2.0222000e+00 6.2600000e-02 -7.4300000e-02 - 5.2680000e-01 3.3360000e-01 6.9970000e-01 4.9760000e-01 9.7240000e-01 3.1580000e-01 1.7171000e+00 1.8500000e-02 1.0600000e-02 1.9838000e+00 -2.1300000e-02 2.4600000e-02 - 1.8700000e-02 6.9360000e-01 1.5260000e-01 8.7640000e-01 2.6850000e-01 8.7720000e-01 -2.4000000e-02 1.7611000e+00 5.6600000e-02 1.9333000e+00 3.5200000e-02 -4.0000000e-02 - 1.1033000e+00 3.9000000e-03 8.2850000e-01 3.8320000e-01 6.6490000e-01 2.2460000e-01 6.9630000e-01 5.4020000e-01 5.9350000e-01 2.8860000e-01 7.6670000e-01 4.4940000e-01 - 8.4100000e-02 1.0328000e+00 4.3920000e-01 1.0484000e+00 6.0820000e-01 9.9350000e-01 1.7389000e+00 -3.8000000e-03 -2.1000000e-02 2.0269000e+00 -1.0670000e-01 1.8588000e+00 - 4.6890000e-01 4.0920000e-01 6.7910000e-01 7.0290000e-01 8.4720000e-01 7.1690000e-01 6.3730000e-01 7.5400000e-01 7.3360000e-01 8.7680000e-01 7.6110000e-01 6.1140000e-01 - 5.3680000e-01 2.1210000e-01 7.6380000e-01 3.1460000e-01 8.8830000e-01 3.0720000e-01 1.6748000e+00 6.9200000e-02 9.3000000e-03 1.9896000e+00 6.7000000e-03 -9.1000000e-03 - 4.2400000e-01 4.9000000e-01 7.2320000e-01 5.0140000e-01 8.5910000e-01 4.8380000e-01 1.7726000e+00 -4.8700000e-02 4.2400000e-02 1.9548000e+00 -1.7800000e-02 2.1000000e-02 - 1.0500000e-01 1.0033000e+00 2.2300000e-02 5.8800000e-01 3.4300000e-02 4.6970000e-01 1.2490000e-01 4.6650000e-01 6.7100000e-02 4.2250000e-01 1.0300000e-01 4.9030000e-01 - 3.1110000e-01 3.7510000e-01 4.6510000e-01 6.1210000e-01 6.3620000e-01 5.6810000e-01 5.0200000e-01 5.6300000e-01 6.5800000e-01 5.5830000e-01 5.0590000e-01 5.6460000e-01 - 4.4780000e-01 -1.4000000e-03 6.5010000e-01 6.1100000e-02 7.7710000e-01 2.5900000e-02 6.7170000e-01 3.0600000e-02 7.6270000e-01 5.2500000e-02 6.4930000e-01 5.7200000e-02 - 7.3030000e-01 3.7200000e-01 3.4900000e-01 6.1000000e-01 2.7880000e-01 4.7560000e-01 2.7710000e-01 6.9630000e-01 3.0900000e-01 4.2410000e-01 2.8370000e-01 6.8630000e-01 - 5.1400000e-02 1.0625000e+00 3.9400000e-02 4.1180000e-01 -9.0000000e-03 4.0010000e-01 1.3140000e-01 3.0370000e-01 7.1300000e-02 2.9780000e-01 7.1000000e-02 3.7780000e-01 - 4.9800000e-01 5.1000000e-01 8.1110000e-01 5.1150000e-01 8.7000000e-01 5.8260000e-01 1.6827000e+00 5.5700000e-02 7.7000000e-03 1.9870000e+00 3.5000000e-02 -4.1800000e-02 - 2.0340000e-01 6.5430000e-01 2.5350000e-01 1.1095000e+00 3.1520000e-01 1.2312000e+00 2.3840000e-01 1.1295000e+00 3.8080000e-01 1.1798000e+00 2.2540000e-01 1.1407000e+00 - 3.1530000e-01 4.3100000e-01 6.2770000e-01 5.1400000e-01 6.2220000e-01 6.9740000e-01 6.0160000e-01 5.4690000e-01 7.2900000e-01 5.9040000e-01 5.6910000e-01 5.8350000e-01 - 8.9610000e-01 2.3100000e-02 1.0488000e+00 2.1820000e-01 1.3056000e+00 6.0300000e-02 1.7017000e+00 3.3000000e-02 2.1629000e+00 -1.9540000e-01 -5.5000000e-03 6.7000000e-03 - 1.0993000e+00 6.8000000e-03 4.7450000e-01 1.2502000e+00 3.6310000e-01 8.2840000e-01 4.2910000e-01 1.3048000e+00 3.7990000e-01 7.6910000e-01 5.0490000e-01 1.2172000e+00 - 1.1240000e-01 4.5490000e-01 1.5850000e-01 7.3630000e-01 7.6800000e-02 9.6280000e-01 1.0120000e-01 8.0690000e-01 9.6500000e-02 9.5270000e-01 1.2450000e-01 7.7630000e-01 - 2.9140000e-01 8.2030000e-01 1.3840000e-01 9.8770000e-01 1.5310000e-01 6.9680000e-01 2.0930000e-01 9.1020000e-01 2.1000000e-01 6.0840000e-01 1.6840000e-01 9.5380000e-01 - 2.5030000e-01 5.3770000e-01 4.5080000e-01 7.6890000e-01 6.0410000e-01 7.6650000e-01 4.4680000e-01 7.7210000e-01 6.2110000e-01 7.6450000e-01 4.4420000e-01 7.7890000e-01 - 5.5220000e-01 9.6000000e-02 8.6230000e-01 9.8300000e-02 9.7830000e-01 1.0280000e-01 1.7322000e+00 1.4000000e-03 6.0400000e-02 1.9287000e+00 -1.1100000e-02 1.4200000e-02 - 6.9900000e-01 1.6950000e-01 1.1128000e+00 2.3790000e-01 1.2872000e+00 2.4630000e-01 1.0544000e+00 3.0470000e-01 1.3308000e+00 2.2670000e-01 1.1034000e+00 2.4860000e-01 - 1.2060000e-01 5.1770000e-01 2.9570000e-01 6.8150000e-01 2.7420000e-01 8.4850000e-01 2.0640000e-01 7.8650000e-01 3.2130000e-01 8.1040000e-01 2.3740000e-01 7.5000000e-01 - 7.5600000e-02 4.7990000e-01 1.0690000e-01 7.6540000e-01 2.2510000e-01 7.4430000e-01 2.1620000e-01 6.3400000e-01 1.7320000e-01 8.2730000e-01 1.7150000e-01 6.8740000e-01 - 2.5840000e-01 8.3860000e-01 7.6200000e-02 7.2780000e-01 1.0700000e-01 5.3780000e-01 7.1100000e-02 7.3840000e-01 7.8700000e-02 5.5730000e-01 4.8500000e-02 7.6290000e-01 - 7.2700000e-01 3.8280000e-01 1.0702000e+00 4.5210000e-01 1.3231000e+00 3.0220000e-01 1.6634000e+00 8.4000000e-02 2.0806000e+00 -9.0900000e-02 7.4300000e-02 1.6421000e+00 - 2.3110000e-01 8.7280000e-01 1.7560000e-01 5.3090000e-01 1.3600000e-01 4.4640000e-01 1.4810000e-01 5.6500000e-01 6.0900000e-02 5.2150000e-01 1.2310000e-01 5.9440000e-01 - 6.2570000e-01 4.2520000e-01 8.7450000e-01 5.0170000e-01 8.1450000e-01 7.1010000e-01 1.7678000e+00 -4.0600000e-02 3.7900000e-02 1.9548000e+00 -3.6100000e-02 1.7775000e+00 - 3.7110000e-01 3.4480000e-01 6.6900000e-01 3.5640000e-01 7.4930000e-01 4.0800000e-01 1.6655000e+00 8.1900000e-02 4.5100000e-02 1.9496000e+00 1.1200000e-02 -1.4700000e-02 - 1.0914000e+00 1.5000000e-02 7.5160000e-01 9.9320000e-01 6.7390000e-01 4.2240000e-01 9.4370000e-01 7.6830000e-01 6.2940000e-01 4.3990000e-01 8.0630000e-01 9.2980000e-01 - 4.2560000e-01 2.7800000e-02 6.8160000e-01 2.2200000e-02 7.5220000e-01 5.6300000e-02 6.7170000e-01 3.3900000e-02 8.9970000e-01 -1.0750000e-01 6.9330000e-01 9.5000000e-03 - 1.0280000e+00 9.5100000e-02 5.6190000e-01 4.5290000e-01 5.6480000e-01 2.0580000e-01 6.5080000e-01 3.5100000e-01 4.0770000e-01 3.7440000e-01 5.6860000e-01 4.4760000e-01 - -4.2000000e-02 7.6100000e-01 1.2120000e-01 9.0640000e-01 1.7530000e-01 9.8780000e-01 5.3400000e-02 1.6703000e+00 -1.6600000e-02 2.0136000e+00 6.6800000e-02 -7.9700000e-02 - 3.9070000e-01 5.5870000e-01 6.9180000e-01 5.7870000e-01 7.5080000e-01 6.5330000e-01 1.7659000e+00 -3.9000000e-02 -4.4000000e-03 2.0073000e+00 1.1500000e-02 -1.4400000e-02 - 5.8830000e-01 4.4440000e-01 9.4420000e-01 6.7420000e-01 1.2238000e+00 5.9070000e-01 1.0459000e+00 5.5410000e-01 1.1291000e+00 7.2460000e-01 1.0687000e+00 5.2710000e-01 - 1.2610000e-01 4.3380000e-01 2.3830000e-01 6.3310000e-01 1.3930000e-01 8.7960000e-01 7.8000000e-02 8.2330000e-01 1.4620000e-01 8.8750000e-01 1.7710000e-01 7.0830000e-01 - 2.9510000e-01 7.9710000e-01 2.0910000e-01 7.7750000e-01 8.5500000e-02 6.9930000e-01 1.9870000e-01 7.9230000e-01 5.9400000e-02 7.1440000e-01 1.0310000e-01 9.0430000e-01 - 1.1007000e+00 5.8000000e-03 5.7350000e-01 5.5080000e-01 4.8430000e-01 3.6430000e-01 5.1510000e-01 6.1790000e-01 4.2440000e-01 4.0980000e-01 6.1940000e-01 4.9280000e-01 - 8.8640000e-01 1.6000000e-03 1.3752000e+00 2.0200000e-02 1.5715000e+00 1.0500000e-02 1.4155000e+00 -3.1900000e-02 1.6552000e+00 -6.1900000e-02 1.4095000e+00 -2.7200000e-02 - 1.1521000e+00 -5.5800000e-02 7.1920000e-01 1.0303000e+00 7.6940000e-01 4.8010000e-01 8.3180000e-01 8.9790000e-01 6.6530000e-01 5.5820000e-01 9.1780000e-01 7.9470000e-01 - 5.0440000e-01 1.5400000e-02 8.1260000e-01 -2.7000000e-03 8.9060000e-01 4.0300000e-02 7.9630000e-01 1.7800000e-02 9.8540000e-01 -5.4900000e-02 7.2480000e-01 1.0000000e-01 - -2.1800000e-02 8.2990000e-01 1.5990000e-01 9.6490000e-01 3.7800000e-01 8.4830000e-01 2.1800000e-02 1.7072000e+00 -4.5700000e-02 2.0548000e+00 -1.0200000e-02 1.2400000e-02 - 9.1600000e-02 1.0369000e+00 1.7550000e-01 6.1790000e-01 8.3300000e-02 5.7230000e-01 1.3400000e-01 6.7090000e-01 5.4800000e-02 5.9140000e-01 6.2900000e-02 7.5330000e-01 - 3.5070000e-01 4.6820000e-01 6.5350000e-01 4.8600000e-01 7.4870000e-01 5.1190000e-01 1.6789000e+00 5.9700000e-02 -5.8200000e-02 2.0682000e+00 -2.4100000e-02 2.8700000e-02 - 1.0347000e+00 8.1900000e-02 5.0160000e-01 1.2328000e+00 4.5620000e-01 1.2184000e+00 4.6860000e-01 1.2695000e+00 4.2050000e-01 1.1322000e+00 5.1040000e-01 1.2230000e+00 - 6.2820000e-01 1.1570000e-01 8.5710000e-01 3.3140000e-01 1.0491000e+00 2.9180000e-01 8.4510000e-01 3.4370000e-01 1.1304000e+00 2.1790000e-01 9.4570000e-01 2.2380000e-01 - 1.0879000e+00 1.7800000e-02 5.8490000e-01 9.0800000e-02 4.4000000e-01 1.2950000e-01 5.7950000e-01 1.0140000e-01 4.9780000e-01 4.7300000e-02 6.4930000e-01 2.2200000e-02 - 1.0432000e+00 7.5700000e-02 3.3620000e-01 8.3870000e-01 3.9220000e-01 4.7200000e-01 4.6630000e-01 6.8430000e-01 3.1370000e-01 5.4350000e-01 4.3330000e-01 7.2400000e-01 - 1.7630000e-01 5.2830000e-01 4.5470000e-01 5.7010000e-01 6.4200000e-01 4.9280000e-01 1.7676000e+00 -4.3200000e-02 3.5700000e-02 1.9569000e+00 -3.2700000e-02 3.9900000e-02 - 4.5100000e-01 6.4300000e-02 6.2710000e-01 2.0250000e-01 7.6990000e-01 1.5550000e-01 7.2160000e-01 8.5400000e-02 8.4440000e-01 8.5600000e-02 8.3320000e-01 -4.7500000e-02 - 7.9000000e-02 5.2670000e-01 8.6900000e-02 8.6970000e-01 1.6710000e-01 9.1150000e-01 5.8800000e-02 9.0350000e-01 6.8700000e-02 1.0427000e+00 8.0800000e-02 8.7890000e-01 - 2.1880000e-01 3.3690000e-01 3.6690000e-01 5.0100000e-01 3.1120000e-01 6.9600000e-01 2.3750000e-01 6.5370000e-01 3.3450000e-01 6.7770000e-01 2.7970000e-01 6.0310000e-01 - -9.4500000e-02 1.2140000e+00 -7.6600000e-02 1.7907000e+00 1.7690000e-01 1.6283000e+00 -1.7300000e-02 1.7521000e+00 2.3200000e-02 1.9761000e+00 -8.3500000e-02 1.8299000e+00 - 1.4860000e-01 6.6920000e-01 1.8490000e-01 1.1121000e+00 2.4170000e-01 1.2298000e+00 3.1310000e-01 9.6060000e-01 3.6080000e-01 1.1052000e+00 1.7070000e-01 1.1237000e+00 - 1.1519000e+00 -5.4200000e-02 6.3380000e-01 3.9400000e-01 4.5420000e-01 3.5270000e-01 5.9050000e-01 4.5220000e-01 5.8140000e-01 1.8350000e-01 6.0350000e-01 4.3200000e-01 - 1.4790000e-01 2.2510000e-01 1.9860000e-01 3.9610000e-01 2.6740000e-01 3.9860000e-01 1.3400000e-01 4.6900000e-01 2.3040000e-01 4.5340000e-01 1.9990000e-01 3.9110000e-01 - 5.8900000e-02 1.8640000e-01 3.1780000e-01 2.5350000e-01 4.5220000e-01 2.3450000e-01 9.1900000e-02 1.6225000e+00 -7.7000000e-03 9.0000000e-03 -7.6700000e-02 9.0700000e-02 - 9.4000000e-02 6.2380000e-01 3.9690000e-01 6.3340000e-01 4.9980000e-01 6.5990000e-01 1.7901000e+00 -6.6200000e-02 -7.9300000e-02 2.0938000e+00 2.7000000e-03 -4.4000000e-03 - 2.3190000e-01 5.9270000e-01 3.0220000e-01 1.0017000e+00 3.9910000e-01 1.0727000e+00 4.0550000e-01 8.7950000e-01 4.5040000e-01 1.0360000e+00 3.1120000e-01 9.8390000e-01 - 3.8150000e-01 3.3320000e-01 6.4620000e-01 3.9130000e-01 7.0580000e-01 4.5570000e-01 1.7095000e+00 2.5200000e-02 -6.1400000e-02 2.0765000e+00 2.6900000e-02 -3.1100000e-02 - 1.0783000e+00 3.4100000e-02 8.2010000e-01 9.1030000e-01 6.9380000e-01 1.2782000e+00 7.1040000e-01 1.0392000e+00 5.9320000e-01 1.2780000e+00 7.3610000e-01 1.0035000e+00 - 9.1400000e-02 5.7240000e-01 3.5040000e-01 6.3800000e-01 4.1480000e-01 7.0280000e-01 1.7523000e+00 -2.2600000e-02 -2.2900000e-02 2.0243000e+00 -1.7700000e-02 1.9100000e-02 - 4.4750000e-01 6.6810000e-01 3.2190000e-01 5.9370000e-01 2.1440000e-01 5.2190000e-01 2.7750000e-01 6.4700000e-01 1.7950000e-01 5.4400000e-01 2.6500000e-01 6.6450000e-01 - 1.0524000e+00 6.4800000e-02 5.2250000e-01 8.3870000e-01 4.7880000e-01 4.7120000e-01 5.0380000e-01 8.5600000e-01 3.8320000e-01 5.5500000e-01 5.5430000e-01 7.9870000e-01 - 9.8490000e-01 1.4330000e-01 5.0620000e-01 2.2030000e-01 5.4410000e-01 2.2600000e-02 5.6960000e-01 1.4040000e-01 4.7190000e-01 9.9200000e-02 4.9970000e-01 2.2600000e-01 - 5.7480000e-01 1.5790000e-01 9.1000000e-01 2.3260000e-01 9.9800000e-01 3.1410000e-01 9.0070000e-01 2.4190000e-01 9.7290000e-01 3.6280000e-01 8.9420000e-01 2.5150000e-01 - 4.3950000e-01 4.6790000e-01 8.9620000e-01 2.9430000e-01 9.7010000e-01 3.5160000e-01 1.8179000e+00 -9.6500000e-02 2.7300000e-02 1.9609000e+00 2.1000000e-03 -1.7000000e-03 - 4.8400000e-01 2.0940000e-01 7.4910000e-01 3.4790000e-01 9.1980000e-01 3.1340000e-01 7.5650000e-01 3.3620000e-01 7.9340000e-01 4.7680000e-01 6.9910000e-01 4.0200000e-01 - 4.9580000e-01 1.4430000e-01 8.9700000e-01 8.4200000e-02 8.6790000e-01 2.8430000e-01 7.2580000e-01 2.8980000e-01 8.6060000e-01 3.0760000e-01 7.9340000e-01 2.1010000e-01 - 1.0791000e+00 3.2900000e-02 5.1660000e-01 7.4480000e-01 4.6260000e-01 4.4870000e-01 5.8060000e-01 6.7200000e-01 4.9770000e-01 3.9010000e-01 5.3780000e-01 7.2460000e-01 - 5.5500000e-02 7.2740000e-01 3.6170000e-01 7.3260000e-01 5.6020000e-01 6.3820000e-01 1.9312000e+00 -2.3330000e-01 7.0000000e-03 1.9927000e+00 -7.1100000e-02 8.2800000e-02 - 1.0891000e+00 2.1400000e-02 4.1520000e-01 8.0530000e-01 3.7880000e-01 5.1850000e-01 4.5970000e-01 7.4950000e-01 3.5720000e-01 5.1920000e-01 4.3320000e-01 7.8230000e-01 - 5.5810000e-01 5.7800000e-02 7.4620000e-01 2.4610000e-01 8.3690000e-01 2.9830000e-01 8.0100000e-01 1.8320000e-01 9.5500000e-01 1.7470000e-01 8.7980000e-01 8.9600000e-02 - 1.0965000e+00 3.9000000e-03 4.1170000e-01 4.9210000e-01 3.6640000e-01 3.4790000e-01 4.4330000e-01 4.5720000e-01 3.8680000e-01 3.0880000e-01 4.3630000e-01 4.6530000e-01 - 1.1036000e+00 -9.0000000e-04 5.5960000e-01 1.1638000e+00 4.2810000e-01 9.9270000e-01 5.5540000e-01 1.1705000e+00 4.4490000e-01 9.1070000e-01 5.7660000e-01 1.1397000e+00 - 4.7410000e-01 2.9650000e-01 7.5020000e-01 4.4990000e-01 7.6870000e-01 6.1500000e-01 6.8620000e-01 5.2820000e-01 8.3570000e-01 5.5730000e-01 6.2920000e-01 5.9120000e-01 - 3.8070000e-01 4.9360000e-01 5.5400000e-01 8.2720000e-01 8.5000000e-01 6.7540000e-01 6.0420000e-01 7.6400000e-01 7.1950000e-01 8.5420000e-01 6.3540000e-01 7.2750000e-01 - 8.3640000e-01 3.2700000e-02 1.1864000e+00 1.9680000e-01 1.4360000e+00 1.2260000e-01 1.2364000e+00 1.4420000e-01 1.3838000e+00 2.1100000e-01 1.1713000e+00 2.1770000e-01 - -1.5600000e-02 4.1960000e-01 4.6500000e-02 5.7640000e-01 6.0000000e-02 6.4600000e-01 3.0500000e-02 5.9350000e-01 4.5700000e-02 6.7410000e-01 -3.2800000e-02 6.6990000e-01 - 5.7360000e-01 5.3830000e-01 2.7500000e-01 1.3149000e+00 2.6740000e-01 7.5210000e-01 3.3160000e-01 1.2535000e+00 2.4740000e-01 7.4650000e-01 2.8320000e-01 1.3060000e+00 - 7.0070000e-01 3.7330000e-01 9.0310000e-01 7.8990000e-01 1.0389000e+00 7.6710000e-01 1.7840000e+00 -5.9300000e-02 2.5000000e-02 1.9708000e+00 2.7900000e-02 1.6997000e+00 - 1.1013000e+00 3.7000000e-03 4.7780000e-01 5.7400000e-02 3.8170000e-01 6.8100000e-02 3.3780000e-01 2.2280000e-01 3.6020000e-01 8.8000000e-02 3.0970000e-01 2.5820000e-01 - -5.8600000e-02 7.0000000e-02 2.4690000e-01 5.6400000e-02 2.6550000e-01 1.7480000e-01 -4.0000000e-03 5.9000000e-03 2.7300000e-02 -3.1500000e-02 5.9000000e-03 -7.1000000e-03 - 1.1206000e+00 -1.6400000e-02 5.1780000e-01 1.2121000e+00 3.9260000e-01 1.0176000e+00 5.0520000e-01 1.2247000e+00 3.0070000e-01 1.0638000e+00 4.7340000e-01 1.2566000e+00 - 2.9830000e-01 8.0360000e-01 2.1760000e-01 9.1560000e-01 1.7220000e-01 6.8390000e-01 1.8450000e-01 9.5220000e-01 1.2040000e-01 7.2240000e-01 2.5030000e-01 8.7550000e-01 - 1.0631000e+00 5.4300000e-02 8.0580000e-01 9.2700000e-01 5.8260000e-01 9.2130000e-01 7.3340000e-01 1.0033000e+00 6.8190000e-01 7.3210000e-01 7.2870000e-01 1.0060000e+00 - 4.2540000e-01 3.6430000e-01 7.0340000e-01 5.2530000e-01 6.9430000e-01 7.2530000e-01 8.1340000e-01 4.0010000e-01 7.9320000e-01 6.3440000e-01 7.5780000e-01 4.6250000e-01 - 4.2900000e-02 7.1700000e-01 2.4900000e-01 8.3080000e-01 4.2080000e-01 7.6610000e-01 -2.3200000e-02 1.7629000e+00 1.8600000e-02 1.9788000e+00 -6.8700000e-02 8.1300000e-02 - -3.9800000e-02 6.5140000e-01 1.5880000e-01 7.6030000e-01 2.6720000e-01 7.7240000e-01 -1.7800000e-02 1.7504000e+00 1.8900000e-02 1.9782000e+00 -5.6200000e-02 6.8400000e-02 - 2.0620000e-01 4.0980000e-01 2.5510000e-01 7.1480000e-01 4.4660000e-01 6.3590000e-01 2.7150000e-01 6.9690000e-01 2.4900000e-01 8.8290000e-01 2.2010000e-01 7.5910000e-01 - 3.4100000e-01 4.3070000e-01 7.2060000e-01 3.5240000e-01 7.9490000e-01 4.0690000e-01 1.6029000e+00 1.5390000e-01 -1.4590000e-01 2.1716000e+00 1.0000000e-02 -1.2500000e-02 - -2.4100000e-02 6.1190000e-01 2.9130000e-01 5.9300000e-01 2.9590000e-01 7.3410000e-01 6.9900000e-02 1.6442000e+00 -4.0700000e-02 2.0504000e+00 9.5000000e-02 -1.1450000e-01 - 8.8640000e-01 1.4570000e-01 1.4495000e+00 1.6120000e-01 1.5581000e+00 2.8720000e-01 1.4678000e+00 1.3830000e-01 1.6454000e+00 2.1260000e-01 1.5117000e+00 8.0600000e-02 - 4.5160000e-01 3.0310000e-01 6.5390000e-01 4.3830000e-01 8.6960000e-01 3.2440000e-01 1.7664000e+00 -3.9900000e-02 -4.0200000e-02 2.0440000e+00 5.8000000e-02 -6.9900000e-02 - 1.5000000e-03 1.0396000e+00 -6.2700000e-02 1.7078000e+00 -1.4000000e-02 1.8808000e+00 6.4700000e-02 1.5640000e+00 1.5720000e-01 1.6980000e+00 1.5400000e-02 1.6208000e+00 - 4.8960000e-01 2.4240000e-01 8.5850000e-01 1.6870000e-01 1.0219000e+00 1.2270000e-01 1.7284000e+00 -1.6000000e-03 3.2300000e-02 1.9617000e+00 -3.7900000e-02 4.5700000e-02 - 1.0285000e+00 2.2000000e-03 1.5191000e+00 1.1530000e-01 1.8914000e+00 -6.6800000e-02 1.5746000e+00 4.7100000e-02 1.8829000e+00 -2.4600000e-02 1.6602000e+00 -5.8000000e-02 - 5.7000000e-01 1.9490000e-01 6.6620000e-01 5.7750000e-01 7.9010000e-01 6.2180000e-01 7.0560000e-01 5.3060000e-01 7.9610000e-01 6.3510000e-01 7.5920000e-01 4.7000000e-01 - 2.9640000e-01 2.8390000e-01 3.5320000e-01 5.7730000e-01 4.3290000e-01 6.1870000e-01 3.3220000e-01 6.0250000e-01 4.2040000e-01 6.5660000e-01 3.1750000e-01 6.1810000e-01 - 1.1101000e+00 -6.9000000e-03 5.4300000e-01 7.1350000e-01 3.8130000e-01 5.5010000e-01 4.7750000e-01 7.9010000e-01 4.4550000e-01 4.4770000e-01 5.5190000e-01 7.0220000e-01 - 4.4940000e-01 3.0160000e-01 6.6610000e-01 5.1180000e-01 8.2720000e-01 5.0390000e-01 7.3800000e-01 4.3240000e-01 8.0660000e-01 5.5310000e-01 7.6740000e-01 3.9590000e-01 - 1.2017000e+00 -1.1560000e-01 3.3790000e-01 6.8910000e-01 3.5190000e-01 4.3170000e-01 4.1710000e-01 5.9470000e-01 2.7140000e-01 5.1150000e-01 3.8650000e-01 6.3030000e-01 - 4.1750000e-01 6.9200000e-01 7.2380000e-01 8.3240000e-01 8.2130000e-01 8.5280000e-01 1.6841000e+00 5.6800000e-02 9.1000000e-03 1.9873000e+00 -1.1500000e-02 1.7499000e+00 - 4.3600000e-01 3.7130000e-01 6.2140000e-01 6.5740000e-01 7.2030000e-01 7.2350000e-01 5.8250000e-01 6.9560000e-01 7.2060000e-01 7.4580000e-01 6.7310000e-01 5.9170000e-01 - 3.8480000e-01 2.8010000e-01 5.8860000e-01 4.5930000e-01 7.4170000e-01 4.3870000e-01 5.8760000e-01 4.5920000e-01 7.1560000e-01 4.9030000e-01 6.4830000e-01 3.8820000e-01 - 1.0964000e+00 1.2600000e-02 8.9660000e-01 3.4210000e-01 6.8220000e-01 2.3010000e-01 8.6640000e-01 3.7880000e-01 6.9560000e-01 1.8600000e-01 8.9820000e-01 3.4470000e-01 - 4.8050000e-01 5.6880000e-01 5.9590000e-01 1.0792000e+00 6.9380000e-01 1.2131000e+00 6.7700000e-01 9.8350000e-01 7.8590000e-01 1.1297000e+00 7.5370000e-01 8.9000000e-01 - 1.0907000e+00 1.5100000e-02 4.5320000e-01 8.9470000e-01 3.8290000e-01 5.7800000e-01 4.5690000e-01 8.8860000e-01 3.9700000e-01 5.3480000e-01 5.7030000e-01 7.5810000e-01 - 4.5800000e-02 5.1390000e-01 -1.8400000e-02 3.1960000e-01 -3.7100000e-02 3.0040000e-01 9.0800000e-02 1.9050000e-01 -1.3900000e-02 2.6750000e-01 1.5300000e-02 2.7850000e-01 - 1.1312000e+00 -3.0200000e-02 8.9290000e-01 7.0400000e-02 7.7030000e-01 -2.5600000e-02 8.9510000e-01 6.5800000e-02 6.5700000e-01 8.8100000e-02 8.8340000e-01 8.1700000e-02 - -5.8200000e-02 8.1500000e-01 3.4950000e-01 7.0020000e-01 4.3720000e-01 7.4110000e-01 -8.8000000e-02 1.8297000e+00 6.8400000e-02 1.9209000e+00 -3.9800000e-02 4.8900000e-02 - 3.4790000e-01 6.4170000e-01 6.0930000e-01 7.0090000e-01 7.5650000e-01 6.6210000e-01 1.6282000e+00 1.2240000e-01 1.4920000e-01 1.8285000e+00 -1.9600000e-02 2.3000000e-02 - 4.1600000e-01 1.6540000e-01 6.3490000e-01 2.7830000e-01 6.3700000e-01 4.1720000e-01 6.4870000e-01 2.5770000e-01 6.3790000e-01 4.2570000e-01 6.3670000e-01 2.7390000e-01 - 7.6000000e-02 1.0320000e+00 3.4400000e-02 1.0428000e+00 -1.5700000e-02 8.4860000e-01 4.8300000e-02 1.0262000e+00 9.1700000e-02 7.0070000e-01 -1.4200000e-02 1.0993000e+00 - 6.8700000e-02 6.4460000e-01 4.4100000e-01 5.6990000e-01 4.8810000e-01 6.5710000e-01 1.6742000e+00 6.7300000e-02 1.8600000e-02 1.9761000e+00 2.7500000e-02 -3.1800000e-02 - 1.8380000e-01 9.2220000e-01 1.6680000e-01 8.2310000e-01 1.0930000e-01 6.6450000e-01 1.5970000e-01 8.3030000e-01 1.6040000e-01 5.8990000e-01 1.9050000e-01 7.9350000e-01 - 5.9850000e-01 2.1140000e-01 8.5990000e-01 2.7470000e-01 8.1550000e-01 4.7150000e-01 1.7543000e+00 -2.5900000e-02 -1.8900000e-02 2.0170000e+00 -6.5500000e-02 7.9600000e-02 - 9.8120000e-01 1.4640000e-01 9.8730000e-01 7.3880000e-01 7.2300000e-01 1.2538000e+00 9.4140000e-01 7.9270000e-01 7.6450000e-01 1.0268000e+00 9.8550000e-01 7.3750000e-01 - 2.7710000e-01 4.9340000e-01 4.5570000e-01 7.4480000e-01 4.5330000e-01 9.2610000e-01 3.6330000e-01 8.5190000e-01 3.5890000e-01 1.0557000e+00 4.4320000e-01 7.5850000e-01 - 2.7650000e-01 5.4460000e-01 4.0580000e-01 8.7920000e-01 5.7650000e-01 8.7490000e-01 4.4980000e-01 8.3810000e-01 6.5300000e-01 8.0680000e-01 4.0160000e-01 8.9540000e-01 - 8.5800000e-01 2.2060000e-01 3.9430000e-01 1.3298000e+00 3.0000000e-01 7.3210000e-01 4.2280000e-01 1.2965000e+00 3.0490000e-01 6.9110000e-01 3.7200000e-01 1.3522000e+00 - 4.1780000e-01 5.1680000e-01 5.4070000e-01 9.4380000e-01 6.5230000e-01 1.0295000e+00 6.2140000e-01 8.4660000e-01 7.6590000e-01 9.2120000e-01 5.4440000e-01 9.3910000e-01 - 5.3980000e-01 3.0480000e-01 8.7790000e-01 4.4040000e-01 1.0117000e+00 4.8540000e-01 8.1460000e-01 5.1310000e-01 9.7180000e-01 5.5390000e-01 8.7710000e-01 4.3400000e-01 - 1.7370000e-01 1.0200000e-01 2.2180000e-01 2.2430000e-01 3.0250000e-01 1.9400000e-01 2.4210000e-01 1.9730000e-01 2.7380000e-01 2.3450000e-01 1.9400000e-01 2.5850000e-01 - 2.5520000e-01 6.2420000e-01 5.0950000e-01 6.9320000e-01 7.3940000e-01 5.5970000e-01 1.6505000e+00 1.0110000e-01 -2.0900000e-02 2.0252000e+00 4.5100000e-02 -5.3100000e-02 - 6.0430000e-01 4.8470000e-01 2.6980000e-01 7.1330000e-01 1.2070000e-01 6.6500000e-01 1.9660000e-01 7.9950000e-01 3.1830000e-01 4.1460000e-01 3.3670000e-01 6.2920000e-01 - 7.7830000e-01 4.7000000e-03 1.0487000e+00 2.0470000e-01 1.1697000e+00 2.6270000e-01 1.0240000e+00 2.3380000e-01 1.2292000e+00 2.1350000e-01 1.0673000e+00 1.8270000e-01 - 1.0569000e+00 5.5200000e-02 7.5050000e-01 1.6430000e-01 5.6270000e-01 1.7350000e-01 7.4640000e-01 1.7010000e-01 5.5990000e-01 1.6030000e-01 7.4430000e-01 1.7460000e-01 - 1.0820000e+00 2.9400000e-02 6.7960000e-01 1.7780000e-01 5.7550000e-01 1.0530000e-01 7.0750000e-01 1.4090000e-01 5.5950000e-01 1.1050000e-01 8.2470000e-01 -2.3000000e-03 - 5.7100000e-02 1.0503000e+00 -1.1000000e-03 4.3280000e-01 1.6230000e-01 1.7090000e-01 -3.4000000e-02 4.7190000e-01 -1.9500000e-02 3.8050000e-01 -9.3300000e-02 5.4340000e-01 - 1.3090000e-01 3.5770000e-01 1.5340000e-01 6.2400000e-01 1.3690000e-01 7.5090000e-01 2.4300000e-01 5.1220000e-01 2.2890000e-01 6.5380000e-01 2.0450000e-01 5.5730000e-01 - 3.3630000e-01 4.8770000e-01 6.2840000e-01 5.1750000e-01 6.6390000e-01 6.1050000e-01 1.6347000e+00 1.1600000e-01 -7.7400000e-02 2.0898000e+00 1.6500000e-02 -2.0600000e-02 - 8.4300000e-02 3.5200000e-01 5.2800000e-02 6.5160000e-01 6.4200000e-02 7.3340000e-01 5.9000000e-02 6.4300000e-01 6.9100000e-02 7.3870000e-01 1.0510000e-01 5.8910000e-01 - 2.4190000e-01 2.5540000e-01 3.8980000e-01 3.8720000e-01 4.2880000e-01 4.5750000e-01 3.5040000e-01 4.3620000e-01 4.9030000e-01 3.9880000e-01 3.9690000e-01 3.7840000e-01 - 6.3070000e-01 4.7400000e-01 9.7980000e-01 5.7700000e-01 9.1520000e-01 7.9240000e-01 1.9129000e+00 -2.1400000e-01 2.0075000e+00 -1.0500000e-02 -5.0000000e-04 1.7301000e+00 - 4.4600000e-01 -1.9600000e-02 7.0090000e-01 -2.9600000e-02 7.2060000e-01 5.7700000e-02 6.4150000e-01 4.1500000e-02 7.6930000e-01 1.5200000e-02 7.2580000e-01 -5.9500000e-02 - 1.2260000e-01 8.0720000e-01 5.3870000e-01 6.8230000e-01 6.6260000e-01 6.8290000e-01 1.6893000e+00 4.5400000e-02 -1.9900000e-02 2.0193000e+00 -3.8100000e-02 4.7400000e-02 - 7.8250000e-01 9.2500000e-02 1.1279000e+00 2.5620000e-01 1.3257000e+00 2.4370000e-01 1.1831000e+00 1.9540000e-01 1.2905000e+00 3.1300000e-01 1.2405000e+00 1.2530000e-01 - 4.6890000e-01 1.4810000e-01 7.3570000e-01 2.3290000e-01 7.4420000e-01 3.7920000e-01 6.9270000e-01 2.8330000e-01 8.2080000e-01 3.0540000e-01 7.7930000e-01 1.8180000e-01 - 6.0730000e-01 1.5050000e-01 8.9180000e-01 3.0780000e-01 1.1236000e+00 2.2380000e-01 8.8780000e-01 3.1410000e-01 1.0236000e+00 3.6350000e-01 8.9190000e-01 3.1220000e-01 - 1.0845000e+00 2.5600000e-02 1.2557000e+00 4.6530000e-01 9.0010000e-01 1.0652000e+00 1.2402000e+00 4.7830000e-01 9.7730000e-01 7.5410000e-01 1.2807000e+00 4.3580000e-01 - 1.1224000e+00 -2.0900000e-02 6.2880000e-01 1.1115000e+00 5.2390000e-01 8.9230000e-01 7.6400000e-01 9.5130000e-01 6.1130000e-01 7.2720000e-01 6.9480000e-01 1.0375000e+00 - 8.8260000e-01 2.2100000e-01 3.0470000e-01 6.4310000e-01 3.2470000e-01 4.1020000e-01 4.2220000e-01 5.0540000e-01 3.0850000e-01 4.1340000e-01 3.6440000e-01 5.7010000e-01 - -7.1800000e-02 6.5580000e-01 2.1160000e-01 6.7080000e-01 3.3280000e-01 6.7440000e-01 3.5700000e-02 1.6883000e+00 5.1000000e-02 1.9467000e+00 -7.0300000e-02 8.5400000e-02 - 3.6130000e-01 7.3650000e-01 2.1890000e-01 5.5760000e-01 1.7820000e-01 4.5140000e-01 2.0200000e-01 5.7750000e-01 1.8550000e-01 4.2750000e-01 2.5360000e-01 5.1440000e-01 - 1.0749000e+00 4.1400000e-02 9.1210000e-01 1.8760000e-01 7.6670000e-01 5.7600000e-02 9.2580000e-01 1.7220000e-01 7.2910000e-01 8.4500000e-02 8.9420000e-01 2.0830000e-01 - 2.7600000e-02 1.0868000e+00 -4.2500000e-02 5.2190000e-01 3.2500000e-02 3.6070000e-01 2.4600000e-02 4.4290000e-01 8.6900000e-02 2.8590000e-01 -1.1000000e-03 4.7260000e-01 - -4.9500000e-02 5.9700000e-02 5.9900000e-02 1.8090000e-01 1.1200000e-02 3.6600000e-01 1.3400000e-02 -1.2900000e-02 7.6300000e-02 -9.2000000e-02 5.3200000e-02 -6.3700000e-02 - -7.6400000e-02 1.1988000e+00 3.2470000e-01 1.1920000e+00 3.8500000e-01 1.2573000e+00 -8.2800000e-02 1.8321000e+00 4.5000000e-03 1.9912000e+00 -1.9900000e-02 1.7577000e+00 - 3.7350000e-01 7.3550000e-01 1.9270000e-01 5.4800000e-01 2.1430000e-01 3.8240000e-01 2.4580000e-01 4.8370000e-01 1.0670000e-01 4.9740000e-01 1.6870000e-01 5.7560000e-01 - 2.7410000e-01 5.1640000e-01 3.7490000e-01 8.7730000e-01 4.3550000e-01 9.8320000e-01 4.2990000e-01 8.1030000e-01 5.1460000e-01 9.1490000e-01 3.8770000e-01 8.5870000e-01 - -7.8000000e-03 7.6030000e-01 -4.1100000e-02 1.1109000e+00 -3.3000000e-03 1.1939000e+00 -7.1100000e-02 1.8191000e+00 6.2500000e-02 1.9267000e+00 4.5100000e-02 -5.1400000e-02 - 1.1840000e+00 -9.5400000e-02 4.0440000e-01 6.9700000e-01 2.9190000e-01 5.5500000e-01 5.2660000e-01 5.5600000e-01 3.2090000e-01 5.0180000e-01 3.6570000e-01 7.4500000e-01 - 4.7320000e-01 3.0460000e-01 6.6830000e-01 4.4550000e-01 8.0950000e-01 4.2240000e-01 1.7365000e+00 1.3000000e-03 3.1900000e-02 1.9640000e+00 -2.4000000e-03 3.5000000e-03 - 1.3840000e-01 6.3020000e-01 1.2250000e-01 1.0966000e+00 2.1380000e-01 1.1625000e+00 1.2600000e-01 1.0907000e+00 2.1730000e-01 1.1774000e+00 2.1580000e-01 9.8600000e-01 - 5.4370000e-01 1.8280000e-01 8.2220000e-01 3.2640000e-01 1.0674000e+00 2.1310000e-01 8.1370000e-01 3.3490000e-01 1.0335000e+00 2.7700000e-01 8.0170000e-01 3.5190000e-01 - 8.3530000e-01 1.8560000e-01 1.1308000e+00 2.1150000e-01 1.1424000e+00 3.4090000e-01 1.6488000e+00 9.6900000e-02 1.9394000e+00 7.2500000e-02 -3.9600000e-02 4.6500000e-02 - 5.6650000e-01 4.0000000e-01 8.0400000e-01 4.9140000e-01 9.7250000e-01 4.3510000e-01 1.7701000e+00 -5.0900000e-02 -6.5300000e-02 2.0774000e+00 8.1400000e-02 -9.7300000e-02 - 1.7800000e-02 6.4410000e-01 1.9340000e-01 8.1530000e-01 2.0800000e-01 9.4530000e-01 2.3700000e-01 7.6180000e-01 2.1030000e-01 9.5560000e-01 8.1100000e-02 9.4600000e-01 - 3.7160000e-01 5.7270000e-01 5.7620000e-01 9.0280000e-01 7.6660000e-01 8.9920000e-01 6.5740000e-01 8.0450000e-01 8.0850000e-01 8.7240000e-01 6.2840000e-01 8.4460000e-01 - 9.6500000e-02 8.8000000e-02 5.9900000e-02 2.4610000e-01 1.4450000e-01 1.8770000e-01 1.4200000e-01 1.5160000e-01 1.6620000e-01 1.6800000e-01 1.1620000e-01 1.7990000e-01 - 5.0240000e-01 5.6390000e-01 7.3280000e-01 9.4920000e-01 8.7070000e-01 1.0364000e+00 6.1740000e-01 1.0845000e+00 8.0370000e-01 1.1428000e+00 7.7940000e-01 8.9580000e-01 - 5.4650000e-01 4.3340000e-01 9.1300000e-01 6.0680000e-01 8.9310000e-01 8.6470000e-01 8.4900000e-01 6.8660000e-01 9.9780000e-01 7.6420000e-01 8.0580000e-01 7.3530000e-01 - 1.0900000e-01 7.3280000e-01 2.5920000e-01 1.0424000e+00 3.4530000e-01 1.1261000e+00 2.1200000e-01 1.0990000e+00 3.5520000e-01 1.1394000e+00 2.4000000e-01 1.0655000e+00 - 3.0450000e-01 4.0430000e-01 4.6790000e-01 6.4690000e-01 5.7840000e-01 6.7980000e-01 4.4420000e-01 6.7400000e-01 5.2870000e-01 7.5810000e-01 4.4440000e-01 6.7190000e-01 - 5.5970000e-01 2.2980000e-01 8.8490000e-01 3.5510000e-01 1.0343000e+00 3.7240000e-01 1.0940000e+00 1.0690000e-01 1.1169000e+00 3.0340000e-01 8.5650000e-01 3.8300000e-01 - 1.1354000e+00 -3.6600000e-02 9.9300000e-01 7.3280000e-01 7.3960000e-01 4.8290000e-01 1.0962000e+00 6.1530000e-01 7.7500000e-01 4.0160000e-01 1.0572000e+00 6.5680000e-01 - 2.4530000e-01 4.8080000e-01 4.8550000e-01 6.3430000e-01 6.2670000e-01 6.3160000e-01 5.0350000e-01 6.1000000e-01 6.3720000e-01 6.4300000e-01 4.3820000e-01 6.8380000e-01 - 1.5900000e-01 9.6240000e-01 5.6160000e-01 1.1676000e+00 7.0670000e-01 1.1980000e+00 1.7508000e+00 -2.1400000e-02 1.2300000e-02 1.9831000e+00 1.7100000e-02 1.7125000e+00 - 1.0162000e+00 1.0340000e-01 4.2590000e-01 4.5740000e-01 3.6150000e-01 3.4370000e-01 3.9980000e-01 4.8660000e-01 3.6500000e-01 3.2140000e-01 4.5850000e-01 4.2200000e-01 - -6.1000000e-02 1.1811000e+00 1.0400000e-02 1.6611000e+00 9.1700000e-02 1.7018000e+00 1.1600000e-02 1.7159000e+00 -5.7300000e-02 2.0688000e+00 -1.8600000e-02 1.7503000e+00 - 1.1169000e+00 -1.1000000e-02 1.6683000e+00 7.9100000e-02 1.0796000e+00 2.6200000e-02 1.7536000e+00 -3.0700000e-02 9.3170000e-01 1.6140000e-01 1.7194000e+00 1.6800000e-02 - 5.4250000e-01 5.6620000e-01 4.0650000e-01 7.1190000e-01 1.6880000e-01 7.0620000e-01 3.2220000e-01 8.1600000e-01 7.4300000e-02 7.9790000e-01 1.8850000e-01 9.7410000e-01 - 1.0921000e+00 1.9100000e-02 6.4020000e-01 6.8830000e-01 4.0990000e-01 5.5500000e-01 5.4330000e-01 8.0570000e-01 4.1090000e-01 5.2290000e-01 5.5500000e-01 7.8810000e-01 - 4.2760000e-01 5.9820000e-01 8.5800000e-01 4.5800000e-01 9.6660000e-01 4.7120000e-01 1.7024000e+00 3.4600000e-02 9.5000000e-03 1.9839000e+00 3.4500000e-02 -4.0500000e-02 - 1.1152000e+00 -1.2400000e-02 8.3360000e-01 9.3900000e-02 7.4190000e-01 -2.3500000e-02 7.9260000e-01 1.4360000e-01 6.8700000e-01 2.1400000e-02 8.8570000e-01 3.2600000e-02 - 6.6950000e-01 2.5920000e-01 1.0304000e+00 1.9220000e-01 1.1238000e+00 2.3170000e-01 1.7019000e+00 3.9000000e-02 1.9416000e+00 6.7800000e-02 -6.9000000e-03 9.6000000e-03 - 1.1060000e+00 1.0000000e-04 5.4930000e-01 1.1840000e+00 5.3730000e-01 6.6500000e-01 4.1430000e-01 1.3441000e+00 4.7310000e-01 6.9900000e-01 5.0290000e-01 1.2366000e+00 - 2.4780000e-01 6.4580000e-01 4.0430000e-01 8.3370000e-01 7.1930000e-01 6.0840000e-01 1.7618000e+00 -2.9400000e-02 3.3500000e-02 1.9597000e+00 -3.2700000e-02 4.1900000e-02 - 7.1050000e-01 3.8320000e-01 9.5480000e-01 4.6480000e-01 1.0654000e+00 4.7340000e-01 1.7806000e+00 -5.4600000e-02 1.9311000e+00 7.9800000e-02 -8.0900000e-02 1.8276000e+00 - 9.9680000e-01 1.2540000e-01 3.7710000e-01 8.5350000e-01 3.7390000e-01 5.2020000e-01 4.0120000e-01 8.2730000e-01 3.7940000e-01 4.9330000e-01 3.1890000e-01 9.2000000e-01 - 8.3500000e-02 5.5620000e-01 -2.3600000e-02 9.9900000e-01 1.6250000e-01 8.9860000e-01 -2.4400000e-02 1.7587000e+00 9.7000000e-02 1.8847000e+00 -6.0000000e-03 8.4000000e-03 - 7.4890000e-01 2.0260000e-01 1.0559000e+00 4.5880000e-01 1.0769000e+00 6.6370000e-01 9.0100000e-01 6.3770000e-01 1.1062000e+00 6.6280000e-01 9.7120000e-01 5.5370000e-01 - 6.8520000e-01 4.2160000e-01 2.9220000e-01 1.4460000e+00 2.7030000e-01 1.4915000e+00 3.9440000e-01 1.3287000e+00 2.5300000e-01 1.3620000e+00 3.4640000e-01 1.3823000e+00 - 1.0966000e+00 1.1600000e-02 5.3890000e-01 6.2360000e-01 4.2910000e-01 4.4410000e-01 4.9390000e-01 6.7760000e-01 5.0150000e-01 3.3560000e-01 5.4810000e-01 6.0990000e-01 - 4.6740000e-01 3.3660000e-01 6.3000000e-01 6.5090000e-01 8.7070000e-01 5.6480000e-01 8.0280000e-01 4.5470000e-01 7.8960000e-01 6.8000000e-01 6.8820000e-01 5.8420000e-01 - 3.7400000e-02 3.9770000e-01 1.9680000e-01 5.5880000e-01 3.6300000e-01 5.0270000e-01 5.7000000e-03 1.7238000e+00 5.8800000e-02 -6.7900000e-02 4.3700000e-02 -5.2400000e-02 - -8.6400000e-02 7.6060000e-01 1.3860000e-01 8.2850000e-01 1.7610000e-01 9.3100000e-01 1.8900000e-02 1.7075000e+00 8.9000000e-02 1.8993000e+00 -2.2600000e-02 2.6600000e-02 - 9.2800000e-02 4.5110000e-01 2.2590000e-01 6.1110000e-01 2.0240000e-01 7.5610000e-01 2.1660000e-01 6.2060000e-01 2.4740000e-01 7.1910000e-01 3.0040000e-01 5.2130000e-01 - 1.1292000e+00 -2.8500000e-02 1.8393000e+00 -1.2560000e-01 1.0448000e+00 3.0020000e-01 1.6902000e+00 4.7700000e-02 1.0602000e+00 2.2500000e-01 1.7564000e+00 -2.5500000e-02 - 4.5760000e-01 6.4810000e-01 8.8420000e-01 6.6540000e-01 8.4000000e-01 8.5850000e-01 1.7768000e+00 -5.0000000e-02 -2.6000000e-02 2.0274000e+00 -3.7700000e-02 1.7766000e+00 - 9.9500000e-01 1.1790000e-01 4.0390000e-01 5.3800000e-02 3.0930000e-01 8.5200000e-02 3.8990000e-01 6.8800000e-02 3.2940000e-01 5.4200000e-02 3.7970000e-01 8.2400000e-02 - 3.3390000e-01 6.8750000e-01 4.5580000e-01 1.1534000e+00 6.1720000e-01 1.1920000e+00 5.5020000e-01 1.0388000e+00 6.2020000e-01 1.2252000e+00 5.8110000e-01 1.0076000e+00 - -4.8200000e-02 6.3590000e-01 1.8000000e-02 8.8100000e-01 2.7130000e-01 7.2870000e-01 -7.9000000e-03 1.7429000e+00 -7.3000000e-02 2.0824000e+00 1.4200000e-02 -1.8300000e-02 - 2.4570000e-01 8.7290000e-01 5.1420000e-01 1.2411000e+00 6.7120000e-01 1.2185000e+00 1.5932000e+00 1.6500000e-01 -7.0700000e-02 2.0765000e+00 -4.6600000e-02 1.7826000e+00 - 1.2290000e-01 4.8210000e-01 1.8930000e-01 7.5990000e-01 1.5860000e-01 9.2810000e-01 1.1260000e-01 8.4870000e-01 1.6170000e-01 9.4550000e-01 1.4380000e-01 8.1200000e-01 - 1.0702000e+00 4.0600000e-02 9.5760000e-01 7.7590000e-01 7.3890000e-01 6.7400000e-01 9.6940000e-01 7.6240000e-01 6.4540000e-01 7.2100000e-01 8.9240000e-01 8.5040000e-01 - 1.1693000e+00 -7.4100000e-02 4.0900000e-01 1.4050000e-01 3.9120000e-01 5.7700000e-02 3.3980000e-01 2.2180000e-01 3.6320000e-01 8.3600000e-02 3.7030000e-01 1.8290000e-01 - 1.1054000e+00 -1.5000000e-03 8.8070000e-01 8.5860000e-01 7.3970000e-01 7.9150000e-01 8.2990000e-01 9.1650000e-01 6.4370000e-01 8.2000000e-01 8.5600000e-01 8.8720000e-01 - 7.3640000e-01 3.6060000e-01 3.3790000e-01 1.3956000e+00 3.8090000e-01 6.6020000e-01 3.7420000e-01 1.3499000e+00 2.7660000e-01 7.4950000e-01 3.4530000e-01 1.3858000e+00 - 2.1110000e-01 4.4450000e-01 3.1710000e-01 7.1000000e-01 2.7350000e-01 9.0870000e-01 2.5070000e-01 7.8810000e-01 3.7140000e-01 8.1230000e-01 3.0260000e-01 7.2860000e-01 - 1.1039000e+00 1.5000000e-03 1.5639000e+00 1.4080000e-01 1.0428000e+00 2.6250000e-01 1.3422000e+00 4.0420000e-01 9.1190000e-01 3.6490000e-01 1.3856000e+00 3.5080000e-01 - 4.1950000e-01 8.8300000e-02 7.2090000e-01 9.8500000e-02 9.0060000e-01 3.0600000e-02 1.7745000e+00 -4.8600000e-02 4.9100000e-02 -5.7000000e-02 -4.1800000e-02 4.7400000e-02 - 5.6530000e-01 2.0910000e-01 8.3360000e-01 2.6730000e-01 9.0220000e-01 3.2500000e-01 1.7549000e+00 -2.7500000e-02 -4.6100000e-02 2.0457000e+00 9.9000000e-03 -1.0700000e-02 - 1.0734000e+00 3.6500000e-02 1.3026000e+00 4.3340000e-01 9.5830000e-01 1.2960000e-01 1.3876000e+00 3.2540000e-01 9.9330000e-01 4.7900000e-02 1.3187000e+00 4.1040000e-01 - 1.0123000e+00 9.1700000e-02 1.2745000e+00 4.6560000e-01 1.3943000e+00 5.1970000e-01 1.5965000e+00 1.5800000e-01 1.9634000e+00 4.4700000e-02 -3.7100000e-02 1.7758000e+00 - 3.3120000e-01 4.5490000e-01 4.0880000e-01 8.5640000e-01 4.9910000e-01 9.2370000e-01 3.7740000e-01 8.8680000e-01 4.2580000e-01 1.0343000e+00 4.4000000e-01 8.1270000e-01 - 2.7550000e-01 5.1000000e-02 5.7090000e-01 7.6400000e-02 6.8090000e-01 8.6300000e-02 1.5520000e+00 2.1050000e-01 1.0290000e-01 -1.2440000e-01 4.7300000e-02 -5.7400000e-02 - 1.9820000e-01 4.7410000e-01 5.5390000e-01 4.2190000e-01 5.9410000e-01 5.1790000e-01 1.6454000e+00 9.8400000e-02 -2.0100000e-02 2.0235000e+00 2.8000000e-03 -4.0000000e-03 - 5.4570000e-01 5.5230000e-01 2.8550000e-01 1.4449000e+00 2.5070000e-01 9.5360000e-01 2.2930000e-01 1.5112000e+00 1.9370000e-01 9.8330000e-01 2.6840000e-01 1.4655000e+00 - -1.7600000e-02 7.5400000e-01 2.6060000e-01 7.7830000e-01 3.2030000e-01 8.5020000e-01 3.8000000e-02 1.6885000e+00 -8.7000000e-03 2.0108000e+00 1.6000000e-03 -5.2000000e-03 - 4.1590000e-01 3.1450000e-01 5.5170000e-01 6.1080000e-01 6.1290000e-01 7.0970000e-01 5.8310000e-01 5.7510000e-01 5.8520000e-01 7.6860000e-01 6.0100000e-01 5.5500000e-01 - 9.4230000e-01 8.4900000e-02 1.5125000e+00 9.5200000e-02 1.4247000e+00 4.4860000e-01 1.3959000e+00 2.2590000e-01 1.5048000e+00 3.9340000e-01 1.3654000e+00 2.6410000e-01 - 4.4140000e-01 3.1660000e-01 7.8750000e-01 2.7550000e-01 8.9690000e-01 2.8460000e-01 1.7446000e+00 -1.0500000e-02 2.5000000e-02 1.9698000e+00 6.8800000e-02 -8.3100000e-02 - 4.8970000e-01 6.1660000e-01 7.7780000e-01 7.3130000e-01 9.9980000e-01 6.1250000e-01 1.7028000e+00 3.8000000e-02 1.1010000e-01 1.8715000e+00 6.6700000e-02 1.6543000e+00 - 3.5920000e-01 1.2030000e-01 5.8960000e-01 1.6280000e-01 7.0700000e-01 1.3900000e-01 6.2350000e-01 1.2240000e-01 6.7210000e-01 1.9720000e-01 6.0860000e-01 1.4390000e-01 - 1.1778000e+00 -8.2700000e-02 1.4250000e+00 2.8900000e-02 1.0753000e+00 -1.0250000e-01 1.4269000e+00 3.1900000e-02 9.6030000e-01 8.0000000e-04 1.4190000e+00 4.1700000e-02 - 2.6020000e-01 7.1640000e-01 7.3220000e-01 5.2800000e-01 7.8100000e-01 6.1290000e-01 1.7544000e+00 -2.5700000e-02 -4.1700000e-02 2.0486000e+00 -7.4200000e-02 8.8500000e-02 - 9.8530000e-01 1.2930000e-01 4.3450000e-01 2.1520000e-01 3.4370000e-01 1.9540000e-01 4.0990000e-01 2.4710000e-01 3.2840000e-01 2.0320000e-01 4.2000000e-01 2.3300000e-01 - 9.1170000e-01 1.9480000e-01 1.1587000e+00 4.0740000e-01 1.3352000e+00 3.3640000e-01 1.7149000e+00 1.7600000e-02 2.0166000e+00 -2.5500000e-02 -1.0410000e-01 1.8564000e+00 - 1.1406000e+00 -3.5800000e-02 6.3000000e-01 1.8010000e-01 3.5420000e-01 3.3080000e-01 5.1450000e-01 3.2210000e-01 4.8680000e-01 1.5620000e-01 5.0230000e-01 3.3460000e-01 - 6.2670000e-01 4.1420000e-01 9.5520000e-01 6.7950000e-01 1.0283000e+00 8.4370000e-01 9.6890000e-01 6.6550000e-01 1.2239000e+00 6.4740000e-01 9.2020000e-01 7.2260000e-01 - 2.9150000e-01 2.8980000e-01 3.7330000e-01 5.4690000e-01 3.4240000e-01 7.2580000e-01 3.2000000e-01 6.1400000e-01 4.2480000e-01 6.4490000e-01 2.8110000e-01 6.6470000e-01 - 7.7410000e-01 1.9460000e-01 1.0684000e+00 4.7550000e-01 1.3943000e+00 3.3000000e-01 1.1707000e+00 3.5060000e-01 1.3430000e+00 4.2050000e-01 1.2183000e+00 2.9570000e-01 - -3.6800000e-02 4.0170000e-01 6.5900000e-02 6.1220000e-01 2.1080000e-01 5.8430000e-01 -7.3200000e-02 1.8211000e+00 -7.0400000e-02 8.3800000e-02 -1.6200000e-02 1.7700000e-02 - 3.5110000e-01 8.3300000e-02 6.2430000e-01 4.3200000e-02 6.9500000e-01 7.2200000e-02 6.2730000e-01 3.8800000e-02 8.0730000e-01 -5.2500000e-02 6.8660000e-01 -2.9300000e-02 - -2.2000000e-03 4.3280000e-01 9.6200000e-02 5.5660000e-01 1.0320000e-01 6.3970000e-01 7.7800000e-02 5.7990000e-01 1.4180000e-01 6.0600000e-01 1.5400000e-02 6.5260000e-01 - 1.1570000e-01 3.9320000e-01 2.2330000e-01 5.6840000e-01 3.1360000e-01 5.7400000e-01 2.2210000e-01 5.6730000e-01 1.6970000e-01 7.5950000e-01 1.9360000e-01 6.0020000e-01 - 3.6990000e-01 7.3110000e-01 7.2810000e-01 9.9340000e-01 6.7340000e-01 1.3111000e+00 1.7348000e+00 -5.5000000e-03 1.1860000e-01 1.8588000e+00 -1.4700000e-02 1.7463000e+00 - 3.8910000e-01 2.2150000e-01 6.2820000e-01 3.2520000e-01 7.1110000e-01 3.7000000e-01 6.1020000e-01 3.4340000e-01 7.2870000e-01 3.7070000e-01 5.9590000e-01 3.6200000e-01 - 1.1676000e+00 -7.0100000e-02 3.7700000e-01 1.3590000e+00 2.3430000e-01 1.3327000e+00 4.7090000e-01 1.2509000e+00 2.9150000e-01 1.1747000e+00 3.5800000e-01 1.3827000e+00 - 4.2590000e-01 4.3270000e-01 6.6300000e-01 6.8350000e-01 6.4970000e-01 9.0320000e-01 6.2080000e-01 7.3780000e-01 6.5250000e-01 9.3070000e-01 6.6880000e-01 6.7600000e-01 - 4.5070000e-01 5.8500000e-01 7.6120000e-01 8.4840000e-01 8.8780000e-01 9.4690000e-01 7.2160000e-01 8.9780000e-01 7.9420000e-01 1.0794000e+00 7.3770000e-01 8.7730000e-01 - 1.7000000e-01 9.4360000e-01 1.4860000e-01 1.0919000e+00 5.4200000e-02 8.6500000e-01 2.8370000e-01 9.3060000e-01 1.3780000e-01 7.4360000e-01 1.4120000e-01 1.0993000e+00 - 3.3130000e-01 2.2130000e-01 5.5290000e-01 3.2760000e-01 7.6300000e-01 2.2470000e-01 1.7751000e+00 -4.9600000e-02 -1.1230000e-01 2.1304000e+00 3.4500000e-02 -4.1200000e-02 - -3.9700000e-02 8.6530000e-01 2.4900000e-02 1.1064000e+00 1.5390000e-01 1.0776000e+00 2.1700000e-02 1.7031000e+00 -3.3900000e-02 2.0434000e+00 4.8900000e-02 -5.9400000e-02 - 4.1600000e-02 2.4920000e-01 3.6300000e-02 5.7780000e-01 2.4280000e-01 4.7300000e-01 -2.5000000e-03 1.7334000e+00 -5.5200000e-02 6.7500000e-02 4.8800000e-02 -5.8700000e-02 - 1.1561000e+00 -6.1800000e-02 4.0960000e-01 1.0400000e+00 4.1790000e-01 5.6480000e-01 4.8420000e-01 9.4800000e-01 3.3750000e-01 6.2730000e-01 3.9950000e-01 1.0511000e+00 - 1.1514000e+00 -5.3800000e-02 5.1460000e-01 1.2122000e+00 4.1470000e-01 8.3320000e-01 5.3070000e-01 1.1946000e+00 3.8700000e-01 8.2230000e-01 5.7300000e-01 1.1448000e+00 - 8.8540000e-01 2.1180000e-01 2.8100000e-01 5.8460000e-01 3.2410000e-01 3.5420000e-01 3.3270000e-01 5.2220000e-01 3.0070000e-01 3.6790000e-01 4.1310000e-01 4.2620000e-01 - 3.6380000e-01 3.8040000e-01 7.4450000e-01 2.9880000e-01 8.4950000e-01 3.2020000e-01 1.9061000e+00 -2.0260000e-01 -0.0000000e+00 2.0012000e+00 -8.7000000e-03 9.6000000e-03 - 2.0770000e-01 4.4870000e-01 2.4820000e-01 7.9530000e-01 3.9550000e-01 7.7270000e-01 3.5350000e-01 6.6780000e-01 4.8960000e-01 6.7820000e-01 3.9690000e-01 6.1830000e-01 - 2.3290000e-01 6.2850000e-01 5.0940000e-01 8.1130000e-01 6.0330000e-01 8.9560000e-01 5.4960000e-01 7.5990000e-01 6.5610000e-01 8.5290000e-01 5.2720000e-01 7.8620000e-01 - 2.7910000e-01 3.8980000e-01 3.0580000e-01 7.6220000e-01 5.0190000e-01 6.8610000e-01 4.3060000e-01 6.1450000e-01 4.5620000e-01 7.5820000e-01 4.9780000e-01 5.3880000e-01 - 7.1230000e-01 4.0880000e-01 4.2060000e-01 5.5960000e-01 2.8090000e-01 4.9530000e-01 3.9210000e-01 5.9530000e-01 3.2690000e-01 4.2630000e-01 3.6880000e-01 6.1820000e-01 - 4.5600000e-01 6.6680000e-01 9.0330000e-01 7.4760000e-01 1.0136000e+00 7.5900000e-01 1.7543000e+00 -2.0500000e-02 1.9672000e+00 3.6000000e-02 -5.3600000e-02 1.7967000e+00 - 8.8900000e-02 1.3250000e-01 4.0740000e-01 1.2440000e-01 5.9650000e-01 4.4500000e-02 1.6266000e+00 1.2350000e-01 -4.2900000e-02 5.3100000e-02 4.7700000e-02 -5.7000000e-02 - 1.1466000e+00 -4.9200000e-02 7.5590000e-01 2.3600000e-02 5.8750000e-01 5.0600000e-02 7.4650000e-01 3.9100000e-02 6.1430000e-01 2.3000000e-03 8.2450000e-01 -5.3900000e-02 - 8.1700000e-02 2.0010000e-01 1.8280000e-01 2.4800000e-01 5.4100000e-02 4.6480000e-01 1.3100000e-01 3.0710000e-01 6.1500000e-02 4.6360000e-01 1.2920000e-01 3.1500000e-01 - 3.1800000e-02 6.0070000e-01 9.2800000e-02 8.9300000e-01 6.6600000e-02 1.0584000e+00 1.8710000e-01 7.8110000e-01 1.8620000e-01 9.3830000e-01 1.8270000e-01 7.8880000e-01 - 4.0720000e-01 3.6590000e-01 5.0850000e-01 7.2960000e-01 5.9360000e-01 8.1170000e-01 6.0280000e-01 6.1890000e-01 5.5980000e-01 8.7650000e-01 6.0760000e-01 6.1480000e-01 - 2.2430000e-01 6.1240000e-01 4.1860000e-01 7.5520000e-01 5.7870000e-01 7.0570000e-01 1.6685000e+00 7.7000000e-02 -8.1100000e-02 2.0926000e+00 -5.9700000e-02 7.0100000e-02 - 1.1199000e+00 -1.7300000e-02 7.2950000e-01 1.0100000e+00 6.3740000e-01 6.9440000e-01 7.7690000e-01 9.5450000e-01 7.1490000e-01 5.4390000e-01 7.1680000e-01 1.0242000e+00 - 4.5200000e-01 4.7220000e-01 6.9020000e-01 7.5820000e-01 9.6250000e-01 6.5990000e-01 7.9940000e-01 6.3820000e-01 8.5700000e-01 8.1080000e-01 7.5310000e-01 6.9500000e-01 - 1.0814000e+00 2.5800000e-02 7.3810000e-01 8.5480000e-01 5.4550000e-01 4.8430000e-01 8.0890000e-01 7.7210000e-01 6.4470000e-01 3.3520000e-01 7.8060000e-01 8.0490000e-01 - -3.5300000e-02 9.1390000e-01 3.1090000e-01 8.7670000e-01 3.8750000e-01 9.2910000e-01 1.7600000e-02 1.7085000e+00 5.1900000e-02 1.9373000e+00 -2.2700000e-02 2.4600000e-02 - 9.8070000e-01 1.1790000e-01 4.3090000e-01 2.8240000e-01 3.1170000e-01 2.8230000e-01 4.3820000e-01 2.7770000e-01 3.2460000e-01 2.5480000e-01 4.1800000e-01 3.0020000e-01 - 3.2500000e-01 2.7900000e-01 7.3600000e-01 1.6070000e-01 7.5210000e-01 2.8780000e-01 1.7845000e+00 -6.3300000e-02 -7.2400000e-02 2.0823000e+00 3.4700000e-02 -4.1500000e-02 - 5.6880000e-01 9.0400000e-02 8.4460000e-01 1.9700000e-01 9.2810000e-01 2.6490000e-01 8.4920000e-01 1.9290000e-01 8.7960000e-01 3.3760000e-01 7.5740000e-01 3.0300000e-01 - 3.1200000e-01 4.0040000e-01 6.6710000e-01 3.5660000e-01 5.7740000e-01 6.0720000e-01 1.6906000e+00 5.0000000e-02 1.2900000e-02 1.9874000e+00 -1.3800000e-02 1.6500000e-02 - 1.6630000e-01 6.1860000e-01 1.5620000e-01 1.0843000e+00 1.5190000e-01 1.2678000e+00 1.8620000e-01 1.0491000e+00 3.0500000e-01 1.1053000e+00 1.6690000e-01 1.0736000e+00 - 3.3760000e-01 5.2060000e-01 4.0220000e-01 9.6260000e-01 6.5260000e-01 8.6350000e-01 5.5930000e-01 7.7630000e-01 5.7130000e-01 9.8560000e-01 6.0180000e-01 7.2420000e-01 - 4.5790000e-01 6.5530000e-01 2.9360000e-01 8.6540000e-01 2.9200000e-01 5.6800000e-01 2.7770000e-01 8.8820000e-01 2.3600000e-01 6.1180000e-01 2.4990000e-01 9.1870000e-01 - 3.0840000e-01 5.3840000e-01 6.8870000e-01 4.6070000e-01 7.5910000e-01 5.1930000e-01 1.6744000e+00 6.1900000e-02 -2.3300000e-02 2.0283000e+00 -1.7300000e-02 2.1000000e-02 - 2.8080000e-01 2.5630000e-01 4.4880000e-01 3.8730000e-01 5.7960000e-01 3.6150000e-01 4.8060000e-01 3.4850000e-01 5.9990000e-01 3.5180000e-01 4.7740000e-01 3.5450000e-01 - 8.3800000e-02 1.4810000e-01 1.6040000e-01 1.9430000e-01 2.5140000e-01 1.3990000e-01 2.0790000e-01 1.3840000e-01 2.3090000e-01 1.7060000e-01 1.7960000e-01 1.7280000e-01 - 5.2700000e-02 4.6200000e-01 3.8440000e-01 4.4320000e-01 5.0970000e-01 4.3370000e-01 -2.3500000e-02 1.7616000e+00 -9.9000000e-03 1.1000000e-02 3.1400000e-02 -3.7800000e-02 - 4.7840000e-01 -3.0000000e-03 7.4560000e-01 2.3000000e-03 7.3090000e-01 1.3640000e-01 6.9660000e-01 5.9300000e-02 7.6000000e-01 1.1870000e-01 7.2050000e-01 3.5400000e-02 - 3.5850000e-01 6.2600000e-01 7.7430000e-01 5.0650000e-01 7.9850000e-01 6.1490000e-01 1.7375000e+00 -1.0900000e-02 4.9000000e-02 1.9454000e+00 -8.9700000e-02 1.0650000e-01 - 5.0820000e-01 5.9100000e-01 7.8900000e-01 7.5760000e-01 8.6730000e-01 8.0250000e-01 1.6776000e+00 6.5400000e-02 -5.0200000e-02 2.0583000e+00 8.4000000e-02 1.6328000e+00 - 3.3150000e-01 5.4090000e-01 6.5870000e-01 5.2500000e-01 6.8790000e-01 6.2870000e-01 1.7303000e+00 0.0000000e+00 8.9300000e-02 1.8986000e+00 4.9100000e-02 -5.9900000e-02 - 7.5000000e-03 7.3260000e-01 2.2170000e-01 8.0100000e-01 2.0550000e-01 9.6430000e-01 6.1800000e-02 1.6568000e+00 -5.8700000e-02 2.0704000e+00 -2.5600000e-02 3.1000000e-02 - -4.2400000e-02 7.5910000e-01 3.4280000e-01 6.6470000e-01 3.5450000e-01 7.9330000e-01 5.0000000e-02 1.6755000e+00 -9.6900000e-02 2.1122000e+00 -1.8000000e-03 2.4000000e-03 - 3.3930000e-01 5.0550000e-01 6.0300000e-01 7.0800000e-01 7.0560000e-01 7.8140000e-01 6.1020000e-01 7.0220000e-01 7.2450000e-01 7.8640000e-01 5.6520000e-01 7.5470000e-01 - 6.8240000e-01 4.3440000e-01 1.0507000e+00 6.3780000e-01 1.1184000e+00 7.0040000e-01 1.7751000e+00 -5.3200000e-02 1.8750000e+00 1.4420000e-01 3.9400000e-02 1.6865000e+00 - 3.6480000e-01 5.6430000e-01 7.4580000e-01 4.8670000e-01 8.3420000e-01 5.1870000e-01 1.7375000e+00 -6.5000000e-03 -9.8100000e-02 2.1178000e+00 -5.9200000e-02 7.1200000e-02 - 1.1296000e+00 -3.0200000e-02 6.5340000e-01 1.0976000e+00 6.0210000e-01 4.7810000e-01 7.2210000e-01 1.0155000e+00 6.2980000e-01 4.1440000e-01 7.7290000e-01 9.5280000e-01 - 4.0520000e-01 3.1310000e-01 6.7830000e-01 4.4360000e-01 7.6990000e-01 5.0710000e-01 6.4990000e-01 4.7750000e-01 8.5000000e-01 4.3140000e-01 7.1820000e-01 3.9590000e-01 - 1.1318000e+00 -3.1500000e-02 6.6680000e-01 5.6770000e-01 5.1980000e-01 3.8550000e-01 6.2950000e-01 6.1420000e-01 4.7330000e-01 4.1710000e-01 7.0190000e-01 5.3030000e-01 - -1.3500000e-02 9.6920000e-01 6.8600000e-02 1.2016000e+00 2.0070000e-01 1.1847000e+00 -1.0110000e-01 1.8504000e+00 2.5500000e-02 1.9737000e+00 6.1000000e-02 -7.3200000e-02 - 1.0094000e+00 1.1340000e-01 4.2130000e-01 8.0660000e-01 3.5470000e-01 5.5250000e-01 4.4740000e-01 7.7130000e-01 4.1880000e-01 4.5100000e-01 5.4550000e-01 6.5430000e-01 - 4.1460000e-01 4.6000000e-02 7.3040000e-01 4.7800000e-02 8.8950000e-01 -9.0000000e-04 1.7669000e+00 -4.1800000e-02 1.9400000e-02 -2.5600000e-02 1.6000000e-03 -2.1000000e-03 - 4.2000000e-03 7.4060000e-01 4.6190000e-01 5.6870000e-01 5.0060000e-01 6.6510000e-01 1.6471000e+00 9.6300000e-02 -8.3800000e-02 2.0936000e+00 1.6800000e-02 -2.1800000e-02 - 8.9600000e-01 2.1220000e-01 1.2361000e+00 2.5600000e-01 1.3762000e+00 2.3000000e-01 1.7311000e+00 5.0000000e-04 1.9945000e+00 4.6000000e-03 -2.3300000e-02 1.7591000e+00 - 1.1250000e+00 -2.6400000e-02 1.1851000e+00 5.6720000e-01 9.3000000e-01 2.2850000e-01 1.2188000e+00 5.2730000e-01 8.3810000e-01 2.9530000e-01 1.2885000e+00 4.4260000e-01 - 9.5700000e-02 2.9740000e-01 4.0620000e-01 1.6350000e-01 4.6070000e-01 1.8120000e-01 3.9960000e-01 1.6720000e-01 4.8210000e-01 1.6830000e-01 3.2960000e-01 2.5460000e-01 - 1.0664000e+00 5.0600000e-02 4.3520000e-01 1.3097000e+00 3.3150000e-01 1.3321000e+00 4.6900000e-01 1.2778000e+00 4.9500000e-01 1.0288000e+00 4.5170000e-01 1.2938000e+00 - 9.8070000e-01 1.4930000e-01 1.1071000e+00 6.1140000e-01 7.6850000e-01 2.6790000e-01 9.5590000e-01 7.9150000e-01 7.8320000e-01 2.1890000e-01 1.0403000e+00 6.8880000e-01 - 4.9760000e-01 3.9700000e-01 6.3870000e-01 7.8250000e-01 8.2510000e-01 7.7920000e-01 7.4570000e-01 6.5850000e-01 7.6330000e-01 8.7930000e-01 7.2470000e-01 6.8540000e-01 - 7.0800000e-02 8.4370000e-01 1.5650000e-01 1.2676000e+00 2.2150000e-01 1.3889000e+00 2.7460000e-01 1.1292000e+00 2.8400000e-01 1.3390000e+00 1.6680000e-01 1.2508000e+00 - 2.7040000e-01 7.2880000e-01 6.2890000e-01 6.7570000e-01 7.2740000e-01 7.0370000e-01 1.6942000e+00 4.6200000e-02 5.7500000e-02 1.9300000e+00 -1.1500000e-02 1.2000000e-02 - 3.1980000e-01 7.8520000e-01 6.0130000e-01 1.1350000e+00 7.2340000e-01 1.1983000e+00 1.6289000e+00 1.2480000e-01 -4.4200000e-02 2.0533000e+00 8.1700000e-02 1.6358000e+00 - 8.9780000e-01 1.2260000e-01 1.1111000e+00 2.3760000e-01 1.3339000e+00 1.1990000e-01 1.9067000e+00 -2.0290000e-01 1.9557000e+00 4.5000000e-02 1.6800000e-02 -1.9700000e-02 - 3.3760000e-01 4.6810000e-01 7.7220000e-01 3.2050000e-01 7.2270000e-01 5.2110000e-01 1.5636000e+00 1.9410000e-01 5.3000000e-03 1.9936000e+00 -4.3000000e-02 5.2600000e-02 - -1.7800000e-02 9.7810000e-01 2.4800000e-01 1.0045000e+00 2.6170000e-01 1.1303000e+00 -5.7600000e-02 1.8007000e+00 3.0900000e-02 1.9653000e+00 -6.7700000e-02 7.9900000e-02 - 3.0650000e-01 4.8020000e-01 5.4400000e-01 6.7450000e-01 5.2460000e-01 8.7950000e-01 4.6520000e-01 7.6940000e-01 5.6390000e-01 8.5700000e-01 4.7350000e-01 7.5870000e-01 - 7.4270000e-01 7.3900000e-02 1.1893000e+00 8.3000000e-02 1.4143000e+00 2.0500000e-02 1.2506000e+00 6.0000000e-03 1.4415000e+00 1.2400000e-02 1.1935000e+00 7.4200000e-02 - 4.6660000e-01 6.2260000e-01 8.8000000e-01 7.9760000e-01 9.4670000e-01 9.6880000e-01 6.7250000e-01 1.0394000e+00 8.8720000e-01 1.0677000e+00 7.3610000e-01 9.6140000e-01 - 3.0630000e-01 6.2640000e-01 7.3070000e-01 4.9340000e-01 8.1770000e-01 5.2900000e-01 1.7757000e+00 -4.8600000e-02 -1.5000000e-02 2.0136000e+00 -5.0900000e-02 6.1700000e-02 - -2.3000000e-03 9.0570000e-01 2.2910000e-01 9.9150000e-01 4.5890000e-01 8.6470000e-01 5.0000000e-04 1.7318000e+00 4.5000000e-03 1.9904000e+00 -5.0000000e-03 5.3000000e-03 - 7.8540000e-01 3.0980000e-01 2.9990000e-01 6.8760000e-01 2.8560000e-01 4.7670000e-01 3.6440000e-01 6.1300000e-01 2.6260000e-01 4.9180000e-01 3.0000000e-01 6.8890000e-01 - 4.1410000e-01 2.9040000e-01 6.3940000e-01 3.9380000e-01 8.1970000e-01 3.2370000e-01 1.6248000e+00 1.2270000e-01 -1.5300000e-02 2.0185000e+00 -3.4900000e-02 4.1600000e-02 - 6.3920000e-01 4.6880000e-01 9.0390000e-01 8.4160000e-01 1.0614000e+00 8.3760000e-01 1.5987000e+00 1.5930000e-01 1.9722000e+00 3.3100000e-02 -9.9800000e-02 1.8524000e+00 - 6.4360000e-01 2.3300000e-02 1.0190000e+00 -4.7700000e-02 9.9920000e-01 1.1740000e-01 1.6056000e+00 1.4740000e-01 2.0678000e+00 -8.1500000e-02 5.8900000e-02 -6.9900000e-02 - 1.0568000e+00 5.6100000e-02 9.2770000e-01 3.8400000e-01 7.8620000e-01 1.4210000e-01 9.0470000e-01 4.0880000e-01 7.1540000e-01 1.9770000e-01 9.7250000e-01 3.2860000e-01 - 2.8630000e-01 2.3970000e-01 3.0170000e-01 5.5120000e-01 4.1180000e-01 5.4140000e-01 4.8230000e-01 3.3350000e-01 2.9840000e-01 6.9180000e-01 4.6410000e-01 3.5580000e-01 - 4.7310000e-01 8.5600000e-02 7.4290000e-01 1.3250000e-01 9.0450000e-01 8.1700000e-02 8.1100000e-01 4.6200000e-02 8.3910000e-01 1.7840000e-01 7.4060000e-01 1.3520000e-01 - 1.3350000e-01 9.6470000e-01 4.8200000e-02 1.0511000e+00 1.4080000e-01 6.7850000e-01 3.6300000e-02 1.0681000e+00 7.9700000e-02 7.3080000e-01 1.6410000e-01 9.1790000e-01 - 4.5800000e-02 9.0440000e-01 3.9320000e-01 8.6140000e-01 5.3220000e-01 8.4310000e-01 1.6753000e+00 6.2300000e-02 3.5000000e-02 1.9577000e+00 -3.2000000e-03 3.0000000e-03 - 1.0390000e+00 7.5700000e-02 7.3070000e-01 1.0098000e+00 5.9820000e-01 1.3770000e+00 7.3630000e-01 1.0043000e+00 6.7140000e-01 1.0315000e+00 8.4570000e-01 8.7750000e-01 - 5.7640000e-01 3.0320000e-01 9.5770000e-01 4.1200000e-01 9.6730000e-01 6.1530000e-01 9.3520000e-01 4.3920000e-01 9.7520000e-01 6.3140000e-01 7.5720000e-01 6.4800000e-01 - 1.1407000e+00 -4.5500000e-02 9.4170000e-01 7.7760000e-01 7.0750000e-01 5.7600000e-01 8.1990000e-01 9.2280000e-01 6.9800000e-01 5.3910000e-01 9.4800000e-01 7.6810000e-01 - -2.9000000e-02 6.7330000e-01 3.2020000e-01 6.3150000e-01 4.0430000e-01 6.6850000e-01 -7.3700000e-02 1.8137000e+00 1.2290000e-01 1.8512000e+00 4.1700000e-02 -5.2000000e-02 - 3.2650000e-01 4.8940000e-01 6.2270000e-01 5.1300000e-01 7.1860000e-01 5.4170000e-01 1.7655000e+00 -3.9300000e-02 -4.8400000e-02 2.0535000e+00 9.8000000e-03 -1.1900000e-02 - 1.1171000e+00 -8.6000000e-03 7.3020000e-01 3.6160000e-01 6.1300000e-01 2.1390000e-01 6.6460000e-01 4.4100000e-01 6.1130000e-01 1.9870000e-01 7.0620000e-01 3.8990000e-01 - 1.1419000e+00 -4.7000000e-02 6.5740000e-01 7.7730000e-01 5.4790000e-01 4.3820000e-01 7.2440000e-01 6.9540000e-01 5.7100000e-01 3.8230000e-01 7.0530000e-01 7.2030000e-01 - 4.9650000e-01 1.7380000e-01 7.8850000e-01 2.0350000e-01 9.6620000e-01 1.3570000e-01 1.8353000e+00 -1.1540000e-01 -1.5600000e-02 2.0217000e+00 -1.4500000e-02 1.6500000e-02 - 4.5440000e-01 3.7660000e-01 8.2700000e-01 4.5200000e-01 7.1250000e-01 7.8680000e-01 6.9320000e-01 6.1570000e-01 7.6690000e-01 7.4230000e-01 5.6730000e-01 7.6140000e-01 - 4.0550000e-01 3.6540000e-01 7.8230000e-01 3.9330000e-01 7.7590000e-01 5.9080000e-01 7.1250000e-01 4.7980000e-01 9.0260000e-01 4.5380000e-01 7.4340000e-01 4.4320000e-01 - 5.7280000e-01 2.9890000e-01 8.3060000e-01 3.6440000e-01 9.3040000e-01 3.9020000e-01 1.6960000e+00 4.1000000e-02 -8.4300000e-02 2.1010000e+00 -7.1300000e-02 8.5800000e-02 - 3.3760000e-01 7.7750000e-01 3.4640000e-01 1.3621000e+00 2.2320000e-01 9.4730000e-01 2.1150000e-01 1.5213000e+00 2.5510000e-01 8.7050000e-01 2.4350000e-01 1.4863000e+00 - 1.0906000e+00 1.8600000e-02 6.0900000e-01 1.1224000e+00 4.9010000e-01 5.5310000e-01 5.4310000e-01 1.2013000e+00 5.0420000e-01 5.0400000e-01 5.1070000e-01 1.2371000e+00 - 1.1143000e+00 -1.0800000e-02 5.2380000e-01 4.2600000e-01 3.5340000e-01 4.0490000e-01 5.2560000e-01 4.2350000e-01 4.2890000e-01 2.9810000e-01 5.3920000e-01 4.0560000e-01 - 4.6470000e-01 9.6100000e-02 8.4500000e-01 1.0900000e-02 9.3760000e-01 3.9800000e-02 7.9870000e-01 6.4000000e-02 8.0260000e-01 2.1850000e-01 7.8180000e-01 8.8800000e-02 - 1.7210000e-01 2.1830000e-01 2.8190000e-01 3.2800000e-01 3.5570000e-01 3.3640000e-01 2.3380000e-01 3.8470000e-01 3.2660000e-01 3.7750000e-01 2.2950000e-01 3.9090000e-01 - 1.8600000e-02 2.3300000e-01 8.2800000e-02 2.9810000e-01 6.3800000e-02 3.7890000e-01 -1.6600000e-02 4.1840000e-01 1.6100000e-01 2.7070000e-01 8.5800000e-02 2.9880000e-01 - 4.7820000e-01 4.6410000e-01 8.9460000e-01 3.4350000e-01 8.5730000e-01 5.3570000e-01 1.8757000e+00 -1.6910000e-01 -6.9000000e-03 2.0144000e+00 2.0900000e-02 -2.3100000e-02 - 3.3090000e-01 4.2500000e-01 4.9280000e-01 6.9810000e-01 6.0510000e-01 7.4040000e-01 6.1550000e-01 5.5280000e-01 6.3820000e-01 7.2050000e-01 5.2330000e-01 6.6050000e-01 - 2.5360000e-01 1.0800000e-01 5.5540000e-01 1.2270000e-01 7.1700000e-01 7.0600000e-02 1.6969000e+00 4.1700000e-02 -1.4400000e-02 1.7600000e-02 -1.0160000e-01 1.2160000e-01 - 3.3900000e-01 3.0200000e-01 5.1530000e-01 4.9420000e-01 5.9360000e-01 5.5450000e-01 5.3140000e-01 4.7040000e-01 6.2250000e-01 5.3700000e-01 5.0930000e-01 4.9720000e-01 - 1.1679000e+00 -7.6700000e-02 1.1519000e+00 5.6570000e-01 8.2340000e-01 1.0579000e+00 1.0611000e+00 6.7780000e-01 6.2920000e-01 1.0533000e+00 1.0503000e+00 6.9420000e-01 - 4.1280000e-01 6.8860000e-01 7.7200000e-01 6.3330000e-01 8.1910000e-01 7.2760000e-01 1.6969000e+00 4.1700000e-02 -1.4000000e-03 1.9991000e+00 -2.7700000e-02 1.7638000e+00 - 2.4010000e-01 8.4560000e-01 2.8010000e-01 1.4405000e+00 3.1560000e-01 1.6452000e+00 4.9830000e-01 1.1820000e+00 4.5110000e-01 1.5130000e+00 4.3490000e-01 1.2603000e+00 - 3.1900000e-02 2.3270000e-01 1.3290000e-01 4.5030000e-01 1.8200000e-01 5.3820000e-01 1.3200000e-01 1.5797000e+00 6.8800000e-02 -8.4500000e-02 -1.0800000e-02 1.3900000e-02 - 4.0390000e-01 3.5730000e-01 6.5850000e-01 5.3340000e-01 6.3880000e-01 7.3200000e-01 6.2930000e-01 5.6030000e-01 6.8030000e-01 7.0470000e-01 5.6670000e-01 6.3580000e-01 - 1.0277000e+00 8.7600000e-02 5.3600000e-01 1.1746000e+00 3.2940000e-01 7.8920000e-01 3.7240000e-01 1.3661000e+00 2.9530000e-01 7.9050000e-01 4.6780000e-01 1.2532000e+00 - 6.9500000e-02 4.0760000e-01 1.7080000e-01 5.6360000e-01 1.3060000e-01 7.2050000e-01 -1.2900000e-02 7.8610000e-01 1.4150000e-01 7.2030000e-01 6.8500000e-02 6.8670000e-01 - 5.5780000e-01 4.9560000e-01 8.4140000e-01 5.3720000e-01 9.2650000e-01 5.6950000e-01 1.7200000e+00 1.9600000e-02 7.7400000e-02 1.9062000e+00 -9.7200000e-02 1.8503000e+00 - 2.0430000e-01 1.5280000e-01 3.9410000e-01 1.4830000e-01 4.3140000e-01 1.8990000e-01 4.2440000e-01 1.1360000e-01 5.0530000e-01 1.1300000e-01 4.4280000e-01 8.9500000e-02 - 3.1150000e-01 6.2060000e-01 5.1960000e-01 9.3490000e-01 4.1600000e-01 1.2756000e+00 4.7730000e-01 9.8060000e-01 5.7550000e-01 1.1033000e+00 3.6500000e-01 1.1162000e+00 - 1.0367000e+00 8.2200000e-02 8.8680000e-01 8.5280000e-01 7.2600000e-01 4.4640000e-01 1.0599000e+00 6.5390000e-01 6.6810000e-01 4.7690000e-01 9.6390000e-01 7.6060000e-01 - 1.0698000e+00 4.0800000e-02 4.5070000e-01 2.5530000e-01 3.5200000e-01 2.3470000e-01 4.4030000e-01 2.7150000e-01 3.8980000e-01 1.7760000e-01 4.8800000e-01 2.1270000e-01 - 9.3980000e-01 9.4900000e-02 1.5149000e+00 9.6200000e-02 1.6572000e+00 1.8440000e-01 1.5923000e+00 4.3000000e-03 1.7853000e+00 7.1500000e-02 1.4903000e+00 1.2330000e-01 - 2.9160000e-01 8.1540000e-01 2.4960000e-01 7.5570000e-01 7.5700000e-02 7.3020000e-01 2.2930000e-01 7.7970000e-01 1.2230000e-01 6.5400000e-01 1.6390000e-01 8.5900000e-01 - 1.0270000e-01 1.0161000e+00 1.5560000e-01 1.5667000e+00 1.1830000e-01 9.7860000e-01 1.3650000e-01 1.5895000e+00 1.1330000e-01 9.5590000e-01 1.2480000e-01 1.6054000e+00 - 1.1035000e+00 3.5000000e-03 5.6190000e-01 1.9300000e-01 4.2700000e-01 1.9610000e-01 5.0090000e-01 2.6240000e-01 4.6150000e-01 1.3910000e-01 5.8090000e-01 1.7120000e-01 - 1.0000000e-03 9.4400000e-01 3.0680000e-01 9.5550000e-01 4.6420000e-01 9.0270000e-01 -1.1520000e-01 1.8659000e+00 -8.0800000e-02 2.0988000e+00 -8.5000000e-03 1.0200000e-02 - 4.4550000e-01 6.6060000e-01 2.3770000e-01 6.7920000e-01 2.4480000e-01 4.7130000e-01 2.5680000e-01 6.5230000e-01 3.0880000e-01 3.7940000e-01 2.0700000e-01 7.1390000e-01 - 3.4310000e-01 3.8720000e-01 5.2620000e-01 6.1810000e-01 5.9250000e-01 7.1690000e-01 6.1760000e-01 5.1520000e-01 6.1260000e-01 7.1070000e-01 4.9960000e-01 6.5540000e-01 - 3.3900000e-02 6.7090000e-01 4.0340000e-01 6.0580000e-01 4.8490000e-01 6.5300000e-01 1.6960000e+00 4.9400000e-02 -7.4600000e-02 2.0886000e+00 -6.1000000e-03 7.9000000e-03 - -3.3400000e-02 1.1367000e+00 8.8600000e-02 1.3305000e+00 2.9220000e-01 1.2353000e+00 1.8300000e-02 1.7128000e+00 -8.1600000e-02 2.1002000e+00 3.5000000e-03 1.7263000e+00 - 1.1589000e+00 -6.4600000e-02 4.9030000e-01 1.2574000e+00 5.8720000e-01 4.2860000e-01 5.7960000e-01 1.1499000e+00 5.2930000e-01 4.6810000e-01 5.7130000e-01 1.1585000e+00 - 1.0614000e+00 5.4600000e-02 4.6240000e-01 4.1380000e-01 4.5380000e-01 2.3950000e-01 6.2610000e-01 2.1940000e-01 4.3830000e-01 2.3730000e-01 5.5590000e-01 2.9800000e-01 - -6.8700000e-02 1.1770000e+00 3.0700000e-01 1.0942000e+00 3.0660000e-01 1.2395000e+00 8.4200000e-02 1.6366000e+00 -2.1000000e-03 1.9998000e+00 -3.1400000e-02 1.7687000e+00 - 4.3580000e-01 5.6870000e-01 7.7500000e-01 5.4020000e-01 9.5380000e-01 4.6870000e-01 1.7507000e+00 -2.5600000e-02 1.5630000e-01 1.8170000e+00 6.0300000e-02 -7.1600000e-02 - 3.4930000e-01 3.8790000e-01 4.4680000e-01 7.2770000e-01 5.0660000e-01 8.3430000e-01 6.3640000e-01 5.0050000e-01 6.5280000e-01 6.8120000e-01 4.9850000e-01 6.6940000e-01 - 3.4510000e-01 3.3040000e-01 4.1100000e-01 6.6780000e-01 4.9580000e-01 7.3280000e-01 4.9740000e-01 5.7120000e-01 4.8490000e-01 7.6110000e-01 3.3850000e-01 7.5430000e-01 - 6.6280000e-01 4.2500000e-01 2.7480000e-01 2.0460000e-01 1.5550000e-01 2.6840000e-01 2.4390000e-01 2.4130000e-01 2.9850000e-01 8.7000000e-02 2.8190000e-01 1.9620000e-01 - 5.3810000e-01 2.0080000e-01 9.1850000e-01 2.2810000e-01 1.0664000e+00 2.3080000e-01 9.0620000e-01 2.3680000e-01 9.8140000e-01 3.5350000e-01 8.8560000e-01 2.6790000e-01 - 1.1129000e+00 -9.1000000e-03 8.9640000e-01 8.3690000e-01 6.8710000e-01 7.3480000e-01 8.8730000e-01 8.5290000e-01 7.3080000e-01 6.1210000e-01 8.8800000e-01 8.4950000e-01 - 9.4080000e-01 1.5720000e-01 1.1803000e+00 3.3680000e-01 1.2962000e+00 3.4250000e-01 1.6847000e+00 5.9100000e-02 2.0016000e+00 2.3000000e-03 4.3200000e-02 1.6859000e+00 - 1.5810000e-01 3.7130000e-01 3.2740000e-01 4.8370000e-01 2.7310000e-01 6.7400000e-01 4.1870000e-01 3.7310000e-01 3.7210000e-01 5.6500000e-01 3.4010000e-01 4.6730000e-01 - 3.3930000e-01 1.3880000e-01 2.2350000e-01 3.1400000e-02 7.3200000e-02 1.6480000e-01 2.1430000e-01 4.1100000e-02 1.4660000e-01 7.9500000e-02 1.8400000e-01 7.6100000e-02 - 2.4490000e-01 1.4850000e-01 1.7480000e-01 3.9900000e-02 7.9000000e-02 1.2290000e-01 1.6390000e-01 5.2500000e-02 1.9080000e-01 -1.5500000e-02 7.4400000e-02 1.6310000e-01 - 3.5390000e-01 7.2240000e-01 4.4080000e-01 1.2744000e+00 6.7960000e-01 1.2379000e+00 4.9890000e-01 1.1996000e+00 6.0230000e-01 1.3623000e+00 4.9240000e-01 1.2095000e+00 - 4.3410000e-01 4.4680000e-01 6.1160000e-01 7.8350000e-01 7.3140000e-01 8.5030000e-01 6.3830000e-01 7.5110000e-01 8.3380000e-01 7.5230000e-01 6.8460000e-01 6.9880000e-01 - 5.3800000e-01 4.6970000e-01 7.4070000e-01 8.6280000e-01 7.8850000e-01 1.0488000e+00 7.1240000e-01 8.9690000e-01 9.3900000e-01 9.0410000e-01 7.3190000e-01 8.7540000e-01 - 2.9200000e-01 8.9000000e-02 4.2560000e-01 1.7470000e-01 4.7040000e-01 2.2010000e-01 4.9250000e-01 9.6800000e-02 4.6910000e-01 2.2920000e-01 4.4260000e-01 1.5390000e-01 - 4.1220000e-01 8.9400000e-02 5.8340000e-01 2.5710000e-01 6.9540000e-01 2.6380000e-01 1.6774000e+00 6.4800000e-02 -2.7000000e-03 2.0000000e-03 1.8700000e-02 -2.0200000e-02 - -5.0000000e-04 1.1208000e+00 6.3500000e-02 1.6651000e+00 -5.9800000e-02 1.5827000e+00 -7.1000000e-03 1.7469000e+00 8.8400000e-02 1.3325000e+00 1.0320000e-01 1.6179000e+00 - 2.2500000e-02 4.1860000e-01 2.2500000e-02 2.1580000e-01 4.4800000e-02 1.5560000e-01 6.0300000e-02 1.7440000e-01 6.0300000e-02 1.3890000e-01 -2.9400000e-02 2.8250000e-01 - 1.8560000e-01 3.4670000e-01 6.8200000e-02 2.1940000e-01 4.4800000e-02 2.0590000e-01 9.4900000e-02 1.8820000e-01 3.8400000e-02 2.0950000e-01 5.2700000e-02 2.3880000e-01 - 5.0030000e-01 2.5360000e-01 1.0019000e+00 1.3100000e-01 1.0543000e+00 2.5310000e-01 8.9350000e-01 2.6080000e-01 1.0176000e+00 3.1720000e-01 9.8220000e-01 1.5660000e-01 - 4.2050000e-01 6.8780000e-01 8.4300000e-02 1.6795000e+00 1.5850000e-01 9.9730000e-01 2.6640000e-01 1.4648000e+00 1.6630000e-01 9.5230000e-01 3.4390000e-01 1.3753000e+00 - 7.2300000e-01 2.8970000e-01 1.0617000e+00 5.3990000e-01 1.2918000e+00 5.1180000e-01 1.1537000e+00 4.3070000e-01 1.2086000e+00 6.4270000e-01 9.5710000e-01 6.6200000e-01 - 2.1240000e-01 6.1690000e-01 4.3370000e-01 8.4920000e-01 4.2790000e-01 1.0418000e+00 4.3110000e-01 8.4750000e-01 4.5540000e-01 1.0337000e+00 3.4600000e-01 9.5280000e-01 - 1.0870000e+00 2.0500000e-02 3.8140000e-01 9.1840000e-01 3.3110000e-01 6.0460000e-01 3.6960000e-01 9.3460000e-01 3.3960000e-01 5.6830000e-01 4.5330000e-01 8.3280000e-01 - 1.1225000e+00 -2.0000000e-02 7.0350000e-01 1.0289000e+00 5.0010000e-01 6.3630000e-01 7.5240000e-01 9.6420000e-01 5.6880000e-01 5.1810000e-01 7.3090000e-01 9.9580000e-01 - 1.1227000e+00 -2.1200000e-02 7.3450000e-01 1.1260000e-01 5.6200000e-01 1.2540000e-01 7.8460000e-01 5.5800000e-02 5.2390000e-01 1.5480000e-01 7.2560000e-01 1.2180000e-01 - 2.3070000e-01 7.5210000e-01 4.2150000e-01 8.9680000e-01 4.8950000e-01 9.5830000e-01 1.7062000e+00 3.0100000e-02 2.5100000e-02 1.9761000e+00 5.1300000e-02 -5.9900000e-02 - 3.1680000e-01 1.1690000e-01 7.9080000e-01 -7.2600000e-02 8.9580000e-01 -5.6900000e-02 1.5964000e+00 1.5940000e-01 -4.3500000e-02 5.0300000e-02 -4.9900000e-02 5.9700000e-02 - 1.0986000e+00 1.0600000e-02 1.1702000e+00 5.6800000e-01 8.7320000e-01 2.2720000e-01 1.1428000e+00 5.9460000e-01 8.2190000e-01 2.5280000e-01 1.1957000e+00 5.3190000e-01 - 1.8230000e-01 6.2650000e-01 1.9400000e-01 1.0896000e+00 2.8390000e-01 1.1665000e+00 2.7850000e-01 9.8650000e-01 3.3790000e-01 1.1246000e+00 2.2030000e-01 1.0591000e+00 - 4.1500000e-02 7.6180000e-01 2.8640000e-01 8.3240000e-01 4.1100000e-01 8.2520000e-01 4.5000000e-03 1.7251000e+00 -2.5900000e-02 2.0277000e+00 -2.9600000e-02 3.4200000e-02 - 5.9090000e-01 4.0020000e-01 8.2410000e-01 7.4340000e-01 1.1374000e+00 6.1160000e-01 9.5190000e-01 5.8970000e-01 1.0614000e+00 7.3070000e-01 8.9740000e-01 6.6020000e-01 - 7.9070000e-01 2.0810000e-01 1.2880000e+00 2.7270000e-01 1.4417000e+00 3.3420000e-01 1.2602000e+00 3.0460000e-01 1.5019000e+00 2.9530000e-01 1.2352000e+00 3.3370000e-01 - 5.2210000e-01 5.6330000e-01 9.3890000e-01 7.3400000e-01 9.7140000e-01 9.5610000e-01 9.0440000e-01 7.7300000e-01 1.1164000e+00 8.0800000e-01 9.2040000e-01 7.5410000e-01 - 1.1437000e+00 -4.7200000e-02 5.8480000e-01 9.8450000e-01 6.1450000e-01 3.8590000e-01 6.8350000e-01 8.5770000e-01 5.8570000e-01 3.9280000e-01 6.8920000e-01 8.5990000e-01 - 1.6520000e-01 5.2200000e-01 7.5640000e-01 1.9590000e-01 7.3520000e-01 3.6190000e-01 1.7983000e+00 -8.0000000e-02 -6.1100000e-02 2.0729000e+00 -3.1700000e-02 3.5900000e-02 - 2.4290000e-01 8.1920000e-01 5.5220000e-01 8.2540000e-01 6.5840000e-01 8.3970000e-01 1.7585000e+00 -2.9700000e-02 1.1170000e-01 1.8712000e+00 2.0100000e-02 1.7049000e+00 - 1.0275000e+00 9.2900000e-02 7.7700000e-01 9.6470000e-01 7.0270000e-01 6.2310000e-01 8.0900000e-01 9.2810000e-01 7.1080000e-01 5.5480000e-01 9.4170000e-01 7.7370000e-01 - 2.0980000e-01 4.6970000e-01 2.1480000e-01 8.6910000e-01 2.7090000e-01 9.6030000e-01 2.0650000e-01 8.8090000e-01 1.7510000e-01 1.0908000e+00 1.6340000e-01 9.3650000e-01 - 8.1820000e-01 2.8600000e-01 1.2287000e+00 2.1210000e-01 1.2751000e+00 2.9800000e-01 1.7315000e+00 2.5000000e-03 2.0293000e+00 -3.2400000e-02 2.8700000e-02 1.6999000e+00 - 1.4160000e-01 9.6640000e-01 7.0300000e-02 8.9760000e-01 1.4430000e-01 5.9910000e-01 4.9800000e-02 9.2500000e-01 1.0070000e-01 6.3280000e-01 6.1300000e-02 9.0410000e-01 - 6.4200000e-01 1.7120000e-01 8.5870000e-01 4.4320000e-01 9.4610000e-01 5.4240000e-01 1.0491000e+00 2.2200000e-01 1.1326000e+00 3.4750000e-01 9.1130000e-01 3.8150000e-01 - 6.4480000e-01 4.6500000e-01 3.6590000e-01 8.7610000e-01 2.2140000e-01 7.0230000e-01 3.7020000e-01 8.7500000e-01 2.9810000e-01 5.8700000e-01 3.1300000e-01 9.3740000e-01 - 5.0130000e-01 9.7100000e-02 7.7490000e-01 1.5990000e-01 8.7160000e-01 1.9460000e-01 7.9860000e-01 1.3040000e-01 8.8740000e-01 1.9740000e-01 6.9910000e-01 2.4990000e-01 - 3.9080000e-01 4.8420000e-01 5.1480000e-01 8.7380000e-01 6.9250000e-01 8.6810000e-01 5.9850000e-01 7.7000000e-01 5.3250000e-01 1.0790000e+00 6.0690000e-01 7.5540000e-01 - 1.0384000e+00 8.2400000e-02 1.5784000e+00 1.8190000e-01 1.2421000e+00 5.3290000e-01 1.7806000e+00 -5.6800000e-02 1.2397000e+00 3.6900000e-01 1.6569000e+00 8.2400000e-02 - -9.1000000e-03 6.4770000e-01 3.5100000e-02 9.0810000e-01 3.8100000e-02 1.0318000e+00 6.7800000e-02 1.6569000e+00 3.9500000e-02 1.9503000e+00 -2.4900000e-02 3.0900000e-02 - 6.4580000e-01 2.9650000e-01 9.2460000e-01 3.3790000e-01 1.0542000e+00 3.2630000e-01 1.7519000e+00 -2.2300000e-02 2.0708000e+00 -7.8600000e-02 -1.0100000e-02 1.0300000e-02 - 4.9510000e-01 5.8990000e-01 9.0690000e-01 7.7000000e-01 8.3670000e-01 1.1014000e+00 7.9520000e-01 9.0140000e-01 9.1850000e-01 1.0396000e+00 7.0240000e-01 1.0089000e+00 - 1.1363000e+00 -3.5300000e-02 3.8380000e-01 2.0790000e-01 3.3850000e-01 1.5360000e-01 4.5690000e-01 1.1990000e-01 2.9720000e-01 1.9290000e-01 3.5050000e-01 2.4410000e-01 - 1.6460000e-01 5.1480000e-01 4.5580000e-01 5.4050000e-01 5.8550000e-01 5.2580000e-01 1.7270000e+00 4.4000000e-03 -1.2500000e-02 2.0190000e+00 3.9600000e-02 -4.6300000e-02 - 2.0280000e-01 3.8660000e-01 2.6590000e-01 6.7120000e-01 3.3680000e-01 7.2500000e-01 3.4950000e-01 5.7040000e-01 3.3580000e-01 7.3720000e-01 2.5090000e-01 6.8340000e-01 - 6.9300000e-01 1.4070000e-01 1.0153000e+00 3.0200000e-01 1.1141000e+00 3.9900000e-01 1.0706000e+00 2.3700000e-01 1.1068000e+00 4.2510000e-01 1.0414000e+00 2.7510000e-01 - 5.7530000e-01 5.2570000e-01 8.6930000e-01 5.9060000e-01 8.9920000e-01 6.9830000e-01 1.6700000e+00 7.5100000e-02 1.9422000e+00 6.4000000e-02 -2.0000000e-02 1.7531000e+00 - 8.4000000e-03 5.5670000e-01 2.4800000e-02 8.5600000e-01 1.3770000e-01 8.4140000e-01 8.5300000e-02 7.8660000e-01 -6.2100000e-02 1.0986000e+00 9.4000000e-02 7.7360000e-01 - 1.1220000e-01 2.2120000e-01 2.7670000e-01 2.2580000e-01 4.0650000e-01 1.4610000e-01 2.2360000e-01 2.8910000e-01 2.2710000e-01 3.7090000e-01 2.5220000e-01 2.5230000e-01 - 2.7650000e-01 8.2220000e-01 6.0550000e-01 9.0750000e-01 7.4800000e-01 8.8320000e-01 1.7497000e+00 -2.0600000e-02 4.5400000e-02 1.9501000e+00 4.8000000e-02 1.6723000e+00 - 3.0000000e-01 2.8430000e-01 5.0360000e-01 4.0810000e-01 6.1040000e-01 4.2290000e-01 4.8100000e-01 4.3800000e-01 5.6000000e-01 5.0120000e-01 4.7910000e-01 4.3640000e-01 - 1.1427000e+00 -4.4000000e-02 8.8700000e-01 1.0080000e-01 7.3550000e-01 2.9400000e-02 9.6600000e-01 8.2000000e-03 6.7650000e-01 7.8000000e-02 1.0125000e+00 -4.3600000e-02 - 2.7880000e-01 1.9470000e-01 4.1530000e-01 3.3300000e-01 4.4930000e-01 4.0330000e-01 3.6330000e-01 3.9060000e-01 4.5440000e-01 4.1060000e-01 3.1820000e-01 4.3970000e-01 - 4.4480000e-01 6.4220000e-01 8.1920000e-01 5.6340000e-01 8.8530000e-01 6.2920000e-01 1.7546000e+00 -2.4600000e-02 2.8700000e-02 1.9639000e+00 -1.7100000e-02 1.7546000e+00 - 1.1696000e+00 -7.9500000e-02 6.5950000e-01 3.3010000e-01 5.6470000e-01 2.0090000e-01 7.1080000e-01 2.6440000e-01 4.9900000e-01 2.6000000e-01 6.6600000e-01 3.2460000e-01 - 2.8000000e-02 4.7490000e-01 3.0820000e-01 5.1650000e-01 4.9950000e-01 4.2920000e-01 -6.6800000e-02 1.8095000e+00 -5.3100000e-02 6.3400000e-02 7.0200000e-02 -8.2500000e-02 - -2.0500000e-02 1.0804000e+00 1.9900000e-01 1.1713000e+00 2.0440000e-01 1.3011000e+00 -6.3000000e-02 1.8070000e+00 -1.0900000e-01 2.1311000e+00 9.1100000e-02 1.6254000e+00 - -1.9900000e-02 1.1294000e+00 6.4000000e-03 1.4753000e+00 1.1950000e-01 1.4794000e+00 -8.5700000e-02 1.8333000e+00 -5.2500000e-02 2.0615000e+00 8.6000000e-02 1.6302000e+00 - 7.7340000e-01 3.1080000e-01 1.0783000e+00 6.4180000e-01 1.2671000e+00 6.8870000e-01 1.2128000e+00 4.8440000e-01 1.2609000e+00 7.2410000e-01 1.1978000e+00 5.0270000e-01 - 4.7400000e-01 6.3790000e-01 2.7100000e-01 7.7200000e-01 2.1480000e-01 5.9210000e-01 2.1970000e-01 8.2940000e-01 2.1980000e-01 5.6640000e-01 2.3870000e-01 8.0730000e-01 - 1.4120000e-01 9.8140000e-01 1.2100000e-01 5.8910000e-01 5.2800000e-02 5.3810000e-01 1.4000000e-01 5.6030000e-01 1.4420000e-01 4.1710000e-01 1.0870000e-01 6.0310000e-01 - 8.0930000e-01 1.4470000e-01 1.0198000e+00 2.6900000e-01 1.2023000e+00 2.0140000e-01 1.6420000e+00 1.0330000e-01 2.1177000e+00 -1.3910000e-01 -8.4900000e-02 1.0150000e-01 - 6.0510000e-01 3.1290000e-01 1.0136000e+00 4.1450000e-01 1.0628000e+00 5.7650000e-01 9.3270000e-01 5.0890000e-01 1.0946000e+00 5.6870000e-01 9.3480000e-01 5.0660000e-01 - 1.0444000e+00 7.2500000e-02 7.8410000e-01 6.7010000e-01 5.8300000e-01 4.0920000e-01 7.3400000e-01 7.4070000e-01 4.9500000e-01 4.8860000e-01 7.5330000e-01 7.1480000e-01 - 6.8470000e-01 3.8970000e-01 9.8170000e-01 4.1010000e-01 1.0455000e+00 4.7340000e-01 1.7347000e+00 -3.0000000e-04 1.9866000e+00 1.3200000e-02 5.5500000e-02 1.6658000e+00 - 1.7400000e-02 8.3440000e-01 -7.0700000e-02 1.2515000e+00 4.4900000e-02 1.2502000e+00 -8.9100000e-02 1.8348000e+00 3.7700000e-02 1.9520000e+00 1.8500000e-02 -2.1000000e-02 - 2.6220000e-01 8.3930000e-01 1.8880000e-01 7.9980000e-01 2.0620000e-01 5.5590000e-01 2.3300000e-01 7.4650000e-01 1.0650000e-01 6.5580000e-01 1.1070000e-01 8.9620000e-01 - 6.6000000e-03 1.0970000e+00 4.7510000e-01 9.0950000e-01 5.5170000e-01 9.6400000e-01 1.7779000e+00 -5.2900000e-02 -2.3500000e-02 2.0283000e+00 5.5200000e-02 1.6669000e+00 - 4.7350000e-01 2.9500000e-02 6.1490000e-01 2.0030000e-01 7.8150000e-01 1.2880000e-01 6.3220000e-01 1.7940000e-01 6.8740000e-01 2.5750000e-01 6.0460000e-01 2.0990000e-01 - 6.4800000e-02 5.5140000e-01 3.7300000e-02 9.3920000e-01 1.5050000e-01 9.4010000e-01 2.3500000e-02 9.5640000e-01 1.4240000e-01 9.6960000e-01 3.1400000e-02 9.4410000e-01 - 1.2719000e+00 -1.9400000e-01 4.1950000e-01 1.0213000e+00 3.3790000e-01 6.5560000e-01 4.2000000e-01 1.0272000e+00 3.8940000e-01 5.6640000e-01 4.4300000e-01 1.0011000e+00 - 3.0840000e-01 7.8970000e-01 1.5220000e-01 1.5769000e+00 7.1800000e-02 9.7820000e-01 2.2200000e-01 1.4985000e+00 4.4300000e-02 9.8100000e-01 9.3400000e-02 1.6466000e+00 - 5.8180000e-01 2.6610000e-01 8.6780000e-01 4.6880000e-01 9.5460000e-01 5.7280000e-01 7.3040000e-01 6.3340000e-01 8.5400000e-01 7.1340000e-01 8.6070000e-01 4.7920000e-01 - 5.9400000e-02 7.0900000e-01 4.5630000e-01 6.1080000e-01 5.6790000e-01 6.2200000e-01 1.7787000e+00 -5.6900000e-02 3.9700000e-02 1.9523000e+00 3.5900000e-02 -4.2700000e-02 - 5.9330000e-01 2.3040000e-01 9.5290000e-01 3.3190000e-01 1.1514000e+00 2.9910000e-01 9.8220000e-01 3.0420000e-01 1.1840000e+00 2.9010000e-01 9.7250000e-01 3.1200000e-01 - 8.7190000e-01 2.3120000e-01 4.3100000e-01 5.5070000e-01 3.1460000e-01 4.5670000e-01 4.2580000e-01 5.5630000e-01 3.7400000e-01 3.7000000e-01 3.3300000e-01 6.6460000e-01 - 1.3600000e-02 9.9610000e-01 2.4660000e-01 1.0775000e+00 3.3790000e-01 1.1120000e+00 5.6200000e-02 1.6623000e+00 -4.0800000e-02 2.0461000e+00 -1.2500000e-02 1.6100000e-02 - 1.2180000e+00 -1.3380000e-01 6.0160000e-01 1.1368000e+00 4.4340000e-01 7.7370000e-01 6.4930000e-01 1.0746000e+00 5.3330000e-01 6.2330000e-01 5.9160000e-01 1.1466000e+00 - 7.6360000e-01 1.1100000e-01 1.1721000e+00 4.0000000e-04 1.3075000e+00 -1.8100000e-02 1.7561000e+00 -3.1500000e-02 1.8769000e+00 1.4640000e-01 4.9100000e-02 -5.8200000e-02 - 4.7330000e-01 5.5330000e-01 7.8780000e-01 5.4790000e-01 9.1360000e-01 5.4190000e-01 1.8099000e+00 -8.9200000e-02 -7.8700000e-02 2.0888000e+00 2.5700000e-02 -3.1400000e-02 - 2.0050000e-01 5.8290000e-01 6.2130000e-01 4.5440000e-01 6.1850000e-01 6.0310000e-01 1.6748000e+00 6.3700000e-02 4.6900000e-02 1.9445000e+00 4.5400000e-02 -5.3300000e-02 - 2.8730000e-01 3.9040000e-01 5.5420000e-01 4.8620000e-01 6.0850000e-01 5.7710000e-01 6.2280000e-01 4.0630000e-01 6.2410000e-01 5.7690000e-01 5.5900000e-01 4.7890000e-01 - 1.0658000e+00 4.8900000e-02 9.7540000e-01 2.3390000e-01 7.4180000e-01 1.5050000e-01 8.2480000e-01 4.0870000e-01 6.8960000e-01 1.8740000e-01 8.4150000e-01 3.9020000e-01 - 2.7490000e-01 8.3220000e-01 7.5500000e-02 8.4700000e-01 1.1790000e-01 6.0810000e-01 2.0790000e-01 6.9380000e-01 1.1330000e-01 5.9860000e-01 1.8560000e-01 7.1870000e-01 - 1.1604000e+00 -7.0700000e-02 3.8730000e-01 4.7900000e-01 3.7020000e-01 3.1350000e-01 3.7310000e-01 4.9340000e-01 4.2040000e-01 2.4150000e-01 3.7680000e-01 4.8840000e-01 - 7.8500000e-01 3.0970000e-01 1.0392000e+00 6.8990000e-01 1.0074000e+00 9.1790000e-01 1.6067000e+00 1.4940000e-01 2.0236000e+00 -2.7500000e-02 7.7400000e-02 1.6430000e+00 - 4.7670000e-01 6.2000000e-01 8.1240000e-01 6.2010000e-01 8.2010000e-01 7.4760000e-01 1.7621000e+00 -3.2000000e-02 2.8500000e-02 1.9625000e+00 -2.4000000e-03 1.7352000e+00 - 7.0000000e-02 5.8040000e-01 3.6030000e-01 6.0510000e-01 5.0320000e-01 5.8040000e-01 1.8080000e+00 -9.0500000e-02 2.4500000e-02 1.9718000e+00 5.3800000e-02 -6.2900000e-02 - 2.3980000e-01 5.3810000e-01 4.2800000e-01 7.8270000e-01 4.5940000e-01 9.2490000e-01 4.5430000e-01 7.4980000e-01 4.8370000e-01 9.1780000e-01 3.5580000e-01 8.6880000e-01 - 6.7720000e-01 4.1530000e-01 8.7020000e-01 6.6730000e-01 1.0976000e+00 5.3710000e-01 1.6714000e+00 7.2300000e-02 1.9134000e+00 1.0310000e-01 4.3600000e-02 1.6821000e+00 - 5.7710000e-01 2.6900000e-01 9.6260000e-01 1.8650000e-01 9.7480000e-01 3.0900000e-01 1.7208000e+00 1.2000000e-02 1.9040000e+00 1.1570000e-01 4.1800000e-02 -5.0900000e-02 - 9.6780000e-01 1.2050000e-01 4.1690000e-01 7.3650000e-01 2.9570000e-01 5.8020000e-01 4.3170000e-01 7.1450000e-01 3.1420000e-01 5.3550000e-01 3.2430000e-01 8.4960000e-01 - 3.0390000e-01 8.2500000e-02 5.0320000e-01 1.0010000e-01 6.3130000e-01 4.2000000e-02 5.4430000e-01 4.8100000e-02 6.1870000e-01 7.0400000e-02 5.8020000e-01 5.8000000e-03 - 1.0660000e+00 4.7400000e-02 8.3900000e-01 3.3010000e-01 6.5010000e-01 2.2950000e-01 8.4270000e-01 3.2390000e-01 6.7700000e-01 1.7150000e-01 7.6320000e-01 4.2130000e-01 - 4.4450000e-01 4.3280000e-01 7.3540000e-01 6.3060000e-01 7.3510000e-01 8.4230000e-01 7.1420000e-01 6.5970000e-01 8.0770000e-01 7.8140000e-01 6.8670000e-01 6.9330000e-01 - 1.1138000e+00 -1.2000000e-02 4.5090000e-01 1.2854000e+00 4.2550000e-01 1.1156000e+00 4.5810000e-01 1.2714000e+00 3.9180000e-01 1.0656000e+00 4.9000000e-01 1.2340000e+00 - 3.9290000e-01 1.1680000e-01 6.1460000e-01 2.2380000e-01 8.5540000e-01 8.0000000e-02 1.6777000e+00 6.2400000e-02 -6.7100000e-02 7.8300000e-02 3.8400000e-02 -4.4600000e-02 - 1.1749000e+00 -7.8200000e-02 5.3180000e-01 1.2082000e+00 4.0900000e-01 8.0320000e-01 6.1210000e-01 1.1060000e+00 4.8240000e-01 6.7670000e-01 5.0830000e-01 1.2340000e+00 - 1.1125000e+00 -1.9000000e-03 7.9530000e-01 4.1820000e-01 5.7720000e-01 3.3170000e-01 8.6660000e-01 3.3760000e-01 6.8220000e-01 1.8370000e-01 7.5940000e-01 4.6070000e-01 - 1.0643000e+00 4.8300000e-02 4.0310000e-01 4.7150000e-01 4.2170000e-01 2.6100000e-01 5.2850000e-01 3.1660000e-01 4.1230000e-01 2.5490000e-01 4.9660000e-01 3.5440000e-01 - 1.4620000e-01 1.7410000e-01 3.3550000e-01 3.2100000e-01 5.9010000e-01 1.6450000e-01 1.8199000e+00 -1.0750000e-01 4.4800000e-02 -5.0600000e-02 -1.5400000e-02 1.9000000e-02 - 1.0826000e+00 2.9000000e-02 5.7590000e-01 1.1436000e+00 3.5870000e-01 1.0011000e+00 3.9840000e-01 1.3519000e+00 3.6970000e-01 9.3110000e-01 4.6090000e-01 1.2805000e+00 - 1.7490000e-01 9.3370000e-01 4.9710000e-01 9.7990000e-01 5.1580000e-01 1.1065000e+00 1.8580000e+00 -1.4770000e-01 5.8800000e-02 1.9321000e+00 -9.4800000e-02 1.8403000e+00 - 2.3200000e-02 4.4720000e-01 2.8800000e-02 7.5780000e-01 1.6400000e-02 8.9300000e-01 -2.8400000e-02 1.7682000e+00 -4.3000000e-03 5.0000000e-03 1.6300000e-02 -1.9200000e-02 - -1.8900000e-02 9.0490000e-01 2.7330000e-01 9.2400000e-01 4.0540000e-01 9.1170000e-01 -1.6300000e-02 1.7494000e+00 8.6000000e-03 1.9898000e+00 1.6500000e-02 -1.9700000e-02 - 3.0170000e-01 1.8100000e-02 5.9040000e-01 5.2500000e-02 8.3120000e-01 -9.4900000e-02 1.7939000e+00 -7.1400000e-02 2.2000000e-03 -2.4000000e-03 1.7000000e-03 -1.6000000e-03 - 3.0390000e-01 3.1210000e-01 6.0460000e-01 3.2400000e-01 8.1460000e-01 2.1880000e-01 1.6989000e+00 4.0700000e-02 -9.3700000e-02 2.1047000e+00 -1.6100000e-02 2.0600000e-02 - 1.2490000e-01 9.8270000e-01 6.2600000e-02 1.6752000e+00 8.9000000e-02 9.5630000e-01 1.0850000e-01 1.6157000e+00 5.5300000e-02 9.6140000e-01 3.0700000e-02 1.7138000e+00 - 1.1557000e+00 -5.8600000e-02 3.9130000e-01 1.3615000e+00 4.1940000e-01 8.1760000e-01 4.4290000e-01 1.2958000e+00 4.2180000e-01 7.6740000e-01 4.7230000e-01 1.2706000e+00 - 2.4590000e-01 7.7840000e-01 5.4810000e-01 7.9020000e-01 6.9020000e-01 7.7150000e-01 1.7652000e+00 -3.9400000e-02 -1.5600000e-02 2.0200000e+00 4.0100000e-02 -4.9000000e-02 - 6.2200000e-01 4.9510000e-01 3.0170000e-01 7.1150000e-01 2.1430000e-01 5.8160000e-01 2.7240000e-01 7.4360000e-01 2.2400000e-01 5.5040000e-01 3.2880000e-01 6.8090000e-01 - 3.2490000e-01 2.3440000e-01 5.2910000e-01 3.4400000e-01 5.3900000e-01 4.6440000e-01 6.5310000e-01 1.9410000e-01 5.9350000e-01 4.1410000e-01 5.1120000e-01 3.6320000e-01 - 3.1860000e-01 6.1890000e-01 7.3530000e-01 5.0080000e-01 7.8070000e-01 5.8740000e-01 1.7592000e+00 -3.6700000e-02 1.7000000e-02 1.9832000e+00 -3.5700000e-02 4.1600000e-02 - 8.1100000e-02 5.7300000e-01 -5.4000000e-03 1.0595000e+00 4.6200000e-02 1.1418000e+00 9.5100000e-02 9.3770000e-01 4.3300000e-02 1.1576000e+00 4.8700000e-02 9.9160000e-01 - 2.5940000e-01 3.8920000e-01 5.2200000e-01 4.5110000e-01 6.6000000e-01 4.2740000e-01 1.7167000e+00 1.8200000e-02 -2.3400000e-02 2.0239000e+00 2.9100000e-02 -3.2300000e-02 - 5.5160000e-01 4.3230000e-01 9.5370000e-01 3.2890000e-01 1.0199000e+00 3.9330000e-01 1.7653000e+00 -3.4100000e-02 2.0264000e+00 -2.9800000e-02 1.7800000e-02 -2.1500000e-02 - 1.1115000e+00 -6.9000000e-03 5.7010000e-01 1.1575000e+00 4.3810000e-01 1.5352000e+00 4.5600000e-01 1.2902000e+00 4.3600000e-01 1.3827000e+00 5.7610000e-01 1.1560000e+00 - 4.9610000e-01 1.8900000e-01 7.0770000e-01 3.7800000e-01 8.0670000e-01 4.3090000e-01 7.2110000e-01 3.6450000e-01 8.0280000e-01 4.5560000e-01 7.3520000e-01 3.4420000e-01 - 1.1331000e+00 -3.6400000e-02 4.3250000e-01 4.3070000e-01 3.7800000e-01 3.1090000e-01 4.3640000e-01 4.2850000e-01 3.3260000e-01 3.5050000e-01 4.7870000e-01 3.8150000e-01 - 4.6490000e-01 6.4390000e-01 7.7750000e-01 6.8260000e-01 9.9820000e-01 5.6380000e-01 1.7573000e+00 -2.2500000e-02 -1.5800000e-02 2.0113000e+00 4.9700000e-02 1.6713000e+00 - 5.7820000e-01 6.3000000e-03 9.6170000e-01 -6.0700000e-02 1.0348000e+00 4.3000000e-03 9.0530000e-01 7.6000000e-03 1.0513000e+00 2.4000000e-03 1.0077000e+00 -1.1580000e-01 - 7.9180000e-01 3.0770000e-01 5.1340000e-01 9.9190000e-01 2.7000000e-01 7.4730000e-01 4.5210000e-01 1.0581000e+00 2.5350000e-01 7.3520000e-01 4.2080000e-01 1.0961000e+00 - 5.9280000e-01 5.2220000e-01 1.0036000e+00 4.5790000e-01 1.0781000e+00 5.0700000e-01 1.6470000e+00 9.8600000e-02 2.1061000e+00 -1.1670000e-01 3.7100000e-02 1.6833000e+00 - 5.9600000e-02 7.6290000e-01 3.2390000e-01 8.1810000e-01 4.2590000e-01 8.4400000e-01 1.6702000e+00 7.1900000e-02 -2.6300000e-02 2.0354000e+00 9.0800000e-02 -1.0850000e-01 - 1.0498000e+00 6.4100000e-02 5.9190000e-01 2.1740000e-01 3.9790000e-01 2.7180000e-01 4.7880000e-01 3.4860000e-01 4.1190000e-01 2.3610000e-01 5.0730000e-01 3.1690000e-01 - 1.1555000e+00 -5.9900000e-02 1.1906000e+00 2.9810000e-01 9.9630000e-01 -1.4300000e-02 1.0789000e+00 4.2940000e-01 8.7090000e-01 1.0260000e-01 1.2618000e+00 2.1110000e-01 - 6.2150000e-01 2.1200000e-01 1.0451000e+00 2.4940000e-01 1.1394000e+00 3.4510000e-01 1.0849000e+00 1.9670000e-01 1.1759000e+00 3.2350000e-01 1.1392000e+00 1.3580000e-01 - 2.3490000e-01 1.4420000e-01 2.2650000e-01 3.9290000e-01 2.7360000e-01 4.2960000e-01 2.9310000e-01 3.1120000e-01 2.3630000e-01 4.8020000e-01 2.4540000e-01 3.7260000e-01 - 3.0190000e-01 8.0050000e-01 2.3990000e-01 1.4774000e+00 1.6810000e-01 1.1787000e+00 1.2560000e-01 1.6129000e+00 1.9980000e-01 1.0850000e+00 1.1930000e-01 1.6217000e+00 - 1.1476000e+00 -4.7700000e-02 6.6030000e-01 5.8890000e-01 4.9820000e-01 4.2220000e-01 6.5750000e-01 5.9070000e-01 5.0410000e-01 3.8740000e-01 5.8510000e-01 6.7890000e-01 - 1.1012000e+00 7.3000000e-03 8.4260000e-01 1.0420000e-01 6.8910000e-01 5.5400000e-02 9.4690000e-01 -1.9000000e-02 7.6820000e-01 -6.1700000e-02 8.7420000e-01 6.5800000e-02 - -4.2100000e-02 6.2060000e-01 -8.9000000e-03 9.0800000e-01 4.4400000e-02 9.6450000e-01 2.5600000e-02 8.6780000e-01 3.3900000e-02 9.9270000e-01 1.3810000e-01 7.3450000e-01 - 3.1730000e-01 1.4390000e-01 4.6520000e-01 2.6720000e-01 4.7180000e-01 3.7320000e-01 4.2240000e-01 3.1860000e-01 5.4220000e-01 2.9980000e-01 4.6610000e-01 2.6600000e-01 - 3.0980000e-01 5.2660000e-01 7.1390000e-01 4.2100000e-01 7.5260000e-01 5.1230000e-01 1.8442000e+00 -1.2760000e-01 4.1600000e-02 1.9502000e+00 3.5600000e-02 -4.2300000e-02 - 1.1133000e+00 -1.0200000e-02 6.5860000e-01 3.1860000e-01 5.5280000e-01 2.0980000e-01 6.5480000e-01 3.2030000e-01 4.6920000e-01 2.8830000e-01 5.4170000e-01 4.5620000e-01 - 1.1205000e+00 -1.2500000e-02 4.2730000e-01 1.3050000e+00 4.0010000e-01 1.5690000e+00 5.6070000e-01 1.1524000e+00 4.1320000e-01 1.3408000e+00 3.8410000e-01 1.3542000e+00 - 6.9900000e-01 3.9680000e-01 1.7460000e-01 6.3640000e-01 2.2410000e-01 4.2110000e-01 3.1940000e-01 4.6430000e-01 2.1550000e-01 4.1680000e-01 4.0410000e-01 3.6300000e-01 - 4.0870000e-01 1.1440000e-01 6.5180000e-01 1.9590000e-01 7.5400000e-01 2.2400000e-01 1.7393000e+00 -8.9000000e-03 4.3000000e-03 -5.2000000e-03 -2.8200000e-02 3.5800000e-02 - 8.9900000e-02 4.9620000e-01 1.6700000e-01 7.4780000e-01 1.8310000e-01 8.5790000e-01 1.6030000e-01 7.5590000e-01 1.4810000e-01 9.1700000e-01 1.5390000e-01 7.6050000e-01 - 7.2750000e-01 8.8000000e-02 1.0263000e+00 2.7730000e-01 1.2038000e+00 2.7120000e-01 1.0262000e+00 2.6890000e-01 1.2536000e+00 2.3430000e-01 9.7830000e-01 3.3080000e-01 - 5.4950000e-01 5.4370000e-01 2.6860000e-01 7.1070000e-01 1.7930000e-01 5.9210000e-01 2.2320000e-01 7.6450000e-01 2.5580000e-01 4.8460000e-01 3.5180000e-01 6.0740000e-01 - 5.9790000e-01 2.6310000e-01 9.2460000e-01 2.4540000e-01 1.0407000e+00 2.4970000e-01 1.7432000e+00 -1.2100000e-02 2.0357000e+00 -4.2800000e-02 5.5100000e-02 -6.8600000e-02 - 4.0530000e-01 3.1930000e-01 7.1280000e-01 4.0650000e-01 6.6000000e-01 6.4500000e-01 6.4350000e-01 4.9220000e-01 7.3260000e-01 5.7840000e-01 6.6100000e-01 4.6860000e-01 - 3.1010000e-01 7.8820000e-01 2.0580000e-01 6.5240000e-01 1.5780000e-01 5.3370000e-01 1.5780000e-01 7.1000000e-01 1.4070000e-01 5.3900000e-01 1.6590000e-01 7.0290000e-01 - 3.4480000e-01 7.7330000e-01 2.2260000e-01 3.7450000e-01 1.8560000e-01 3.1020000e-01 2.0020000e-01 3.9920000e-01 2.5830000e-01 2.1510000e-01 1.7750000e-01 4.2660000e-01 - 1.1630000e-01 6.2810000e-01 6.2400000e-02 1.1285000e+00 1.5770000e-01 1.1817000e+00 8.5700000e-02 1.1037000e+00 1.1710000e-01 1.2524000e+00 2.2700000e-01 9.3350000e-01 - 1.8410000e-01 9.0890000e-01 9.6200000e-02 1.6317000e+00 3.0600000e-02 1.1532000e+00 4.4200000e-02 1.6942000e+00 1.4400000e-02 1.1366000e+00 9.7300000e-02 1.6302000e+00 - 2.8710000e-01 4.3910000e-01 5.5450000e-01 5.5860000e-01 5.6100000e-01 7.1820000e-01 6.1550000e-01 4.8530000e-01 6.9820000e-01 5.7630000e-01 5.2840000e-01 5.9290000e-01 - 6.2660000e-01 4.8690000e-01 8.6590000e-01 8.9000000e-01 1.0302000e+00 9.5530000e-01 1.9557000e+00 -2.5830000e-01 1.9543000e+00 5.2500000e-02 1.8700000e-02 1.7097000e+00 - 2.5210000e-01 5.2080000e-01 4.8740000e-01 6.1310000e-01 7.0960000e-01 4.9270000e-01 1.6930000e+00 4.6700000e-02 2.2500000e-02 1.9677000e+00 1.4100000e-02 -1.7900000e-02 - 1.1157000e+00 -1.1300000e-02 5.8250000e-01 6.1350000e-01 6.0440000e-01 2.5980000e-01 5.3690000e-01 6.6770000e-01 5.0630000e-01 3.5210000e-01 5.3270000e-01 6.7010000e-01 - 4.3430000e-01 2.8920000e-01 6.9430000e-01 4.3790000e-01 8.3660000e-01 4.4580000e-01 8.2590000e-01 2.8380000e-01 8.6700000e-01 4.2630000e-01 7.6930000e-01 3.4940000e-01 - 3.7670000e-01 6.6100000e-01 7.3480000e-01 6.0400000e-01 9.2300000e-01 5.2510000e-01 1.7403000e+00 -4.5000000e-03 -4.6000000e-02 2.0542000e+00 2.1600000e-02 -2.4700000e-02 - 1.7760000e-01 9.4470000e-01 6.2920000e-01 9.1010000e-01 7.0550000e-01 9.5750000e-01 1.7973000e+00 -8.0500000e-02 -8.9800000e-02 2.1110000e+00 -2.0000000e-03 1.7336000e+00 - 2.9230000e-01 5.5790000e-01 3.7040000e-01 9.8290000e-01 3.5580000e-01 1.1918000e+00 3.3510000e-01 1.0251000e+00 3.1760000e-01 1.2665000e+00 3.6190000e-01 9.8760000e-01 - 1.1005000e+00 6.6000000e-03 4.4210000e-01 8.5600000e-01 3.8230000e-01 5.4870000e-01 4.9090000e-01 7.9660000e-01 3.2410000e-01 5.9070000e-01 5.0040000e-01 7.8380000e-01 - 1.0480000e+00 6.7600000e-02 7.7880000e-01 9.4750000e-01 6.5920000e-01 1.1568000e+00 7.7520000e-01 9.5440000e-01 6.5850000e-01 9.6740000e-01 7.5350000e-01 9.7870000e-01 - 6.9700000e-02 9.3520000e-01 2.5930000e-01 1.0610000e+00 3.4190000e-01 1.1080000e+00 7.3000000e-03 1.7191000e+00 -7.6000000e-03 2.0048000e+00 3.5200000e-02 -4.4000000e-02 - 3.2600000e-01 2.1600000e-01 7.5280000e-01 7.9000000e-02 7.9240000e-01 1.7420000e-01 1.6870000e+00 5.0200000e-02 -3.9400000e-02 4.6700000e-02 -6.0300000e-02 7.0900000e-02 - 1.0500000e-01 6.6630000e-01 3.3480000e-01 7.6010000e-01 5.8750000e-01 6.1100000e-01 1.7132000e+00 2.6100000e-02 1.2280000e-01 1.8555000e+00 -2.3200000e-02 2.8600000e-02 - 1.0027000e+00 1.2520000e-01 6.1660000e-01 4.8140000e-01 4.7980000e-01 3.6170000e-01 7.0880000e-01 3.7550000e-01 5.6360000e-01 2.3770000e-01 6.2390000e-01 4.7720000e-01 - 4.4960000e-01 2.1690000e-01 6.3920000e-01 3.6300000e-01 8.2000000e-01 2.8990000e-01 1.7704000e+00 -4.5200000e-02 -1.1000000e-02 2.0112000e+00 1.7600000e-02 -1.9100000e-02 - 4.9260000e-01 6.1370000e-01 3.1860000e-01 1.4046000e+00 2.4770000e-01 1.2605000e+00 1.9280000e-01 1.5521000e+00 2.1020000e-01 1.2252000e+00 2.5420000e-01 1.4818000e+00 - 1.1387000e+00 -4.2700000e-02 8.8790000e-01 1.3010000e-01 6.6930000e-01 1.2690000e-01 8.1050000e-01 2.2190000e-01 6.3060000e-01 1.4970000e-01 8.7110000e-01 1.5170000e-01 - 9.7110000e-01 1.0220000e-01 1.4946000e+00 2.0160000e-01 1.7635000e+00 1.5830000e-01 1.5542000e+00 1.2900000e-01 1.8343000e+00 1.0140000e-01 1.4889000e+00 2.0660000e-01 - 2.1050000e-01 1.5220000e-01 5.7530000e-01 9.3600000e-02 6.6960000e-01 1.2340000e-01 1.7630000e+00 -3.6600000e-02 4.1000000e-03 -4.0000000e-03 -1.8600000e-02 2.3200000e-02 - 5.3640000e-01 5.6970000e-01 2.7540000e-01 1.4599000e+00 2.3400000e-01 9.7700000e-01 2.4800000e-01 1.4892000e+00 3.0990000e-01 8.4840000e-01 2.9890000e-01 1.4283000e+00 - 1.1653000e+00 -7.4100000e-02 4.1470000e-01 6.8310000e-01 3.3220000e-01 5.0730000e-01 4.4040000e-01 6.5150000e-01 3.8820000e-01 4.2030000e-01 3.9100000e-01 7.0640000e-01 - 5.9290000e-01 4.7530000e-01 7.6720000e-01 9.4240000e-01 8.5720000e-01 1.0864000e+00 7.9780000e-01 8.9580000e-01 9.4690000e-01 1.0056000e+00 7.5370000e-01 9.5060000e-01 - 1.9820000e-01 7.6110000e-01 4.9010000e-01 7.8440000e-01 6.0650000e-01 7.8510000e-01 1.7347000e+00 -4.1000000e-03 9.3500000e-02 1.8908000e+00 -3.7900000e-02 4.5300000e-02 - 1.5460000e-01 4.1710000e-01 2.1190000e-01 6.8680000e-01 2.0170000e-01 8.3550000e-01 3.1200000e-01 5.7040000e-01 2.8650000e-01 7.4730000e-01 2.1010000e-01 6.9320000e-01 - 5.0300000e-02 4.6740000e-01 7.3400000e-02 7.6430000e-01 2.0450000e-01 7.5580000e-01 -4.6000000e-03 1.7365000e+00 -6.4600000e-02 7.5500000e-02 -1.4800000e-02 1.9300000e-02 - 1.0410000e-01 6.7620000e-01 3.9260000e-01 7.0500000e-01 5.1570000e-01 7.0680000e-01 1.7745000e+00 -5.1300000e-02 3.8100000e-02 1.9580000e+00 -2.2100000e-02 2.5300000e-02 - 6.7000000e-03 5.9810000e-01 -6.6500000e-02 1.0335000e+00 -2.7400000e-02 1.1121000e+00 3.6100000e-02 9.1090000e-01 1.0800000e-02 1.0856000e+00 -6.4300000e-02 1.0264000e+00 - 2.6010000e-01 8.4260000e-01 1.7480000e-01 5.5360000e-01 1.4880000e-01 4.4360000e-01 2.1770000e-01 4.9870000e-01 1.1350000e-01 4.7780000e-01 1.0670000e-01 6.3210000e-01 - 7.4950000e-01 3.6380000e-01 3.0870000e-01 7.1320000e-01 2.2890000e-01 5.7040000e-01 4.0130000e-01 5.9870000e-01 4.1470000e-01 3.3380000e-01 3.7950000e-01 6.3000000e-01 - 2.5360000e-01 3.0320000e-01 3.8900000e-01 4.8360000e-01 4.2850000e-01 5.6350000e-01 4.0990000e-01 4.5580000e-01 5.1440000e-01 4.7860000e-01 3.6650000e-01 5.1070000e-01 - 1.1087000e+00 -1.4000000e-03 6.2310000e-01 1.1034000e+00 5.1750000e-01 5.2170000e-01 5.1510000e-01 1.2323000e+00 5.3080000e-01 4.7320000e-01 4.8140000e-01 1.2696000e+00 - 1.2430000e-01 8.5520000e-01 2.6070000e-01 1.2543000e+00 2.2240000e-01 1.5176000e+00 2.0600000e-01 1.3228000e+00 2.7950000e-01 1.4760000e+00 3.0780000e-01 1.2011000e+00 - 9.2130000e-01 1.8300000e-01 1.2711000e+00 2.1820000e-01 1.4360000e+00 1.6850000e-01 1.7391000e+00 -6.5000000e-03 2.1182000e+00 -1.3820000e-01 2.5100000e-02 1.7038000e+00 - 4.9060000e-01 2.2840000e-01 7.6070000e-01 3.6950000e-01 7.5480000e-01 5.5660000e-01 8.1020000e-01 3.0680000e-01 8.2650000e-01 4.8960000e-01 7.1110000e-01 4.2830000e-01 - 5.0230000e-01 2.2960000e-01 8.0950000e-01 3.3910000e-01 8.6450000e-01 4.5460000e-01 7.0440000e-01 4.6200000e-01 8.4310000e-01 5.0250000e-01 7.6050000e-01 3.9840000e-01 - 1.1565000e+00 -5.7600000e-02 6.4270000e-01 1.0852000e+00 4.4900000e-01 9.3780000e-01 5.6510000e-01 1.1727000e+00 4.1250000e-01 9.1920000e-01 6.7130000e-01 1.0480000e+00 - 3.4710000e-01 4.9690000e-01 5.6720000e-01 6.0260000e-01 6.6930000e-01 6.2480000e-01 1.6422000e+00 1.0360000e-01 2.1800000e-02 1.9766000e+00 -6.9000000e-03 6.9000000e-03 - 1.7720000e-01 7.1120000e-01 3.1030000e-01 1.0796000e+00 2.5510000e-01 1.3394000e+00 2.3380000e-01 1.1686000e+00 3.0750000e-01 1.3050000e+00 2.4370000e-01 1.1531000e+00 - 4.8190000e-01 5.0600000e-01 7.0120000e-01 8.5590000e-01 8.2850000e-01 9.3760000e-01 6.9440000e-01 8.6150000e-01 7.0040000e-01 1.1127000e+00 7.2760000e-01 8.1930000e-01 - 4.5970000e-01 2.7490000e-01 7.4820000e-01 3.9900000e-01 9.1980000e-01 3.7460000e-01 7.8800000e-01 3.5290000e-01 8.2530000e-01 5.0670000e-01 7.3850000e-01 4.1400000e-01 - 4.5230000e-01 6.4700000e-01 2.4130000e-01 1.3122000e+00 1.8940000e-01 8.2520000e-01 1.8030000e-01 1.3793000e+00 1.9610000e-01 7.9190000e-01 1.9980000e-01 1.3618000e+00 - 4.2700000e-02 6.1640000e-01 1.5160000e-01 8.6570000e-01 8.4100000e-02 1.0956000e+00 1.2610000e-01 8.9920000e-01 1.5490000e-01 1.0291000e+00 7.6300000e-02 9.6250000e-01 - 4.8100000e-02 6.3010000e-01 3.6410000e-01 6.2740000e-01 4.2130000e-01 6.9750000e-01 4.6500000e-02 1.6775000e+00 -2.9700000e-02 2.0379000e+00 1.5000000e-03 -2.2000000e-03 - -5.4100000e-02 8.4240000e-01 2.8300000e-01 8.1340000e-01 4.0240000e-01 8.1350000e-01 6.6000000e-03 1.7243000e+00 -8.1300000e-02 2.0994000e+00 -3.5900000e-02 4.3700000e-02 - 1.4920000e-01 9.4330000e-01 4.5810000e-01 1.2633000e+00 4.3400000e-01 1.5177000e+00 1.6926000e+00 4.6300000e-02 -5.2700000e-02 2.0564000e+00 -1.0430000e-01 1.8550000e+00 - 5.9500000e-01 5.0370000e-01 8.4180000e-01 6.5930000e-01 1.0356000e+00 5.6840000e-01 1.7362000e+00 -4.0000000e-03 1.9987000e+00 2.9000000e-03 7.4500000e-02 1.6417000e+00 - 1.0412000e+00 7.6900000e-02 3.4290000e-01 5.1310000e-01 4.0830000e-01 2.5630000e-01 4.4570000e-01 3.8720000e-01 3.6860000e-01 2.8750000e-01 4.2010000e-01 4.1960000e-01 - 5.7500000e-01 9.8300000e-02 7.3590000e-01 3.5240000e-01 9.2260000e-01 3.0270000e-01 7.9220000e-01 2.8490000e-01 9.7580000e-01 2.5880000e-01 8.2020000e-01 2.5140000e-01 - 2.3560000e-01 2.6080000e-01 3.4910000e-01 4.3120000e-01 5.0150000e-01 3.6560000e-01 3.2590000e-01 4.5890000e-01 3.6230000e-01 5.4800000e-01 3.4150000e-01 4.3910000e-01 - -4.4000000e-03 5.7500000e-02 2.9990000e-01 5.5900000e-02 3.4360000e-01 1.4950000e-01 6.2200000e-02 -7.2600000e-02 3.7700000e-02 -4.6200000e-02 -1.9300000e-02 2.4600000e-02 - 3.8530000e-01 2.2660000e-01 6.3980000e-01 2.9400000e-01 7.4650000e-01 3.0860000e-01 1.8898000e+00 -1.8040000e-01 2.0800000e-02 1.9773000e+00 -1.6600000e-02 1.8400000e-02 - 5.7320000e-01 9.8200000e-02 9.4950000e-01 2.1500000e-02 1.0106000e+00 9.0400000e-02 1.6897000e+00 5.0200000e-02 1.8551000e+00 1.6630000e-01 -1.4400000e-02 1.5600000e-02 - 5.9750000e-01 9.5900000e-02 8.7090000e-01 2.2770000e-01 9.4480000e-01 3.0960000e-01 8.9840000e-01 1.9090000e-01 1.0404000e+00 2.1440000e-01 8.6950000e-01 2.2800000e-01 - 1.1617000e+00 -6.8000000e-02 5.1030000e-01 1.2134000e+00 4.3220000e-01 6.7670000e-01 5.0400000e-01 1.2196000e+00 3.9560000e-01 6.8590000e-01 5.2040000e-01 1.2014000e+00 - 1.0217000e+00 9.8800000e-02 4.3180000e-01 4.9480000e-01 3.2710000e-01 4.0960000e-01 4.7880000e-01 4.3440000e-01 3.3610000e-01 3.8750000e-01 4.1070000e-01 5.1750000e-01 - 9.9100000e-02 6.0970000e-01 1.9280000e-01 9.1260000e-01 2.8490000e-01 9.6500000e-01 1.8190000e-01 9.3130000e-01 2.5870000e-01 1.0148000e+00 2.2170000e-01 8.8270000e-01 - 4.1960000e-01 4.0700000e-01 8.5090000e-01 2.7050000e-01 8.0900000e-01 4.5840000e-01 1.6335000e+00 1.1200000e-01 -6.6500000e-02 2.0807000e+00 1.6600000e-02 -1.9500000e-02 - 6.4420000e-01 4.6570000e-01 2.8050000e-01 9.2110000e-01 3.0620000e-01 5.7540000e-01 3.2030000e-01 8.7360000e-01 3.4280000e-01 5.1070000e-01 2.1950000e-01 9.9370000e-01 - 1.0830000e-01 3.7040000e-01 2.1980000e-01 5.2390000e-01 1.9730000e-01 6.5790000e-01 1.0380000e-01 6.6410000e-01 2.7240000e-01 5.8150000e-01 2.6000000e-01 4.7340000e-01 - 1.1434000e+00 -4.7000000e-02 6.3940000e-01 1.0840000e+00 5.0720000e-01 8.2190000e-01 7.2740000e-01 9.7700000e-01 4.1170000e-01 8.7920000e-01 6.1330000e-01 1.1156000e+00 - 4.8940000e-01 2.3220000e-01 9.4440000e-01 1.5990000e-01 1.1365000e+00 1.1000000e-01 9.9700000e-01 9.4000000e-02 1.0839000e+00 1.9250000e-01 9.9860000e-01 9.7000000e-02 - 6.9400000e-02 4.4960000e-01 3.8610000e-01 4.4310000e-01 5.0830000e-01 4.4200000e-01 1.7225000e+00 8.0000000e-03 -2.1800000e-02 2.5800000e-02 -1.2600000e-02 1.5300000e-02 - 4.7090000e-01 1.6370000e-01 7.5400000e-01 2.3680000e-01 9.1680000e-01 1.9870000e-01 7.0550000e-01 2.9190000e-01 8.7990000e-01 2.5970000e-01 6.8790000e-01 3.1190000e-01 - 3.6950000e-01 7.4750000e-01 7.1780000e-01 7.0920000e-01 8.7490000e-01 6.6760000e-01 1.7200000e+00 6.4000000e-03 -9.8000000e-03 2.0150000e+00 5.2300000e-02 1.6716000e+00 - 2.8330000e-01 6.9290000e-01 3.3430000e-01 1.2073000e+00 4.8480000e-01 1.2596000e+00 4.2010000e-01 1.1084000e+00 4.5280000e-01 1.3130000e+00 3.4830000e-01 1.2002000e+00 - 3.2490000e-01 2.6630000e-01 2.1350000e-01 8.3800000e-02 2.1760000e-01 3.2300000e-02 1.6330000e-01 1.4400000e-01 1.6430000e-01 8.8700000e-02 2.1860000e-01 7.8500000e-02 - 2.5090000e-01 4.4150000e-01 3.2530000e-01 7.7740000e-01 3.2370000e-01 9.3790000e-01 2.0930000e-01 9.1460000e-01 3.4700000e-01 9.3130000e-01 2.3180000e-01 8.8540000e-01 - 4.0890000e-01 3.1290000e-01 5.5810000e-01 5.0680000e-01 8.1380000e-01 3.4620000e-01 1.6779000e+00 6.8400000e-02 2.0800000e-02 1.9740000e+00 -2.2900000e-02 2.7100000e-02 - 6.6430000e-01 4.4400000e-01 4.7090000e-01 3.0210000e-01 3.0440000e-01 3.3730000e-01 3.3180000e-01 4.6830000e-01 2.8920000e-01 3.4210000e-01 3.2020000e-01 4.8280000e-01 - 1.0898000e+00 1.8200000e-02 4.5220000e-01 1.2807000e+00 2.7930000e-01 9.1390000e-01 4.6740000e-01 1.2634000e+00 4.8110000e-01 6.3790000e-01 5.0400000e-01 1.2181000e+00 - 7.7460000e-01 2.7730000e-01 1.2625000e+00 3.7680000e-01 1.3682000e+00 5.1050000e-01 1.1763000e+00 4.7760000e-01 1.3622000e+00 5.5020000e-01 1.2441000e+00 4.0040000e-01 - 1.2144000e+00 -1.3450000e-01 5.7280000e-01 8.0450000e-01 4.7320000e-01 4.9320000e-01 5.4320000e-01 8.4590000e-01 4.3650000e-01 5.0790000e-01 6.4450000e-01 7.2330000e-01 - 1.0203000e+00 1.0230000e-01 1.3399000e+00 3.8630000e-01 8.9180000e-01 4.0100000e-01 1.4000000e+00 3.1130000e-01 9.2640000e-01 3.0660000e-01 1.2568000e+00 4.8800000e-01 - -8.0000000e-04 8.2110000e-01 2.3610000e-01 9.0750000e-01 3.8800000e-01 8.6790000e-01 -2.0600000e-02 1.7576000e+00 -1.6800000e-02 2.0206000e+00 -5.4000000e-02 6.5800000e-02 - 8.5900000e-02 5.0930000e-01 1.0940000e-01 8.2840000e-01 1.8880000e-01 8.6720000e-01 2.0090000e-01 7.2420000e-01 3.1710000e-01 7.2910000e-01 2.1090000e-01 7.0780000e-01 - 5.5850000e-01 4.1040000e-01 9.0910000e-01 6.0400000e-01 9.6480000e-01 7.6400000e-01 8.6390000e-01 6.5600000e-01 9.7550000e-01 7.8080000e-01 9.2220000e-01 5.7760000e-01 - 6.9520000e-01 9.7900000e-02 1.1192000e+00 1.2730000e-01 1.2994000e+00 1.1310000e-01 1.2063000e+00 2.2800000e-02 1.3673000e+00 5.9200000e-02 1.0612000e+00 1.9610000e-01 - -2.9000000e-02 6.1000000e-01 1.7680000e-01 7.1170000e-01 2.5720000e-01 7.5480000e-01 -4.3000000e-02 1.7838000e+00 -1.1340000e-01 2.1310000e+00 2.7800000e-02 -3.5800000e-02 - 6.6900000e-02 1.0330000e+00 5.4700000e-02 4.1850000e-01 6.7800000e-02 3.2540000e-01 1.6600000e-02 4.6320000e-01 3.3700000e-02 3.6360000e-01 6.2000000e-03 4.8060000e-01 - 4.3920000e-01 1.7300000e-01 7.4670000e-01 1.9720000e-01 7.6730000e-01 3.2790000e-01 8.3600000e-01 9.1200000e-02 8.2200000e-01 2.8300000e-01 7.6160000e-01 1.8450000e-01 - 1.1279000e+00 -2.7500000e-02 1.1578000e+00 5.6090000e-01 8.1990000e-01 7.0000000e-01 1.0711000e+00 6.6390000e-01 8.2060000e-01 6.1270000e-01 1.0031000e+00 7.4670000e-01 - 8.1720000e-01 2.9070000e-01 3.7810000e-01 1.3564000e+00 3.8310000e-01 1.5758000e+00 5.0630000e-01 1.2031000e+00 3.2320000e-01 1.4816000e+00 5.6060000e-01 1.1399000e+00 - 3.5220000e-01 3.8160000e-01 6.5130000e-01 4.0300000e-01 8.7960000e-01 2.7410000e-01 1.8530000e+00 -1.4370000e-01 2.0200000e-02 1.9815000e+00 4.1000000e-02 -4.7900000e-02 - 6.2430000e-01 3.1360000e-01 9.4040000e-01 5.3380000e-01 1.0548000e+00 6.1770000e-01 1.0030000e+00 4.5830000e-01 1.1257000e+00 5.6550000e-01 8.9170000e-01 5.8950000e-01 - 1.0320000e-01 2.1450000e-01 2.2280000e-01 2.6180000e-01 2.3340000e-01 3.1970000e-01 2.7950000e-01 1.9280000e-01 2.5140000e-01 3.1020000e-01 2.5100000e-01 2.2850000e-01 - 9.1200000e-02 4.7630000e-01 8.2600000e-02 8.1950000e-01 1.3230000e-01 8.8280000e-01 1.2080000e-01 7.7110000e-01 1.5020000e-01 8.7490000e-01 8.2300000e-02 8.1640000e-01 - 8.4000000e-03 5.2330000e-01 3.1140000e-01 5.3030000e-01 3.5480000e-01 6.2120000e-01 -7.8100000e-02 1.8246000e+00 -1.5000000e-02 1.6300000e-02 1.9000000e-02 -2.1900000e-02 - 3.6210000e-01 3.9480000e-01 5.8140000e-01 5.0910000e-01 7.3710000e-01 4.6220000e-01 1.8272000e+00 -1.1370000e-01 2.8800000e-02 1.9640000e+00 -9.4000000e-03 1.2100000e-02 - 3.5800000e-01 2.7630000e-01 4.8540000e-01 5.2670000e-01 6.3050000e-01 5.0670000e-01 6.0410000e-01 3.8640000e-01 6.2650000e-01 5.2870000e-01 5.9110000e-01 4.0110000e-01 - 3.1870000e-01 7.5510000e-01 4.2090000e-01 1.2687000e+00 6.2580000e-01 1.2716000e+00 6.4510000e-01 1.0010000e+00 5.4750000e-01 1.3951000e+00 5.5790000e-01 1.1133000e+00 - 1.1850000e-01 7.7960000e-01 2.7080000e-01 1.1185000e+00 3.4140000e-01 1.2323000e+00 2.9900000e-01 1.0839000e+00 4.3090000e-01 1.1509000e+00 3.3580000e-01 1.0394000e+00 - 1.1162000e+00 -1.3000000e-02 5.2060000e-01 2.6100000e-02 4.6510000e-01 -1.6300000e-02 5.0070000e-01 4.6600000e-02 4.3460000e-01 1.1900000e-02 4.8270000e-01 6.8500000e-02 - 8.3060000e-01 9.5300000e-02 1.1048000e+00 3.8180000e-01 1.3452000e+00 3.3600000e-01 1.1025000e+00 3.8460000e-01 1.2743000e+00 4.4250000e-01 1.1275000e+00 3.5470000e-01 - 1.1563000e+00 -6.3700000e-02 9.5820000e-01 7.8390000e-01 8.0170000e-01 5.6240000e-01 1.0872000e+00 6.3770000e-01 7.9810000e-01 5.0800000e-01 9.9660000e-01 7.4310000e-01 - 1.1137000e+00 -1.0600000e-02 5.7510000e-01 8.1280000e-01 5.4250000e-01 4.1480000e-01 5.3600000e-01 8.6060000e-01 6.2210000e-01 2.9540000e-01 6.0100000e-01 7.8330000e-01 - -7.0000000e-04 1.0002000e+00 6.2400000e-02 1.4912000e+00 3.8400000e-02 1.7347000e+00 1.1080000e-01 1.4354000e+00 1.1970000e-01 1.6700000e+00 -2.7000000e-03 1.5675000e+00 - 1.2400000e-01 9.8720000e-01 1.6950000e-01 5.1260000e-01 1.2710000e-01 4.3430000e-01 1.3800000e-02 6.9530000e-01 2.2700000e-02 5.4620000e-01 1.3740000e-01 5.4820000e-01 - 4.1410000e-01 6.7190000e-01 2.3140000e-01 1.4977000e+00 2.2800000e-01 9.1370000e-01 2.1780000e-01 1.5045000e+00 1.0920000e-01 1.0190000e+00 1.2470000e-01 1.6145000e+00 - 6.1770000e-01 1.4270000e-01 9.6230000e-01 1.0170000e-01 1.1030000e+00 7.8000000e-02 1.6003000e+00 1.5620000e-01 1.8715000e+00 1.5310000e-01 2.2100000e-02 -2.6000000e-02 - 1.0593000e+00 5.4700000e-02 9.2450000e-01 3.8870000e-01 7.5760000e-01 1.7690000e-01 9.5780000e-01 3.4120000e-01 7.9520000e-01 1.0950000e-01 1.0415000e+00 2.4500000e-01 - 9.4490000e-01 1.3530000e-01 3.2130000e-01 1.4215000e+00 3.4290000e-01 7.2630000e-01 3.7670000e-01 1.3514000e+00 3.4320000e-01 6.8750000e-01 3.7760000e-01 1.3547000e+00 - 6.3010000e-01 4.7480000e-01 4.0080000e-01 9.9580000e-01 2.7410000e-01 6.9790000e-01 3.3200000e-01 1.0734000e+00 3.4380000e-01 5.9080000e-01 3.1960000e-01 1.0868000e+00 - 1.9780000e-01 9.1400000e-01 1.9380000e-01 4.2480000e-01 6.0200000e-02 4.7220000e-01 9.9600000e-02 5.3540000e-01 7.2100000e-02 4.4470000e-01 1.7170000e-01 4.5290000e-01 - 2.3090000e-01 2.0640000e-01 6.6800000e-01 5.6800000e-02 6.9320000e-01 1.7250000e-01 1.8597000e+00 -1.5250000e-01 -3.1300000e-02 3.6300000e-02 5.9100000e-02 -7.0200000e-02 - 2.2110000e-01 7.9430000e-01 6.5550000e-01 6.5370000e-01 6.7710000e-01 7.6730000e-01 1.8087000e+00 -8.8400000e-02 7.2000000e-03 1.9942000e+00 -1.6800000e-02 1.9700000e-02 - 2.1250000e-01 6.5600000e-01 4.9320000e-01 6.9530000e-01 6.0840000e-01 6.9820000e-01 1.7019000e+00 3.8000000e-02 -5.1000000e-02 2.0600000e+00 -4.3000000e-03 2.3000000e-03 - 5.3900000e-02 1.0553000e+00 1.1670000e-01 5.1240000e-01 -7.1100000e-02 6.2300000e-01 1.1150000e-01 5.1700000e-01 3.8000000e-02 4.8340000e-01 6.4400000e-02 5.7570000e-01 - 2.9160000e-01 2.6180000e-01 3.2330000e-01 5.6710000e-01 5.4340000e-01 4.3860000e-01 4.5610000e-01 4.0790000e-01 4.1680000e-01 6.0650000e-01 3.9490000e-01 4.8450000e-01 - 3.0460000e-01 1.1980000e-01 5.7860000e-01 1.6650000e-01 7.7350000e-01 8.2600000e-02 1.7601000e+00 -3.1400000e-02 2.8800000e-02 -3.6300000e-02 -2.3700000e-02 2.7200000e-02 - 1.8120000e-01 5.1920000e-01 4.6320000e-01 5.5970000e-01 6.3440000e-01 4.9760000e-01 1.7098000e+00 2.5200000e-02 -7.9800000e-02 2.0897000e+00 -4.4400000e-02 5.3200000e-02 - 3.1790000e-01 4.8910000e-01 7.1550000e-01 3.8830000e-01 7.7700000e-01 4.5470000e-01 1.5663000e+00 1.9490000e-01 5.8300000e-02 1.9276000e+00 3.4800000e-02 -4.3600000e-02 - 1.4770000e-01 4.5470000e-01 2.5690000e-01 6.8310000e-01 2.4500000e-01 8.3600000e-01 3.7650000e-01 5.4120000e-01 2.7400000e-01 8.1860000e-01 1.8570000e-01 7.7080000e-01 - 2.1310000e-01 6.1780000e-01 6.2590000e-01 4.9740000e-01 7.5430000e-01 4.9470000e-01 1.7086000e+00 2.9400000e-02 4.0100000e-02 1.9456000e+00 2.1300000e-02 -2.4700000e-02 - 1.0508000e+00 6.5200000e-02 4.2320000e-01 8.0300000e-02 3.0030000e-01 1.3620000e-01 3.8650000e-01 1.2200000e-01 3.6450000e-01 4.6900000e-02 4.0600000e-01 9.8500000e-02 - 2.0900000e-02 7.0600000e-01 2.4700000e-01 8.0110000e-01 3.7400000e-01 7.9660000e-01 -3.6900000e-02 1.7773000e+00 4.6100000e-02 1.9462000e+00 -2.4600000e-02 2.9100000e-02 - 4.1370000e-01 2.8920000e-01 4.4930000e-01 6.9760000e-01 7.3070000e-01 5.2710000e-01 5.3310000e-01 5.9400000e-01 5.6880000e-01 7.4220000e-01 5.5950000e-01 5.6050000e-01 - 5.2980000e-01 6.4200000e-02 6.8240000e-01 2.7880000e-01 7.8700000e-01 3.0540000e-01 6.4890000e-01 3.2060000e-01 8.5370000e-01 2.4530000e-01 7.1670000e-01 2.3870000e-01 - 4.2210000e-01 3.6640000e-01 7.5730000e-01 4.5970000e-01 8.6410000e-01 5.2220000e-01 7.5220000e-01 4.6710000e-01 8.5130000e-01 5.5950000e-01 7.4780000e-01 4.6880000e-01 - 4.6220000e-01 1.2900000e-01 7.0780000e-01 2.2220000e-01 7.7510000e-01 2.8170000e-01 7.0120000e-01 2.3050000e-01 8.3620000e-01 2.3080000e-01 6.0950000e-01 3.3730000e-01 - 6.3610000e-01 8.1600000e-02 9.4890000e-01 1.8810000e-01 1.0334000e+00 2.6300000e-01 9.6010000e-01 1.7150000e-01 1.0037000e+00 3.1910000e-01 1.1462000e+00 -4.8300000e-02 - 2.9300000e-01 5.0220000e-01 7.1520000e-01 3.7390000e-01 7.3630000e-01 4.9000000e-01 1.7595000e+00 -3.1300000e-02 -6.4600000e-02 2.0740000e+00 4.0200000e-02 -4.7500000e-02 - 4.3110000e-01 1.8100000e-01 8.2050000e-01 9.2400000e-02 1.0063000e+00 1.1600000e-02 1.6958000e+00 3.6400000e-02 -8.4000000e-03 2.0109000e+00 -5.1300000e-02 6.2800000e-02 - 3.4650000e-01 1.6000000e-01 6.6330000e-01 1.5690000e-01 7.6400000e-01 1.7930000e-01 1.6227000e+00 1.2810000e-01 1.8600000e-02 -2.3400000e-02 -2.3000000e-03 2.0000000e-03 - 9.8720000e-01 -1.1260000e-01 1.3320000e+00 7.6400000e-02 1.5485000e+00 5.0100000e-02 1.2691000e+00 1.5100000e-01 1.5857000e+00 2.9800000e-02 1.3681000e+00 3.3800000e-02 - 2.6100000e-01 7.5400000e-01 6.2750000e-01 6.8800000e-01 6.7390000e-01 7.7510000e-01 1.6386000e+00 1.0870000e-01 5.1300000e-02 1.9387000e+00 4.9400000e-02 -5.8500000e-02 - 2.1090000e-01 6.0450000e-01 2.9950000e-01 9.8210000e-01 5.2210000e-01 9.0510000e-01 3.5810000e-01 9.0910000e-01 4.1620000e-01 1.0468000e+00 3.5610000e-01 9.1540000e-01 - 5.2720000e-01 1.3230000e-01 1.0471000e+00 -1.1950000e-01 1.0957000e+00 -3.1100000e-02 1.8179000e+00 -1.0260000e-01 1.9315000e+00 7.7500000e-02 6.7000000e-03 -8.2000000e-03 - 2.7950000e-01 5.9180000e-01 4.3820000e-01 9.2610000e-01 5.1190000e-01 1.0353000e+00 4.6930000e-01 8.9140000e-01 5.5770000e-01 1.0063000e+00 3.8270000e-01 9.8970000e-01 - 7.9180000e-01 -6.7600000e-02 9.7200000e-01 9.0200000e-02 1.0691000e+00 1.1990000e-01 1.7815000e+00 -5.8600000e-02 1.9628000e+00 4.3900000e-02 2.4000000e-02 -2.7100000e-02 - 1.2109000e+00 -1.2610000e-01 5.9530000e-01 1.1494000e+00 5.6850000e-01 9.5120000e-01 6.7460000e-01 1.0500000e+00 5.0150000e-01 9.4480000e-01 6.4180000e-01 1.0937000e+00 - 4.1660000e-01 -2.6100000e-02 7.1580000e-01 -8.6000000e-03 8.0910000e-01 2.5000000e-02 1.7740000e+00 -4.6700000e-02 -6.2000000e-03 6.8000000e-03 -6.6700000e-02 7.7700000e-02 - 1.0664000e+00 4.4400000e-02 3.8900000e-01 1.3610000e+00 4.3810000e-01 6.2640000e-01 5.3860000e-01 1.1882000e+00 4.0470000e-01 6.3270000e-01 3.7060000e-01 1.3792000e+00 - 3.5600000e-01 5.2820000e-01 6.1390000e-01 5.9760000e-01 7.3480000e-01 5.9200000e-01 1.6005000e+00 1.5650000e-01 -3.6800000e-02 2.0459000e+00 2.1000000e-03 -9.0000000e-04 - 2.3680000e-01 4.5500000e-01 4.6860000e-01 5.9050000e-01 5.7620000e-01 6.2240000e-01 4.6140000e-01 5.9980000e-01 5.4240000e-01 6.8330000e-01 5.5090000e-01 4.9590000e-01 - 1.4120000e-01 5.7660000e-01 2.6820000e-01 7.9940000e-01 4.2840000e-01 7.5350000e-01 2.0100000e-02 1.7089000e+00 -5.1900000e-02 2.0636000e+00 -2.9900000e-02 3.5400000e-02 - 4.0280000e-01 3.2290000e-01 5.5380000e-01 5.9160000e-01 6.5930000e-01 6.4070000e-01 5.7540000e-01 5.7260000e-01 6.6790000e-01 6.5120000e-01 4.9640000e-01 6.6560000e-01 - 1.0141000e+00 1.1390000e-01 6.1730000e-01 1.1232000e+00 5.6250000e-01 5.5210000e-01 7.6790000e-01 9.3840000e-01 5.6040000e-01 5.1700000e-01 5.5130000e-01 1.1934000e+00 - 2.2310000e-01 2.1600000e-01 3.6520000e-01 3.1770000e-01 3.5380000e-01 4.3550000e-01 3.7140000e-01 3.0820000e-01 3.9450000e-01 4.0110000e-01 3.8660000e-01 2.9490000e-01 - 3.4490000e-01 7.6980000e-01 2.3420000e-01 1.9630000e-01 1.3740000e-01 2.4060000e-01 2.4040000e-01 1.9060000e-01 1.1730000e-01 2.5740000e-01 2.5920000e-01 1.6750000e-01 - 3.1560000e-01 1.8320000e-01 3.8430000e-01 4.1860000e-01 5.1500000e-01 3.8460000e-01 4.0530000e-01 3.9390000e-01 4.7080000e-01 4.4710000e-01 4.4620000e-01 3.4200000e-01 - 6.0920000e-01 1.4140000e-01 8.6630000e-01 3.2770000e-01 8.9210000e-01 4.8230000e-01 8.2050000e-01 3.8090000e-01 1.0097000e+00 3.6740000e-01 9.1230000e-01 2.7300000e-01 - 8.4630000e-01 1.4750000e-01 1.3833000e+00 1.6590000e-01 1.4094000e+00 3.8710000e-01 1.2360000e+00 3.4240000e-01 1.4394000e+00 3.7750000e-01 1.2404000e+00 3.3150000e-01 - 1.1860000e-01 5.1660000e-01 2.7540000e-01 7.0170000e-01 3.7160000e-01 7.2660000e-01 1.9270000e-01 8.0170000e-01 2.6240000e-01 8.7390000e-01 3.5850000e-01 6.0270000e-01 - -7.1500000e-02 1.1941000e+00 1.8770000e-01 1.4411000e+00 3.2010000e-01 1.4312000e+00 4.7900000e-02 1.6732000e+00 -5.7400000e-02 2.0741000e+00 1.3400000e-02 1.7148000e+00 - 1.0616000e+00 5.1400000e-02 8.0150000e-01 9.3880000e-01 6.0610000e-01 6.1100000e-01 9.1430000e-01 7.9750000e-01 6.7900000e-01 4.8570000e-01 7.0590000e-01 1.0430000e+00 - 7.8130000e-01 2.7070000e-01 1.1661000e+00 1.8090000e-01 1.3402000e+00 1.1610000e-01 1.7843000e+00 -6.4400000e-02 2.0146000e+00 -1.0000000e-02 4.9400000e-02 -5.8600000e-02 - -2.0000000e-02 8.8190000e-01 2.7280000e-01 8.8670000e-01 2.7150000e-01 1.0338000e+00 1.3490000e-01 1.5754000e+00 2.4500000e-02 1.9745000e+00 2.9000000e-02 -3.4000000e-02 - 1.1245000e+00 -2.1800000e-02 4.1860000e-01 1.6520000e-01 3.8450000e-01 9.6500000e-02 4.2610000e-01 1.5390000e-01 3.5430000e-01 1.2560000e-01 4.9960000e-01 6.9700000e-02 - -6.5000000e-03 7.9930000e-01 3.2670000e-01 7.7670000e-01 4.5220000e-01 7.6800000e-01 -2.8200000e-02 1.7668000e+00 4.9700000e-02 1.9421000e+00 -5.3300000e-02 6.4900000e-02 - 6.9000000e-02 7.2490000e-01 9.2400000e-02 1.1529000e+00 4.7100000e-02 1.3844000e+00 6.1100000e-02 1.1919000e+00 4.0000000e-02 1.4008000e+00 1.9070000e-01 1.0341000e+00 - 1.4280000e-01 5.7860000e-01 4.1010000e-01 6.2760000e-01 4.2270000e-01 7.5700000e-01 1.7967000e+00 -7.8200000e-02 1.5900000e-02 1.9758000e+00 3.0800000e-02 -3.8000000e-02 - 4.2300000e-01 6.1800000e-01 6.6670000e-01 9.6000000e-01 8.4230000e-01 9.9910000e-01 6.8210000e-01 9.3830000e-01 7.6100000e-01 1.1183000e+00 6.8370000e-01 9.3710000e-01 - 1.1232000e+00 -1.9800000e-02 4.8420000e-01 1.2564000e+00 4.7330000e-01 5.6130000e-01 6.7130000e-01 1.0419000e+00 4.8690000e-01 5.1810000e-01 6.1740000e-01 1.1018000e+00 - 3.2350000e-01 4.0080000e-01 6.0780000e-01 4.4500000e-01 7.6430000e-01 3.9840000e-01 1.7435000e+00 -1.2400000e-02 7.7100000e-02 1.9048000e+00 -4.6700000e-02 5.6600000e-02 - 8.6350000e-01 2.4160000e-01 3.2680000e-01 7.4370000e-01 3.1940000e-01 4.9580000e-01 4.4840000e-01 5.9740000e-01 3.3120000e-01 4.6180000e-01 3.3420000e-01 7.3320000e-01 - 3.9890000e-01 3.0640000e-01 7.6020000e-01 3.1390000e-01 7.5490000e-01 4.9130000e-01 6.8950000e-01 3.9980000e-01 6.5270000e-01 6.3480000e-01 6.2980000e-01 4.7300000e-01 - -5.6200000e-02 6.1960000e-01 6.9600000e-02 8.0520000e-01 1.9290000e-01 8.0250000e-01 2.6800000e-02 1.7024000e+00 5.0800000e-02 1.9382000e+00 3.7900000e-02 -4.5100000e-02 - 7.6700000e-02 5.5320000e-01 3.6970000e-01 5.7830000e-01 5.2290000e-01 5.4260000e-01 1.7303000e+00 2.5000000e-03 -5.7000000e-03 2.0069000e+00 4.7800000e-02 -5.6900000e-02 - 1.2222000e+00 -1.3850000e-01 5.8730000e-01 1.1419000e+00 4.9070000e-01 8.9500000e-01 4.7240000e-01 1.2760000e+00 4.2360000e-01 9.0680000e-01 5.4060000e-01 1.1952000e+00 - 2.5630000e-01 6.2220000e-01 3.7000000e-01 1.0110000e+00 4.7760000e-01 1.0863000e+00 4.2060000e-01 9.5240000e-01 4.4460000e-01 1.1438000e+00 3.6900000e-01 1.0140000e+00 - 1.8400000e-01 6.7290000e-01 4.9510000e-01 6.7920000e-01 5.7360000e-01 7.2480000e-01 1.6510000e+00 9.4600000e-02 -1.1300000e-02 2.0150000e+00 1.5000000e-03 -2.3000000e-03 - 1.1021000e+00 3.9000000e-03 1.0253000e+00 7.2180000e-01 8.5170000e-01 8.1060000e-01 1.1529000e+00 5.7220000e-01 9.2000000e-01 6.1210000e-01 1.2242000e+00 4.9240000e-01 - 4.6500000e-01 1.4520000e-01 7.8370000e-01 1.3780000e-01 9.2670000e-01 1.0650000e-01 1.7635000e+00 -3.9200000e-02 1.2580000e-01 1.8494000e+00 -1.7100000e-02 2.0100000e-02 - 1.1254000e+00 -1.8600000e-02 4.1180000e-01 1.3410000e+00 5.3580000e-01 4.9820000e-01 5.8280000e-01 1.1376000e+00 4.6010000e-01 5.4880000e-01 5.9330000e-01 1.1295000e+00 - 1.0754000e+00 3.3800000e-02 7.5990000e-01 9.8070000e-01 6.1850000e-01 7.5030000e-01 8.0090000e-01 9.3460000e-01 6.7390000e-01 6.2850000e-01 8.5390000e-01 8.7230000e-01 - 3.5250000e-01 3.3450000e-01 5.1160000e-01 5.7220000e-01 5.6400000e-01 6.6920000e-01 4.8080000e-01 6.0560000e-01 5.9020000e-01 6.5610000e-01 5.1300000e-01 5.6350000e-01 - 5.1900000e-01 5.9240000e-01 8.6480000e-01 7.4410000e-01 1.0382000e+00 6.8020000e-01 1.7580000e+00 -3.6100000e-02 1.9864000e+00 1.0700000e-02 -4.0700000e-02 1.7777000e+00 - 5.1580000e-01 1.8220000e-01 8.5580000e-01 1.4800000e-01 9.6200000e-01 1.6800000e-01 1.7904000e+00 -7.2400000e-02 -3.0500000e-02 2.0328000e+00 7.5800000e-02 -9.1700000e-02 - 5.6100000e-01 2.9250000e-01 7.2600000e-01 4.6310000e-01 9.4440000e-01 3.5250000e-01 1.7233000e+00 7.4000000e-03 3.0600000e-02 1.9585000e+00 3.7500000e-02 -4.5600000e-02 - 5.0500000e-01 6.0700000e-01 8.6270000e-01 8.6090000e-01 8.8430000e-01 1.0804000e+00 1.6629000e+00 7.8100000e-02 9.9000000e-02 1.8863000e+00 -5.9000000e-02 1.7992000e+00 - 4.0830000e-01 6.2770000e-01 5.0780000e-01 1.1373000e+00 5.6820000e-01 1.3054000e+00 6.2290000e-01 1.0021000e+00 6.9800000e-01 1.1802000e+00 6.4870000e-01 9.7100000e-01 - 5.6100000e-01 1.0630000e-01 7.0320000e-01 3.7730000e-01 7.4040000e-01 4.9930000e-01 7.2630000e-01 3.4930000e-01 8.4520000e-01 3.9240000e-01 6.2680000e-01 4.6980000e-01 - -4.6000000e-03 4.5400000e-01 1.9150000e-01 5.7060000e-01 3.0210000e-01 5.8290000e-01 -6.5300000e-02 1.8078000e+00 -4.1300000e-02 4.9200000e-02 9.0000000e-03 -1.0800000e-02 - 4.5580000e-01 6.4170000e-01 7.3850000e-01 7.0600000e-01 9.1070000e-01 6.3860000e-01 1.6405000e+00 1.0950000e-01 -1.4300000e-02 2.0215000e+00 -6.3800000e-02 1.8067000e+00 - 3.7490000e-01 3.4780000e-01 6.6140000e-01 3.7630000e-01 8.6170000e-01 2.8740000e-01 1.6809000e+00 6.0800000e-02 5.1100000e-02 1.9379000e+00 -4.4300000e-02 5.1300000e-02 - 1.1720000e+00 -7.9000000e-02 5.5690000e-01 2.0450000e-01 5.0800000e-01 1.0280000e-01 5.4610000e-01 2.1940000e-01 4.5820000e-01 1.5020000e-01 6.2720000e-01 1.2140000e-01 - 1.1144000e+00 -1.4800000e-02 6.3210000e-01 8.0800000e-02 5.3970000e-01 4.1400000e-02 5.8800000e-01 1.3600000e-01 4.1920000e-01 1.7110000e-01 5.5230000e-01 1.7670000e-01 - 4.1720000e-01 4.8780000e-01 7.5670000e-01 4.5700000e-01 8.2190000e-01 5.1370000e-01 1.6983000e+00 4.2800000e-02 -2.3600000e-02 2.0298000e+00 -3.0000000e-04 2.0000000e-03 - 3.3400000e-02 9.0600000e-02 1.5280000e-01 2.8210000e-01 2.7780000e-01 2.7020000e-01 3.8400000e-02 1.6895000e+00 8.3300000e-02 -1.0080000e-01 7.7900000e-02 -9.4200000e-02 - 1.1250000e+00 -1.9100000e-02 6.2540000e-01 1.1006000e+00 3.9060000e-01 8.2290000e-01 6.1110000e-01 1.1205000e+00 4.2520000e-01 7.4030000e-01 6.3680000e-01 1.0838000e+00 - 6.3560000e-01 4.0440000e-01 8.2790000e-01 8.3150000e-01 9.6200000e-01 9.2610000e-01 8.2100000e-01 8.3970000e-01 1.0012000e+00 9.0820000e-01 8.0580000e-01 8.5730000e-01 - 1.0615000e+00 5.1800000e-02 7.1580000e-01 1.0213000e+00 6.5730000e-01 4.1110000e-01 7.0080000e-01 1.0364000e+00 5.0190000e-01 5.6240000e-01 7.2630000e-01 1.0097000e+00 - 1.7550000e-01 3.3000000e-01 3.6640000e-01 4.0880000e-01 3.8200000e-01 5.1150000e-01 3.9030000e-01 3.8360000e-01 4.5190000e-01 4.4050000e-01 2.8690000e-01 5.0860000e-01 - 2.7000000e-02 1.4300000e-02 3.1900000e-02 3.1980000e-01 3.1300000e-02 4.5640000e-01 -1.2800000e-02 1.5900000e-02 -6.6100000e-02 7.7800000e-02 -8.4000000e-03 8.1000000e-03 - 4.6630000e-01 -4.6000000e-03 7.4080000e-01 4.1700000e-02 9.2210000e-01 -2.9200000e-02 1.7352000e+00 -1.6000000e-03 7.2000000e-02 -8.6000000e-02 3.5100000e-02 -4.1700000e-02 - 3.9710000e-01 1.8710000e-01 6.0500000e-01 3.1330000e-01 6.7990000e-01 3.6370000e-01 5.5640000e-01 3.7030000e-01 7.0480000e-01 3.5690000e-01 5.9250000e-01 3.2980000e-01 - 1.8810000e-01 2.8520000e-01 4.3510000e-01 3.6400000e-01 5.8260000e-01 3.2990000e-01 1.6509000e+00 9.9300000e-02 -7.7600000e-02 9.3700000e-02 3.6100000e-02 -4.4300000e-02 - 7.0710000e-01 4.0060000e-01 3.1630000e-01 1.4213000e+00 2.5120000e-01 1.2084000e+00 3.4510000e-01 1.3859000e+00 3.2140000e-01 1.0501000e+00 3.4450000e-01 1.3834000e+00 - 6.1830000e-01 4.8620000e-01 8.9770000e-01 8.1660000e-01 1.0124000e+00 8.1380000e-01 1.7282000e+00 4.7000000e-03 2.0841000e+00 -9.9300000e-02 -2.8000000e-03 1.7332000e+00 - 7.4800000e-01 2.7230000e-01 1.0889000e+00 5.2340000e-01 1.3357000e+00 4.8070000e-01 1.1975000e+00 3.9540000e-01 1.3356000e+00 5.1350000e-01 1.0921000e+00 5.1620000e-01 - 1.2200000e-02 4.4630000e-01 3.8170000e-01 3.7520000e-01 4.6600000e-01 4.1610000e-01 -3.9000000e-02 1.7754000e+00 2.4500000e-02 -2.8000000e-02 -2.0000000e-02 2.5000000e-02 - 4.1910000e-01 7.0180000e-01 7.9140000e-01 6.6750000e-01 1.0363000e+00 5.1810000e-01 1.7512000e+00 -2.2400000e-02 -1.0370000e-01 2.1167000e+00 -0.0000000e+00 1.7282000e+00 - 5.6730000e-01 5.5640000e-01 3.0510000e-01 8.0240000e-01 2.8700000e-01 5.4900000e-01 3.6140000e-01 7.3250000e-01 2.5920000e-01 5.6380000e-01 3.7730000e-01 7.1750000e-01 - 5.0160000e-01 1.7660000e-01 7.7040000e-01 2.3230000e-01 9.8450000e-01 1.1480000e-01 1.7700000e+00 -4.5600000e-02 -1.4520000e-01 2.1743000e+00 2.1200000e-02 -2.5700000e-02 - 3.8400000e-02 4.8440000e-01 2.1040000e-01 6.3350000e-01 3.1650000e-01 6.5260000e-01 7.1000000e-03 1.7219000e+00 2.4000000e-02 -3.0700000e-02 -3.0000000e-04 -1.3000000e-03 - 6.0220000e-01 4.8510000e-01 9.2710000e-01 4.6470000e-01 1.1818000e+00 3.1030000e-01 1.7287000e+00 6.0000000e-04 2.1438000e+00 -1.6920000e-01 2.1000000e-02 1.7062000e+00 - 4.1980000e-01 1.3600000e-01 7.1110000e-01 1.4980000e-01 7.6910000e-01 2.2400000e-01 7.1260000e-01 1.5010000e-01 8.1950000e-01 1.7470000e-01 7.9250000e-01 5.3600000e-02 - 4.7340000e-01 3.2260000e-01 7.3210000e-01 5.2140000e-01 6.3020000e-01 8.3440000e-01 7.1670000e-01 5.3990000e-01 8.0250000e-01 6.4860000e-01 7.3170000e-01 5.1860000e-01 - 1.9760000e-01 2.8740000e-01 5.4070000e-01 2.4560000e-01 6.1240000e-01 3.1060000e-01 1.7656000e+00 -4.6400000e-02 -8.5000000e-03 1.0900000e-02 -3.0500000e-02 3.4700000e-02 - 2.4720000e-01 3.8300000e-01 4.7620000e-01 4.8970000e-01 5.6580000e-01 5.3310000e-01 4.4450000e-01 5.2710000e-01 4.2620000e-01 7.0660000e-01 5.6700000e-01 3.8010000e-01 - 5.6400000e-02 8.7700000e-01 4.0940000e-01 8.3040000e-01 4.5980000e-01 9.0810000e-01 1.6855000e+00 5.7400000e-02 -1.1600000e-02 2.0146000e+00 3.9300000e-02 -4.6000000e-02 - 5.9330000e-01 5.1240000e-01 3.0090000e-01 4.7660000e-01 2.7570000e-01 3.4840000e-01 2.4300000e-01 5.4030000e-01 2.4140000e-01 3.7680000e-01 3.8090000e-01 3.7830000e-01 - 4.7380000e-01 1.6040000e-01 6.8970000e-01 3.1430000e-01 7.4540000e-01 4.0650000e-01 7.1600000e-01 2.8290000e-01 7.9830000e-01 3.6110000e-01 6.4830000e-01 3.6320000e-01 - 3.2630000e-01 2.2280000e-01 5.1950000e-01 3.3710000e-01 6.4990000e-01 3.1450000e-01 6.0130000e-01 2.3910000e-01 5.8820000e-01 3.9970000e-01 4.5040000e-01 4.1830000e-01 - 2.9520000e-01 3.0110000e-01 3.9070000e-01 5.6100000e-01 5.6230000e-01 4.9510000e-01 3.9300000e-01 5.5480000e-01 5.0710000e-01 5.7770000e-01 3.7620000e-01 5.7730000e-01 - 8.5710000e-01 2.3730000e-01 3.4760000e-01 9.5960000e-01 2.9490000e-01 6.4300000e-01 3.6050000e-01 9.3720000e-01 2.3960000e-01 6.7870000e-01 4.6880000e-01 8.1150000e-01 - -8.3000000e-02 7.1750000e-01 2.8400000e-02 8.9610000e-01 4.9800000e-02 1.0145000e+00 -5.9600000e-02 1.8046000e+00 -2.0700000e-02 2.0252000e+00 1.3000000e-03 -1.4000000e-03 - 4.0930000e-01 4.7960000e-01 7.0520000e-01 4.9920000e-01 8.1370000e-01 5.1160000e-01 1.5592000e+00 2.0630000e-01 -9.4000000e-03 2.0129000e+00 -4.3800000e-02 5.3100000e-02 - -4.0700000e-02 3.4290000e-01 1.6100000e-01 4.5220000e-01 3.3720000e-01 3.8390000e-01 -1.4710000e-01 1.9056000e+00 2.1700000e-02 -2.6500000e-02 1.3300000e-02 -1.8200000e-02 - 4.6550000e-01 3.2340000e-01 6.6660000e-01 5.8960000e-01 7.8140000e-01 6.4140000e-01 7.4810000e-01 4.8710000e-01 8.3910000e-01 5.9390000e-01 7.5900000e-01 4.7380000e-01 - 3.6310000e-01 3.7630000e-01 5.3930000e-01 6.2650000e-01 6.0590000e-01 7.2270000e-01 5.7500000e-01 5.8160000e-01 5.5160000e-01 8.0310000e-01 5.4570000e-01 6.1420000e-01 - 4.6370000e-01 4.0330000e-01 7.5390000e-01 5.9690000e-01 7.8290000e-01 7.6680000e-01 6.9290000e-01 6.7070000e-01 9.0460000e-01 6.5210000e-01 7.3720000e-01 6.1450000e-01 - 1.0904000e+00 1.6600000e-02 6.8350000e-01 4.0160000e-01 5.5390000e-01 2.7740000e-01 6.9360000e-01 3.9480000e-01 5.6490000e-01 2.4330000e-01 7.2150000e-01 3.6090000e-01 - 1.7580000e-01 4.6630000e-01 4.6160000e-01 5.0460000e-01 5.8430000e-01 4.9590000e-01 1.6769000e+00 5.5500000e-02 1.3300000e-02 1.9845000e+00 2.2700000e-02 -2.7500000e-02 - 1.2146000e+00 -1.3190000e-01 5.9350000e-01 5.6970000e-01 5.1280000e-01 3.5930000e-01 5.3450000e-01 6.4020000e-01 4.9970000e-01 3.4760000e-01 5.1620000e-01 6.6700000e-01 - 1.6200000e-02 -1.8800000e-02 2.9760000e-01 1.4500000e-02 4.5840000e-01 -3.5100000e-02 -5.8700000e-02 6.9800000e-02 9.9000000e-03 -9.8000000e-03 -5.1900000e-02 6.1200000e-02 - 9.4870000e-01 1.5560000e-01 4.2500000e-01 9.0130000e-01 2.1600000e-01 7.5000000e-01 5.1090000e-01 8.0250000e-01 3.4730000e-01 5.7420000e-01 4.5270000e-01 8.7210000e-01 - 4.0730000e-01 6.9960000e-01 6.9780000e-01 8.7320000e-01 8.3890000e-01 8.3910000e-01 1.6520000e+00 9.2700000e-02 -9.8500000e-02 2.1203000e+00 4.6400000e-02 1.6715000e+00 - 1.1790000e-01 3.7720000e-01 1.5430000e-01 6.2330000e-01 2.9830000e-01 5.6360000e-01 2.6700000e-01 4.9010000e-01 2.8000000e-01 6.0100000e-01 1.7910000e-01 5.9170000e-01 - 3.8080000e-01 3.0590000e-01 7.1250000e-01 2.8660000e-01 8.3600000e-01 2.7620000e-01 1.7626000e+00 -4.0200000e-02 -1.0600000e-02 2.0144000e+00 -2.1900000e-02 2.5700000e-02 - 1.1007000e+00 6.4000000e-03 6.2140000e-01 1.1072000e+00 5.2790000e-01 5.0330000e-01 6.2440000e-01 1.1023000e+00 4.8720000e-01 5.2430000e-01 4.8570000e-01 1.2691000e+00 - 4.4520000e-01 4.0620000e-01 7.9530000e-01 5.2470000e-01 7.3040000e-01 8.0460000e-01 7.1000000e-01 6.1770000e-01 8.5940000e-01 6.7450000e-01 7.2370000e-01 6.0550000e-01 - -3.3600000e-02 7.8150000e-01 3.4600000e-01 6.9110000e-01 3.2490000e-01 8.5360000e-01 -7.2800000e-02 1.8253000e+00 1.3400000e-02 1.9869000e+00 1.6200000e-02 -2.0700000e-02 - 4.7130000e-01 4.3860000e-01 7.2930000e-01 6.9360000e-01 7.7920000e-01 8.5450000e-01 6.9790000e-01 7.3430000e-01 8.9180000e-01 7.4190000e-01 6.9750000e-01 7.3800000e-01 - 4.1750000e-01 3.3840000e-01 7.3030000e-01 4.4430000e-01 7.4580000e-01 6.0230000e-01 6.8590000e-01 4.9190000e-01 8.6680000e-01 4.7940000e-01 7.3310000e-01 4.3600000e-01 - 5.3400000e-02 9.8280000e-01 2.1230000e-01 1.1286000e+00 2.3510000e-01 1.2471000e+00 -7.9000000e-02 1.8245000e+00 -3.6200000e-02 2.0393000e+00 -1.3200000e-02 1.4500000e-02 - 3.3880000e-01 3.8950000e-01 4.6750000e-01 6.8550000e-01 4.9220000e-01 8.2740000e-01 5.0610000e-01 6.3730000e-01 6.4600000e-01 6.6870000e-01 4.6790000e-01 6.8760000e-01 - 3.2470000e-01 5.2620000e-01 6.1850000e-01 5.5240000e-01 7.9280000e-01 4.8110000e-01 1.7058000e+00 3.4500000e-02 -1.0000000e-04 1.9998000e+00 -2.4300000e-02 2.6900000e-02 - 4.6670000e-01 2.9440000e-01 8.1140000e-01 3.6550000e-01 8.5360000e-01 4.9750000e-01 7.8350000e-01 3.9830000e-01 9.2400000e-01 4.4280000e-01 7.5120000e-01 4.3660000e-01 - 1.3450000e-01 6.4350000e-01 3.9090000e-01 7.9310000e-01 5.4750000e-01 7.8230000e-01 3.9520000e-01 7.8460000e-01 3.8320000e-01 9.9310000e-01 3.4690000e-01 8.4060000e-01 - 1.9400000e-01 4.0960000e-01 2.2250000e-01 7.3970000e-01 2.9210000e-01 7.9920000e-01 3.3310000e-01 6.1140000e-01 3.4280000e-01 7.5370000e-01 2.4250000e-01 7.1730000e-01 - 5.9590000e-01 1.9390000e-01 9.8630000e-01 2.4320000e-01 1.2309000e+00 1.4530000e-01 1.0014000e+00 2.1810000e-01 1.2439000e+00 1.5720000e-01 9.5010000e-01 2.8230000e-01 - 3.3360000e-01 3.5990000e-01 5.0740000e-01 5.7930000e-01 6.5610000e-01 5.6570000e-01 6.2660000e-01 4.3430000e-01 6.6040000e-01 5.7960000e-01 6.1060000e-01 4.5490000e-01 - 4.0590000e-01 3.7980000e-01 7.3940000e-01 3.5550000e-01 8.6150000e-01 3.5000000e-01 1.7603000e+00 -3.0100000e-02 -1.8000000e-02 2.0199000e+00 -1.7300000e-02 2.1400000e-02 - 3.3100000e-01 6.8940000e-01 7.1050000e-01 6.1040000e-01 8.2260000e-01 6.1720000e-01 1.7250000e+00 9.4000000e-03 1.0550000e-01 1.8752000e+00 3.2000000e-03 -3.0000000e-03 - 7.1990000e-01 3.7840000e-01 2.9540000e-01 7.8700000e-01 2.9700000e-01 5.2070000e-01 2.5390000e-01 8.3410000e-01 2.4330000e-01 5.6600000e-01 3.3860000e-01 7.3650000e-01 - 3.3510000e-01 4.3880000e-01 4.2060000e-01 8.0980000e-01 5.9200000e-01 7.9240000e-01 4.7090000e-01 7.4970000e-01 6.0100000e-01 8.0650000e-01 4.2830000e-01 8.0270000e-01 - 2.6820000e-01 2.2940000e-01 5.9700000e-01 1.4340000e-01 7.4600000e-01 8.9300000e-02 5.6860000e-01 1.8350000e-01 6.5930000e-01 2.0420000e-01 6.2650000e-01 1.1150000e-01 - 1.9680000e-01 5.9830000e-01 4.3340000e-01 6.8840000e-01 6.9500000e-01 5.2140000e-01 1.6685000e+00 7.7100000e-02 -1.5000000e-02 2.0171000e+00 -9.4000000e-03 1.2000000e-02 - 2.7430000e-01 2.7070000e-01 5.1820000e-01 3.2260000e-01 6.5760000e-01 2.8170000e-01 4.3340000e-01 4.2220000e-01 5.7860000e-01 3.9690000e-01 5.7920000e-01 2.4900000e-01 - 1.0440000e-01 8.4820000e-01 2.7860000e-01 1.1961000e+00 3.1870000e-01 1.3611000e+00 2.8810000e-01 1.1861000e+00 2.5510000e-01 1.4620000e+00 2.1660000e-01 1.2716000e+00 - 7.9400000e-02 7.6570000e-01 3.9440000e-01 7.6730000e-01 4.7220000e-01 8.1300000e-01 1.6235000e+00 1.2960000e-01 1.5660000e-01 1.8156000e+00 8.9100000e-02 -1.0580000e-01 - 5.8020000e-01 5.2190000e-01 2.7360000e-01 3.5960000e-01 3.3770000e-01 1.6630000e-01 3.4500000e-01 2.7680000e-01 2.6070000e-01 2.5040000e-01 2.9960000e-01 3.3240000e-01 - 3.4350000e-01 7.5010000e-01 1.5040000e-01 4.9320000e-01 6.4100000e-02 4.8260000e-01 2.2690000e-01 4.0380000e-01 1.5570000e-01 3.6270000e-01 6.0500000e-02 6.0400000e-01 - 2.2960000e-01 8.8000000e-02 3.1870000e-01 1.8550000e-01 3.6600000e-01 2.0980000e-01 2.5410000e-01 2.6520000e-01 3.6590000e-01 2.1840000e-01 1.3600000e-01 4.0450000e-01 - 3.4720000e-01 5.5330000e-01 7.2130000e-01 6.6220000e-01 7.4950000e-01 8.3660000e-01 6.9110000e-01 7.0100000e-01 7.4280000e-01 8.7030000e-01 6.7040000e-01 7.2700000e-01 - 1.1273000e+00 -2.9100000e-02 5.4840000e-01 3.9260000e-01 4.0000000e-01 3.5400000e-01 4.9380000e-01 4.5710000e-01 3.5740000e-01 3.8310000e-01 3.6000000e-01 6.1880000e-01 - 3.1160000e-01 2.0060000e-01 6.3780000e-01 1.8440000e-01 8.2510000e-01 1.0210000e-01 1.7316000e+00 -2.0000000e-03 -5.4000000e-03 5.5000000e-03 6.1600000e-02 -7.4800000e-02 - 5.2240000e-01 9.7000000e-03 7.2530000e-01 1.4180000e-01 8.5160000e-01 1.3180000e-01 1.6666000e+00 7.3400000e-02 1.9100000e-02 -1.9100000e-02 -1.1400000e-02 1.2700000e-02 - 5.1630000e-01 2.5700000e-01 7.9180000e-01 4.2140000e-01 9.8770000e-01 3.7640000e-01 8.2800000e-01 3.7690000e-01 9.5260000e-01 4.4070000e-01 8.2460000e-01 3.8580000e-01 - 5.5710000e-01 5.5440000e-01 2.2390000e-01 9.4280000e-01 2.8800000e-01 5.7430000e-01 3.6530000e-01 7.8060000e-01 2.9380000e-01 5.4590000e-01 3.2410000e-01 8.2650000e-01 - 2.2730000e-01 4.1480000e-01 3.5110000e-01 6.5210000e-01 3.8220000e-01 7.6710000e-01 4.6460000e-01 5.1820000e-01 4.0370000e-01 7.5670000e-01 3.4980000e-01 6.5860000e-01 - 4.8390000e-01 4.5700000e-02 1.8400000e-01 1.0790000e-01 2.0190000e-01 4.2500000e-02 3.2590000e-01 -6.0900000e-02 2.7040000e-01 -4.3800000e-02 2.4820000e-01 3.6900000e-02 - 1.1477000e+00 -5.0400000e-02 1.1583000e+00 1.2140000e-01 9.2930000e-01 -1.2000000e-02 1.3401000e+00 -9.3100000e-02 8.4790000e-01 5.7200000e-02 1.2205000e+00 5.1500000e-02 - 6.9890000e-01 3.9940000e-01 1.0831000e+00 3.3120000e-01 1.0740000e+00 4.9110000e-01 1.8318000e+00 -1.1640000e-01 2.0587000e+00 -6.4000000e-02 3.9800000e-02 1.6829000e+00 - 3.7690000e-01 4.5150000e-01 1.7890000e-01 1.9330000e-01 1.1660000e-01 2.0870000e-01 1.7520000e-01 1.9870000e-01 1.6170000e-01 1.4900000e-01 1.4500000e-01 2.3620000e-01 - 4.4290000e-01 2.7700000e-01 6.5590000e-01 4.7700000e-01 7.5330000e-01 5.3330000e-01 6.6360000e-01 4.6790000e-01 9.2720000e-01 3.4840000e-01 6.9420000e-01 4.3150000e-01 - 2.6780000e-01 5.3960000e-01 5.7060000e-01 5.4810000e-01 7.0940000e-01 5.2720000e-01 1.7532000e+00 -2.7800000e-02 1.5250000e-01 1.8195000e+00 -8.1000000e-03 9.3000000e-03 - 5.5650000e-01 2.9850000e-01 9.7320000e-01 1.7510000e-01 1.0136000e+00 2.7410000e-01 1.6639000e+00 7.9600000e-02 1.9723000e+00 3.2500000e-02 -3.0700000e-02 3.6200000e-02 - 7.5320000e-01 3.6120000e-01 3.6350000e-01 1.0189000e+00 3.6850000e-01 5.8680000e-01 3.3420000e-01 1.0666000e+00 2.8910000e-01 6.5520000e-01 3.7430000e-01 1.0184000e+00 - 1.1086000e+00 -5.3000000e-03 4.6350000e-01 1.2831000e+00 4.0840000e-01 1.4423000e+00 4.7930000e-01 1.2652000e+00 4.4900000e-01 1.1938000e+00 6.2620000e-01 1.0873000e+00 - 4.6080000e-01 2.3140000e-01 8.3280000e-01 2.2750000e-01 8.6390000e-01 3.6330000e-01 8.0460000e-01 2.6020000e-01 8.4850000e-01 3.9970000e-01 7.3420000e-01 3.4730000e-01 - 4.4100000e-02 5.5660000e-01 2.6360000e-01 6.3940000e-01 3.7550000e-01 6.3600000e-01 2.3980000e-01 6.6380000e-01 2.5370000e-01 7.9740000e-01 2.9360000e-01 6.0300000e-01 - 5.0050000e-01 3.0340000e-01 7.5480000e-01 5.1210000e-01 1.0416000e+00 3.6570000e-01 8.5970000e-01 3.8260000e-01 1.0280000e+00 4.0660000e-01 7.7940000e-01 4.7710000e-01 - 3.1980000e-01 7.9010000e-01 1.9340000e-01 7.0840000e-01 1.1260000e-01 6.1190000e-01 1.9500000e-01 7.0550000e-01 1.8260000e-01 5.1240000e-01 2.6650000e-01 6.2170000e-01 - 5.1970000e-01 2.0970000e-01 8.5130000e-01 1.8890000e-01 9.6040000e-01 2.0340000e-01 1.7258000e+00 3.1000000e-03 -2.9000000e-02 2.0345000e+00 -4.1800000e-02 4.8400000e-02 - 2.2730000e-01 7.3010000e-01 3.6010000e-01 1.1402000e+00 3.8530000e-01 1.3289000e+00 3.0720000e-01 1.2057000e+00 4.5510000e-01 1.2700000e+00 3.0430000e-01 1.2064000e+00 - -4.5800000e-02 5.0810000e-01 -4.8100000e-02 8.2600000e-01 -1.8600000e-02 9.1200000e-01 4.6900000e-02 1.6759000e+00 -5.2200000e-02 6.2300000e-02 -4.1000000e-03 6.4000000e-03 - 1.1162000e+00 -1.5300000e-02 5.5990000e-01 5.3980000e-01 3.9450000e-01 4.5450000e-01 5.3380000e-01 5.6600000e-01 4.5240000e-01 3.6150000e-01 5.2390000e-01 5.8460000e-01 - 6.9180000e-01 3.5180000e-01 1.0415000e+00 3.0280000e-01 1.0835000e+00 3.9500000e-01 1.6228000e+00 1.2340000e-01 2.0231000e+00 -2.9000000e-02 6.1400000e-02 -7.6700000e-02 - 7.0170000e-01 3.9740000e-01 1.0222000e+00 5.6510000e-01 1.1995000e+00 5.0250000e-01 1.7010000e+00 3.6700000e-02 2.0967000e+00 -1.1060000e-01 6.4000000e-03 1.7242000e+00 - 3.0250000e-01 4.5000000e-01 6.0460000e-01 4.6660000e-01 7.2250000e-01 4.6390000e-01 1.8026000e+00 -8.2800000e-02 -4.2400000e-02 2.0495000e+00 -3.4200000e-02 4.3000000e-02 - 2.4050000e-01 4.1240000e-01 3.7640000e-01 6.4270000e-01 4.9990000e-01 6.4750000e-01 4.1170000e-01 5.9780000e-01 4.7280000e-01 6.9440000e-01 4.3490000e-01 5.7510000e-01 - 3.0420000e-01 4.9750000e-01 5.8130000e-01 5.4100000e-01 5.8800000e-01 6.7720000e-01 1.7730000e+00 -4.9700000e-02 4.0100000e-02 1.9498000e+00 5.6400000e-02 -6.6800000e-02 - 5.1400000e-02 6.0440000e-01 1.6100000e-01 8.4810000e-01 1.9990000e-01 9.4690000e-01 1.1330000e-01 9.0420000e-01 1.4550000e-01 1.0259000e+00 1.5060000e-01 8.6290000e-01 - 7.1590000e-01 3.8870000e-01 3.2210000e-01 2.4280000e-01 3.2030000e-01 1.4040000e-01 3.7250000e-01 1.7970000e-01 2.9810000e-01 1.5840000e-01 2.1530000e-01 3.6920000e-01 - 1.0385000e+00 8.0700000e-02 6.3680000e-01 2.7330000e-01 5.4430000e-01 1.7280000e-01 5.7740000e-01 3.4990000e-01 5.0290000e-01 2.0670000e-01 6.0430000e-01 3.1200000e-01 - 3.4420000e-01 6.5810000e-01 6.6630000e-01 8.7210000e-01 5.8980000e-01 1.1991000e+00 6.5070000e-01 8.9130000e-01 5.9970000e-01 1.2102000e+00 5.8730000e-01 9.6640000e-01 - 4.6500000e-02 1.0680000e+00 1.0440000e-01 4.7190000e-01 1.2670000e-01 3.4750000e-01 2.0500000e-02 5.7260000e-01 7.5000000e-03 4.7920000e-01 5.8200000e-02 5.2410000e-01 - 5.3200000e-02 3.1940000e-01 1.6020000e-01 4.1240000e-01 1.4490000e-01 5.1320000e-01 1.7620000e-01 3.9280000e-01 2.1110000e-01 4.4350000e-01 1.7810000e-01 3.8960000e-01 - 1.1344000e+00 -3.2800000e-02 3.9350000e-01 4.3920000e-01 3.8080000e-01 2.8350000e-01 4.2940000e-01 3.9870000e-01 3.4700000e-01 3.0560000e-01 3.4930000e-01 4.9290000e-01 - 3.8210000e-01 6.8170000e-01 6.2920000e-01 7.6130000e-01 8.7030000e-01 6.1410000e-01 1.6734000e+00 6.2000000e-02 5.2000000e-03 1.9909000e+00 -8.5000000e-02 1.8326000e+00 - 3.0130000e-01 6.6410000e-01 5.2170000e-01 9.8260000e-01 5.6650000e-01 1.1542000e+00 6.3200000e-01 8.5390000e-01 7.9910000e-01 9.0700000e-01 5.8860000e-01 9.0310000e-01 - 1.5060000e-01 7.9480000e-01 1.7650000e-01 1.3156000e+00 2.3410000e-01 1.4619000e+00 2.0850000e-01 1.2752000e+00 2.4020000e-01 1.4745000e+00 1.9730000e-01 1.2940000e+00 - 3.6850000e-01 6.3240000e-01 7.8400000e-01 5.1560000e-01 8.2880000e-01 6.0590000e-01 1.7231000e+00 1.3400000e-02 -3.7000000e-02 2.0447000e+00 -1.2190000e-01 1.4480000e-01 - 4.6750000e-01 3.2260000e-01 7.7640000e-01 4.4980000e-01 8.1590000e-01 5.9080000e-01 6.7200000e-01 5.7260000e-01 8.7850000e-01 5.3770000e-01 5.6080000e-01 7.0120000e-01 - 1.0321000e+00 8.9200000e-02 7.0010000e-01 1.0208000e+00 4.2960000e-01 1.2493000e+00 5.6370000e-01 1.1755000e+00 5.3560000e-01 1.0066000e+00 5.9570000e-01 1.1412000e+00 - 3.5370000e-01 3.7500000e-01 5.8460000e-01 5.6090000e-01 6.0210000e-01 7.1550000e-01 5.3000000e-01 6.2040000e-01 6.2240000e-01 7.0850000e-01 5.2850000e-01 6.2640000e-01 - 1.0284000e+00 5.2300000e-02 3.4420000e-01 1.3965000e+00 2.8390000e-01 1.0068000e+00 3.8780000e-01 1.3455000e+00 4.1440000e-01 8.0100000e-01 3.7940000e-01 1.3541000e+00 - 1.2555000e+00 -1.7830000e-01 9.1720000e-01 1.1700000e-02 6.6710000e-01 7.9300000e-02 8.7120000e-01 6.7100000e-02 6.8120000e-01 4.2100000e-02 9.5210000e-01 -2.8900000e-02 - 3.8950000e-01 2.0330000e-01 5.6060000e-01 3.7580000e-01 7.4020000e-01 3.1010000e-01 5.2420000e-01 4.1810000e-01 7.3930000e-01 3.2710000e-01 5.2310000e-01 4.1830000e-01 - 3.5260000e-01 7.4640000e-01 6.1010000e-01 9.1130000e-01 7.6300000e-01 8.7490000e-01 1.6899000e+00 4.9200000e-02 1.4800000e-02 1.9834000e+00 -1.2000000e-02 1.7456000e+00 - -1.3000000e-03 6.0410000e-01 4.3470000e-01 4.4960000e-01 4.0040000e-01 6.3320000e-01 2.1000000e-03 1.7278000e+00 -8.3400000e-02 2.0973000e+00 1.5800000e-02 -1.7600000e-02 - 3.7910000e-01 2.5300000e-01 8.5440000e-01 6.3100000e-02 8.7290000e-01 1.8380000e-01 1.7806000e+00 -5.3100000e-02 7.7100000e-02 1.9127000e+00 -3.6100000e-02 4.3200000e-02 - 1.1027000e+00 1.9000000e-03 4.9640000e-01 5.0090000e-01 3.9750000e-01 3.8100000e-01 5.7110000e-01 4.1260000e-01 3.5780000e-01 4.1570000e-01 6.1940000e-01 3.5330000e-01 - 3.8820000e-01 4.1720000e-01 6.3110000e-01 6.2640000e-01 8.0140000e-01 6.1510000e-01 7.2900000e-01 5.0970000e-01 8.2560000e-01 6.0370000e-01 7.6640000e-01 4.6260000e-01 - 1.1683000e+00 -7.6100000e-02 5.4860000e-01 1.1928000e+00 5.6990000e-01 4.5320000e-01 5.9900000e-01 1.1305000e+00 4.7730000e-01 5.3300000e-01 5.4300000e-01 1.1969000e+00 - 1.9460000e-01 8.4630000e-01 3.9350000e-01 9.8860000e-01 5.2010000e-01 9.8170000e-01 1.7970000e+00 -7.1500000e-02 -9.4600000e-02 2.1109000e+00 3.8400000e-02 1.6838000e+00 - 2.3870000e-01 5.2870000e-01 5.0810000e-01 5.8030000e-01 6.2250000e-01 5.8610000e-01 1.6027000e+00 1.4860000e-01 -3.1000000e-03 2.0029000e+00 -3.1100000e-02 3.7500000e-02 - 3.3400000e-02 8.5430000e-01 9.3200000e-02 1.2956000e+00 1.7070000e-01 1.3908000e+00 3.8000000e-02 1.3613000e+00 1.2850000e-01 1.4711000e+00 7.5900000e-02 1.3170000e+00 - 2.6030000e-01 6.2760000e-01 6.7280000e-01 5.0710000e-01 7.5490000e-01 5.5080000e-01 1.8246000e+00 -1.0580000e-01 -1.9000000e-02 2.0190000e+00 -7.0000000e-04 4.0000000e-04 - 5.9570000e-01 4.3870000e-01 8.0910000e-01 8.3940000e-01 9.1610000e-01 9.6350000e-01 7.7180000e-01 8.8140000e-01 1.0037000e+00 8.8980000e-01 7.0430000e-01 9.6040000e-01 - 1.1710000e+00 -7.6700000e-02 5.0220000e-01 3.1210000e-01 4.2420000e-01 2.3240000e-01 4.9590000e-01 3.1930000e-01 4.3550000e-01 2.0430000e-01 5.3010000e-01 2.8120000e-01 - 1.1215000e+00 -2.2000000e-02 5.8110000e-01 1.1621000e+00 5.5640000e-01 5.5100000e-01 6.4980000e-01 1.0773000e+00 4.6850000e-01 6.1650000e-01 5.8020000e-01 1.1571000e+00 - 1.0849000e+00 2.4500000e-02 3.8460000e-01 1.3627000e+00 4.7070000e-01 7.3840000e-01 4.2560000e-01 1.3135000e+00 4.0170000e-01 7.7780000e-01 4.3850000e-01 1.2993000e+00 - 1.1536000e+00 -5.4700000e-02 4.6800000e-01 3.5640000e-01 3.5920000e-01 3.1130000e-01 3.6930000e-01 4.7640000e-01 3.7560000e-01 2.7440000e-01 4.2790000e-01 4.0290000e-01 - 3.7480000e-01 2.9940000e-01 6.5360000e-01 3.4080000e-01 8.4570000e-01 2.5270000e-01 1.7398000e+00 -9.2000000e-03 -6.4100000e-02 2.0758000e+00 3.5800000e-02 -4.4200000e-02 - 4.3630000e-01 6.1180000e-01 7.4600000e-01 6.1390000e-01 8.3520000e-01 6.5120000e-01 1.7414000e+00 -1.0400000e-02 -3.7000000e-02 2.0394000e+00 4.3300000e-02 -5.1500000e-02 - 1.7980000e-01 2.8710000e-01 3.0850000e-01 4.1970000e-01 3.6030000e-01 4.6450000e-01 2.5070000e-01 4.8630000e-01 3.4000000e-01 5.0300000e-01 3.4260000e-01 3.7760000e-01 - 6.4030000e-01 5.4200000e-02 1.0254000e+00 6.4700000e-02 1.2196000e+00 9.2000000e-03 9.1080000e-01 1.9630000e-01 1.1338000e+00 1.2920000e-01 9.2120000e-01 1.8570000e-01 - 4.6000000e-02 8.9900000e-01 1.1150000e-01 1.3636000e+00 1.0980000e-01 1.5706000e+00 1.0850000e-01 1.3714000e+00 2.0280000e-01 1.4865000e+00 4.9200000e-02 1.4349000e+00 - 3.3390000e-01 7.7630000e-01 7.4580000e-01 7.4580000e-01 7.7570000e-01 8.4860000e-01 1.7335000e+00 7.0000000e-04 -8.4800000e-02 2.0999000e+00 -2.7800000e-02 1.7612000e+00 - 2.7040000e-01 8.4430000e-01 6.1670000e-01 8.3240000e-01 7.8130000e-01 7.7810000e-01 1.6338000e+00 1.1690000e-01 -2.1900000e-02 2.0260000e+00 -8.8700000e-02 1.8358000e+00 - 3.6630000e-01 5.3390000e-01 4.7160000e-01 9.6020000e-01 5.0790000e-01 1.1271000e+00 5.3630000e-01 8.8670000e-01 5.5950000e-01 1.0941000e+00 5.6060000e-01 8.5960000e-01 - 6.2350000e-01 1.9200000e-01 1.0909000e+00 1.6850000e-01 1.1794000e+00 2.6340000e-01 1.0075000e+00 2.6450000e-01 1.1611000e+00 3.1170000e-01 1.0110000e+00 2.6150000e-01 - -1.8100000e-02 6.9160000e-01 2.4490000e-01 7.3590000e-01 4.1210000e-01 6.8030000e-01 -6.3500000e-02 1.8057000e+00 7.4000000e-02 1.9151000e+00 2.4900000e-02 -2.8600000e-02 - 5.5280000e-01 5.5320000e-01 3.2380000e-01 7.4940000e-01 2.1230000e-01 6.2040000e-01 2.4940000e-01 8.3220000e-01 2.1910000e-01 5.9260000e-01 2.8360000e-01 7.9160000e-01 - 7.8640000e-01 -2.3600000e-02 1.0929000e+00 1.3200000e-01 1.3518000e+00 2.6600000e-02 1.1642000e+00 4.8000000e-02 1.4097000e+00 -1.8600000e-02 1.1813000e+00 2.7600000e-02 - 4.1380000e-01 5.7680000e-01 7.6930000e-01 5.2430000e-01 9.6920000e-01 4.3790000e-01 1.8030000e+00 -8.2400000e-02 6.0500000e-02 1.9306000e+00 -1.3400000e-02 1.4100000e-02 - 1.1481000e+00 -5.4100000e-02 7.8420000e-01 2.5900000e-01 6.8440000e-01 1.1100000e-01 8.1480000e-01 2.2200000e-01 7.0890000e-01 6.4100000e-02 8.8620000e-01 1.4370000e-01 - 2.9150000e-01 6.9520000e-01 5.8620000e-01 7.1600000e-01 8.3230000e-01 5.6730000e-01 1.6712000e+00 7.0100000e-02 1.2900000e-02 1.9853000e+00 7.7700000e-02 -9.1100000e-02 - 1.1317000e+00 -3.1700000e-02 9.4190000e-01 7.8960000e-01 7.9050000e-01 4.9000000e-01 9.0170000e-01 8.3750000e-01 7.7380000e-01 4.5280000e-01 9.1620000e-01 8.1460000e-01 - 3.0900000e-02 9.1720000e-01 3.0800000e-02 1.4571000e+00 1.9220000e-01 1.4717000e+00 4.1100000e-02 1.4452000e+00 1.0630000e-01 1.5995000e+00 7.4900000e-02 1.4113000e+00 - 6.0180000e-01 2.6220000e-01 8.8730000e-01 2.9660000e-01 1.0373000e+00 2.5920000e-01 1.8174000e+00 -9.6500000e-02 2.0132000e+00 -1.7300000e-02 9.1500000e-02 -1.0970000e-01 - 3.6160000e-01 5.3950000e-01 7.6100000e-01 4.3520000e-01 8.0180000e-01 5.2940000e-01 1.7499000e+00 -2.1800000e-02 6.9300000e-02 1.9184000e+00 -5.7400000e-02 6.9100000e-02 - 3.8790000e-01 5.0930000e-01 6.4550000e-01 7.5330000e-01 7.8690000e-01 7.9860000e-01 5.2910000e-01 8.9470000e-01 7.9510000e-01 8.1010000e-01 6.3230000e-01 7.7640000e-01 - 8.8840000e-01 2.1580000e-01 1.1876000e+00 5.4030000e-01 1.2835000e+00 5.9800000e-01 1.6801000e+00 5.8400000e-02 2.0253000e+00 -3.4600000e-02 4.9700000e-02 1.6709000e+00 - 1.1087000e+00 2.1000000e-03 1.0156000e+00 7.1530000e-01 7.3200000e-01 1.0862000e+00 9.1410000e-01 8.3610000e-01 8.0510000e-01 8.1400000e-01 9.5570000e-01 7.8310000e-01 - 2.7800000e-02 6.1300000e-02 2.0010000e-01 2.1440000e-01 3.8150000e-01 1.4460000e-01 1.4350000e-01 1.5630000e+00 -2.3400000e-02 2.9300000e-02 -1.3400000e-02 1.6200000e-02 - 1.9680000e-01 8.8960000e-01 4.8270000e-01 9.2470000e-01 6.1670000e-01 9.0170000e-01 1.6711000e+00 6.7200000e-02 -1.1300000e-02 2.0140000e+00 5.8200000e-02 1.6669000e+00 - 5.2010000e-01 5.8410000e-01 3.1710000e-01 7.6680000e-01 3.8820000e-01 4.1750000e-01 2.9120000e-01 8.0330000e-01 1.7170000e-01 6.5410000e-01 2.2940000e-01 8.7570000e-01 - 5.7860000e-01 4.9500000e-02 8.3360000e-01 1.2010000e-01 9.3280000e-01 1.4530000e-01 1.6279000e+00 1.1840000e-01 1.9387000e+00 6.7300000e-02 2.7000000e-03 -2.8000000e-03 - 2.8810000e-01 5.8800000e-01 6.0850000e-01 5.8080000e-01 7.8210000e-01 5.2080000e-01 1.6875000e+00 5.6500000e-02 -1.1560000e-01 2.1351000e+00 -2.4500000e-02 2.6600000e-02 - 3.1630000e-01 4.2480000e-01 6.3310000e-01 4.1970000e-01 7.9680000e-01 3.6960000e-01 1.6739000e+00 6.8500000e-02 4.8400000e-02 1.9461000e+00 1.1700000e-02 -1.4700000e-02 - 1.1841000e+00 -9.1000000e-02 5.8820000e-01 6.8300000e-01 4.0910000e-01 5.2740000e-01 6.2640000e-01 6.3810000e-01 4.7390000e-01 4.2770000e-01 5.8250000e-01 6.9090000e-01 - 2.3790000e-01 4.0850000e-01 3.8630000e-01 6.2420000e-01 3.8340000e-01 7.7410000e-01 3.7940000e-01 6.3330000e-01 3.2100000e-01 8.7070000e-01 3.6510000e-01 6.4820000e-01 - 1.1245000e+00 -2.2200000e-02 3.9900000e-01 1.0276000e+00 2.6200000e-01 7.3300000e-01 3.2400000e-01 1.1153000e+00 3.7630000e-01 5.7140000e-01 4.5340000e-01 9.6530000e-01 - 1.0994000e+00 8.0000000e-03 8.6680000e-01 8.4540000e-01 5.6980000e-01 1.4140000e+00 6.7270000e-01 1.0783000e+00 5.1440000e-01 1.3957000e+00 6.6860000e-01 1.0770000e+00 - 3.6980000e-01 1.0770000e-01 5.2240000e-01 2.3410000e-01 5.4380000e-01 3.3330000e-01 5.1490000e-01 2.4590000e-01 5.8920000e-01 2.9020000e-01 5.6280000e-01 1.8850000e-01 - 1.1647000e+00 -6.9700000e-02 3.5530000e-01 1.3953000e+00 3.3490000e-01 1.1356000e+00 4.7020000e-01 1.2538000e+00 3.4870000e-01 1.0435000e+00 4.6550000e-01 1.2621000e+00 - 3.1830000e-01 3.0510000e-01 7.3230000e-01 2.0500000e-01 6.8920000e-01 4.0290000e-01 4.6360000e-01 5.2330000e-01 6.1570000e-01 5.0860000e-01 5.2820000e-01 4.4560000e-01 - 3.5210000e-01 3.8440000e-01 5.9840000e-01 5.5040000e-01 6.4380000e-01 6.6880000e-01 6.5860000e-01 4.7840000e-01 6.9640000e-01 6.2640000e-01 5.0530000e-01 6.5830000e-01 - 5.9510000e-01 4.8770000e-01 8.1860000e-01 8.9730000e-01 8.9610000e-01 1.0577000e+00 6.3050000e-01 1.1181000e+00 8.3220000e-01 1.1687000e+00 7.5790000e-01 9.7020000e-01 - 3.3890000e-01 -2.5000000e-03 6.4620000e-01 1.6000000e-03 8.2990000e-01 -7.1800000e-02 1.7515000e+00 -2.6600000e-02 6.9300000e-02 -8.1700000e-02 -3.8200000e-02 4.5600000e-02 - 5.3610000e-01 6.9000000e-03 8.9570000e-01 -5.9000000e-02 8.0350000e-01 1.9130000e-01 6.8530000e-01 1.9330000e-01 8.2600000e-01 1.8360000e-01 8.1150000e-01 4.0500000e-02 - 1.8900000e-02 9.5130000e-01 1.2280000e-01 1.1771000e+00 3.5170000e-01 1.0456000e+00 6.8000000e-03 1.7248000e+00 -4.2100000e-02 2.0440000e+00 6.1000000e-03 -7.7000000e-03 - 9.7600000e-02 1.3670000e-01 1.7870000e-01 1.8160000e-01 9.8700000e-02 3.3170000e-01 7.0800000e-02 3.0920000e-01 1.6550000e-01 2.5880000e-01 1.0270000e-01 2.7200000e-01 - 8.8470000e-01 -4.7600000e-02 1.2635000e+00 7.0400000e-02 1.4746000e+00 3.8800000e-02 1.3299000e+00 -7.3000000e-03 1.5724000e+00 -5.1900000e-02 1.2517000e+00 8.3400000e-02 - 4.1020000e-01 6.9100000e-01 7.2550000e-01 8.2160000e-01 8.3290000e-01 8.3670000e-01 1.7056000e+00 3.8300000e-02 6.3000000e-02 1.9261000e+00 -7.2500000e-02 1.8163000e+00 - 1.2002000e+00 -1.1230000e-01 7.3550000e-01 6.7220000e-01 6.3810000e-01 3.3110000e-01 7.7520000e-01 6.2370000e-01 5.6330000e-01 3.9250000e-01 7.4330000e-01 6.6220000e-01 - 1.3030000e-01 5.1440000e-01 2.0510000e-01 8.0570000e-01 2.3180000e-01 9.1950000e-01 1.1890000e-01 9.1030000e-01 2.4510000e-01 9.2040000e-01 6.8700000e-02 9.6480000e-01 - -1.9400000e-02 1.1284000e+00 9.2600000e-02 8.6420000e-01 -7.8700000e-02 8.6190000e-01 1.9400000e-02 9.5230000e-01 -3.2200000e-02 7.8560000e-01 1.2770000e-01 8.2470000e-01 - 4.8360000e-01 3.7740000e-01 8.7330000e-01 2.8250000e-01 9.3100000e-01 3.5500000e-01 1.8474000e+00 -1.3500000e-01 -6.3800000e-02 2.0775000e+00 -3.2000000e-03 5.3000000e-03 - 3.8800000e-01 7.0930000e-01 2.3570000e-01 1.4962000e+00 1.5460000e-01 1.2791000e+00 2.1540000e-01 1.5174000e+00 1.8650000e-01 1.1748000e+00 2.1100000e-01 1.5184000e+00 - 9.0330000e-01 1.7080000e-01 1.4537000e+00 2.1860000e-01 1.6258000e+00 2.8140000e-01 1.3946000e+00 2.8590000e-01 1.6381000e+00 3.0150000e-01 1.5071000e+00 1.5820000e-01 - 7.6400000e-02 9.5690000e-01 1.0700000e-02 1.3487000e+00 -2.8900000e-02 1.5258000e+00 -6.6900000e-02 1.8149000e+00 5.6000000e-02 1.9309000e+00 -7.5000000e-03 8.6000000e-03 - 6.9200000e-01 1.0850000e-01 1.1267000e+00 1.2140000e-01 1.1233000e+00 3.2580000e-01 9.7820000e-01 2.9890000e-01 1.1091000e+00 3.6480000e-01 9.3120000e-01 3.5280000e-01 - 1.1409000e+00 -4.2500000e-02 5.8550000e-01 1.1563000e+00 5.8390000e-01 5.5310000e-01 6.8720000e-01 1.0392000e+00 5.7300000e-01 5.2510000e-01 6.1840000e-01 1.1171000e+00 - 2.2650000e-01 6.8490000e-01 5.5880000e-01 6.6360000e-01 5.9840000e-01 7.6360000e-01 1.7670000e+00 -3.8800000e-02 4.4900000e-02 1.9494000e+00 -5.3900000e-02 6.5000000e-02 - 1.1366000e+00 -3.8600000e-02 7.0260000e-01 6.3840000e-01 4.1410000e-01 5.6170000e-01 6.6210000e-01 6.8250000e-01 6.1240000e-01 3.0180000e-01 7.4180000e-01 5.9170000e-01 - 1.1514000e+00 -5.2200000e-02 7.8120000e-01 9.5820000e-01 6.1720000e-01 8.5710000e-01 8.6380000e-01 8.5090000e-01 6.0280000e-01 8.0080000e-01 8.6500000e-01 8.5180000e-01 - 3.5120000e-01 7.1550000e-01 4.8790000e-01 1.1954000e+00 6.4520000e-01 1.2576000e+00 5.1780000e-01 1.1577000e+00 5.9950000e-01 1.3414000e+00 4.3820000e-01 1.2536000e+00 - 3.9280000e-01 2.6470000e-01 5.8700000e-01 4.4810000e-01 6.5740000e-01 5.2530000e-01 6.6920000e-01 3.4890000e-01 7.8490000e-01 3.9140000e-01 6.4140000e-01 3.8510000e-01 - 9.8100000e-02 5.7900000e-01 2.6100000e-02 1.0553000e+00 1.2040000e-01 1.0948000e+00 1.1930000e-01 9.4730000e-01 1.5390000e-01 1.0715000e+00 1.4870000e-01 9.1040000e-01 - 3.9600000e-01 5.7100000e-01 6.3520000e-01 8.7570000e-01 6.9990000e-01 1.0322000e+00 6.1240000e-01 9.0600000e-01 7.7280000e-01 9.6320000e-01 6.4770000e-01 8.6640000e-01 - 1.0562000e+00 5.9200000e-02 5.7540000e-01 5.7250000e-01 3.9950000e-01 4.7360000e-01 4.6060000e-01 7.0630000e-01 4.6670000e-01 3.7190000e-01 5.4260000e-01 6.0590000e-01 - 1.5440000e-01 3.9050000e-01 1.7080000e-01 6.9480000e-01 1.1660000e-01 8.7990000e-01 1.7310000e-01 6.9050000e-01 2.3790000e-01 7.5220000e-01 1.9680000e-01 6.6320000e-01 - 7.4740000e-01 3.6700000e-01 4.1440000e-01 1.3083000e+00 3.0110000e-01 7.6280000e-01 4.6810000e-01 1.2478000e+00 2.8520000e-01 7.4650000e-01 2.4090000e-01 1.5162000e+00 - 5.2450000e-01 1.7960000e-01 8.2530000e-01 2.7660000e-01 9.7790000e-01 2.7200000e-01 8.9280000e-01 1.9890000e-01 1.0561000e+00 1.9570000e-01 8.9440000e-01 1.9730000e-01 - 2.5870000e-01 3.3000000e-01 3.5580000e-01 5.7410000e-01 4.2280000e-01 6.3570000e-01 3.2260000e-01 6.1900000e-01 4.3660000e-01 6.3080000e-01 3.0880000e-01 6.2950000e-01 - 4.3810000e-01 3.5450000e-01 7.0580000e-01 5.3340000e-01 7.7130000e-01 6.4820000e-01 6.4760000e-01 6.0450000e-01 8.7610000e-01 5.4590000e-01 7.5370000e-01 4.7790000e-01 - 1.1750000e-01 6.4170000e-01 2.1600000e-02 3.4140000e-01 -4.5000000e-03 3.1570000e-01 4.3500000e-02 3.1260000e-01 8.8000000e-02 2.0140000e-01 8.6500000e-02 2.6350000e-01 - 1.1509000e+00 -5.3300000e-02 8.1480000e-01 1.7120000e-01 6.7750000e-01 8.8400000e-02 8.5370000e-01 1.2430000e-01 6.1370000e-01 1.4530000e-01 7.7880000e-01 2.1540000e-01 - -5.6300000e-02 9.8480000e-01 1.3510000e-01 1.0840000e+00 6.6500000e-02 1.2971000e+00 8.9100000e-02 1.6230000e+00 6.4300000e-02 1.9251000e+00 4.0000000e-04 -6.0000000e-04 - 1.0740000e+00 4.0700000e-02 4.8280000e-01 1.2594000e+00 3.6770000e-01 1.6073000e+00 4.6780000e-01 1.2689000e+00 5.2390000e-01 1.1956000e+00 4.7870000e-01 1.2616000e+00 - 3.0090000e-01 2.2110000e-01 5.3870000e-01 3.0730000e-01 6.5180000e-01 3.1790000e-01 1.8880000e+00 -1.7860000e-01 1.2500000e-02 -1.3100000e-02 4.3100000e-02 -5.1700000e-02 - 4.8330000e-01 5.8550000e-01 6.7250000e-01 1.0157000e+00 8.9430000e-01 1.0028000e+00 7.8160000e-01 8.8510000e-01 9.0700000e-01 1.0162000e+00 7.6650000e-01 9.0630000e-01 - 2.2020000e-01 1.5360000e-01 3.6100000e-01 2.2090000e-01 5.8060000e-01 5.0000000e-02 3.6110000e-01 2.2410000e-01 4.1840000e-01 2.5500000e-01 4.4420000e-01 1.2510000e-01 - 5.6660000e-01 5.3380000e-01 2.7670000e-01 1.3015000e+00 2.3980000e-01 7.7280000e-01 2.7720000e-01 1.2983000e+00 2.3900000e-01 7.4630000e-01 3.1740000e-01 1.2567000e+00 - 3.3760000e-01 7.4190000e-01 4.8480000e-01 1.2210000e+00 6.3350000e-01 1.2910000e+00 5.9210000e-01 1.0881000e+00 6.0100000e-01 1.3564000e+00 4.9580000e-01 1.2012000e+00 - 9.4630000e-01 2.0200000e-02 1.3809000e+00 1.5220000e-01 1.6763000e+00 5.7800000e-02 1.3851000e+00 1.5320000e-01 1.6485000e+00 1.1350000e-01 1.3393000e+00 2.0170000e-01 - 3.9070000e-01 6.2940000e-01 5.8790000e-01 7.7180000e-01 8.1720000e-01 6.3770000e-01 1.6973000e+00 4.5200000e-02 1.2630000e-01 1.8459000e+00 -1.9000000e-03 1.3000000e-03 - 1.7240000e-01 3.8170000e-01 1.7020000e-01 7.2030000e-01 2.8730000e-01 7.0490000e-01 2.9210000e-01 5.7310000e-01 3.5150000e-01 6.4420000e-01 2.9860000e-01 5.6590000e-01 - 1.3120000e-01 7.0930000e-01 3.9480000e-01 7.6730000e-01 5.2720000e-01 7.5540000e-01 1.7433000e+00 -1.2500000e-02 2.3500000e-02 1.9755000e+00 2.5000000e-03 -3.9000000e-03 - 1.2630000e-01 6.1050000e-01 2.6550000e-01 8.7480000e-01 2.3360000e-01 1.0768000e+00 1.8330000e-01 9.7260000e-01 3.0040000e-01 1.0165000e+00 1.9620000e-01 9.5320000e-01 - 6.0430000e-01 4.9140000e-01 1.0900000e+00 5.9610000e-01 1.0569000e+00 8.9660000e-01 1.0279000e+00 6.6940000e-01 1.2127000e+00 7.5490000e-01 1.0654000e+00 6.2780000e-01 - 1.0284000e+00 8.9300000e-02 4.0970000e-01 9.3750000e-01 2.8130000e-01 6.8650000e-01 3.8270000e-01 9.6200000e-01 2.8300000e-01 6.5860000e-01 3.5080000e-01 1.0086000e+00 - -1.9700000e-02 7.7190000e-01 4.6340000e-01 5.7000000e-01 5.1430000e-01 6.5200000e-01 -2.5800000e-02 1.7626000e+00 -1.7300000e-02 2.0232000e+00 2.5000000e-02 -2.6500000e-02 diff --git a/GPy/util/datasets/oil/DataTrnLbls.txt b/GPy/util/datasets/oil/DataTrnLbls.txt deleted file mode 100644 index a563bfc5..00000000 --- a/GPy/util/datasets/oil/DataTrnLbls.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 1.0000000e+00 0.0000000e+00 0.0000000e+00 - 0.0000000e+00 1.0000000e+00 0.0000000e+00 - 0.0000000e+00 0.0000000e+00 1.0000000e+00 diff --git a/GPy/util/datasets/oil/DataTst.txt b/GPy/util/datasets/oil/DataTst.txt deleted file mode 100644 index 47421b29..00000000 --- a/GPy/util/datasets/oil/DataTst.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 5.80300000e-01 4.98000000e-01 8.80900000e-01 8.21500000e-01 1.01330000e+00 9.18700000e-01 8.50800000e-01 8.49600000e-01 9.99600000e-01 9.67000000e-01 9.11100000e-01 7.81400000e-01 - 2.60000000e-03 6.08400000e-01 2.80800000e-01 6.28200000e-01 2.99500000e-01 7.51300000e-01 -1.07000000e-02 1.75030000e+00 -6.22000000e-02 2.07040000e+00 -9.99000000e-02 1.21400000e-01 - -1.15000000e-02 1.11930000e+00 2.20100000e-01 1.31890000e+00 2.65600000e-01 1.40390000e+00 -6.70000000e-03 1.74380000e+00 1.30000000e-03 2.00200000e+00 -4.01000000e-02 1.77970000e+00 - -5.36000000e-02 1.03700000e+00 5.58000000e-02 1.24830000e+00 2.64000000e-01 1.14560000e+00 1.40000000e-02 1.71560000e+00 1.25000000e-02 1.98890000e+00 -5.98000000e-02 7.09000000e-02 - 5.02100000e-01 4.27000000e-01 6.45000000e-01 8.32500000e-01 7.56000000e-01 9.23500000e-01 7.28900000e-01 7.35200000e-01 7.73500000e-01 9.29800000e-01 6.01300000e-01 8.84200000e-01 - 4.80100000e-01 5.52000000e-01 8.00500000e-01 5.46800000e-01 8.60500000e-01 6.15000000e-01 1.70040000e+00 3.52000000e-02 9.88000000e-02 1.88340000e+00 5.37000000e-02 -6.45000000e-02 - 2.91400000e-01 5.18200000e-01 4.75900000e-01 7.85000000e-01 5.53100000e-01 8.76500000e-01 4.60900000e-01 8.01100000e-01 6.07000000e-01 8.37400000e-01 4.75200000e-01 7.87000000e-01 - 2.18500000e-01 1.38000000e-01 4.10300000e-01 1.36400000e-01 3.32200000e-01 3.13600000e-01 3.04500000e-01 2.59600000e-01 3.96400000e-01 2.47400000e-01 3.81300000e-01 1.71100000e-01 - 4.10800000e-01 3.17000000e-01 6.42500000e-01 5.09700000e-01 7.46700000e-01 5.55900000e-01 7.08200000e-01 4.24300000e-01 8.34700000e-01 4.71900000e-01 7.83900000e-01 3.38200000e-01 - 3.62200000e-01 2.81200000e-01 5.29100000e-01 4.86300000e-01 7.31100000e-01 3.99200000e-01 5.88700000e-01 4.12900000e-01 6.43400000e-01 5.20600000e-01 5.69600000e-01 4.38200000e-01 - 4.58400000e-01 4.86600000e-01 8.14000000e-01 4.38200000e-01 9.03700000e-01 4.72800000e-01 1.75720000e+00 -2.69000000e-02 1.87000000e-02 1.97780000e+00 2.39000000e-02 -2.74000000e-02 - 1.84000000e-01 3.62500000e-01 2.03300000e-01 6.70000000e-01 2.95900000e-01 6.81700000e-01 2.82000000e-01 5.72700000e-01 1.46100000e-01 8.74700000e-01 1.78800000e-01 6.96200000e-01 - 1.13140000e+00 -3.03000000e-02 9.67100000e-01 7.72900000e-01 7.87000000e-01 2.47100000e-01 1.01810000e+00 7.12900000e-01 8.37800000e-01 1.56000000e-01 1.00370000e+00 7.32500000e-01 - 4.13100000e-01 5.05100000e-01 5.25300000e-01 9.34200000e-01 8.01500000e-01 8.24200000e-01 6.03000000e-01 8.43400000e-01 6.78600000e-01 9.92900000e-01 6.07500000e-01 8.32600000e-01 - 5.45900000e-01 3.51400000e-01 8.76300000e-01 3.30600000e-01 1.03000000e+00 2.95400000e-01 1.70370000e+00 3.55000000e-02 1.94770000e+00 5.87000000e-02 9.80000000e-03 -1.45000000e-02 - 1.10560000e+00 7.00000000e-04 5.51700000e-01 1.37300000e-01 5.02900000e-01 5.52000000e-02 6.00000000e-01 7.76000000e-02 4.44800000e-01 1.13000000e-01 5.44300000e-01 1.47000000e-01 - 1.03730000e+00 8.16000000e-02 4.90100000e-01 3.62500000e-01 5.03000000e-01 1.64000000e-01 5.35700000e-01 3.13600000e-01 4.11500000e-01 2.58000000e-01 4.04400000e-01 4.66100000e-01 - -1.24000000e-02 6.51600000e-01 1.78000000e-01 7.85900000e-01 3.60200000e-01 7.13900000e-01 -1.13400000e-01 1.86640000e+00 -7.78000000e-02 2.09350000e+00 -5.62000000e-02 6.81000000e-02 - 7.61000000e-02 4.39400000e-01 1.56100000e-01 6.44500000e-01 1.22400000e-01 7.97800000e-01 9.04000000e-02 7.21000000e-01 1.81800000e-01 7.43600000e-01 1.87800000e-01 6.08200000e-01 - 1.10550000e+00 -2.60000000e-03 7.16900000e-01 1.01230000e+00 5.23100000e-01 1.05420000e+00 7.66100000e-01 9.55300000e-01 6.01700000e-01 8.60700000e-01 7.10000000e-01 1.02370000e+00 - 1.07290000e+00 3.62000000e-02 7.60200000e-01 9.50800000e-01 4.80000000e-01 6.31300000e-01 6.66400000e-01 1.06120000e+00 5.22300000e-01 5.46600000e-01 6.59400000e-01 1.06770000e+00 - 5.60700000e-01 2.71100000e-01 7.21000000e-01 6.09100000e-01 9.12600000e-01 5.80800000e-01 8.18400000e-01 4.94000000e-01 9.75400000e-01 5.35700000e-01 8.98000000e-01 4.00100000e-01 - 5.52500000e-01 5.67700000e-01 2.86100000e-01 4.57000000e-01 2.36800000e-01 3.74200000e-01 4.00800000e-01 3.26400000e-01 3.26500000e-01 2.57600000e-01 3.42100000e-01 3.96000000e-01 - 7.61700000e-01 2.79100000e-01 1.29580000e+00 3.11200000e-01 1.35940000e+00 4.91700000e-01 1.35130000e+00 2.41500000e-01 1.60670000e+00 2.20000000e-01 1.35450000e+00 2.41300000e-01 - 1.17520000e+00 -8.48000000e-02 4.84100000e-01 4.30900000e-01 3.75000000e-01 3.58500000e-01 5.93000000e-01 3.06000000e-01 4.48800000e-01 2.52200000e-01 5.52000000e-01 3.49600000e-01 - 1.04800000e-01 7.48300000e-01 5.00000000e-01 6.50800000e-01 5.58800000e-01 7.22200000e-01 1.67170000e+00 7.56000000e-02 3.40000000e-03 1.99730000e+00 -4.60000000e-03 4.00000000e-03 - 7.31300000e-01 -3.03000000e-02 1.07620000e+00 -7.06000000e-02 1.18330000e+00 -5.36000000e-02 1.73470000e+00 -3.40000000e-03 2.02720000e+00 -3.40000000e-02 3.39000000e-02 -4.02000000e-02 - 8.37900000e-01 2.65200000e-01 3.88100000e-01 1.34080000e+00 2.50600000e-01 1.50760000e+00 2.56300000e-01 1.49590000e+00 3.20500000e-01 1.27210000e+00 4.05600000e-01 1.32090000e+00 - 8.01600000e-01 3.08400000e-01 1.14870000e+00 2.66300000e-01 1.30040000e+00 2.31500000e-01 1.67380000e+00 7.25000000e-02 1.97510000e+00 2.53000000e-02 6.47000000e-02 1.65580000e+00 - 1.03710000e+00 8.37000000e-02 1.02670000e+00 7.17400000e-01 7.09800000e-01 9.12000000e-01 1.12770000e+00 6.01800000e-01 7.72100000e-01 7.38800000e-01 1.12220000e+00 6.07900000e-01 - 2.50600000e-01 4.81500000e-01 3.63500000e-01 7.91800000e-01 5.26200000e-01 7.66800000e-01 3.85000000e-01 7.65100000e-01 4.19900000e-01 9.08700000e-01 3.32200000e-01 8.26000000e-01 - 1.30400000e-01 4.48500000e-01 4.64100000e-01 4.26800000e-01 5.83400000e-01 4.26300000e-01 1.66400000e+00 7.38000000e-02 9.35000000e-02 1.88710000e+00 1.65000000e-02 -1.89000000e-02 - 1.07320000e+00 3.81000000e-02 5.05600000e-01 4.40100000e-01 4.69500000e-01 2.68300000e-01 5.86300000e-01 3.42300000e-01 4.57600000e-01 2.66500000e-01 6.06000000e-01 3.22200000e-01 - 1.34600000e-01 1.54600000e-01 9.67000000e-02 6.88000000e-02 1.12700000e-01 2.65000000e-02 1.07700000e-01 5.49000000e-02 1.22700000e-01 1.12000000e-02 4.48000000e-02 1.31900000e-01 - 1.04800000e+00 6.90000000e-02 5.17700000e-01 1.20690000e+00 3.87000000e-01 9.81800000e-01 5.72600000e-01 1.14760000e+00 4.06900000e-01 9.01200000e-01 4.30100000e-01 1.31670000e+00 - 1.14600000e-01 6.64000000e-01 4.91100000e-01 5.86700000e-01 5.48500000e-01 6.63800000e-01 1.65800000e+00 8.56000000e-02 -1.53200000e-01 2.18760000e+00 -4.85000000e-02 5.81000000e-02 - 4.73000000e-02 6.98000000e-01 3.60900000e-01 6.98800000e-01 5.08500000e-01 6.65700000e-01 1.70920000e+00 2.77000000e-02 9.67000000e-02 1.88870000e+00 4.96000000e-02 -5.92000000e-02 - 1.13630000e+00 -3.51000000e-02 4.86200000e-01 3.15800000e-01 3.40200000e-01 3.16800000e-01 5.57900000e-01 2.29900000e-01 4.16500000e-01 2.16200000e-01 5.22500000e-01 2.73500000e-01 - 6.04500000e-01 4.91000000e-01 1.93500000e-01 1.55930000e+00 2.50900000e-01 8.82400000e-01 2.22700000e-01 1.52020000e+00 2.45900000e-01 8.52300000e-01 3.58500000e-01 1.36390000e+00 - 4.23100000e-01 2.75500000e-01 7.46900000e-01 2.60400000e-01 8.59500000e-01 2.68200000e-01 1.76160000e+00 -2.69000000e-02 1.75800000e-01 1.79570000e+00 2.66000000e-02 -3.14000000e-02 - 7.11800000e-01 2.10300000e-01 1.04160000e+00 1.89900000e-01 1.25160000e+00 8.46000000e-02 1.72690000e+00 1.00000000e-04 1.99170000e+00 9.90000000e-03 -5.83000000e-02 6.74000000e-02 - 4.91900000e-01 5.98100000e-01 6.55700000e-01 9.37300000e-01 9.05800000e-01 7.85600000e-01 1.66980000e+00 7.21000000e-02 9.47000000e-02 1.88640000e+00 3.53000000e-02 1.69210000e+00 - 7.49000000e-02 2.03000000e-01 1.40200000e-01 2.88400000e-01 1.30700000e-01 3.62500000e-01 1.29800000e-01 3.00900000e-01 2.13700000e-01 2.69900000e-01 2.15100000e-01 1.99900000e-01 - 1.09410000e+00 1.48000000e-02 5.75800000e-01 1.43000000e-01 5.51300000e-01 2.44000000e-02 5.17800000e-01 2.12300000e-01 4.39400000e-01 1.42500000e-01 5.69300000e-01 1.52700000e-01 - 2.37900000e-01 5.99200000e-01 5.39200000e-01 6.12000000e-01 6.67800000e-01 6.05000000e-01 1.63930000e+00 1.09700000e-01 -2.42000000e-02 2.02880000e+00 1.74000000e-02 -1.94000000e-02 - 1.02390000e+00 8.07000000e-02 3.62600000e-01 7.87000000e-02 4.32400000e-01 -8.07000000e-02 3.97400000e-01 3.82000000e-02 3.05300000e-01 6.35000000e-02 3.84500000e-01 5.42000000e-02 - 3.53600000e-01 5.82700000e-01 7.21000000e-01 5.21700000e-01 8.35400000e-01 5.25000000e-01 1.83170000e+00 -1.13500000e-01 -3.96000000e-02 2.04650000e+00 2.17000000e-02 -2.64000000e-02 - -1.27000000e-02 5.92800000e-01 3.31000000e-02 8.64700000e-01 1.02600000e-01 9.07400000e-01 5.74000000e-02 8.35300000e-01 3.67000000e-02 1.00290000e+00 3.10000000e-02 8.66800000e-01 - 3.13600000e-01 8.01800000e-01 7.50900000e-01 7.13400000e-01 7.56600000e-01 8.45900000e-01 1.67200000e+00 7.62000000e-02 5.75000000e-02 1.93200000e+00 2.93000000e-02 1.69620000e+00 - 6.69700000e-01 4.20000000e-02 9.08300000e-01 1.30800000e-01 1.02660000e+00 1.36600000e-01 1.78870000e+00 -6.75000000e-02 1.98000000e+00 2.43000000e-02 -1.04000000e-02 1.47000000e-02 - -1.43000000e-02 5.55900000e-01 2.52000000e-02 8.13100000e-01 7.52000000e-02 8.69800000e-01 1.22500000e-01 6.96200000e-01 8.26000000e-02 8.75600000e-01 7.33000000e-02 7.54800000e-01 - 1.07460000e+00 3.28000000e-02 5.76200000e-01 7.70800000e-01 5.08300000e-01 4.41000000e-01 6.20200000e-01 7.17000000e-01 4.88100000e-01 4.38800000e-01 4.99100000e-01 8.54900000e-01 - 3.45500000e-01 1.09600000e-01 5.96000000e-01 1.09200000e-01 7.07700000e-01 9.14000000e-02 6.37200000e-01 5.93000000e-02 7.79600000e-01 1.85000000e-02 6.50000000e-01 4.55000000e-02 - 8.81400000e-01 7.41000000e-02 1.17760000e+00 9.63000000e-02 1.22800000e+00 1.79000000e-01 1.78240000e+00 -6.10000000e-02 2.09890000e+00 -1.12800000e-01 -4.17000000e-02 4.92000000e-02 - 1.09580000e+00 1.39000000e-02 6.06300000e-01 2.39900000e-01 3.85300000e-01 3.09700000e-01 5.39000000e-01 3.17800000e-01 5.31900000e-01 1.18300000e-01 5.87200000e-01 2.56700000e-01 - 1.06220000e+00 5.37000000e-02 7.13700000e-01 5.66900000e-01 5.50600000e-01 3.80100000e-01 6.08700000e-01 6.88500000e-01 6.20900000e-01 2.71100000e-01 6.38500000e-01 6.57800000e-01 - 3.77700000e-01 7.25600000e-01 7.50500000e-01 8.81300000e-01 9.08100000e-01 8.34800000e-01 1.82330000e+00 -1.09800000e-01 3.53000000e-02 1.96020000e+00 7.32000000e-02 1.64800000e+00 - 1.93300000e-01 5.41900000e-01 4.16400000e-01 6.44500000e-01 5.96800000e-01 5.74000000e-01 1.72550000e+00 7.20000000e-03 1.43000000e-02 1.98370000e+00 -9.40000000e-03 9.90000000e-03 - 4.84200000e-01 2.08000000e-01 6.48000000e-01 4.62100000e-01 8.72900000e-01 3.62500000e-01 6.71300000e-01 4.29900000e-01 8.94400000e-01 3.58200000e-01 6.60000000e-01 4.41800000e-01 - 6.67500000e-01 3.47200000e-01 1.08870000e+00 4.97900000e-01 1.14000000e+00 6.85900000e-01 1.09180000e+00 4.93200000e-01 1.23650000e+00 5.94100000e-01 1.13790000e+00 4.40500000e-01 - 1.08400000e+00 2.85000000e-02 3.43500000e-01 7.87600000e-01 3.96500000e-01 4.41700000e-01 4.21600000e-01 6.92500000e-01 3.75100000e-01 4.44500000e-01 4.29500000e-01 6.78700000e-01 - 2.64300000e-01 3.53300000e-01 4.94200000e-01 4.60500000e-01 6.04600000e-01 4.73000000e-01 3.99000000e-01 5.69900000e-01 5.65000000e-01 5.38400000e-01 4.71800000e-01 4.85000000e-01 - 5.19000000e-01 5.89500000e-01 7.47600000e-01 7.31900000e-01 9.99100000e-01 5.71500000e-01 1.75460000e+00 -2.53000000e-02 2.07000000e-02 1.97530000e+00 -1.94000000e-02 1.75480000e+00 - 3.38400000e-01 5.28500000e-01 5.78900000e-01 7.69800000e-01 8.80000000e-01 6.08500000e-01 6.29400000e-01 7.05800000e-01 6.86400000e-01 8.63900000e-01 6.12500000e-01 7.21900000e-01 - 8.09100000e-01 1.14900000e-01 1.03660000e+00 2.23700000e-01 1.29290000e+00 5.88000000e-02 1.70900000e+00 2.43000000e-02 2.00540000e+00 -1.50000000e-03 8.57000000e-02 -1.01400000e-01 - 4.08500000e-01 6.87800000e-01 2.75000000e-01 2.66700000e-01 1.44700000e-01 3.28700000e-01 2.41500000e-01 3.09500000e-01 7.82000000e-02 3.97300000e-01 2.84800000e-01 2.57800000e-01 - 3.52000000e-01 3.50600000e-01 3.66500000e-01 7.63200000e-01 4.76000000e-01 8.04700000e-01 3.86400000e-01 7.41200000e-01 4.93400000e-01 8.05100000e-01 4.28000000e-01 6.96300000e-01 - 5.63000000e-01 3.47000000e-01 8.90300000e-01 5.35700000e-01 9.27200000e-01 7.07000000e-01 7.12200000e-01 7.41900000e-01 9.29300000e-01 7.29400000e-01 8.72100000e-01 5.54400000e-01 - 7.45200000e-01 2.80900000e-01 1.14580000e+00 4.55400000e-01 1.24900000e+00 5.86500000e-01 1.12450000e+00 4.85500000e-01 1.28650000e+00 5.74400000e-01 1.05400000e+00 5.64900000e-01 - 4.33600000e-01 6.86400000e-01 2.94700000e-01 3.58200000e-01 1.92100000e-01 3.57300000e-01 2.44900000e-01 4.18300000e-01 3.37800000e-01 1.74100000e-01 2.36400000e-01 4.31100000e-01 - 5.30000000e-02 7.20100000e-01 2.63100000e-01 8.43600000e-01 4.87900000e-01 7.20100000e-01 2.31000000e-02 1.70620000e+00 -1.11400000e-01 2.13290000e+00 4.96000000e-02 -5.81000000e-02 - 3.11800000e-01 4.66400000e-01 6.50200000e-01 4.37700000e-01 6.83300000e-01 5.41900000e-01 1.70470000e+00 3.12000000e-02 7.61000000e-02 1.90900000e+00 -5.28000000e-02 6.31000000e-02 - 5.68900000e-01 1.78400000e-01 7.58200000e-01 4.38700000e-01 9.44000000e-01 3.99400000e-01 8.11900000e-01 3.72900000e-01 9.13400000e-01 4.62000000e-01 6.80800000e-01 5.27500000e-01 - 4.68300000e-01 2.92000000e-01 8.04300000e-01 3.72900000e-01 9.92500000e-01 3.32500000e-01 8.56000000e-01 3.09100000e-01 9.29500000e-01 4.26500000e-01 8.21000000e-01 3.52700000e-01 - 1.18160000e+00 -8.79000000e-02 1.52620000e+00 1.87800000e-01 9.55800000e-01 1.02010000e+00 1.45450000e+00 2.69000000e-01 1.07820000e+00 7.09100000e-01 1.38340000e+00 3.52600000e-01 - 1.76400000e-01 8.38500000e-01 3.84200000e-01 9.58900000e-01 6.07800000e-01 8.42100000e-01 1.68200000e+00 5.69000000e-02 3.43000000e-02 1.96260000e+00 3.20000000e-02 -3.85000000e-02 - 5.61700000e-01 5.35500000e-01 2.07100000e-01 4.24200000e-01 2.39300000e-01 2.76100000e-01 2.25200000e-01 4.02400000e-01 2.69400000e-01 2.24800000e-01 1.87900000e-01 4.47600000e-01 - 2.53800000e-01 1.53300000e-01 3.62600000e-01 2.81000000e-01 4.68600000e-01 2.51000000e-01 4.02500000e-01 2.34400000e-01 4.08200000e-01 3.39100000e-01 3.52400000e-01 2.91700000e-01 - 9.07300000e-01 2.36000000e-01 4.90200000e-01 1.94700000e-01 4.24000000e-01 1.37500000e-01 5.86700000e-01 7.81000000e-02 3.79100000e-01 1.78800000e-01 5.53200000e-01 1.17900000e-01 - 4.51100000e-01 9.79000000e-02 8.37100000e-01 1.23000000e-02 8.50800000e-01 1.41800000e-01 1.83390000e+00 -1.20400000e-01 -1.13000000e-02 1.32000000e-02 5.60000000e-03 -6.70000000e-03 - 3.40500000e-01 3.73400000e-01 5.58900000e-01 5.53600000e-01 6.31800000e-01 6.32700000e-01 5.15800000e-01 6.00800000e-01 6.00500000e-01 6.90200000e-01 4.41100000e-01 6.92800000e-01 - 7.39000000e-01 9.63000000e-02 1.03070000e+00 1.21300000e-01 1.15630000e+00 1.14800000e-01 1.77530000e+00 -5.66000000e-02 2.05890000e+00 -7.59000000e-02 3.33000000e-02 -3.79000000e-02 - 6.18000000e-01 9.19000000e-02 8.64500000e-01 1.70800000e-01 1.04330000e+00 1.01000000e-01 1.70950000e+00 2.81000000e-02 1.96420000e+00 4.13000000e-02 4.70000000e-03 -6.60000000e-03 - 1.06690000e+00 4.57000000e-02 1.13840000e+00 5.93800000e-01 7.58300000e-01 7.54400000e-01 1.13210000e+00 6.03700000e-01 7.44600000e-01 6.88300000e-01 1.09990000e+00 6.44200000e-01 - 1.03630000e+00 6.87000000e-02 3.56500000e-01 1.38930000e+00 3.78800000e-01 6.46000000e-01 3.66000000e-01 1.37120000e+00 3.15200000e-01 6.94900000e-01 4.27700000e-01 1.30180000e+00 - 8.01500000e-01 1.87200000e-01 1.22990000e+00 3.21300000e-01 1.41580000e+00 3.43500000e-01 1.26180000e+00 2.83100000e-01 1.42180000e+00 3.63800000e-01 1.32640000e+00 2.11400000e-01 - 3.50000000e-02 5.03000000e-01 2.43500000e-01 6.18100000e-01 3.53500000e-01 6.26000000e-01 -3.00000000e-02 1.77210000e+00 -1.31400000e-01 1.58500000e-01 -3.14000000e-02 3.86000000e-02 - 1.10200000e+00 5.30000000e-03 7.89200000e-01 8.00300000e-01 7.73900000e-01 2.18600000e-01 8.08600000e-01 7.78200000e-01 7.17900000e-01 2.57700000e-01 8.13100000e-01 7.80600000e-01 - 8.43000000e-02 1.01440000e+00 4.69000000e-02 6.89900000e-01 -2.59000000e-02 6.41200000e-01 -4.30000000e-03 7.51300000e-01 5.20000000e-02 5.36800000e-01 -5.55000000e-02 8.13400000e-01 - 1.08800000e+00 1.80000000e-02 4.09000000e-01 2.87700000e-01 3.05600000e-01 2.75400000e-01 3.64900000e-01 3.41300000e-01 3.24800000e-01 2.42000000e-01 3.72300000e-01 3.33900000e-01 - 1.14990000e+00 -5.23000000e-02 3.86800000e-01 3.87200000e-01 3.13100000e-01 3.18900000e-01 4.05900000e-01 3.65300000e-01 3.71000000e-01 2.36700000e-01 5.25400000e-01 2.24200000e-01 - 3.46800000e-01 6.55000000e-01 6.05400000e-01 7.15200000e-01 7.15400000e-01 7.30400000e-01 1.69130000e+00 4.76000000e-02 -3.78000000e-02 2.05070000e+00 5.44000000e-02 -6.49000000e-02 - 4.46700000e-01 6.65700000e-01 8.34700000e-01 6.93700000e-01 9.44100000e-01 7.09800000e-01 1.68190000e+00 6.10000000e-02 3.40000000e-02 1.96040000e+00 2.38000000e-02 1.70430000e+00 - 6.87000000e-01 4.09600000e-01 9.01700000e-01 6.46000000e-01 1.10700000e+00 5.43500000e-01 1.83670000e+00 -1.22400000e-01 2.06060000e+00 -6.77000000e-02 1.08000000e-02 1.72070000e+00 - 6.20300000e-01 4.78400000e-01 7.97800000e-01 9.54200000e-01 9.38600000e-01 1.04470000e+00 9.70200000e-01 7.47200000e-01 1.10720000e+00 8.79800000e-01 8.36900000e-01 9.03000000e-01 - 3.56000000e-01 4.54100000e-01 5.84900000e-01 6.77600000e-01 6.67800000e-01 7.68100000e-01 6.88500000e-01 5.56700000e-01 7.60400000e-01 6.86600000e-01 5.34500000e-01 7.38500000e-01 - 1.20600000e-01 6.81500000e-01 4.79000000e-01 6.28400000e-01 5.18700000e-01 7.24200000e-01 1.74990000e+00 -2.13000000e-02 -2.09000000e-02 2.02460000e+00 -5.04000000e-02 5.76000000e-02 - 4.63200000e-01 4.67700000e-01 7.81900000e-01 4.60600000e-01 8.89900000e-01 4.72800000e-01 1.76310000e+00 -3.84000000e-02 -8.40000000e-03 2.00930000e+00 4.97000000e-02 -6.04000000e-02 - 4.17600000e-01 7.63000000e-02 5.72200000e-01 2.63200000e-01 7.94600000e-01 1.38900000e-01 1.79840000e+00 -7.19000000e-02 -7.74000000e-02 9.25000000e-02 2.96000000e-02 -3.40000000e-02 - 3.36500000e-01 7.50600000e-01 1.57200000e-01 8.90400000e-01 1.21300000e-01 6.85400000e-01 1.96400000e-01 8.38900000e-01 8.56000000e-02 7.10700000e-01 1.56000000e-01 8.91700000e-01 - 1.03730000e+00 8.40000000e-02 1.74370000e+00 -1.62000000e-02 1.07820000e+00 2.34400000e-01 1.76840000e+00 -4.12000000e-02 1.12690000e+00 1.18800000e-01 1.74390000e+00 -1.41000000e-02 - 3.98100000e-01 3.49600000e-01 5.38400000e-01 6.52800000e-01 7.02300000e-01 6.38700000e-01 5.24600000e-01 6.60600000e-01 6.17300000e-01 7.58000000e-01 6.45800000e-01 5.21300000e-01 - 4.76900000e-01 6.17200000e-01 6.38500000e-01 8.23900000e-01 8.64000000e-01 7.00100000e-01 1.68450000e+00 5.69000000e-02 3.00000000e-04 1.99610000e+00 -8.51000000e-02 1.83120000e+00 - 6.29400000e-01 1.00400000e-01 9.72700000e-01 1.79200000e-01 1.11400000e+00 1.96700000e-01 9.91300000e-01 1.58700000e-01 1.18640000e+00 1.36800000e-01 9.02700000e-01 2.62500000e-01 - 4.63800000e-01 2.74200000e-01 6.41900000e-01 5.30100000e-01 9.30300000e-01 3.67700000e-01 6.77300000e-01 4.90500000e-01 8.16800000e-01 5.21800000e-01 7.43800000e-01 4.09900000e-01 - 4.53800000e-01 6.49300000e-01 3.40600000e-01 1.37400000e+00 2.12200000e-01 1.03460000e+00 2.02600000e-01 1.53320000e+00 3.00100000e-01 8.81700000e-01 2.67800000e-01 1.45730000e+00 - 4.68600000e-01 6.33400000e-01 2.74600000e-01 2.31300000e-01 1.77800000e-01 2.55700000e-01 2.66700000e-01 2.37100000e-01 2.68000000e-01 1.39700000e-01 2.12700000e-01 3.00800000e-01 - 4.69100000e-01 6.21400000e-01 2.76300000e-01 3.24300000e-01 1.35600000e-01 3.82500000e-01 1.87100000e-01 4.32300000e-01 2.16800000e-01 2.76400000e-01 1.90700000e-01 4.26500000e-01 - 1.09170000e+00 1.74000000e-02 6.63900000e-01 1.05820000e+00 5.04700000e-01 6.04700000e-01 6.10300000e-01 1.12430000e+00 4.88900000e-01 5.86600000e-01 6.58300000e-01 1.07260000e+00 - 2.23100000e-01 5.62800000e-01 4.78100000e-01 7.26000000e-01 4.82100000e-01 8.99400000e-01 4.17700000e-01 7.96200000e-01 5.89100000e-01 7.90800000e-01 3.92500000e-01 8.24900000e-01 - 1.15770000e+00 -6.39000000e-02 6.07300000e-01 3.06700000e-01 5.69300000e-01 1.45600000e-01 6.73600000e-01 2.27400000e-01 5.11500000e-01 1.98000000e-01 6.77600000e-01 2.23200000e-01 - 7.26600000e-01 3.88200000e-01 3.90800000e-01 4.62700000e-01 3.33200000e-01 3.47800000e-01 3.39000000e-01 5.20400000e-01 3.00100000e-01 3.73800000e-01 3.96000000e-01 4.56500000e-01 - -1.49000000e-02 1.12420000e+00 2.46000000e-02 5.56500000e-01 6.16000000e-02 4.11400000e-01 1.16000000e-02 5.71700000e-01 1.25000000e-02 4.63800000e-01 1.87000000e-02 5.62100000e-01 - 2.00200000e-01 3.90200000e-01 1.88500000e-01 7.58300000e-01 3.07300000e-01 7.51300000e-01 2.21600000e-01 7.17400000e-01 3.20200000e-01 7.50000000e-01 2.47200000e-01 6.86900000e-01 - 4.07500000e-01 6.80400000e-01 5.98600000e-01 9.48100000e-01 7.86400000e-01 8.62500000e-01 1.81260000e+00 -9.12000000e-02 -6.70000000e-02 2.08300000e+00 -1.53400000e-01 1.91390000e+00 - 7.02300000e-01 4.10200000e-01 1.05780000e+00 6.72700000e-01 1.18140000e+00 6.87100000e-01 1.71390000e+00 2.01000000e-02 1.98290000e+00 1.83000000e-02 1.40000000e-02 1.71350000e+00 - 5.74000000e-01 4.04900000e-01 9.10600000e-01 6.24000000e-01 9.72100000e-01 7.84900000e-01 8.85200000e-01 6.49800000e-01 1.03860000e+00 7.32200000e-01 9.73300000e-01 5.51600000e-01 - 6.80800000e-01 4.17500000e-01 1.03230000e+00 6.83000000e-01 1.01320000e+00 8.52600000e-01 1.65610000e+00 9.23000000e-02 2.02220000e+00 -2.48000000e-02 -6.39000000e-02 1.80120000e+00 - 3.29700000e-01 4.79300000e-01 6.49200000e-01 4.75600000e-01 7.48800000e-01 4.94800000e-01 1.67970000e+00 6.57000000e-02 -8.06000000e-02 2.09680000e+00 1.00000000e-04 4.00000000e-04 - 3.01600000e-01 6.11300000e-01 6.55400000e-01 5.66800000e-01 8.37600000e-01 4.94400000e-01 1.74780000e+00 -1.87000000e-02 -1.55000000e-02 2.01780000e+00 -2.42000000e-02 2.70000000e-02 - 3.23200000e-01 3.72200000e-01 4.53100000e-01 6.48100000e-01 5.07000000e-01 7.48900000e-01 4.78100000e-01 6.14400000e-01 6.46200000e-01 5.99300000e-01 6.26700000e-01 4.37100000e-01 - 2.73600000e-01 6.75800000e-01 3.53500000e-01 1.14830000e+00 4.82500000e-01 1.21060000e+00 4.75600000e-01 9.97200000e-01 4.23100000e-01 1.30090000e+00 2.82600000e-01 1.22370000e+00 - 1.06340000e+00 5.66000000e-02 6.74500000e-01 6.67100000e-01 5.83000000e-01 3.61900000e-01 7.04700000e-01 6.23700000e-01 5.87000000e-01 3.28400000e-01 6.15200000e-01 7.30600000e-01 - 1.00800000e-01 7.18000000e-02 1.73500000e-01 9.31000000e-02 2.51700000e-01 4.18000000e-02 1.57900000e-01 1.11400000e-01 2.11600000e-01 9.45000000e-02 1.92000000e-01 7.12000000e-02 - 1.46300000e-01 7.47800000e-01 5.08300000e-01 6.91200000e-01 6.70300000e-01 6.40300000e-01 1.74090000e+00 -1.01000000e-02 -7.87000000e-02 2.09330000e+00 -1.33000000e-02 1.56000000e-02 - 6.37600000e-01 4.60100000e-01 9.66400000e-01 4.72900000e-01 1.08790000e+00 4.71700000e-01 1.67320000e+00 6.45000000e-02 2.09400000e+00 -1.09400000e-01 -1.52600000e-01 1.90850000e+00 - 2.77000000e-02 8.16900000e-01 1.76000000e-02 1.31550000e+00 1.27000000e-02 1.49990000e+00 9.17000000e-02 1.22410000e+00 -5.00000000e-04 1.54320000e+00 1.13700000e-01 1.20060000e+00 - 8.82500000e-01 2.16900000e-01 3.32500000e-01 1.18850000e+00 3.46500000e-01 6.54200000e-01 4.43700000e-01 1.05240000e+00 2.56200000e-01 7.30700000e-01 3.39900000e-01 1.17580000e+00 - 1.99300000e-01 4.96900000e-01 2.71100000e-01 8.29800000e-01 3.27800000e-01 9.18400000e-01 2.95000000e-01 7.96700000e-01 3.31100000e-01 9.35600000e-01 3.95400000e-01 6.80800000e-01 - 6.18300000e-01 1.50400000e-01 9.12900000e-01 2.98700000e-01 1.04520000e+00 3.29700000e-01 8.45000000e-01 3.78900000e-01 1.11800000e+00 2.69800000e-01 9.11200000e-01 2.98500000e-01 - 2.73300000e-01 9.55000000e-02 4.40400000e-01 1.40100000e-01 4.67700000e-01 1.98100000e-01 4.22400000e-01 1.57100000e-01 5.34200000e-01 1.31400000e-01 4.27500000e-01 1.57900000e-01 - 9.64000000e-02 7.58900000e-01 3.44900000e-01 8.37400000e-01 5.52400000e-01 7.29300000e-01 1.74070000e+00 -1.26000000e-02 -1.90000000e-03 2.00450000e+00 -4.52000000e-02 5.31000000e-02 - 5.17000000e-01 5.13000000e-02 8.60400000e-01 1.86000000e-02 1.01490000e+00 -2.08000000e-02 7.86600000e-01 1.09300000e-01 9.57900000e-01 6.68000000e-02 8.73700000e-01 1.30000000e-03 - 1.12200000e+00 -1.43000000e-02 4.73100000e-01 1.25510000e+00 3.42700000e-01 7.41800000e-01 4.33200000e-01 1.29510000e+00 4.13500000e-01 6.24500000e-01 4.45900000e-01 1.28440000e+00 - 7.00000000e-04 1.11360000e+00 3.51300000e-01 1.23240000e+00 5.04900000e-01 1.19480000e+00 -9.64000000e-02 1.83920000e+00 -1.53000000e-02 2.02220000e+00 -7.34000000e-02 1.81810000e+00 - 3.26600000e-01 4.06700000e-01 5.46600000e-01 5.20900000e-01 6.88800000e-01 4.95100000e-01 1.70460000e+00 2.93000000e-02 -6.77000000e-02 2.07830000e+00 -2.67000000e-02 3.13000000e-02 - 2.96300000e-01 4.72300000e-01 3.78600000e-01 8.39600000e-01 5.51500000e-01 8.12400000e-01 4.71400000e-01 7.32300000e-01 5.06300000e-01 8.85700000e-01 4.60800000e-01 7.46000000e-01 - 3.74800000e-01 2.31000000e-02 7.33500000e-01 -2.67000000e-02 8.27500000e-01 8.00000000e-04 1.73120000e+00 2.40000000e-03 -4.52000000e-02 5.35000000e-02 -5.60000000e-02 6.68000000e-02 - 5.42400000e-01 3.93700000e-01 8.92000000e-01 5.67400000e-01 1.01800000e+00 6.46300000e-01 8.54300000e-01 6.15900000e-01 1.02480000e+00 6.64300000e-01 8.37200000e-01 6.35300000e-01 - 1.22700000e-01 4.79800000e-01 1.22500000e-01 8.27900000e-01 1.33100000e-01 9.52300000e-01 2.22500000e-01 7.11100000e-01 1.07000000e-01 1.00240000e+00 2.26000000e-02 9.48100000e-01 - 8.87000000e-02 5.12500000e-01 1.42000000e-01 8.01300000e-01 1.95100000e-01 8.73200000e-01 1.68400000e-01 7.68300000e-01 9.89000000e-02 1.00270000e+00 1.90400000e-01 7.39300000e-01 - 1.47600000e-01 6.30100000e-01 2.87100000e-01 9.26600000e-01 3.67800000e-01 1.00970000e+00 2.73900000e-01 9.40400000e-01 3.67300000e-01 1.02620000e+00 2.51600000e-01 9.69800000e-01 - 7.23000000e-02 6.78100000e-01 2.94800000e-01 7.89500000e-01 6.13100000e-01 5.53900000e-01 1.76040000e+00 -3.04000000e-02 2.37000000e-02 1.97180000e+00 3.22000000e-02 -3.98000000e-02 - 5.39900000e-01 4.36000000e-02 8.36400000e-01 6.71000000e-02 9.07200000e-01 1.23600000e-01 1.79430000e+00 -7.29000000e-02 7.77000000e-02 1.90400000e+00 -3.00000000e-04 1.10000000e-03 - 2.53500000e-01 8.51100000e-01 5.93200000e-01 9.49400000e-01 6.36000000e-01 1.03810000e+00 1.68560000e+00 5.22000000e-02 -8.80000000e-03 2.01280000e+00 6.57000000e-02 1.65620000e+00 - 2.04000000e-01 5.56200000e-01 4.01400000e-01 6.94400000e-01 5.07700000e-01 7.09900000e-01 1.86390000e+00 -1.54100000e-01 2.51000000e-02 1.96990000e+00 9.00000000e-04 -3.00000000e-03 - 2.24200000e-01 1.10600000e-01 6.13100000e-01 2.14000000e-02 6.43700000e-01 1.27500000e-01 1.78130000e+00 -5.58000000e-02 -2.98000000e-02 3.54000000e-02 2.09000000e-02 -2.63000000e-02 - 4.26200000e-01 2.98100000e-01 7.47200000e-01 3.76800000e-01 8.31800000e-01 4.52600000e-01 7.39200000e-01 3.87900000e-01 8.12700000e-01 4.95700000e-01 7.32700000e-01 3.96400000e-01 - 1.08060000e+00 2.93000000e-02 3.85700000e-01 1.35530000e+00 4.26800000e-01 6.41900000e-01 4.32300000e-01 1.29860000e+00 3.85100000e-01 6.55700000e-01 4.94500000e-01 1.22640000e+00 - 7.23300000e-01 2.35700000e-01 1.04040000e+00 2.30900000e-01 1.19990000e+00 1.85400000e-01 1.70110000e+00 3.07000000e-02 1.95540000e+00 4.85000000e-02 -9.07000000e-02 1.08600000e-01 - 6.87000000e-02 6.66900000e-01 -1.64000000e-02 1.19310000e+00 2.94000000e-02 1.29370000e+00 5.71000000e-02 1.10190000e+00 7.66000000e-02 1.26120000e+00 9.19000000e-02 1.06030000e+00 - 4.35000000e-01 2.67300000e-01 6.20800000e-01 4.87700000e-01 7.83700000e-01 4.65200000e-01 6.31000000e-01 4.72300000e-01 6.89300000e-01 5.95300000e-01 6.36200000e-01 4.66000000e-01 - 3.49500000e-01 5.16400000e-01 7.11800000e-01 4.61600000e-01 7.45500000e-01 5.60000000e-01 1.69390000e+00 4.48000000e-02 3.59000000e-02 1.95900000e+00 2.31000000e-02 -2.59000000e-02 - 6.89400000e-01 4.88000000e-02 9.02700000e-01 1.67400000e-01 1.09930000e+00 7.64000000e-02 1.73470000e+00 1.70000000e-03 2.04530000e+00 -5.04000000e-02 -1.82000000e-02 2.37000000e-02 - 3.16600000e-01 1.64100000e-01 3.19800000e-01 4.65200000e-01 4.47600000e-01 4.28700000e-01 3.84900000e-01 3.87400000e-01 4.99700000e-01 3.80100000e-01 4.51400000e-01 3.07800000e-01 - 1.08440000e+00 2.27000000e-02 5.51000000e-01 1.18950000e+00 5.48300000e-01 8.18800000e-01 6.41500000e-01 1.08700000e+00 4.27000000e-01 9.04300000e-01 5.81600000e-01 1.15010000e+00 - 4.67100000e-01 5.57000000e-01 6.48100000e-01 9.66300000e-01 8.83700000e-01 9.29800000e-01 7.82200000e-01 8.12300000e-01 8.83400000e-01 9.61700000e-01 9.25200000e-01 6.41900000e-01 - 3.85300000e-01 5.93000000e-01 3.42900000e-01 1.24320000e+00 5.39000000e-01 1.23600000e+00 4.04600000e-01 1.16800000e+00 5.40200000e-01 1.25800000e+00 3.15800000e-01 1.26810000e+00 - 2.48400000e-01 7.27800000e-01 3.81400000e-01 1.14700000e+00 4.90400000e-01 1.23660000e+00 4.92100000e-01 1.01470000e+00 4.65500000e-01 1.29540000e+00 4.57500000e-01 1.05770000e+00 - 6.35300000e-01 3.34000000e-01 9.00800000e-01 6.37600000e-01 9.42000000e-01 8.28300000e-01 9.44400000e-01 5.94100000e-01 1.06370000e+00 7.10200000e-01 8.43500000e-01 7.08900000e-01 - 3.88400000e-01 5.82500000e-01 1.82600000e-01 2.20500000e-01 2.68800000e-01 5.14000000e-02 1.94300000e-01 2.04600000e-01 1.91500000e-01 1.39500000e-01 2.00600000e-01 1.99300000e-01 - 2.63500000e-01 8.61400000e-01 1.63000000e-01 9.24100000e-01 2.23700000e-01 5.93900000e-01 1.77700000e-01 9.09000000e-01 2.47300000e-01 5.47300000e-01 1.98100000e-01 8.84300000e-01 - 9.26500000e-01 1.82500000e-01 1.28290000e+00 3.53400000e-01 1.26440000e+00 5.13500000e-01 1.61860000e+00 1.32400000e-01 1.97860000e+00 2.30000000e-02 -2.32000000e-02 1.75960000e+00 - 5.26300000e-01 4.86700000e-01 8.42500000e-01 4.84600000e-01 1.01350000e+00 4.22700000e-01 1.67860000e+00 6.25000000e-02 2.03260000e+00 -3.97000000e-02 -9.20000000e-03 1.27000000e-02 - 1.23800000e-01 9.87200000e-01 -7.12000000e-02 1.83220000e+00 -1.05000000e-02 1.25580000e+00 1.13600000e-01 1.61330000e+00 6.14000000e-02 1.12720000e+00 1.82500000e-01 1.53460000e+00 - 1.05390000e+00 5.98000000e-02 7.26300000e-01 7.17000000e-02 5.59100000e-01 9.30000000e-02 6.94400000e-01 1.11000000e-01 5.44800000e-01 9.38000000e-02 7.33200000e-01 6.36000000e-02 - 6.41400000e-01 4.53000000e-01 3.58600000e-01 1.16630000e+00 3.39000000e-01 6.55100000e-01 3.00400000e-01 1.23510000e+00 2.44600000e-01 7.36000000e-01 3.14300000e-01 1.21560000e+00 - 5.56000000e-02 1.03950000e+00 2.30000000e-02 8.97100000e-01 -3.32000000e-02 7.67700000e-01 3.91000000e-02 8.73000000e-01 1.03400000e-01 5.87000000e-01 -4.28000000e-02 9.73500000e-01 - 1.80900000e-01 4.84100000e-01 2.54000000e-01 7.99500000e-01 3.82200000e-01 7.98400000e-01 1.99700000e-01 8.64600000e-01 2.39400000e-01 9.79000000e-01 2.44600000e-01 8.10700000e-01 - 6.88300000e-01 4.10300000e-01 2.96100000e-01 1.43760000e+00 3.47100000e-01 8.10500000e-01 2.79100000e-01 1.45760000e+00 3.28500000e-01 7.94000000e-01 2.96000000e-01 1.44140000e+00 - 3.39600000e-01 5.89300000e-01 4.50600000e-01 1.02070000e+00 6.24100000e-01 1.02710000e+00 5.02000000e-01 9.58800000e-01 7.17100000e-01 9.42700000e-01 5.86200000e-01 8.55700000e-01 - 7.28200000e-01 1.64000000e-02 1.10920000e+00 -6.07000000e-02 1.11640000e+00 6.86000000e-02 1.72170000e+00 1.27000000e-02 1.93640000e+00 7.17000000e-02 -9.59000000e-02 1.14700000e-01 - 4.17400000e-01 6.85900000e-01 2.90200000e-01 5.16500000e-01 1.59400000e-01 5.06600000e-01 3.37300000e-01 4.60400000e-01 1.40500000e-01 5.12500000e-01 2.38000000e-01 5.79100000e-01 - 2.13200000e-01 8.98000000e-01 1.56300000e-01 3.05900000e-01 1.01300000e-01 2.94000000e-01 1.69700000e-01 2.89600000e-01 1.41000000e-01 2.36700000e-01 1.33700000e-01 3.30400000e-01 - 3.13500000e-01 6.73100000e-01 5.39300000e-01 9.98800000e-01 6.39200000e-01 1.10670000e+00 5.41200000e-01 9.93300000e-01 6.22000000e-01 1.15110000e+00 5.39200000e-01 1.00140000e+00 - 5.47100000e-01 5.49600000e-01 8.34100000e-01 8.87200000e-01 1.02070000e+00 9.25100000e-01 8.61600000e-01 8.43700000e-01 1.05930000e+00 9.10200000e-01 9.73100000e-01 7.22400000e-01 - 5.62200000e-01 5.38700000e-01 9.24300000e-01 6.05500000e-01 9.54900000e-01 7.15300000e-01 1.68700000e+00 5.59000000e-02 1.05600000e-01 1.87440000e+00 -3.40000000e-03 1.73930000e+00 - -2.35000000e-02 9.02000000e-01 3.58300000e-01 8.20900000e-01 4.79200000e-01 8.16700000e-01 1.86670000e+00 -1.59000000e-01 6.55000000e-02 1.92030000e+00 6.52000000e-02 -7.50000000e-02 - 4.22000000e-01 6.93100000e-01 8.24300000e-01 7.95500000e-01 9.63600000e-01 7.74700000e-01 1.80810000e+00 -9.48000000e-02 -6.67000000e-02 2.06850000e+00 1.06000000e-01 1.60290000e+00 - 1.04620000e+00 7.36000000e-02 6.34900000e-01 1.10670000e+00 6.38900000e-01 1.32420000e+00 6.42300000e-01 1.09870000e+00 5.54300000e-01 1.22640000e+00 7.06800000e-01 1.02050000e+00 - 9.98700000e-01 3.69000000e-02 1.10450000e+00 2.81100000e-01 1.27210000e+00 2.29200000e-01 1.75480000e+00 -2.80000000e-02 1.95740000e+00 4.90000000e-02 -5.40000000e-03 1.73480000e+00 - 8.54700000e-01 2.46900000e-01 3.85400000e-01 5.56900000e-01 3.21400000e-01 4.21800000e-01 4.09400000e-01 5.34100000e-01 3.11700000e-01 4.18300000e-01 4.28100000e-01 5.09800000e-01 - 1.93900000e-01 7.32700000e-01 2.64800000e-01 1.19430000e+00 3.91600000e-01 1.25180000e+00 2.67100000e-01 1.19330000e+00 3.91800000e-01 1.27270000e+00 2.93000000e-01 1.16150000e+00 - 5.82600000e-01 2.54400000e-01 9.92100000e-01 2.94800000e-01 1.06170000e+00 4.18200000e-01 1.00180000e+00 2.87700000e-01 1.06570000e+00 4.40000000e-01 9.74200000e-01 3.19800000e-01 - 5.02000000e-02 6.09200000e-01 3.50800000e-01 6.23100000e-01 5.12700000e-01 5.77900000e-01 -9.36000000e-02 1.84540000e+00 4.35000000e-02 1.94560000e+00 -3.89000000e-02 4.55000000e-02 - 1.29900000e-01 7.55400000e-01 4.87300000e-01 7.05200000e-01 5.95000000e-01 7.16100000e-01 1.73710000e+00 -4.40000000e-03 1.66000000e-02 1.98280000e+00 -3.37000000e-02 3.85000000e-02 - 2.80000000e-03 1.06830000e+00 2.51800000e-01 1.13980000e+00 5.66900000e-01 9.08200000e-01 -9.00000000e-04 1.73360000e+00 4.65000000e-02 1.94920000e+00 -1.00600000e-01 1.85680000e+00 - 9.28800000e-01 1.73700000e-01 1.19540000e+00 4.67200000e-01 1.36380000e+00 4.03000000e-01 1.67180000e+00 6.98000000e-02 1.96670000e+00 3.82000000e-02 1.73000000e-02 1.71390000e+00 - 4.36000000e-02 7.77000000e-01 3.79600000e-01 7.47700000e-01 4.58200000e-01 7.98200000e-01 9.74000000e-02 1.61570000e+00 -1.21900000e-01 2.13860000e+00 7.61000000e-02 -9.14000000e-02 - 1.45000000e-01 5.75400000e-01 3.77900000e-01 7.22000000e-01 3.99000000e-01 8.59200000e-01 3.66900000e-01 7.33300000e-01 3.94500000e-01 8.83900000e-01 3.34500000e-01 7.75000000e-01 - 1.87200000e-01 9.18200000e-01 1.49100000e-01 1.57680000e+00 1.31000000e-02 1.13330000e+00 9.85000000e-02 1.63610000e+00 9.30000000e-02 9.97600000e-01 1.16000000e-01 1.61500000e+00 - 1.05600000e+00 5.65000000e-02 5.60900000e-01 1.10100000e-01 4.79000000e-01 7.29000000e-02 5.90500000e-01 7.80000000e-02 4.63800000e-01 7.75000000e-02 5.06500000e-01 1.74500000e-01 - 2.23200000e-01 6.08000000e-01 5.60200000e-01 5.80300000e-01 6.37800000e-01 6.36900000e-01 1.80930000e+00 -9.58000000e-02 -1.97000000e-02 2.02510000e+00 -7.57000000e-02 9.02000000e-02 - 1.03590000e+00 8.25000000e-02 1.72840000e+00 -2.00000000e-04 1.20900000e+00 5.00300000e-01 1.69330000e+00 3.87000000e-02 9.87800000e-01 6.15800000e-01 1.71320000e+00 2.24000000e-02 - 2.07000000e-02 6.58100000e-01 9.37000000e-02 9.56100000e-01 7.28000000e-02 1.12620000e+00 7.90000000e-02 9.73800000e-01 1.71300000e-01 1.02850000e+00 1.94400000e-01 8.37000000e-01 - 1.15460000e+00 -5.69000000e-02 7.80400000e-01 9.49700000e-01 6.04900000e-01 5.64400000e-01 6.19900000e-01 1.13600000e+00 5.66100000e-01 5.64600000e-01 8.06700000e-01 9.12900000e-01 - 1.89700000e-01 2.82100000e-01 2.26500000e-01 5.25000000e-01 2.12200000e-01 6.54300000e-01 2.83900000e-01 4.56800000e-01 3.13700000e-01 5.47900000e-01 2.20800000e-01 5.33900000e-01 - 1.13070000e+00 -3.22000000e-02 6.39100000e-01 1.47700000e-01 4.85800000e-01 1.55500000e-01 5.45400000e-01 2.62800000e-01 4.12600000e-01 2.30700000e-01 6.27800000e-01 1.61700000e-01 - 1.23880000e+00 -1.60600000e-01 9.52500000e-01 3.53100000e-01 7.80200000e-01 1.49700000e-01 9.59300000e-01 3.42800000e-01 8.18100000e-01 8.18000000e-02 9.84700000e-01 3.07000000e-01 - 4.78200000e-01 6.17700000e-01 2.90900000e-01 6.66600000e-01 2.55500000e-01 4.91700000e-01 3.53800000e-01 5.93200000e-01 2.36300000e-01 4.97500000e-01 1.52800000e-01 8.32000000e-01 - 4.33400000e-01 4.13000000e-01 6.25800000e-01 7.09500000e-01 8.12900000e-01 6.84000000e-01 6.48400000e-01 6.79000000e-01 8.04500000e-01 7.14700000e-01 6.05800000e-01 7.29400000e-01 - 1.10980000e+00 -8.10000000e-03 4.04400000e-01 1.33710000e+00 4.63300000e-01 1.49100000e+00 5.07400000e-01 1.21240000e+00 3.33000000e-01 1.38070000e+00 4.17000000e-01 1.32060000e+00 - 1.04600000e+00 7.27000000e-02 5.96900000e-01 1.13990000e+00 4.97200000e-01 6.54700000e-01 6.33500000e-01 1.09700000e+00 5.92500000e-01 5.04900000e-01 6.25900000e-01 1.10630000e+00 - 4.24600000e-01 6.50800000e-01 7.01700000e-01 6.93800000e-01 8.34600000e-01 6.79700000e-01 1.70200000e+00 3.83000000e-02 -3.70000000e-03 2.00680000e+00 -1.21100000e-01 1.87280000e+00 - 9.56000000e-02 1.00800000e+00 3.86000000e-02 1.69600000e+00 5.13000000e-02 1.30420000e+00 2.06000000e-02 1.71810000e+00 1.08500000e-01 1.17890000e+00 4.15000000e-02 1.68980000e+00 - 3.71700000e-01 4.39500000e-01 7.35300000e-01 3.78100000e-01 9.49000000e-01 2.66000000e-01 1.73800000e+00 -6.60000000e-03 -7.20000000e-02 2.08660000e+00 9.61000000e-02 -1.13300000e-01 - 1.05170000e+00 6.09000000e-02 6.24600000e-01 1.10170000e+00 4.71400000e-01 9.39800000e-01 5.62900000e-01 1.17340000e+00 3.90800000e-01 9.71100000e-01 6.06900000e-01 1.12290000e+00 - 4.59100000e-01 6.25000000e-01 5.48100000e-01 9.51300000e-01 7.35300000e-01 8.75800000e-01 1.80810000e+00 -9.11000000e-02 -1.11000000e-02 2.01490000e+00 -6.22000000e-02 1.80390000e+00 - 5.62500000e-01 8.42000000e-02 8.86800000e-01 7.06000000e-02 1.01110000e+00 6.78000000e-02 1.72960000e+00 -2.20000000e-03 2.02930000e+00 -3.48000000e-02 1.64000000e-02 -2.02000000e-02 - 4.14600000e-01 1.70800000e-01 6.29000000e-01 2.94100000e-01 8.00800000e-01 2.34400000e-01 7.00600000e-01 2.08500000e-01 8.18400000e-01 2.29300000e-01 7.50500000e-01 1.47500000e-01 - 4.32500000e-01 6.70100000e-01 2.40700000e-01 4.83200000e-01 1.35900000e-01 4.71400000e-01 2.12300000e-01 5.17400000e-01 1.08600000e-01 4.88000000e-01 2.03100000e-01 5.27700000e-01 - 5.25600000e-01 5.73600000e-01 2.60900000e-01 3.36100000e-01 1.59600000e-01 3.50100000e-01 2.30300000e-01 3.75000000e-01 2.02900000e-01 2.90000000e-01 2.79700000e-01 3.16100000e-01 - 1.10200000e+00 3.70000000e-03 8.52700000e-01 8.71200000e-01 6.16700000e-01 6.81300000e-01 8.28200000e-01 9.03800000e-01 6.79600000e-01 5.54500000e-01 8.20800000e-01 9.07900000e-01 - 1.12210000e+00 -2.35000000e-02 5.74400000e-01 2.89500000e-01 5.02500000e-01 1.82000000e-01 5.57700000e-01 3.08100000e-01 4.10700000e-01 2.74800000e-01 5.70700000e-01 2.95700000e-01 - 4.30700000e-01 1.52300000e-01 6.42300000e-01 2.76700000e-01 7.60200000e-01 2.86500000e-01 6.46100000e-01 2.72100000e-01 7.35500000e-01 3.24400000e-01 6.46200000e-01 2.73500000e-01 - 3.46900000e-01 7.62700000e-01 1.86700000e-01 8.88500000e-01 1.94700000e-01 6.23200000e-01 1.37100000e-01 9.45600000e-01 1.80300000e-01 6.20300000e-01 2.04000000e-01 8.67700000e-01 - 1.19490000e+00 -1.03700000e-01 5.93000000e-01 9.23200000e-01 4.93800000e-01 5.14400000e-01 6.13500000e-01 9.04200000e-01 5.51900000e-01 4.12200000e-01 5.96200000e-01 9.20800000e-01 - 2.93600000e-01 5.22000000e-02 4.91600000e-01 4.39000000e-02 5.76300000e-01 3.06000000e-02 5.21700000e-01 5.30000000e-03 4.92600000e-01 1.41200000e-01 4.46200000e-01 9.91000000e-02 - 3.47200000e-01 2.10500000e-01 7.41800000e-01 1.13000000e-01 7.98500000e-01 1.92100000e-01 1.66420000e+00 7.96000000e-02 7.68000000e-02 1.91020000e+00 -3.80000000e-03 1.40000000e-03 - 1.72600000e-01 3.73100000e-01 4.96900000e-01 3.59400000e-01 6.60400000e-01 3.12000000e-01 1.68840000e+00 5.06000000e-02 2.89000000e-02 -3.54000000e-02 2.02000000e-02 -2.32000000e-02 - 3.04500000e-01 5.48900000e-01 6.95800000e-01 4.60400000e-01 7.41800000e-01 5.45500000e-01 1.68480000e+00 5.31000000e-02 1.15000000e-02 1.98550000e+00 1.41000000e-02 -1.70000000e-02 - -5.70000000e-03 1.13120000e+00 -3.58000000e-02 4.54500000e-01 3.48000000e-02 3.07900000e-01 1.63500000e-01 2.15100000e-01 -1.22000000e-02 3.59300000e-01 5.82000000e-02 3.42400000e-01 - 1.87200000e-01 4.69900000e-01 2.93400000e-01 7.36500000e-01 2.95700000e-01 8.87200000e-01 2.14300000e-01 8.30900000e-01 3.58100000e-01 8.26300000e-01 3.49800000e-01 6.69500000e-01 - 1.09620000e+00 9.70000000e-03 6.73600000e-01 1.97000000e-01 5.89300000e-01 9.84000000e-02 6.82900000e-01 1.82100000e-01 5.42700000e-01 1.37100000e-01 6.18600000e-01 2.61400000e-01 - 5.29900000e-01 3.75300000e-01 6.90400000e-01 7.49400000e-01 8.95100000e-01 7.27400000e-01 9.16000000e-01 4.88600000e-01 9.41500000e-01 6.98800000e-01 9.26000000e-01 4.78400000e-01 - 1.86200000e-01 6.99600000e-01 3.42200000e-01 1.03660000e+00 4.18400000e-01 1.14450000e+00 3.68400000e-01 1.00660000e+00 4.38500000e-01 1.14700000e+00 2.51600000e-01 1.14210000e+00 - 5.84600000e-01 4.78700000e-01 8.51900000e-01 8.22200000e-01 8.65700000e-01 1.06270000e+00 9.09200000e-01 7.58600000e-01 9.79100000e-01 9.61600000e-01 8.39100000e-01 8.39300000e-01 - 1.08700000e-01 5.07000000e-01 5.04900000e-01 4.04800000e-01 6.33500000e-01 3.98500000e-01 1.80740000e+00 -9.19000000e-02 -4.76000000e-02 2.05060000e+00 4.77000000e-02 -5.67000000e-02 - 7.17500000e-01 3.82200000e-01 4.77400000e-01 1.22520000e+00 2.94300000e-01 8.16200000e-01 3.45400000e-01 1.37920000e+00 1.81700000e-01 9.20900000e-01 3.50700000e-01 1.38120000e+00 - 1.67000000e-02 8.02100000e-01 2.21000000e-02 1.11200000e+00 6.19000000e-02 1.20390000e+00 1.32000000e-02 1.71440000e+00 -8.00000000e-03 2.00760000e+00 7.30000000e-03 -7.60000000e-03 - 5.13500000e-01 6.13100000e-01 2.78600000e-01 6.96400000e-01 3.29900000e-01 4.18400000e-01 4.39300000e-01 5.10800000e-01 2.63000000e-01 4.80900000e-01 4.05700000e-01 5.44500000e-01 - 1.14780000e+00 -5.13000000e-02 3.40100000e-01 6.76500000e-01 3.75500000e-01 3.99700000e-01 3.92400000e-01 6.13400000e-01 3.27100000e-01 4.38000000e-01 4.03800000e-01 6.02900000e-01 - 5.98600000e-01 5.00600000e-01 9.48800000e-01 4.98700000e-01 9.63000000e-01 6.26800000e-01 1.78450000e+00 -6.50000000e-02 2.06290000e+00 -7.71000000e-02 1.06200000e-01 1.60920000e+00 - 2.38300000e-01 5.98700000e-01 5.46200000e-01 6.00400000e-01 6.53300000e-01 6.17300000e-01 1.72980000e+00 1.00000000e-03 -6.41000000e-02 2.07720000e+00 7.75000000e-02 -9.12000000e-02 - 6.20500000e-01 4.65700000e-01 2.44300000e-01 4.84700000e-01 3.06600000e-01 2.69700000e-01 2.89300000e-01 4.30400000e-01 2.21400000e-01 3.57500000e-01 2.88400000e-01 4.29800000e-01 - 1.04820000e+00 5.23000000e-02 4.07200000e-01 1.31810000e+00 3.12000000e-01 7.62800000e-01 4.45500000e-01 1.27730000e+00 3.27200000e-01 7.14700000e-01 3.82500000e-01 1.35320000e+00 - 6.05000000e-02 3.94600000e-01 2.39500000e-01 4.47500000e-01 2.36800000e-01 5.49800000e-01 1.62000000e-01 5.38300000e-01 2.62000000e-01 5.35400000e-01 1.56700000e-01 5.47300000e-01 - 9.16000000e-02 1.00220000e+00 3.86100000e-01 1.24360000e+00 4.79500000e-01 1.27370000e+00 1.86000000e-02 1.70870000e+00 -3.30000000e-02 2.03760000e+00 4.07000000e-02 1.67890000e+00 - 4.18100000e-01 1.68200000e-01 6.97400000e-01 2.11700000e-01 7.58700000e-01 2.83200000e-01 6.57400000e-01 2.58300000e-01 7.66700000e-01 2.89900000e-01 5.70200000e-01 3.62200000e-01 - 1.18750000e+00 -9.97000000e-02 5.43700000e-01 1.16010000e+00 4.57800000e-01 6.02900000e-01 3.83400000e-01 1.35460000e+00 3.37900000e-01 7.16300000e-01 4.92000000e-01 1.22660000e+00 - 3.91000000e-01 6.03100000e-01 7.10200000e-01 5.97200000e-01 7.46600000e-01 6.98200000e-01 1.80930000e+00 -9.60000000e-02 -9.19000000e-02 2.10330000e+00 2.11000000e-02 -2.65000000e-02 - 3.57000000e-01 4.34900000e-01 4.62900000e-01 7.97600000e-01 6.06200000e-01 8.15200000e-01 5.92300000e-01 6.41300000e-01 5.66700000e-01 8.83000000e-01 4.91600000e-01 7.63900000e-01 - 1.00880000e+00 8.99000000e-02 1.35270000e+00 3.14300000e-01 1.38100000e+00 4.24700000e-01 1.81050000e+00 -8.94000000e-02 1.98400000e+00 1.98000000e-02 -4.61000000e-02 1.78800000e+00 - 2.14100000e-01 8.99100000e-01 1.92400000e-01 4.56200000e-01 1.81500000e-01 3.49600000e-01 1.93800000e-01 4.56800000e-01 2.30800000e-01 2.80800000e-01 1.43600000e-01 5.13700000e-01 - 4.43300000e-01 4.90000000e-01 7.69700000e-01 6.85400000e-01 8.92000000e-01 7.56400000e-01 8.25500000e-01 6.14000000e-01 7.44100000e-01 9.54100000e-01 7.17700000e-01 7.40100000e-01 - 6.37100000e-01 -1.09800000e-01 2.16000000e-01 8.23000000e-02 2.08600000e-01 4.35000000e-02 2.14400000e-01 8.59000000e-02 1.83100000e-01 6.79000000e-02 2.82500000e-01 2.30000000e-03 - 1.08300000e+00 2.89000000e-02 6.06900000e-01 1.63600000e-01 4.89600000e-01 1.38400000e-01 6.03900000e-01 1.65700000e-01 4.46300000e-01 1.77700000e-01 6.63900000e-01 9.76000000e-02 - 1.12390000e+00 -2.09000000e-02 7.43100000e-01 1.91900000e-01 5.17800000e-01 2.36800000e-01 6.97800000e-01 2.46800000e-01 5.48400000e-01 1.78700000e-01 7.12100000e-01 2.25300000e-01 - 4.51100000e-01 4.20400000e-01 6.90500000e-01 6.70900000e-01 8.21500000e-01 7.25800000e-01 7.08600000e-01 6.51500000e-01 8.98600000e-01 6.63800000e-01 6.75000000e-01 6.93400000e-01 - 1.24680000e+00 -1.68500000e-01 4.88700000e-01 2.64400000e-01 2.97000000e-01 3.35500000e-01 6.12000000e-01 1.16900000e-01 3.59600000e-01 2.49900000e-01 4.91400000e-01 2.59500000e-01 - 9.58000000e-01 1.30300000e-01 3.46600000e-01 1.39310000e+00 2.94800000e-01 9.19800000e-01 3.04100000e-01 1.43860000e+00 2.85300000e-01 8.89700000e-01 3.84900000e-01 1.34260000e+00 - 1.93700000e-01 6.78600000e-01 4.42900000e-01 8.98700000e-01 5.19900000e-01 1.00610000e+00 4.14200000e-01 9.35200000e-01 5.70700000e-01 9.68100000e-01 3.79200000e-01 9.76800000e-01 - 3.77000000e-01 5.03500000e-01 5.54900000e-01 8.29000000e-01 7.03600000e-01 8.59300000e-01 6.73700000e-01 6.88100000e-01 8.05700000e-01 7.63000000e-01 6.98800000e-01 6.58400000e-01 - 1.00000000e-01 4.72600000e-01 4.29000000e-01 4.58000000e-01 4.77400000e-01 5.38600000e-01 -3.00000000e-04 1.72850000e+00 -6.66000000e-02 2.07900000e+00 -8.98000000e-02 1.06800000e-01 - 5.64200000e-01 5.51600000e-01 8.56000000e-01 8.38900000e-01 1.07100000e+00 7.22700000e-01 1.72720000e+00 7.80000000e-03 2.06090000e+00 -7.62000000e-02 5.15000000e-02 1.67580000e+00 - 5.65100000e-01 5.10900000e-01 8.16500000e-01 5.82600000e-01 1.00280000e+00 5.02400000e-01 1.75150000e+00 -2.62000000e-02 3.30000000e-02 1.95960000e+00 -1.71300000e-01 1.93630000e+00 - 2.80100000e-01 3.66000000e-02 3.15800000e-01 2.02500000e-01 3.71200000e-01 2.16900000e-01 2.96200000e-01 2.23600000e-01 3.50600000e-01 2.49200000e-01 3.01900000e-01 2.20300000e-01 - 3.85500000e-01 7.22500000e-01 7.31700000e-01 7.34500000e-01 7.25600000e-01 8.90000000e-01 1.67310000e+00 6.80000000e-02 -1.17000000e-01 2.14140000e+00 -7.95000000e-02 1.82670000e+00 - 1.07290000e+00 3.60000000e-02 9.91400000e-01 7.57200000e-01 8.20000000e-01 6.92100000e-01 1.10610000e+00 6.24200000e-01 9.01500000e-01 5.11400000e-01 1.15020000e+00 5.70800000e-01 - 9.87000000e-02 7.21300000e-01 4.62700000e-01 6.58100000e-01 6.12100000e-01 6.28400000e-01 1.69930000e+00 3.75000000e-02 8.28000000e-02 1.90540000e+00 6.50000000e-03 -7.60000000e-03 - 1.39000000e-02 1.02100000e+00 6.05000000e-02 1.29480000e+00 2.10500000e-01 1.26090000e+00 -6.49000000e-02 1.80860000e+00 5.64000000e-02 1.93650000e+00 -7.53000000e-02 8.98000000e-02 - 9.26900000e-01 7.60000000e-02 1.21560000e+00 1.06100000e-01 1.26780000e+00 1.87300000e-01 1.66760000e+00 7.56000000e-02 2.03490000e+00 -3.37000000e-02 2.17000000e-02 -2.51000000e-02 - 2.44700000e-01 8.63300000e-01 1.45000000e-01 6.63100000e-01 9.51000000e-02 5.65600000e-01 1.46800000e-01 6.64000000e-01 8.43000000e-02 5.65800000e-01 2.43900000e-01 5.51400000e-01 - 4.05500000e-01 7.25400000e-01 2.43300000e-01 1.32920000e+00 2.48700000e-01 7.63600000e-01 2.12100000e-01 1.36600000e+00 3.02800000e-01 6.68900000e-01 3.01100000e-01 1.26300000e+00 - 5.21100000e-01 4.20000000e-01 8.56200000e-01 6.08100000e-01 9.70200000e-01 6.88300000e-01 8.86700000e-01 5.67700000e-01 9.88700000e-01 6.98200000e-01 8.75000000e-01 5.86600000e-01 - 1.04230000e+00 7.49000000e-02 8.84100000e-01 1.12100000e-01 6.56300000e-01 1.30300000e-01 8.34700000e-01 1.67200000e-01 6.79000000e-01 7.93000000e-02 7.91800000e-01 2.21700000e-01 - 1.91000000e-02 9.39400000e-01 3.30700000e-01 9.45000000e-01 4.49100000e-01 9.40500000e-01 3.00000000e-04 1.73240000e+00 2.98000000e-02 1.96740000e+00 1.65000000e-02 -2.03000000e-02 - 4.14500000e-01 3.87900000e-01 7.20900000e-01 5.16300000e-01 8.17500000e-01 6.00200000e-01 7.38800000e-01 4.98700000e-01 8.30300000e-01 6.00300000e-01 6.92200000e-01 5.54600000e-01 - 2.11600000e-01 4.07200000e-01 2.49700000e-01 7.37500000e-01 3.18700000e-01 7.98900000e-01 2.23900000e-01 7.66000000e-01 3.14700000e-01 8.21700000e-01 3.80000000e-01 5.81000000e-01 - 3.41500000e-01 7.56600000e-01 5.54300000e-01 1.13760000e+00 7.52000000e-01 1.04400000e+00 1.72500000e+00 6.50000000e-03 -8.89000000e-02 2.10570000e+00 -6.57000000e-02 1.81210000e+00 - 1.90300000e-01 5.30600000e-01 1.38500000e-01 1.01940000e+00 1.35800000e-01 1.18420000e+00 1.60400000e-01 9.95200000e-01 1.45900000e-01 1.19420000e+00 1.44100000e-01 1.01010000e+00 - 1.12880000e+00 -3.08000000e-02 7.34600000e-01 9.86000000e-01 5.74200000e-01 8.29900000e-01 7.27300000e-01 1.00370000e+00 5.73400000e-01 7.69700000e-01 7.81600000e-01 9.32900000e-01 - 1.80100000e-01 8.02200000e-01 4.45900000e-01 8.58900000e-01 5.72200000e-01 8.55300000e-01 1.78580000e+00 -6.23000000e-02 -1.62000000e-02 2.01420000e+00 3.83000000e-02 -4.62000000e-02 - 5.66600000e-01 3.28400000e-01 8.84600000e-01 5.18800000e-01 1.04460000e+00 5.52300000e-01 8.31900000e-01 5.83800000e-01 1.03330000e+00 5.85700000e-01 8.99600000e-01 5.04500000e-01 - -5.00000000e-04 5.71100000e-01 9.92000000e-02 7.90500000e-01 2.37900000e-01 7.64500000e-01 7.69000000e-02 1.63780000e+00 1.09400000e-01 1.87200000e+00 1.18000000e-02 -1.39000000e-02 - 5.40000000e-02 1.76400000e-01 3.02000000e-02 3.42800000e-01 7.63000000e-02 3.39800000e-01 5.62000000e-02 3.11000000e-01 3.43000000e-02 3.94600000e-01 8.47000000e-02 2.76400000e-01 - 1.13100000e+00 -3.20000000e-02 4.49300000e-01 2.61600000e-01 3.41500000e-01 2.46100000e-01 4.14700000e-01 3.02300000e-01 4.04400000e-01 1.57200000e-01 3.52700000e-01 3.75800000e-01 - 9.10100000e-01 1.58500000e-01 1.55130000e+00 1.02300000e-01 1.71470000e+00 1.72400000e-01 1.59960000e+00 4.22000000e-02 1.76350000e+00 1.53900000e-01 1.38870000e+00 2.92000000e-01 - 5.31900000e-01 4.88400000e-01 9.16300000e-01 6.69200000e-01 9.93100000e-01 8.19900000e-01 7.89400000e-01 8.21900000e-01 1.04070000e+00 7.88100000e-01 8.75700000e-01 7.14800000e-01 - 3.48200000e-01 5.40400000e-01 5.05700000e-01 8.96000000e-01 5.75600000e-01 1.02150000e+00 4.65600000e-01 9.44400000e-01 6.70400000e-01 9.32500000e-01 4.70500000e-01 9.35000000e-01 - 5.48700000e-01 2.15600000e-01 7.00300000e-01 5.29000000e-01 7.35000000e-01 6.70900000e-01 6.56100000e-01 5.81500000e-01 8.52000000e-01 5.55900000e-01 7.37800000e-01 4.83300000e-01 - 3.71300000e-01 6.95000000e-01 5.69800000e-01 8.29000000e-01 6.79700000e-01 8.38400000e-01 1.61180000e+00 1.40500000e-01 -1.74000000e-02 2.02490000e+00 2.38000000e-02 1.70750000e+00 - 6.33100000e-01 2.52900000e-01 8.94200000e-01 3.18500000e-01 9.14600000e-01 4.37800000e-01 1.65410000e+00 8.82000000e-02 1.99860000e+00 6.10000000e-03 -3.90000000e-03 4.80000000e-03 - 5.80000000e-01 4.19800000e-01 8.76500000e-01 6.96700000e-01 1.13800000e+00 6.29000000e-01 9.62600000e-01 5.96400000e-01 9.28600000e-01 9.00900000e-01 8.44900000e-01 7.31100000e-01 - 1.12370000e+00 -1.95000000e-02 6.14800000e-01 3.28700000e-01 5.72700000e-01 1.59300000e-01 6.66800000e-01 2.69300000e-01 3.83600000e-01 3.65600000e-01 5.55000000e-01 4.00000000e-01 - 3.16300000e-01 5.15800000e-01 5.86800000e-01 5.66300000e-01 7.96500000e-01 4.65700000e-01 1.70470000e+00 3.95000000e-02 -9.59000000e-02 2.10950000e+00 3.47000000e-02 -4.37000000e-02 - -3.00000000e-04 6.13600000e-01 6.80000000e-02 8.58500000e-01 1.70800000e-01 8.78700000e-01 2.93000000e-02 1.69390000e+00 -3.47000000e-02 2.04160000e+00 3.93000000e-02 -4.65000000e-02 - 4.59400000e-01 6.54800000e-01 9.58300000e-01 5.26700000e-01 1.01810000e+00 5.99200000e-01 1.66700000e+00 7.52000000e-02 -9.92000000e-02 2.11640000e+00 -3.74000000e-02 1.77520000e+00 - 1.16000000e-01 6.11900000e-01 3.00100000e-01 8.17600000e-01 3.28200000e-01 9.50000000e-01 2.10900000e-01 9.27200000e-01 3.98500000e-01 8.85900000e-01 2.55900000e-01 8.71800000e-01 - 1.10690000e+00 -1.60000000e-03 6.91600000e-01 6.81800000e-01 5.45200000e-01 4.23300000e-01 7.75400000e-01 5.79000000e-01 6.23400000e-01 2.99900000e-01 6.08900000e-01 7.78900000e-01 - 4.00300000e-01 1.15300000e-01 5.06900000e-01 3.17400000e-01 6.64500000e-01 2.56900000e-01 4.88400000e-01 3.40000000e-01 6.47200000e-01 2.93300000e-01 5.37500000e-01 2.82600000e-01 - 1.15500000e+00 -5.71000000e-02 5.43100000e-01 1.17200000e+00 3.97600000e-01 1.09450000e+00 4.90800000e-01 1.23760000e+00 3.19000000e-01 1.11260000e+00 4.66800000e-01 1.26170000e+00 - 2.16300000e-01 8.48800000e-01 5.92600000e-01 7.76100000e-01 7.09700000e-01 7.77200000e-01 1.75530000e+00 -2.92000000e-02 6.93000000e-02 1.91880000e+00 1.27600000e-01 1.58310000e+00 - 3.89100000e-01 5.38800000e-01 6.17800000e-01 6.39900000e-01 7.02200000e-01 6.83600000e-01 1.82160000e+00 -1.08400000e-01 -2.55000000e-02 2.03050000e+00 -2.54000000e-02 3.03000000e-02 - 9.57100000e-01 1.78900000e-01 4.12800000e-01 1.32000000e+00 3.50200000e-01 7.89000000e-01 3.75000000e-01 1.36340000e+00 2.88300000e-01 8.27200000e-01 4.30500000e-01 1.29300000e+00 - -1.01000000e-02 1.95300000e-01 2.33000000e-01 2.58600000e-01 3.19300000e-01 2.98100000e-01 -6.40000000e-03 1.74050000e+00 -1.53000000e-02 1.87000000e-02 3.46000000e-02 -4.07000000e-02 - 5.14900000e-01 3.51000000e-01 8.31200000e-01 5.28600000e-01 9.37700000e-01 6.04700000e-01 8.59600000e-01 4.93500000e-01 9.11100000e-01 6.67400000e-01 8.69700000e-01 4.78100000e-01 - 5.60900000e-01 2.51800000e-01 7.44100000e-01 5.60000000e-01 8.23700000e-01 6.64100000e-01 8.24000000e-01 4.69700000e-01 9.04400000e-01 5.90300000e-01 8.35000000e-01 4.52700000e-01 - 3.72500000e-01 7.26900000e-01 7.02300000e-01 1.00300000e+00 8.46600000e-01 1.08810000e+00 7.11800000e-01 9.89800000e-01 9.56300000e-01 9.79200000e-01 7.16300000e-01 9.89400000e-01 - 4.21300000e-01 6.77000000e-01 1.90100000e-01 1.47870000e+00 1.04900000e-01 9.37400000e-01 1.94300000e-01 1.46870000e+00 1.83900000e-01 8.13900000e-01 2.65000000e-01 1.38610000e+00 - 6.35600000e-01 3.32100000e-01 1.10050000e+00 4.03500000e-01 1.26260000e+00 4.39900000e-01 1.09580000e+00 4.05000000e-01 1.23990000e+00 4.92100000e-01 1.01770000e+00 4.95600000e-01 - 1.12040000e+00 -2.30000000e-02 1.51980000e+00 2.14600000e-01 1.01650000e+00 9.57700000e-01 1.45770000e+00 2.79500000e-01 9.71100000e-01 8.15700000e-01 1.57200000e+00 1.49400000e-01 - 4.30500000e-01 4.91200000e-01 7.09700000e-01 5.32800000e-01 8.12400000e-01 5.52300000e-01 1.76600000e+00 -4.27000000e-02 -1.99000000e-02 2.02210000e+00 3.90000000e-02 -4.51000000e-02 - 8.47000000e-02 7.90900000e-01 3.88200000e-01 8.07000000e-01 6.10900000e-01 6.83200000e-01 1.78800000e+00 -6.65000000e-02 2.30000000e-02 1.97050000e+00 1.25700000e-01 -1.50100000e-01 - 1.01680000e+00 1.05700000e-01 6.55500000e-01 6.59700000e-01 4.97600000e-01 4.42300000e-01 6.44000000e-01 6.70700000e-01 4.56400000e-01 4.63400000e-01 5.25100000e-01 8.11300000e-01 - 6.99700000e-01 4.11600000e-01 3.46300000e-01 2.77800000e-01 3.31100000e-01 1.76400000e-01 3.26800000e-01 3.00000000e-01 1.87200000e-01 3.37600000e-01 3.26700000e-01 2.99600000e-01 - 1.09790000e+00 7.70000000e-03 3.56800000e-01 1.38500000e+00 3.61300000e-01 8.65500000e-01 4.98000000e-01 1.21990000e+00 4.04100000e-01 7.71300000e-01 4.55300000e-01 1.27440000e+00 - 1.01600000e-01 3.39100000e-01 1.48900000e-01 6.25900000e-01 3.23000000e-01 5.60600000e-01 6.47000000e-02 1.65450000e+00 -6.80000000e-02 8.05000000e-02 1.95000000e-02 -2.22000000e-02 - 2.03800000e-01 2.73200000e-01 4.90600000e-01 2.25500000e-01 5.65900000e-01 2.51500000e-01 5.41200000e-01 1.66200000e-01 5.48200000e-01 2.86100000e-01 5.26600000e-01 1.84400000e-01 - 4.80100000e-01 5.41600000e-01 8.74700000e-01 7.06900000e-01 9.10100000e-01 8.99700000e-01 8.30800000e-01 7.58500000e-01 9.47200000e-01 8.83200000e-01 8.54100000e-01 7.27600000e-01 - 8.51000000e-02 2.10700000e-01 1.55600000e-01 3.06600000e-01 1.32500000e-01 4.00400000e-01 1.38800000e-01 3.20600000e-01 1.56700000e-01 3.81100000e-01 9.41000000e-02 3.76500000e-01 - 4.67000000e-01 5.25400000e-01 7.97100000e-01 5.04200000e-01 9.53100000e-01 4.61600000e-01 1.85210000e+00 -1.35200000e-01 -7.10000000e-03 2.00990000e+00 -3.74000000e-02 4.44000000e-02 - -7.85000000e-02 8.55300000e-01 4.11700000e-01 6.48100000e-01 3.98800000e-01 8.01100000e-01 -9.10000000e-02 1.84330000e+00 -2.14000000e-02 2.02670000e+00 4.05000000e-02 -5.03000000e-02 - 3.79900000e-01 7.26500000e-01 2.90900000e-01 5.83300000e-01 1.92400000e-01 5.13300000e-01 2.65900000e-01 6.15600000e-01 1.08300000e-01 5.98300000e-01 2.55800000e-01 6.26200000e-01 - 5.95200000e-01 4.97800000e-01 8.44500000e-01 7.32700000e-01 8.96000000e-01 8.05900000e-01 1.74100000e+00 -6.90000000e-03 -6.41000000e-02 2.07930000e+00 3.33000000e-02 1.69250000e+00 - 5.26900000e-01 5.80300000e-01 7.28000000e-01 1.02500000e+00 1.06080000e+00 8.94700000e-01 1.72460000e+00 1.00000000e-02 -1.14000000e-02 2.01100000e+00 1.30800000e-01 1.57930000e+00 - -1.42000000e-02 7.74400000e-01 3.14800000e-01 7.54000000e-01 5.74300000e-01 5.92200000e-01 4.63000000e-02 1.67870000e+00 5.43000000e-02 1.93520000e+00 7.20000000e-02 -8.73000000e-02 - 1.17620000e+00 -8.42000000e-02 3.61700000e-01 1.25600000e-01 2.73100000e-01 1.44100000e-01 4.08200000e-01 7.19000000e-02 2.96000000e-01 1.08400000e-01 3.82000000e-01 1.00000000e-01 - 5.97500000e-01 3.04900000e-01 8.81000000e-01 5.51300000e-01 1.19250000e+00 4.01000000e-01 1.00010000e+00 4.05400000e-01 1.10540000e+00 5.27300000e-01 9.43600000e-01 4.72900000e-01 - 6.52800000e-01 4.54000000e-01 8.72200000e-01 6.37300000e-01 1.21860000e+00 3.72900000e-01 1.88810000e+00 -1.86700000e-01 2.03260000e+00 -3.61000000e-02 -3.00000000e-04 1.72920000e+00 - 1.09650000e+00 9.40000000e-03 7.31000000e-01 2.97000000e-02 5.86700000e-01 3.05000000e-02 7.07900000e-01 5.69000000e-02 4.51400000e-01 1.78500000e-01 6.56200000e-01 1.19200000e-01 - 4.07000000e-01 6.91000000e-01 2.31800000e-01 1.49470000e+00 7.69000000e-02 1.12360000e+00 2.64400000e-01 1.45740000e+00 1.24600000e-01 1.02710000e+00 2.57400000e-01 1.46250000e+00 - 2.59900000e-01 5.21400000e-01 4.14100000e-01 8.04000000e-01 5.32100000e-01 8.45800000e-01 4.16200000e-01 8.00100000e-01 4.16200000e-01 1.00300000e+00 3.38400000e-01 8.93200000e-01 - 5.44600000e-01 2.78700000e-01 9.48000000e-01 1.73600000e-01 9.64100000e-01 2.94700000e-01 1.73350000e+00 -4.20000000e-03 2.08540000e+00 -1.01200000e-01 -7.80000000e-03 9.10000000e-03 - 6.01500000e-01 5.06200000e-01 3.36500000e-01 5.43300000e-01 3.16300000e-01 3.78800000e-01 3.26400000e-01 5.54700000e-01 2.82500000e-01 4.02400000e-01 2.87500000e-01 6.00000000e-01 - 1.11570000e+00 -1.07000000e-02 1.13330000e+00 5.93600000e-01 8.50500000e-01 4.37100000e-01 1.13610000e+00 5.88000000e-01 7.20500000e-01 5.36200000e-01 1.07570000e+00 6.59700000e-01 - 8.70000000e-02 1.03400000e+00 6.71000000e-02 1.66750000e+00 1.50800000e-01 1.09810000e+00 3.16000000e-02 1.71420000e+00 9.29000000e-02 1.11770000e+00 1.19500000e-01 1.60910000e+00 - 1.08750000e+00 2.30000000e-02 9.67400000e-01 2.27200000e-01 8.74900000e-01 -9.30000000e-03 1.04070000e+00 1.41500000e-01 7.67200000e-01 9.37000000e-02 1.03240000e+00 1.51800000e-01 - 1.19020000e+00 -9.85000000e-02 1.37400000e+00 2.35800000e-01 1.58490000e+00 1.32600000e-01 1.81920000e+00 -9.78000000e-02 1.87150000e+00 1.52600000e-01 1.69370000e+00 3.80000000e-02 - 6.93000000e-02 5.11600000e-01 2.57000000e-02 9.02200000e-01 -7.75000000e-02 1.15300000e+00 1.26000000e-02 9.14600000e-01 6.70000000e-02 9.94800000e-01 3.60000000e-03 9.27900000e-01 - 1.28400000e-01 5.86900000e-01 1.36500000e-01 9.91500000e-01 1.93100000e-01 1.08790000e+00 1.97700000e-01 9.21500000e-01 1.50700000e-01 1.15790000e+00 4.04000000e-02 1.11110000e+00 - 4.49200000e-01 3.34500000e-01 5.76100000e-01 6.79000000e-01 7.53700000e-01 6.54100000e-01 6.76600000e-01 5.59800000e-01 8.87600000e-01 5.18500000e-01 6.99700000e-01 5.36900000e-01 - 1.14120000e+00 -4.20000000e-02 4.97000000e-01 1.31500000e-01 4.49100000e-01 6.48000000e-02 4.79900000e-01 1.51700000e-01 4.01100000e-01 1.10300000e-01 5.18100000e-01 1.08800000e-01 - 1.02600000e+00 9.55000000e-02 1.06830000e+00 6.37000000e-01 7.15800000e-01 4.34000000e-01 9.65500000e-01 7.61500000e-01 6.27300000e-01 4.99700000e-01 8.30900000e-01 9.16900000e-01 - 1.13520000e+00 -3.26000000e-02 4.64000000e-01 1.28030000e+00 4.27300000e-01 1.04220000e+00 5.48500000e-01 1.18620000e+00 6.04200000e-01 7.60700000e-01 6.12400000e-01 1.10520000e+00 - 1.15100000e-01 7.69600000e-01 1.87200000e-01 1.19390000e+00 2.39700000e-01 1.33240000e+00 2.75400000e-01 1.08590000e+00 2.93800000e-01 1.28840000e+00 2.39600000e-01 1.13030000e+00 - 1.08520000e+00 2.89000000e-02 1.37190000e+00 3.64100000e-01 1.04850000e+00 5.92300000e-01 1.34120000e+00 4.05000000e-01 9.11700000e-01 6.33800000e-01 1.43850000e+00 2.85900000e-01 - 9.69000000e-02 6.77400000e-01 3.18300000e-01 7.80600000e-01 5.88800000e-01 6.06200000e-01 1.70550000e+00 2.97000000e-02 3.73000000e-02 1.95530000e+00 3.90000000e-03 -4.30000000e-03 - 6.19800000e-01 4.87100000e-01 2.98600000e-01 5.66500000e-01 1.71300000e-01 5.36800000e-01 2.78200000e-01 5.92500000e-01 2.39100000e-01 4.42100000e-01 3.04300000e-01 5.61000000e-01 - 4.85800000e-01 4.34900000e-01 7.23400000e-01 7.24300000e-01 7.94000000e-01 8.58700000e-01 7.23700000e-01 7.32300000e-01 8.40700000e-01 8.29800000e-01 6.63200000e-01 7.97700000e-01 - 1.63000000e-01 4.36100000e-01 3.03800000e-01 6.24000000e-01 2.17100000e-01 8.66400000e-01 2.00000000e-01 7.49400000e-01 2.88700000e-01 7.96700000e-01 2.87700000e-01 6.41800000e-01 - 3.11800000e-01 3.51000000e-01 4.78900000e-01 5.62500000e-01 5.23600000e-01 6.66200000e-01 4.25200000e-01 6.27200000e-01 5.02600000e-01 7.09100000e-01 3.90000000e-01 6.70400000e-01 - 1.12020000e+00 -2.20000000e-02 8.36300000e-01 4.44200000e-01 6.89500000e-01 2.31600000e-01 8.84600000e-01 3.82700000e-01 6.68400000e-01 2.31500000e-01 9.11800000e-01 3.52500000e-01 - 1.31700000e-01 9.62500000e-01 2.92000000e-02 9.90700000e-01 -2.36000000e-02 8.26300000e-01 1.93000000e-02 9.98900000e-01 1.67600000e-01 5.78300000e-01 3.99000000e-02 9.77400000e-01 - 1.62100000e-01 5.07000000e-02 3.72200000e-01 -6.17000000e-02 3.54900000e-01 1.29000000e-02 2.45800000e-01 9.20000000e-02 4.21800000e-01 -6.15000000e-02 3.22800000e-01 -6.00000000e-04 - 3.75000000e-02 1.06490000e+00 6.65000000e-02 6.76900000e-01 -1.31000000e-02 6.33200000e-01 -1.47000000e-02 7.76500000e-01 9.80000000e-03 5.92000000e-01 -1.88000000e-02 7.75600000e-01 - 6.13000000e-02 1.04740000e+00 7.80000000e-03 9.46400000e-01 5.98000000e-02 6.79200000e-01 1.52500000e-01 7.73300000e-01 1.03700000e-01 6.12500000e-01 9.23000000e-02 8.45000000e-01 - 8.56000000e-01 2.51300000e-01 2.80800000e-01 4.88800000e-01 3.36400000e-01 2.76900000e-01 4.30500000e-01 3.09700000e-01 3.42400000e-01 2.54800000e-01 3.13000000e-01 4.53800000e-01 - 1.05280000e+00 6.00000000e-02 3.50900000e-01 6.12500000e-01 3.27600000e-01 4.23900000e-01 3.18600000e-01 6.49600000e-01 2.68800000e-01 4.75500000e-01 3.88700000e-01 5.65600000e-01 - 1.07500000e+00 3.52000000e-02 4.36400000e-01 5.86800000e-01 3.18300000e-01 4.85900000e-01 4.95600000e-01 5.15200000e-01 3.71100000e-01 4.04200000e-01 4.01200000e-01 6.27100000e-01 - 2.94500000e-01 4.72200000e-01 6.87400000e-01 3.79300000e-01 6.58100000e-01 5.57500000e-01 1.81110000e+00 -8.84000000e-02 2.41000000e-02 1.97050000e+00 -2.00000000e-02 2.29000000e-02 - 2.59300000e-01 7.80900000e-01 4.22700000e-01 1.20230000e+00 5.01000000e-01 1.34440000e+00 3.96400000e-01 1.22970000e+00 5.15700000e-01 1.35250000e+00 5.24800000e-01 1.08200000e+00 - 1.11560000e+00 -1.09000000e-02 4.18900000e-01 8.41400000e-01 4.33700000e-01 4.69700000e-01 3.97600000e-01 8.66800000e-01 4.03600000e-01 4.80300000e-01 4.57800000e-01 8.00400000e-01 - -3.33000000e-02 1.15570000e+00 9.27000000e-02 1.02500000e+00 2.82000000e-02 8.21100000e-01 -5.80000000e-03 1.14000000e+00 6.10000000e-03 8.27900000e-01 8.95000000e-02 1.02530000e+00 - 6.39300000e-01 4.65000000e-01 2.94800000e-01 9.47400000e-01 3.37200000e-01 5.57300000e-01 3.13900000e-01 9.22800000e-01 2.62100000e-01 6.23100000e-01 3.54600000e-01 8.77600000e-01 - 3.90200000e-01 5.39200000e-01 7.71000000e-01 6.57400000e-01 1.02410000e+00 5.73000000e-01 7.45300000e-01 6.89100000e-01 9.30200000e-01 7.14700000e-01 7.79700000e-01 6.47300000e-01 - 5.77600000e-01 4.99200000e-01 9.43100000e-01 7.35200000e-01 1.07090000e+00 8.42500000e-01 9.27100000e-01 7.55700000e-01 9.68900000e-01 9.90400000e-01 8.62900000e-01 8.35100000e-01 - 1.20010000e+00 -1.07500000e-01 6.07800000e-01 5.46900000e-01 4.37000000e-01 4.40900000e-01 6.16700000e-01 5.37100000e-01 3.47800000e-01 5.23200000e-01 5.89600000e-01 5.64200000e-01 - 3.97300000e-01 1.26600000e-01 6.32300000e-01 2.15400000e-01 7.32500000e-01 2.44800000e-01 1.65300000e+00 8.99000000e-02 -2.98000000e-02 3.43000000e-02 -6.69000000e-02 8.06000000e-02 - 4.63800000e-01 6.16300000e-01 6.81500000e-01 9.67700000e-01 7.61200000e-01 1.00760000e+00 1.65520000e+00 8.55000000e-02 -9.08000000e-02 2.10500000e+00 -1.36000000e-01 1.88960000e+00 - 8.15000000e-02 1.01020000e+00 -2.61000000e-02 8.10700000e-01 7.10000000e-03 6.24700000e-01 -3.80000000e-03 7.86000000e-01 1.45700000e-01 4.49000000e-01 1.35000000e-02 7.66500000e-01 - 7.46100000e-01 3.58400000e-01 1.11790000e+00 5.97500000e-01 1.07700000e+00 9.08500000e-01 1.80790000e+00 -8.89000000e-02 1.91200000e+00 1.07200000e-01 6.53000000e-02 1.65250000e+00 - 1.06210000e+00 5.05000000e-02 1.26760000e+00 4.46800000e-01 8.45300000e-01 6.72500000e-01 1.12200000e+00 6.21400000e-01 8.91700000e-01 5.36400000e-01 1.17190000e+00 5.59900000e-01 - 1.32600000e-01 9.58300000e-01 8.17000000e-02 1.64040000e+00 1.50000000e-02 1.96390000e+00 1.94300000e-01 1.50480000e+00 6.08000000e-02 1.60200000e+00 6.87000000e-02 1.66270000e+00 - 3.95000000e-02 2.85200000e-01 1.81300000e-01 4.70900000e-01 3.29700000e-01 4.36600000e-01 -7.03000000e-02 1.81340000e+00 1.56000000e-02 -1.97000000e-02 1.50000000e-03 -1.60000000e-03 - 1.12270000e+00 -2.33000000e-02 3.53300000e-01 5.93500000e-01 3.43000000e-01 3.95600000e-01 5.04700000e-01 4.12700000e-01 2.91900000e-01 4.39000000e-01 4.69300000e-01 4.49000000e-01 - 1.10310000e+00 3.30000000e-03 6.95000000e-01 1.02530000e+00 4.63300000e-01 8.70700000e-01 6.20000000e-01 1.11260000e+00 5.56600000e-01 7.11500000e-01 6.79000000e-01 1.04550000e+00 - 1.13240000e+00 -2.68000000e-02 9.00400000e-01 8.28700000e-01 6.88100000e-01 9.44600000e-01 9.17700000e-01 8.12500000e-01 6.62500000e-01 8.67500000e-01 8.93800000e-01 8.40600000e-01 - 1.68500000e-01 6.43200000e-01 4.02200000e-01 8.44400000e-01 3.90700000e-01 1.04020000e+00 4.66900000e-01 7.69700000e-01 5.41300000e-01 8.88200000e-01 4.43200000e-01 7.94400000e-01 - 4.87600000e-01 6.13300000e-01 2.48700000e-01 7.75300000e-01 1.68300000e-01 6.36100000e-01 2.71100000e-01 7.50800000e-01 2.13200000e-01 5.62300000e-01 2.55100000e-01 7.71500000e-01 - 2.92300000e-01 4.84400000e-01 5.28800000e-01 6.73100000e-01 5.92100000e-01 7.75800000e-01 5.79300000e-01 6.10300000e-01 5.50700000e-01 8.46500000e-01 5.87400000e-01 6.00900000e-01 - 1.17220000e+00 -7.79000000e-02 6.36200000e-01 1.51300000e-01 4.97700000e-01 1.44700000e-01 6.12800000e-01 1.77100000e-01 5.30100000e-01 9.03000000e-02 6.37400000e-01 1.51500000e-01 - 2.17900000e-01 4.42600000e-01 3.22000000e-01 7.17800000e-01 3.98500000e-01 7.75000000e-01 4.35300000e-01 5.82200000e-01 4.25000000e-01 7.64800000e-01 3.42400000e-01 6.90900000e-01 - 3.99300000e-01 7.15600000e-01 2.53300000e-01 9.01600000e-01 2.06700000e-01 6.58800000e-01 2.99700000e-01 8.47500000e-01 1.57800000e-01 6.96400000e-01 2.17200000e-01 9.43700000e-01 - 5.15000000e-01 5.80200000e-01 8.10500000e-01 6.70500000e-01 9.14100000e-01 6.94500000e-01 1.73560000e+00 -8.30000000e-03 -1.49000000e-02 2.02100000e+00 -6.69000000e-02 1.80480000e+00 - 1.10960000e+00 -5.40000000e-03 7.23600000e-01 4.87100000e-01 5.88300000e-01 3.01500000e-01 6.88700000e-01 5.28000000e-01 4.96400000e-01 3.90000000e-01 6.68300000e-01 5.52400000e-01 - 1.13960000e+00 -4.05000000e-02 5.63300000e-01 1.15870000e+00 3.95100000e-01 1.58840000e+00 5.32300000e-01 1.19760000e+00 4.02100000e-01 1.31100000e+00 5.45700000e-01 1.18600000e+00 - 9.48700000e-01 1.58400000e-01 1.34620000e+00 2.71700000e-01 1.52100000e+00 2.09200000e-01 1.64540000e+00 1.05400000e-01 1.96160000e+00 4.36000000e-02 -4.55000000e-02 1.78480000e+00 - 3.14000000e-02 5.26300000e-01 5.14000000e-02 8.20700000e-01 3.21000000e-02 9.62500000e-01 1.26700000e-01 7.29400000e-01 1.29200000e-01 8.63200000e-01 4.94000000e-02 8.24300000e-01 - 5.83500000e-01 5.14700000e-01 2.52900000e-01 6.91800000e-01 2.23500000e-01 5.21000000e-01 3.35100000e-01 5.90600000e-01 2.36300000e-01 4.89400000e-01 3.24000000e-01 6.08500000e-01 - 1.23500000e-01 5.08500000e-01 3.80300000e-01 5.75400000e-01 5.45700000e-01 5.22400000e-01 1.67560000e+00 6.02000000e-02 -9.00000000e-04 2.00110000e+00 -2.27000000e-02 2.83000000e-02 - 1.22000000e-01 3.19800000e-01 3.91300000e-01 3.73600000e-01 5.96500000e-01 2.74500000e-01 1.75190000e+00 -2.19000000e-02 -2.87000000e-02 3.55000000e-02 -4.61000000e-02 5.53000000e-02 - -0.00000000e+00 3.03300000e-01 1.24400000e-01 4.88400000e-01 2.59600000e-01 4.70900000e-01 -4.08000000e-02 1.78140000e+00 4.88000000e-02 -5.86000000e-02 1.60000000e-03 -3.20000000e-03 - 3.94600000e-01 7.50000000e-03 5.57100000e-01 8.87000000e-02 5.90600000e-01 1.51200000e-01 5.32600000e-01 1.18200000e-01 6.73600000e-01 6.64000000e-02 5.61400000e-01 8.13000000e-02 - 8.55600000e-01 2.50200000e-01 3.17700000e-01 1.42890000e+00 3.93600000e-01 6.51700000e-01 4.26200000e-01 1.29160000e+00 3.53700000e-01 6.64500000e-01 4.19300000e-01 1.30950000e+00 - 3.18500000e-01 1.25600000e-01 5.35300000e-01 1.49200000e-01 5.24000000e-01 2.67100000e-01 4.95400000e-01 1.94200000e-01 4.08700000e-01 4.16400000e-01 4.56300000e-01 2.43900000e-01 - 9.18100000e-01 1.90600000e-01 3.36800000e-01 3.31000000e-01 2.81200000e-01 2.72000000e-01 3.42700000e-01 3.21800000e-01 3.38300000e-01 1.92300000e-01 3.41500000e-01 3.24700000e-01 - 5.81900000e-01 4.22500000e-01 7.42400000e-01 8.58900000e-01 9.88600000e-01 8.06500000e-01 9.02100000e-01 6.69500000e-01 8.66400000e-01 9.83900000e-01 8.64800000e-01 7.15300000e-01 - 3.13400000e-01 7.94100000e-01 1.82300000e-01 1.54930000e+00 1.79000000e-01 1.02910000e+00 2.17300000e-01 1.50860000e+00 1.73500000e-01 9.91500000e-01 1.55300000e-01 1.58440000e+00 - 7.44000000e-01 3.60400000e-01 1.12660000e+00 5.88600000e-01 1.16150000e+00 6.96900000e-01 1.81690000e+00 -9.77000000e-02 2.03240000e+00 -4.08000000e-02 3.79000000e-02 1.68730000e+00 - 4.65900000e-01 4.20100000e-01 7.83000000e-01 4.12200000e-01 9.40000000e-01 3.67600000e-01 1.70330000e+00 3.09000000e-02 3.90000000e-02 1.94820000e+00 3.02000000e-02 -3.65000000e-02 - 2.19700000e-01 4.81100000e-01 4.05600000e-01 6.88400000e-01 4.07500000e-01 8.44400000e-01 3.80300000e-01 7.14700000e-01 4.15700000e-01 8.55400000e-01 3.49100000e-01 7.53000000e-01 - 1.12930000e+00 -2.64000000e-02 5.71800000e-01 4.07100000e-01 4.90400000e-01 2.73100000e-01 5.23600000e-01 4.63500000e-01 4.69800000e-01 2.78800000e-01 5.25600000e-01 4.60500000e-01 - 1.11600000e+00 -1.13000000e-02 4.61000000e-01 5.43700000e-01 3.86700000e-01 3.95900000e-01 4.35500000e-01 5.74600000e-01 3.15000000e-01 4.63600000e-01 4.07200000e-01 6.09300000e-01 - 3.56700000e-01 4.69600000e-01 5.90300000e-01 6.98700000e-01 7.93400000e-01 6.55600000e-01 5.77700000e-01 7.12000000e-01 7.24800000e-01 7.55300000e-01 7.08000000e-01 5.60600000e-01 - 1.13770000e+00 -3.72000000e-02 5.72800000e-01 9.88000000e-02 4.78600000e-01 7.09000000e-02 5.98200000e-01 6.70000000e-02 4.57500000e-01 8.65000000e-02 5.40700000e-01 1.37300000e-01 - 4.06700000e-01 6.19100000e-01 6.87300000e-01 9.12600000e-01 7.75700000e-01 1.04770000e+00 6.66900000e-01 9.34800000e-01 7.41400000e-01 1.11520000e+00 6.87800000e-01 9.09500000e-01 - -9.80000000e-03 5.87900000e-01 1.04200000e-01 7.79000000e-01 6.64000000e-02 9.46300000e-01 -8.80000000e-03 9.12300000e-01 -5.24000000e-02 1.10630000e+00 5.92000000e-02 8.32500000e-01 - 1.07070000e+00 4.22000000e-02 1.52730000e+00 -5.20000000e-02 1.62010000e+00 -2.03000000e-02 1.69150000e+00 4.63000000e-02 1.99830000e+00 3.10000000e-03 1.71490000e+00 2.26000000e-02 - 4.95000000e-01 1.33900000e-01 7.91000000e-01 1.51500000e-01 8.69200000e-01 2.08200000e-01 1.72400000e+00 1.21000000e-02 6.34000000e-02 1.92350000e+00 -6.50000000e-03 7.60000000e-03 - 5.99500000e-01 3.76500000e-01 9.12300000e-01 6.23400000e-01 1.01070000e+00 7.40800000e-01 8.91000000e-01 6.44400000e-01 1.13300000e+00 6.25900000e-01 9.94500000e-01 5.23100000e-01 - 1.54800000e-01 4.56600000e-01 3.00700000e-01 6.37100000e-01 3.48900000e-01 7.21300000e-01 2.90200000e-01 6.50400000e-01 3.06100000e-01 7.88100000e-01 2.94200000e-01 6.47600000e-01 - 3.15500000e-01 6.16800000e-01 6.06800000e-01 8.35300000e-01 7.47500000e-01 8.85000000e-01 5.49800000e-01 9.02200000e-01 7.03800000e-01 9.62300000e-01 6.36600000e-01 7.96300000e-01 - 4.37600000e-01 6.72300000e-01 1.91900000e-01 9.53400000e-01 1.31200000e-01 7.38100000e-01 2.87200000e-01 8.39800000e-01 1.87100000e-01 6.54600000e-01 2.76100000e-01 8.51000000e-01 - 2.22100000e-01 4.92300000e-01 4.01000000e-01 7.10200000e-01 4.77800000e-01 7.82000000e-01 4.08000000e-01 6.97500000e-01 4.97200000e-01 7.81200000e-01 4.81700000e-01 6.14100000e-01 - 1.17430000e+00 -8.17000000e-02 4.57100000e-01 1.27940000e+00 2.99600000e-01 7.77400000e-01 4.85200000e-01 1.24690000e+00 4.87200000e-01 5.23800000e-01 5.64700000e-01 1.15480000e+00 - 7.17600000e-01 3.92000000e-01 2.88900000e-01 1.08440000e+00 3.96400000e-01 5.40700000e-01 3.65900000e-01 9.89800000e-01 3.61900000e-01 5.56400000e-01 3.67000000e-01 9.92700000e-01 - 1.05790000e+00 5.74000000e-02 6.49500000e-01 1.07460000e+00 5.80200000e-01 6.86500000e-01 5.16000000e-01 1.22990000e+00 3.73000000e-01 8.77700000e-01 5.38300000e-01 1.20590000e+00 - 4.40100000e-01 1.47800000e-01 6.30200000e-01 3.02100000e-01 6.88400000e-01 3.77100000e-01 6.69600000e-01 2.57800000e-01 8.09500000e-01 2.51100000e-01 6.78100000e-01 2.45000000e-01 - 3.57200000e-01 5.68300000e-01 2.02500000e-01 1.83900000e-01 2.02400000e-01 1.20800000e-01 1.88700000e-01 2.00200000e-01 7.97000000e-02 2.61800000e-01 1.45900000e-01 2.49400000e-01 - 1.07370000e+00 2.65000000e-02 3.51700000e-01 6.71000000e-02 3.75500000e-01 -3.29000000e-02 3.35700000e-01 8.41000000e-02 3.45100000e-01 -6.10000000e-03 4.73000000e-01 -7.66000000e-02 - 1.07000000e-02 6.24000000e-01 2.79100000e-01 6.66500000e-01 2.48500000e-01 8.42300000e-01 2.77900000e-01 6.68900000e-01 3.01100000e-01 7.95200000e-01 1.99700000e-01 7.60900000e-01 - 3.52600000e-01 3.88800000e-01 6.93600000e-01 4.43400000e-01 7.58200000e-01 5.35700000e-01 6.55600000e-01 4.90000000e-01 6.46200000e-01 6.93300000e-01 6.57500000e-01 4.86100000e-01 - -1.87000000e-02 4.75400000e-01 4.56000000e-02 7.16700000e-01 2.09100000e-01 6.65100000e-01 2.72000000e-02 1.70050000e+00 -5.68000000e-02 6.73000000e-02 3.71000000e-02 -4.58000000e-02 - 1.19410000e+00 -1.05600000e-01 4.38300000e-01 1.29120000e+00 3.69100000e-01 9.95700000e-01 5.23100000e-01 1.19740000e+00 4.33900000e-01 8.61400000e-01 4.21700000e-01 1.32030000e+00 - 7.83000000e-01 6.79000000e-02 1.19200000e+00 1.56000000e-01 1.23670000e+00 3.14200000e-01 1.20440000e+00 1.35500000e-01 1.36250000e+00 1.95700000e-01 1.10110000e+00 2.61800000e-01 - -2.76000000e-02 9.98900000e-01 2.64000000e-02 1.48160000e+00 2.81000000e-02 1.69180000e+00 7.81000000e-02 1.42040000e+00 -1.57000000e-02 1.76850000e+00 -9.80000000e-02 1.63230000e+00 - 3.61300000e-01 7.21300000e-01 7.41800000e-01 6.44100000e-01 7.89500000e-01 7.25000000e-01 1.79380000e+00 -7.23000000e-02 -4.99000000e-02 2.06040000e+00 -9.42000000e-02 1.84160000e+00 - 1.08860000e+00 1.86000000e-02 1.69690000e+00 2.12000000e-02 1.85420000e+00 -2.06000000e-02 1.67390000e+00 6.98000000e-02 2.05680000e+00 -6.11000000e-02 1.72730000e+00 5.90000000e-03 - 9.31000000e-02 9.97300000e-01 -2.75000000e-02 1.51720000e+00 4.98000000e-02 9.44600000e-01 -6.20000000e-03 1.49470000e+00 4.42000000e-02 9.19200000e-01 5.97000000e-02 1.41780000e+00 - 1.23380000e+00 -1.55200000e-01 7.32500000e-01 9.83300000e-01 4.67000000e-01 9.11700000e-01 6.97000000e-01 1.02480000e+00 4.54700000e-01 8.59100000e-01 7.62500000e-01 9.47300000e-01 - 1.10340000e+00 1.90000000e-03 7.39100000e-01 3.03300000e-01 5.57800000e-01 2.53900000e-01 7.40000000e-01 3.01400000e-01 5.66000000e-01 2.20100000e-01 7.49000000e-01 2.88100000e-01 - -3.59000000e-02 8.33000000e-02 2.03000000e-02 3.97000000e-02 1.78000000e-02 4.87000000e-02 1.44000000e-02 4.38000000e-02 1.00000000e-03 6.97000000e-02 9.74000000e-02 -5.39000000e-02 - 1.15960000e+00 -6.63000000e-02 4.88000000e-01 3.94000000e-01 4.08200000e-01 2.93100000e-01 4.85600000e-01 3.95700000e-01 3.66800000e-01 3.24700000e-01 4.43900000e-01 4.39200000e-01 - 2.54700000e-01 8.62400000e-01 6.69600000e-01 7.70400000e-01 6.77000000e-01 9.03400000e-01 1.80530000e+00 -8.55000000e-02 -1.08100000e-01 2.12800000e+00 -4.01000000e-02 1.77740000e+00 - 6.39100000e-01 4.45300000e-01 2.87300000e-01 7.20800000e-01 2.10600000e-01 5.80100000e-01 3.30200000e-01 6.72200000e-01 2.25900000e-01 5.43200000e-01 2.17300000e-01 8.05500000e-01 - -2.64000000e-02 4.44300000e-01 2.50400000e-01 4.66200000e-01 2.67000000e-01 5.87900000e-01 1.42000000e-02 1.71700000e+00 -2.83000000e-02 3.17000000e-02 -8.47000000e-02 1.00800000e-01 - 4.86700000e-01 3.39400000e-01 6.61500000e-01 6.51600000e-01 8.28500000e-01 6.47200000e-01 7.58300000e-01 5.36100000e-01 7.96400000e-01 7.10500000e-01 5.90900000e-01 7.31600000e-01 - 4.36000000e-01 1.69000000e-01 9.26600000e-01 -2.28000000e-02 9.94300000e-01 4.45000000e-02 9.28600000e-01 -2.79000000e-02 9.87600000e-01 6.92000000e-02 7.97300000e-01 1.29000000e-01 - 1.77200000e-01 4.55300000e-01 2.18900000e-01 7.85200000e-01 2.96300000e-01 8.35800000e-01 3.21300000e-01 6.60900000e-01 3.40300000e-01 8.00700000e-01 1.60700000e-01 8.51100000e-01 - 5.26800000e-01 3.04600000e-01 9.00200000e-01 2.32300000e-01 9.03400000e-01 3.75300000e-01 1.70470000e+00 3.48000000e-02 -1.04000000e-02 2.00890000e+00 1.88000000e-02 -2.35000000e-02 - 1.09860000e+00 7.30000000e-03 5.20700000e-01 1.08300000e-01 3.68800000e-01 1.67400000e-01 5.40200000e-01 8.89000000e-02 4.11600000e-01 1.05700000e-01 5.18300000e-01 1.13600000e-01 - 3.43200000e-01 2.14500000e-01 5.40600000e-01 3.34700000e-01 5.65600000e-01 4.43600000e-01 4.72100000e-01 4.22800000e-01 6.28000000e-01 3.84400000e-01 4.76200000e-01 4.14700000e-01 - 1.12150000e+00 -1.66000000e-02 3.95300000e-01 1.34380000e+00 3.75900000e-01 6.77900000e-01 4.03700000e-01 1.32630000e+00 2.94100000e-01 7.43100000e-01 3.96700000e-01 1.33430000e+00 - 1.22900000e-01 5.71500000e-01 4.84500000e-01 5.13700000e-01 5.94800000e-01 5.25200000e-01 1.69730000e+00 3.95000000e-02 9.85000000e-02 1.88080000e+00 2.46000000e-02 -2.96000000e-02 - 2.33800000e-01 8.17800000e-01 4.02600000e-01 9.86300000e-01 6.26200000e-01 8.66300000e-01 1.74850000e+00 -1.42000000e-02 3.69000000e-02 1.95990000e+00 2.07000000e-02 1.70840000e+00 - 3.48900000e-01 5.73600000e-01 5.56300000e-01 8.87800000e-01 6.51000000e-01 9.87500000e-01 4.88900000e-01 9.67700000e-01 6.65900000e-01 9.94400000e-01 6.03300000e-01 8.32600000e-01 - 2.61400000e-01 3.90500000e-01 4.24100000e-01 5.93100000e-01 4.16900000e-01 7.50900000e-01 3.45800000e-01 6.86100000e-01 4.17500000e-01 7.66200000e-01 4.16100000e-01 6.01700000e-01 - 2.70000000e-01 4.98500000e-01 2.95200000e-01 9.28600000e-01 3.67200000e-01 1.01980000e+00 2.60600000e-01 9.69700000e-01 3.05900000e-01 1.11520000e+00 2.83300000e-01 9.42900000e-01 - 1.06740000e+00 4.88000000e-02 5.06400000e-01 1.24370000e+00 5.95800000e-01 7.54700000e-01 6.87200000e-01 1.03350000e+00 6.46100000e-01 6.34500000e-01 4.91900000e-01 1.26150000e+00 - 1.16100000e+00 -6.57000000e-02 5.42900000e-01 1.19670000e+00 3.88100000e-01 1.01720000e+00 5.88500000e-01 1.13900000e+00 5.36800000e-01 7.76900000e-01 6.64500000e-01 1.05190000e+00 - 8.18400000e-01 2.87800000e-01 1.13310000e+00 2.96500000e-01 1.25800000e+00 2.91300000e-01 1.69720000e+00 4.03000000e-02 2.08330000e+00 -9.77000000e-02 1.31900000e-01 1.57960000e+00 - 6.29100000e-01 2.89500000e-01 8.50900000e-01 4.00000000e-01 1.01480000e+00 3.46100000e-01 1.67950000e+00 6.62000000e-02 2.78000000e-02 1.96900000e+00 -3.10000000e-03 4.00000000e-03 - 1.55100000e-01 9.53400000e-01 9.53000000e-02 6.06200000e-01 7.30000000e-02 5.06100000e-01 4.00000000e-03 7.17600000e-01 1.68300000e-01 3.81000000e-01 1.45400000e-01 5.49600000e-01 - 2.62000000e-01 5.81400000e-01 4.96700000e-01 8.02600000e-01 5.20600000e-01 9.73000000e-01 4.43200000e-01 8.69100000e-01 5.89000000e-01 9.15200000e-01 4.65300000e-01 8.48600000e-01 - 7.78700000e-01 3.06400000e-01 9.51700000e-01 6.57200000e-01 1.04220000e+00 6.90400000e-01 1.76680000e+00 -4.05000000e-02 1.95740000e+00 4.47000000e-02 4.99000000e-02 1.67680000e+00 - 1.09030000e+00 2.05000000e-02 6.20100000e-01 1.10450000e+00 5.53400000e-01 1.25730000e+00 5.07800000e-01 1.24190000e+00 5.16500000e-01 1.11230000e+00 5.44800000e-01 1.19960000e+00 - 1.04510000e+00 7.33000000e-02 7.29800000e-01 6.67300000e-01 6.52600000e-01 3.08800000e-01 7.07800000e-01 6.98400000e-01 5.49400000e-01 3.98800000e-01 7.53300000e-01 6.38800000e-01 - 6.03000000e-02 1.05510000e+00 1.90000000e-02 4.00600000e-01 8.96000000e-02 2.51400000e-01 3.22000000e-02 3.86100000e-01 8.03000000e-02 2.55500000e-01 5.49000000e-02 3.57400000e-01 - 1.06600000e-01 5.58800000e-01 1.53500000e-01 8.87300000e-01 2.73100000e-01 8.98200000e-01 2.28300000e-01 8.01900000e-01 2.31300000e-01 9.67100000e-01 1.31200000e-01 9.19700000e-01 - 3.71300000e-01 2.99300000e-01 6.29800000e-01 3.65900000e-01 6.31900000e-01 5.02800000e-01 1.76540000e+00 -4.37000000e-02 2.20000000e-02 1.97850000e+00 -2.62000000e-02 3.18000000e-02 - 8.08300000e-01 2.96500000e-01 4.12400000e-01 6.35900000e-01 3.33000000e-01 4.75200000e-01 3.65000000e-01 6.91600000e-01 3.04000000e-01 4.87300000e-01 4.41200000e-01 6.01600000e-01 - 5.17600000e-01 5.97500000e-01 3.55900000e-01 2.18600000e-01 2.94700000e-01 1.87500000e-01 2.43700000e-01 3.54800000e-01 2.73500000e-01 2.00300000e-01 3.55000000e-01 2.17500000e-01 - 1.05390000e+00 6.14000000e-02 1.79520000e+00 -7.36000000e-02 1.29180000e+00 6.73700000e-01 1.65240000e+00 9.01000000e-02 1.18210000e+00 5.40000000e-01 1.66990000e+00 7.19000000e-02 - 1.09350000e+00 1.53000000e-02 5.36400000e-01 9.26100000e-01 5.08800000e-01 4.68500000e-01 5.44700000e-01 9.15600000e-01 4.23900000e-01 5.40800000e-01 5.47900000e-01 9.09900000e-01 - 8.76700000e-01 2.35100000e-01 4.29900000e-01 5.05400000e-01 2.80000000e-01 4.72400000e-01 3.51600000e-01 6.01900000e-01 2.68900000e-01 4.70400000e-01 3.75000000e-01 5.72300000e-01 - 6.32500000e-01 4.78500000e-01 2.83300000e-01 1.45640000e+00 2.59000000e-01 7.79500000e-01 3.42900000e-01 1.38620000e+00 3.01100000e-01 7.00900000e-01 2.41400000e-01 1.50750000e+00 - 4.41300000e-01 4.76900000e-01 7.35000000e-01 6.93800000e-01 8.49000000e-01 7.72800000e-01 7.25100000e-01 7.09600000e-01 6.99000000e-01 9.87400000e-01 6.69000000e-01 7.74900000e-01 - 7.10000000e-03 5.34400000e-01 1.59900000e-01 7.02400000e-01 2.22500000e-01 7.70000000e-01 1.61000000e-02 1.71160000e+00 -9.50000000e-03 1.19000000e-02 1.32000000e-02 -1.34000000e-02 - 1.33600000e-01 4.69800000e-01 3.23100000e-01 5.99400000e-01 3.48500000e-01 7.05100000e-01 3.73200000e-01 5.43900000e-01 2.80500000e-01 8.04200000e-01 2.50500000e-01 6.87900000e-01 - 2.01800000e-01 8.35700000e-01 5.33000000e-01 8.12600000e-01 6.95400000e-01 7.64600000e-01 1.72920000e+00 1.40000000e-03 7.35000000e-02 1.91530000e+00 1.64000000e-02 -1.92000000e-02 - 7.60000000e-03 1.09780000e+00 2.40800000e-01 1.18200000e+00 2.30700000e-01 1.33160000e+00 2.24000000e-02 1.70750000e+00 8.00000000e-04 1.99350000e+00 -4.69000000e-02 1.79040000e+00 - 4.63600000e-01 6.09800000e-01 7.67300000e-01 6.21700000e-01 8.54700000e-01 6.61000000e-01 1.81190000e+00 -9.18000000e-02 4.01000000e-02 1.95150000e+00 -3.71000000e-02 1.77590000e+00 - 2.06500000e-01 3.87600000e-01 4.04300000e-01 5.05000000e-01 4.46700000e-01 5.95900000e-01 4.16800000e-01 4.90500000e-01 4.13000000e-01 6.51900000e-01 3.91200000e-01 5.24800000e-01 - 3.36900000e-01 3.86400000e-01 6.54900000e-01 4.52000000e-01 6.74100000e-01 5.99400000e-01 5.03100000e-01 6.34100000e-01 7.40700000e-01 5.45700000e-01 6.12500000e-01 5.05700000e-01 - 9.80300000e-01 1.47700000e-01 7.62500000e-01 2.71600000e-01 6.53400000e-01 1.40500000e-01 7.54700000e-01 2.81100000e-01 6.02400000e-01 1.81300000e-01 7.96500000e-01 2.29300000e-01 - 3.06200000e-01 6.95600000e-01 4.07300000e-01 1.16680000e+00 5.64600000e-01 1.21690000e+00 3.37500000e-01 1.25300000e+00 5.75200000e-01 1.22730000e+00 4.48100000e-01 1.12290000e+00 - 5.95900000e-01 4.82400000e-01 8.50500000e-01 7.30500000e-01 8.78000000e-01 8.40300000e-01 1.69670000e+00 4.14000000e-02 1.24100000e-01 1.85160000e+00 -1.20300000e-01 1.87480000e+00 - 1.14000000e+00 -3.84000000e-02 9.84500000e-01 7.46000000e-01 6.90700000e-01 6.25300000e-01 1.03700000e+00 6.87400000e-01 6.59600000e-01 6.08400000e-01 1.02640000e+00 7.03600000e-01 - 2.75400000e-01 3.72400000e-01 5.20800000e-01 4.49700000e-01 7.30500000e-01 3.43900000e-01 1.61680000e+00 1.34500000e-01 1.54000000e-02 1.97590000e+00 -4.91000000e-02 5.97000000e-02 - 5.65200000e-01 5.27600000e-01 9.67900000e-01 7.28200000e-01 1.23760000e+00 6.70700000e-01 9.76700000e-01 7.19100000e-01 1.24860000e+00 6.89900000e-01 1.08340000e+00 5.93000000e-01 - 4.16600000e-01 6.74100000e-01 5.61600000e-01 8.99600000e-01 8.41600000e-01 7.11400000e-01 1.75840000e+00 -3.36000000e-02 3.38000000e-02 1.96150000e+00 -1.50000000e-03 1.73140000e+00 - 6.61000000e-01 4.89000000e-02 8.92500000e-01 1.43400000e-01 1.00350000e+00 1.54800000e-01 1.72400000e+00 9.00000000e-03 1.93610000e+00 7.82000000e-02 -8.32000000e-02 9.88000000e-02 - 1.05870000e+00 5.58000000e-02 8.66600000e-01 -1.59000000e-02 7.05600000e-01 -2.69000000e-02 8.60400000e-01 -4.60000000e-03 6.58900000e-01 9.80000000e-03 8.24900000e-01 3.36000000e-02 - 1.86500000e-01 9.25200000e-01 1.30100000e-01 9.59100000e-01 6.28000000e-02 7.77700000e-01 1.94900000e-01 8.82000000e-01 1.37000000e-01 6.69200000e-01 1.66400000e-01 9.13300000e-01 - 2.95100000e-01 7.01100000e-01 7.25300000e-01 5.61500000e-01 7.64200000e-01 6.53800000e-01 1.84020000e+00 -1.24900000e-01 -4.87000000e-02 2.05230000e+00 -6.10000000e-03 6.00000000e-03 - 1.08020000e+00 2.86000000e-02 1.37710000e+00 3.59600000e-01 1.01500000e+00 2.01000000e-01 1.45430000e+00 2.60200000e-01 1.03700000e+00 1.21700000e-01 1.32950000e+00 4.11400000e-01 - 7.52400000e-01 3.59400000e-01 1.06510000e+00 6.65200000e-01 1.11740000e+00 8.14400000e-01 1.75930000e+00 -3.73000000e-02 2.03250000e+00 -3.75000000e-02 6.66000000e-02 1.65230000e+00 - 3.20900000e-01 2.24700000e-01 6.12900000e-01 2.20900000e-01 6.15900000e-01 3.48100000e-01 6.09700000e-01 2.26600000e-01 6.17900000e-01 3.60500000e-01 5.80800000e-01 2.59900000e-01 - 2.54800000e-01 5.97400000e-01 5.47300000e-01 7.61200000e-01 5.37400000e-01 9.70700000e-01 4.12100000e-01 9.26800000e-01 4.72900000e-01 1.06870000e+00 4.35600000e-01 8.94400000e-01 - 5.14100000e-01 2.84700000e-01 8.89800000e-01 3.46000000e-01 9.40300000e-01 4.83500000e-01 8.20300000e-01 4.31100000e-01 9.95400000e-01 4.37500000e-01 7.82800000e-01 4.74500000e-01 - 1.41300000e-01 3.68700000e-01 1.72900000e-01 6.33600000e-01 2.13100000e-01 7.00100000e-01 1.62400000e-01 6.46300000e-01 2.55500000e-01 6.64900000e-01 1.28600000e-01 6.86900000e-01 - 5.30900000e-01 5.78600000e-01 3.11800000e-01 1.41570000e+00 3.02200000e-01 7.56600000e-01 2.67400000e-01 1.46770000e+00 3.30700000e-01 6.91000000e-01 3.23600000e-01 1.40200000e+00 - 6.49600000e-01 4.55700000e-01 1.01640000e+00 7.07700000e-01 9.87600000e-01 9.45500000e-01 1.69680000e+00 4.06000000e-02 1.97630000e+00 2.26000000e-02 9.80000000e-02 1.61560000e+00 - 4.00000000e-01 5.90700000e-01 6.83700000e-01 6.29700000e-01 7.89300000e-01 6.36600000e-01 1.69950000e+00 3.88000000e-02 -3.75000000e-02 2.04470000e+00 -3.60000000e-02 4.28000000e-02 - 4.59500000e-01 2.34700000e-01 7.20500000e-01 3.62000000e-01 9.56600000e-01 2.52300000e-01 8.20600000e-01 2.46000000e-01 8.55700000e-01 3.93600000e-01 7.61600000e-01 3.14600000e-01 - 3.53000000e-01 7.17000000e-01 4.19100000e-01 1.28810000e+00 5.06300000e-01 1.43250000e+00 5.15100000e-01 1.17340000e+00 5.19100000e-01 1.44850000e+00 4.86300000e-01 1.20530000e+00 - -1.93000000e-02 3.47900000e-01 1.56200000e-01 4.74700000e-01 2.44900000e-01 5.17400000e-01 -2.73000000e-02 1.76160000e+00 -1.36000000e-02 1.79000000e-02 -4.40000000e-02 5.29000000e-02 - 5.34000000e-02 1.69600000e-01 8.31000000e-02 4.63000000e-01 1.99000000e-01 4.69000000e-01 7.82000000e-02 1.63820000e+00 -3.44000000e-02 3.97000000e-02 -5.60000000e-03 7.70000000e-03 - 4.66000000e-01 4.27900000e-01 7.85900000e-01 6.10100000e-01 9.43100000e-01 6.35800000e-01 7.19600000e-01 6.81400000e-01 7.11100000e-01 9.32900000e-01 7.42000000e-01 6.57300000e-01 - 3.21700000e-01 3.46800000e-01 5.78500000e-01 4.57800000e-01 6.92100000e-01 4.79400000e-01 6.22600000e-01 4.04400000e-01 7.63500000e-01 4.15100000e-01 5.85000000e-01 4.49300000e-01 - 1.07300000e-01 6.58300000e-01 4.91200000e-01 5.77400000e-01 5.22600000e-01 6.80000000e-01 1.79780000e+00 -7.34000000e-02 -2.50000000e-02 2.03150000e+00 -5.70000000e-03 6.50000000e-03 - 3.60100000e-01 2.60800000e-01 5.75900000e-01 3.93000000e-01 6.60500000e-01 4.43800000e-01 5.74900000e-01 3.98000000e-01 6.02500000e-01 5.31900000e-01 5.80400000e-01 3.91000000e-01 - 7.69000000e-02 1.74700000e-01 1.71100000e-01 2.17300000e-01 1.41100000e-01 3.12300000e-01 1.49800000e-01 2.40300000e-01 2.34100000e-01 2.05800000e-01 2.35500000e-01 1.41900000e-01 - 3.85600000e-01 2.93700000e-01 6.04200000e-01 4.67100000e-01 6.06900000e-01 6.23700000e-01 6.17200000e-01 4.47400000e-01 6.53900000e-01 5.88400000e-01 5.87100000e-01 4.81800000e-01 - 1.49200000e-01 8.20000000e-01 1.84100000e-01 1.34210000e+00 2.03900000e-01 1.53850000e+00 9.70000000e-02 1.44490000e+00 1.50700000e-01 1.61740000e+00 1.34300000e-01 1.39510000e+00 - 1.54200000e-01 8.32400000e-01 3.54100000e-01 1.16860000e+00 3.21100000e-01 1.42440000e+00 2.96500000e-01 1.23320000e+00 2.55400000e-01 1.52800000e+00 4.00000000e-01 1.11720000e+00 - 4.03600000e-01 1.09600000e-01 6.94600000e-01 1.41200000e-01 8.16300000e-01 1.37400000e-01 1.68340000e+00 5.84000000e-02 1.79000000e-02 -2.25000000e-02 5.57000000e-02 -6.51000000e-02 - 1.04920000e+00 6.64000000e-02 5.75800000e-01 1.16140000e+00 4.47100000e-01 8.45100000e-01 4.93700000e-01 1.25460000e+00 4.55600000e-01 7.87600000e-01 5.28600000e-01 1.21210000e+00 - 1.05050000e+00 7.09000000e-02 4.58300000e-01 1.27260000e+00 4.33900000e-01 6.78200000e-01 4.79600000e-01 1.24440000e+00 3.91600000e-01 6.95400000e-01 5.01300000e-01 1.21580000e+00 - 6.62500000e-01 4.43200000e-01 3.65900000e-01 1.35770000e+00 3.43600000e-01 8.61400000e-01 3.90200000e-01 1.33130000e+00 3.05700000e-01 8.66100000e-01 4.14000000e-01 1.30580000e+00 - 3.27500000e-01 3.69100000e-01 5.51800000e-01 5.30000000e-01 5.86700000e-01 6.53300000e-01 5.17500000e-01 5.74100000e-01 5.99000000e-01 6.58300000e-01 5.49900000e-01 5.34900000e-01 - 6.91000000e-01 4.18100000e-01 3.28600000e-01 4.53600000e-01 2.74800000e-01 3.59400000e-01 3.17400000e-01 4.66700000e-01 2.82700000e-01 3.37100000e-01 3.44900000e-01 4.36100000e-01 - 7.02500000e-01 4.18300000e-01 1.01890000e+00 6.12300000e-01 1.27420000e+00 4.54800000e-01 1.67340000e+00 6.77000000e-02 2.03970000e+00 -4.56000000e-02 -1.58000000e-02 1.75510000e+00 - 2.54500000e-01 5.25200000e-01 3.96900000e-01 8.27700000e-01 5.80800000e-01 7.84900000e-01 3.85100000e-01 8.40700000e-01 5.33700000e-01 8.67800000e-01 4.18600000e-01 8.01200000e-01 - 4.46500000e-01 6.13800000e-01 7.22100000e-01 6.58700000e-01 8.63400000e-01 6.32500000e-01 1.73190000e+00 -1.00000000e-03 2.53000000e-02 1.96960000e+00 -5.79000000e-02 1.79900000e+00 - 7.21200000e-01 3.80400000e-01 2.52500000e-01 8.23600000e-01 2.52300000e-01 5.71100000e-01 3.65600000e-01 6.87300000e-01 2.74500000e-01 5.23500000e-01 3.53900000e-01 7.02800000e-01 - 5.24100000e-01 4.27600000e-01 8.64200000e-01 6.16700000e-01 9.47600000e-01 7.49500000e-01 8.36600000e-01 6.48300000e-01 9.56300000e-01 7.67200000e-01 7.52000000e-01 7.48700000e-01 - 5.38600000e-01 5.77700000e-01 9.39900000e-01 5.00900000e-01 9.50100000e-01 6.28600000e-01 1.74280000e+00 -1.77000000e-02 2.01000000e+00 -5.50000000e-03 5.66000000e-02 1.66230000e+00 - 2.39900000e-01 8.63900000e-01 1.42100000e-01 6.66100000e-01 1.03600000e-01 5.53400000e-01 1.10200000e-01 7.04300000e-01 5.63000000e-02 5.96000000e-01 1.78700000e-01 6.25600000e-01 - 2.53100000e-01 5.81700000e-01 5.63400000e-01 5.85900000e-01 7.01300000e-01 5.60100000e-01 1.72210000e+00 7.60000000e-03 1.47000000e-02 1.98220000e+00 -2.77000000e-02 3.61000000e-02 - 7.78000000e-02 3.89700000e-01 3.98600000e-01 3.79900000e-01 5.77400000e-01 3.14000000e-01 1.74280000e+00 -1.52000000e-02 -2.40000000e-02 2.91000000e-02 -1.65000000e-02 1.97000000e-02 - 3.10000000e-03 3.04700000e-01 9.85000000e-02 5.24000000e-01 2.23800000e-01 5.20400000e-01 1.10000000e-02 1.71860000e+00 4.51000000e-02 -5.28000000e-02 1.70000000e-02 -2.03000000e-02 - 1.18600000e-01 9.94100000e-01 4.68500000e-01 1.15680000e+00 5.70900000e-01 1.17260000e+00 1.70460000e+00 2.90000000e-02 2.76000000e-02 1.96690000e+00 7.75000000e-02 1.64520000e+00 - 2.78300000e-01 8.11100000e-01 4.38200000e-01 1.05050000e+00 5.72000000e-01 1.03020000e+00 1.67700000e+00 6.69000000e-02 -6.50000000e-03 2.00610000e+00 -3.77000000e-02 1.77780000e+00 - 1.08820000e+00 2.01000000e-02 8.44200000e-01 -4.78000000e-02 6.85300000e-01 -4.37000000e-02 8.25800000e-01 -2.87000000e-02 6.14300000e-01 2.43000000e-02 7.66900000e-01 4.17000000e-02 - 1.17810000e+00 -8.90000000e-02 1.71900000e+00 1.23000000e-02 1.26440000e+00 4.08000000e-02 1.74980000e+00 -1.79000000e-02 1.21220000e+00 4.55000000e-02 1.65110000e+00 9.46000000e-02 - 1.07190000e+00 3.85000000e-02 9.91200000e-01 7.37200000e-01 7.54800000e-01 9.50900000e-01 9.29100000e-01 8.15100000e-01 7.14800000e-01 8.62900000e-01 1.09400000e+00 6.20000000e-01 - 6.21700000e-01 3.09900000e-01 9.08100000e-01 5.60100000e-01 1.04140000e+00 6.30400000e-01 9.74400000e-01 4.80500000e-01 1.12000000e+00 5.58900000e-01 1.03420000e+00 4.12500000e-01 - 6.82400000e-01 4.02800000e-01 9.27000000e-01 7.97200000e-01 1.20940000e+00 7.32700000e-01 1.01690000e+00 6.97100000e-01 1.18050000e+00 7.91200000e-01 1.06560000e+00 6.35100000e-01 - 7.05400000e-01 1.34300000e-01 1.11980000e+00 1.64000000e-02 1.08860000e+00 1.96700000e-01 1.66280000e+00 8.34000000e-02 2.02830000e+00 -3.85000000e-02 7.23000000e-02 -8.48000000e-02 - 4.00800000e-01 6.95400000e-01 7.17000000e-01 8.52400000e-01 7.88300000e-01 9.14100000e-01 1.70330000e+00 3.02000000e-02 -3.05000000e-02 2.03520000e+00 -3.71000000e-02 1.77330000e+00 - 5.40800000e-01 3.00300000e-01 8.13300000e-01 5.08700000e-01 9.96700000e-01 4.99100000e-01 9.21900000e-01 3.83000000e-01 9.87900000e-01 5.36300000e-01 9.46800000e-01 3.55200000e-01 - 1.39600000e-01 9.65300000e-01 7.53000000e-02 1.16480000e+00 1.45700000e-01 7.46800000e-01 -6.20000000e-03 1.26060000e+00 9.93000000e-02 7.78700000e-01 6.52000000e-02 1.17500000e+00 - 1.17500000e-01 2.95600000e-01 4.30400000e-01 3.00500000e-01 5.69500000e-01 2.76200000e-01 1.69260000e+00 5.20000000e-02 -4.33000000e-02 5.19000000e-02 -2.56000000e-02 3.20000000e-02 - 2.36400000e-01 6.24900000e-01 3.28300000e-01 1.03060000e+00 3.86800000e-01 1.15780000e+00 3.70700000e-01 9.79300000e-01 4.55000000e-01 1.09670000e+00 3.68100000e-01 9.80700000e-01 - 4.29000000e-01 2.46500000e-01 8.34600000e-01 1.36500000e-01 9.87300000e-01 9.51000000e-02 1.70780000e+00 3.28000000e-02 -2.07000000e-02 2.02500000e+00 -6.40000000e-03 5.50000000e-03 - -1.77000000e-02 1.12750000e+00 3.84000000e-02 8.98700000e-01 1.06000000e-02 7.27700000e-01 1.63000000e-02 9.22100000e-01 -3.61000000e-02 7.68100000e-01 -2.62000000e-02 9.80100000e-01 - 4.42300000e-01 3.51400000e-01 5.71200000e-01 6.95700000e-01 6.95000000e-01 7.40900000e-01 5.54100000e-01 7.19800000e-01 6.92700000e-01 7.66800000e-01 6.29500000e-01 6.26200000e-01 - 3.05800000e-01 3.86400000e-01 4.66700000e-01 6.23200000e-01 5.26400000e-01 7.16300000e-01 4.99700000e-01 5.81600000e-01 4.70900000e-01 8.02000000e-01 4.99800000e-01 5.89400000e-01 - 1.48100000e-01 6.04700000e-01 4.14700000e-01 6.58600000e-01 5.43400000e-01 6.46500000e-01 1.57030000e+00 1.85300000e-01 7.57000000e-02 1.91200000e+00 -6.10000000e-03 7.20000000e-03 - 1.10090000e+00 5.90000000e-03 7.13000000e-01 1.28400000e-01 6.13200000e-01 5.83000000e-02 6.61200000e-01 1.91200000e-01 5.80300000e-01 8.07000000e-02 7.81300000e-01 4.64000000e-02 - 5.52900000e-01 5.56600000e-01 3.50500000e-01 2.33400000e-01 2.34500000e-01 2.64500000e-01 3.07700000e-01 2.82300000e-01 3.33400000e-01 1.37500000e-01 3.03800000e-01 2.90500000e-01 - 1.11330000e+00 -6.20000000e-03 3.99100000e-01 5.52800000e-01 3.83600000e-01 3.55000000e-01 3.98100000e-01 5.53300000e-01 3.99200000e-01 3.21700000e-01 4.47800000e-01 4.94300000e-01 - 1.76900000e-01 2.88500000e-01 4.04600000e-01 3.00400000e-01 4.45400000e-01 3.59900000e-01 3.85100000e-01 3.21200000e-01 4.39700000e-01 3.78900000e-01 2.99000000e-01 4.26800000e-01 - 3.95300000e-01 7.09400000e-01 7.20500000e-01 6.93400000e-01 8.37100000e-01 6.97200000e-01 1.75580000e+00 -2.66000000e-02 1.14200000e-01 1.86370000e+00 3.45000000e-02 1.69300000e+00 - 6.06400000e-01 1.19000000e-02 8.08100000e-01 1.85700000e-01 9.51500000e-01 1.76800000e-01 9.05300000e-01 7.33000000e-02 1.03270000e+00 9.55000000e-02 7.66100000e-01 2.36900000e-01 - -8.94000000e-02 1.15630000e+00 2.31200000e-01 1.13450000e+00 3.07700000e-01 1.19400000e+00 8.57000000e-02 1.63110000e+00 -5.41000000e-02 2.06170000e+00 -8.10000000e-03 9.70000000e-03 - 7.05400000e-01 4.11600000e-01 9.92900000e-01 6.21900000e-01 1.06040000e+00 6.76200000e-01 1.74390000e+00 -1.48000000e-02 1.94180000e+00 6.78000000e-02 -1.42500000e-01 1.90610000e+00 - 8.74000000e-02 1.03800000e+00 5.31000000e-01 9.00900000e-01 5.87700000e-01 9.76200000e-01 1.74050000e+00 -5.10000000e-03 -4.60000000e-03 2.00280000e+00 -7.05000000e-02 1.81350000e+00 - 9.75400000e-01 1.16700000e-01 3.63800000e-01 6.22800000e-01 2.35900000e-01 5.47200000e-01 3.52500000e-01 6.34300000e-01 3.38000000e-01 4.07300000e-01 4.74900000e-01 4.87700000e-01 - 6.61800000e-01 1.26800000e-01 1.04200000e+00 1.92700000e-01 1.11020000e+00 3.16500000e-01 9.89200000e-01 2.58000000e-01 1.10930000e+00 3.37100000e-01 1.08010000e+00 1.48500000e-01 - 1.38000000e-02 5.28000000e-01 8.90000000e-03 8.44800000e-01 3.42000000e-02 9.29000000e-01 6.53000000e-02 7.78000000e-01 1.64700000e-01 7.91000000e-01 6.08000000e-02 7.80800000e-01 - 1.15470000e+00 -5.62000000e-02 5.96900000e-01 2.94000000e-01 5.25300000e-01 1.80300000e-01 6.12600000e-01 2.75800000e-01 4.58400000e-01 2.39500000e-01 6.26400000e-01 2.59900000e-01 - 1.13370000e+00 -3.64000000e-02 4.02700000e-01 2.20000000e-03 3.76800000e-01 -3.99000000e-02 4.29500000e-01 -3.20000000e-02 2.65800000e-01 8.85000000e-02 4.48400000e-01 -5.42000000e-02 - 1.16360000e+00 -6.79000000e-02 6.57300000e-01 1.12000000e-01 4.55700000e-01 1.82400000e-01 5.34200000e-01 2.52900000e-01 4.23800000e-01 2.09000000e-01 5.19200000e-01 2.73200000e-01 - 5.46700000e-01 4.09100000e-01 7.75800000e-01 7.38400000e-01 9.01300000e-01 8.16600000e-01 7.49900000e-01 7.66300000e-01 8.53500000e-01 9.06500000e-01 7.61800000e-01 7.52000000e-01 - 7.01800000e-01 -3.79000000e-02 9.04900000e-01 9.29000000e-02 1.04630000e+00 6.89000000e-02 1.70260000e+00 3.79000000e-02 1.97090000e+00 3.46000000e-02 4.64000000e-02 -5.44000000e-02 - 3.10000000e-01 5.50100000e-01 3.83100000e-01 9.85400000e-01 5.44500000e-01 9.93500000e-01 4.49600000e-01 9.03700000e-01 4.34500000e-01 1.14910000e+00 4.96700000e-01 8.44300000e-01 - 1.03550000e+00 8.49000000e-02 8.11900000e-01 5.25700000e-01 6.23900000e-01 3.28500000e-01 8.86600000e-01 4.42200000e-01 6.10300000e-01 3.19000000e-01 8.06700000e-01 5.39100000e-01 - 1.05680000e+00 5.98000000e-02 5.03200000e-01 1.22150000e+00 3.66000000e-01 9.96000000e-01 4.69300000e-01 1.26810000e+00 4.27500000e-01 8.65000000e-01 6.64400000e-01 1.03040000e+00 - 2.30900000e-01 8.86000000e-01 1.80100000e-01 1.55070000e+00 9.79000000e-02 1.63470000e+00 1.66000000e-01 1.57030000e+00 1.19700000e-01 1.46740000e+00 2.85800000e-01 1.42350000e+00 - 2.09200000e-01 5.97600000e-01 3.09800000e-01 9.56400000e-01 2.81800000e-01 1.17180000e+00 2.92800000e-01 9.82500000e-01 3.58700000e-01 1.10690000e+00 1.95600000e-01 1.09530000e+00 - 6.10600000e-01 5.09600000e-01 1.02830000e+00 5.05900000e-01 9.47800000e-01 7.35900000e-01 1.81360000e+00 -9.67000000e-02 2.01870000e+00 -2.31000000e-02 -8.66000000e-02 1.83360000e+00 - 2.41600000e-01 5.40800000e-01 5.72500000e-01 5.23600000e-01 5.43800000e-01 6.95600000e-01 1.76670000e+00 -4.16000000e-02 -7.80000000e-03 2.00660000e+00 -4.72000000e-02 5.65000000e-02 - 1.19680000e+00 -1.02700000e-01 6.22600000e-01 1.11430000e+00 5.69900000e-01 9.08900000e-01 6.50000000e-01 1.08070000e+00 4.75900000e-01 9.42600000e-01 5.69500000e-01 1.17480000e+00 - 5.88500000e-01 3.38700000e-01 8.50000000e-01 6.16400000e-01 1.05500000e+00 5.98200000e-01 9.38400000e-01 5.10700000e-01 1.03850000e+00 6.37400000e-01 8.76700000e-01 5.83400000e-01 - 1.07710000e+00 2.96000000e-02 9.10300000e-01 8.43500000e-01 8.38500000e-01 8.79800000e-01 9.87100000e-01 7.48500000e-01 7.35600000e-01 8.55000000e-01 9.51500000e-01 7.87500000e-01 - 1.04740000e+00 7.21000000e-02 5.77200000e-01 3.69800000e-01 5.16500000e-01 2.22600000e-01 5.65200000e-01 3.85100000e-01 4.96800000e-01 2.24400000e-01 5.51400000e-01 4.03100000e-01 - 2.42000000e-01 1.29600000e-01 3.99100000e-01 1.78700000e-01 4.53200000e-01 2.06000000e-01 4.15900000e-01 1.58300000e-01 4.17400000e-01 2.55700000e-01 4.41800000e-01 1.27400000e-01 - 1.80000000e-02 6.81600000e-01 2.07000000e-01 8.04900000e-01 3.90500000e-01 7.32900000e-01 6.50000000e-02 1.65450000e+00 -9.80000000e-03 2.01250000e+00 -3.51000000e-02 4.16000000e-02 - 2.90200000e-01 3.46000000e-02 5.14800000e-01 -2.08000000e-02 5.65200000e-01 2.40000000e-03 5.13400000e-01 -1.48000000e-02 4.32000000e-01 1.73900000e-01 5.27600000e-01 -3.50000000e-02 - 2.32800000e-01 7.30000000e-02 3.50300000e-01 1.30300000e-01 4.08600000e-01 1.33700000e-01 3.79300000e-01 9.70000000e-02 3.74900000e-01 1.88500000e-01 2.69900000e-01 2.26200000e-01 - 5.67900000e-01 5.51600000e-01 2.73200000e-01 5.01200000e-01 1.30200000e-01 5.17000000e-01 3.59900000e-01 3.97900000e-01 3.00500000e-01 3.00300000e-01 3.34300000e-01 4.26300000e-01 - -5.21000000e-02 5.35000000e-01 2.35500000e-01 5.52800000e-01 3.85900000e-01 5.14600000e-01 7.29000000e-02 1.64770000e+00 5.34000000e-02 -6.16000000e-02 -5.64000000e-02 6.77000000e-02 - 1.09870000e+00 1.10000000e-02 1.15350000e+00 4.36200000e-01 8.56200000e-01 1.63900000e-01 1.14500000e+00 4.44000000e-01 8.42900000e-01 1.49700000e-01 1.10780000e+00 4.86500000e-01 - 3.87700000e-01 5.10100000e-01 7.96600000e-01 5.75900000e-01 8.25800000e-01 7.50100000e-01 7.75600000e-01 6.01300000e-01 8.51000000e-01 7.42000000e-01 7.11800000e-01 6.75700000e-01 - -9.20000000e-03 6.39100000e-01 1.27300000e-01 8.19400000e-01 3.02500000e-01 7.53100000e-01 -7.50000000e-03 1.74280000e+00 -4.09000000e-02 2.04430000e+00 5.51000000e-02 -6.81000000e-02 - 3.29400000e-01 7.74100000e-01 6.21900000e-01 9.50900000e-01 7.06600000e-01 9.86200000e-01 1.74470000e+00 -1.54000000e-02 -8.53000000e-02 2.10650000e+00 -6.74000000e-02 1.80810000e+00 - 4.25400000e-01 2.80000000e-02 2.23200000e-01 2.61000000e-02 1.46100000e-01 8.13000000e-02 2.48700000e-01 -1.70000000e-03 2.23600000e-01 -1.60000000e-02 2.29400000e-01 2.17000000e-02 - 3.11000000e-02 1.07210000e+00 -5.19000000e-02 1.79760000e+00 1.30000000e-02 1.10240000e+00 -1.49000000e-02 1.75170000e+00 7.80000000e-02 9.90900000e-01 -7.92000000e-02 1.82600000e+00 - 3.94200000e-01 5.78400000e-01 7.27200000e-01 5.52000000e-01 8.26600000e-01 5.78100000e-01 1.65740000e+00 8.12000000e-02 1.31300000e-01 1.84850000e+00 -9.30000000e-03 9.80000000e-03 - 1.89700000e-01 9.10300000e-01 3.93000000e-02 1.70410000e+00 1.20100000e-01 1.30850000e+00 7.34000000e-02 1.66570000e+00 -1.75000000e-02 1.40520000e+00 8.13000000e-02 1.65340000e+00 - 3.56800000e-01 7.47000000e-01 6.35300000e-01 1.01370000e+00 8.40500000e-01 9.09500000e-01 1.75560000e+00 -2.57000000e-02 1.73800000e-01 1.79270000e+00 2.51000000e-02 1.70100000e+00 - 5.35500000e-01 3.93300000e-01 7.58800000e-01 7.08500000e-01 8.84800000e-01 7.79800000e-01 6.50900000e-01 8.34300000e-01 8.00400000e-01 9.12700000e-01 7.54200000e-01 7.12400000e-01 - 4.42900000e-01 2.29100000e-01 5.43500000e-01 5.40900000e-01 7.17200000e-01 4.96100000e-01 5.82700000e-01 4.95000000e-01 7.35100000e-01 4.97500000e-01 5.51800000e-01 5.30700000e-01 - 5.86100000e-01 3.97100000e-01 9.46300000e-01 5.92400000e-01 1.03230000e+00 7.27700000e-01 1.02060000e+00 5.03400000e-01 1.03330000e+00 7.52600000e-01 1.01880000e+00 5.10800000e-01 - 2.21100000e-01 4.82100000e-01 2.16000000e-01 9.07300000e-01 2.43700000e-01 1.03610000e+00 2.03600000e-01 9.23700000e-01 2.09800000e-01 1.09660000e+00 2.52500000e-01 8.65100000e-01 - 4.09100000e-01 1.42200000e-01 5.65000000e-01 3.18800000e-01 6.34300000e-01 3.75000000e-01 5.89300000e-01 2.90000000e-01 5.53200000e-01 4.87100000e-01 5.63300000e-01 3.20100000e-01 - 6.10900000e-01 1.51800000e-01 9.95500000e-01 7.08000000e-02 1.11880000e+00 6.53000000e-02 1.68790000e+00 4.66000000e-02 2.04040000e+00 -4.49000000e-02 -3.02000000e-02 3.54000000e-02 - 3.59900000e-01 4.93800000e-01 5.87300000e-01 7.41900000e-01 4.33200000e-01 1.12780000e+00 4.74600000e-01 8.74900000e-01 4.47900000e-01 1.12810000e+00 4.87600000e-01 8.62900000e-01 - 4.93300000e-01 6.06200000e-01 8.67000000e-01 7.76600000e-01 8.79200000e-01 9.04900000e-01 1.69160000e+00 4.68000000e-02 -9.60000000e-03 2.01290000e+00 1.21600000e-01 1.58210000e+00 - 2.66700000e-01 8.35700000e-01 6.53000000e-01 8.18100000e-01 6.73000000e-01 9.39400000e-01 1.79740000e+00 -7.51000000e-02 6.25000000e-02 1.91920000e+00 5.72000000e-02 1.66680000e+00 - 1.09300000e+00 1.69000000e-02 4.44900000e-01 1.30150000e+00 4.81900000e-01 6.19100000e-01 5.78300000e-01 1.14530000e+00 3.57000000e-01 7.36000000e-01 5.50500000e-01 1.17900000e+00 - 1.39000000e-02 2.19300000e-01 4.74000000e-02 8.53000000e-02 2.33000000e-02 9.48000000e-02 6.58000000e-02 6.21000000e-02 -3.51000000e-02 1.62800000e-01 -4.31000000e-02 1.93000000e-01 - 2.07800000e-01 4.76200000e-01 4.65600000e-01 5.81000000e-01 7.15500000e-01 4.41000000e-01 4.69200000e-01 5.78000000e-01 5.97300000e-01 6.01900000e-01 4.81300000e-01 5.65400000e-01 - 8.78200000e-01 1.07600000e-01 1.29470000e+00 -1.81000000e-02 1.39720000e+00 4.20000000e-03 1.69070000e+00 4.42000000e-02 2.17620000e+00 -2.02500000e-01 3.64000000e-02 -4.44000000e-02 - 2.84000000e-01 8.09000000e-01 1.00800000e-01 7.63700000e-01 7.97000000e-02 6.10300000e-01 1.17400000e-01 7.45100000e-01 1.00400000e-01 5.75000000e-01 2.02200000e-01 6.40800000e-01 - 4.60800000e-01 -5.26000000e-02 7.45300000e-01 -1.75000000e-02 8.17400000e-01 4.05000000e-02 1.75140000e+00 -2.07000000e-02 2.13000000e-02 -2.60000000e-02 4.55000000e-02 -5.48000000e-02 - 1.12820000e+00 -2.99000000e-02 3.54600000e-01 4.15200000e-01 3.71900000e-01 2.42100000e-01 4.40900000e-01 3.12400000e-01 2.48100000e-01 3.74700000e-01 4.20400000e-01 3.42100000e-01 - 3.96200000e-01 5.49500000e-01 5.24300000e-01 9.78500000e-01 5.82200000e-01 1.13180000e+00 4.84700000e-01 1.02150000e+00 5.54900000e-01 1.18390000e+00 5.84800000e-01 9.03800000e-01 - 5.96400000e-01 4.99600000e-01 9.01100000e-01 7.97000000e-01 9.51100000e-01 8.77600000e-01 1.82520000e+00 -1.06300000e-01 -7.82000000e-02 2.09250000e+00 4.34000000e-02 1.68160000e+00 - 3.06000000e-01 1.73100000e-01 5.56300000e-01 1.76700000e-01 7.11300000e-01 1.12500000e-01 5.00800000e-01 2.44500000e-01 6.65100000e-01 1.78000000e-01 5.88600000e-01 1.37900000e-01 - 3.28300000e-01 -1.64000000e-02 2.41900000e-01 -7.37000000e-02 1.83800000e-01 -3.27000000e-02 2.15200000e-01 -4.09000000e-02 1.70800000e-01 -1.97000000e-02 8.02000000e-02 1.26200000e-01 - 1.52400000e-01 3.23800000e-01 3.79100000e-01 4.22100000e-01 6.12000000e-01 2.89900000e-01 1.71300000e+00 2.45000000e-02 1.95000000e-02 -2.12000000e-02 4.60000000e-03 -4.00000000e-03 - 3.19800000e-01 2.98400000e-01 4.88900000e-01 4.66200000e-01 5.87700000e-01 4.94300000e-01 1.75780000e+00 -2.63000000e-02 3.43000000e-02 1.96280000e+00 1.15000000e-02 -1.42000000e-02 - 1.70500000e-01 4.62200000e-01 5.65000000e-01 3.66900000e-01 5.69900000e-01 4.98600000e-01 1.73190000e+00 4.60000000e-03 -4.38000000e-02 2.05380000e+00 -2.86000000e-02 3.34000000e-02 - 1.93400000e-01 4.67500000e-01 6.37300000e-01 3.11600000e-01 6.70400000e-01 4.08800000e-01 1.82950000e+00 -1.14500000e-01 -1.51000000e-02 2.01300000e+00 4.03000000e-02 -4.75000000e-02 - 3.25500000e-01 2.59700000e-01 5.70500000e-01 3.38200000e-01 6.48900000e-01 3.81900000e-01 5.94300000e-01 3.05300000e-01 6.51100000e-01 3.95700000e-01 5.66600000e-01 3.41100000e-01 - 3.02800000e-01 2.59200000e-01 6.19000000e-01 2.36800000e-01 6.10800000e-01 3.80700000e-01 5.32300000e-01 3.42500000e-01 6.71000000e-01 3.27500000e-01 5.59700000e-01 3.10500000e-01 - 1.11040000e+00 -5.10000000e-03 3.55300000e-01 1.10660000e+00 4.50100000e-01 5.24700000e-01 5.73100000e-01 8.50700000e-01 2.85800000e-01 6.87000000e-01 5.13400000e-01 9.21400000e-01 - 4.58500000e-01 5.00700000e-01 6.85500000e-01 8.28300000e-01 8.35700000e-01 8.70400000e-01 7.02200000e-01 8.08300000e-01 8.25000000e-01 9.11600000e-01 7.42900000e-01 7.53400000e-01 - 7.65500000e-01 3.55100000e-01 4.28000000e-01 2.03500000e-01 2.89700000e-01 2.49500000e-01 3.77400000e-01 2.65500000e-01 3.34000000e-01 1.85700000e-01 3.59500000e-01 2.86600000e-01 - 5.42000000e-01 5.58300000e-01 2.97400000e-01 6.87200000e-01 2.39500000e-01 5.32700000e-01 3.09800000e-01 6.73300000e-01 2.97300000e-01 4.43500000e-01 2.66600000e-01 7.22600000e-01 - 5.24500000e-01 5.81000000e-01 3.00100000e-01 4.88300000e-01 2.50800000e-01 3.87000000e-01 2.18100000e-01 5.88200000e-01 2.73400000e-01 3.50100000e-01 2.61500000e-01 5.36900000e-01 - 4.37400000e-01 6.66800000e-01 1.83400000e-01 9.82100000e-01 1.69600000e-01 7.04900000e-01 2.14000000e-01 9.45700000e-01 1.81300000e-01 6.73100000e-01 2.05600000e-01 9.57700000e-01 - 2.04500000e-01 6.34200000e-01 3.05300000e-01 1.00650000e+00 5.15500000e-01 9.54300000e-01 4.08600000e-01 8.88900000e-01 5.09700000e-01 9.79000000e-01 3.33900000e-01 9.80300000e-01 - 3.80200000e-01 6.11000000e-01 5.16400000e-01 1.04750000e+00 6.13100000e-01 1.17010000e+00 5.40300000e-01 1.02090000e+00 6.35700000e-01 1.17390000e+00 4.80800000e-01 1.09120000e+00 - 2.86800000e-01 5.06300000e-01 7.49500000e-01 3.29000000e-01 6.81500000e-01 5.48500000e-01 1.61140000e+00 1.38600000e-01 -1.77000000e-02 2.02530000e+00 4.01000000e-02 -4.87000000e-02 - 4.81600000e-01 6.29500000e-01 7.79400000e-01 7.60000000e-01 8.33200000e-01 8.41500000e-01 1.74420000e+00 -1.20000000e-02 2.58000000e-02 1.97440000e+00 -7.50000000e-02 1.81760000e+00 - 3.45400000e-01 7.64400000e-01 2.20300000e-01 8.64700000e-01 1.52900000e-01 6.81100000e-01 2.43500000e-01 8.35300000e-01 2.04500000e-01 5.98900000e-01 2.25700000e-01 8.60900000e-01 - 8.54200000e-01 2.48200000e-01 3.93600000e-01 6.30000000e-02 2.54000000e-01 1.47700000e-01 4.74300000e-01 -3.14000000e-02 3.25500000e-01 5.51000000e-02 3.17300000e-01 1.53700000e-01 - 2.96100000e-01 1.88900000e-01 5.93300000e-01 2.05200000e-01 7.13100000e-01 2.05500000e-01 1.80780000e+00 -8.76000000e-02 -2.68000000e-02 2.97000000e-02 3.55000000e-02 -4.20000000e-02 - 1.54600000e-01 4.46100000e-01 1.88500000e-01 7.61400000e-01 2.20000000e-01 8.61900000e-01 1.73200000e-01 7.77500000e-01 1.39900000e-01 9.71100000e-01 2.45600000e-01 6.94500000e-01 - 1.53000000e-02 5.58400000e-01 2.11300000e-01 6.96200000e-01 4.39400000e-01 5.66400000e-01 -2.03000000e-02 1.75500000e+00 5.93000000e-02 1.92910000e+00 -3.80000000e-03 7.10000000e-03 - 2.66400000e-01 2.50100000e-01 3.68100000e-01 4.48800000e-01 3.52400000e-01 5.89700000e-01 3.64800000e-01 4.54500000e-01 4.98400000e-01 4.33100000e-01 3.21900000e-01 5.03600000e-01 - 5.96400000e-01 4.52300000e-01 9.32200000e-01 4.22400000e-01 1.08250000e+00 3.83200000e-01 1.74860000e+00 -1.37000000e-02 2.01880000e+00 -2.52000000e-02 2.18000000e-02 -2.69000000e-02 - 1.73000000e-02 4.22300000e-01 -2.20000000e-02 7.22600000e-01 1.26100000e-01 6.42400000e-01 -1.27000000e-02 7.11800000e-01 -1.03000000e-02 8.14700000e-01 1.20300000e-01 5.51700000e-01 - 1.13540000e+00 -3.50000000e-02 7.70800000e-01 9.49000000e-01 6.38100000e-01 4.11300000e-01 6.68900000e-01 1.07150000e+00 5.09900000e-01 5.26200000e-01 7.82300000e-01 9.37700000e-01 - 5.30000000e-03 1.11290000e+00 3.17000000e-02 6.17300000e-01 -1.76000000e-02 5.64700000e-01 1.60000000e-03 6.54400000e-01 6.10000000e-03 5.21700000e-01 -4.72000000e-02 7.11500000e-01 - 2.37200000e-01 5.59500000e-01 2.52200000e-01 1.01580000e+00 3.84000000e-01 1.04390000e+00 3.18200000e-01 9.39200000e-01 3.24300000e-01 1.13270000e+00 3.02100000e-01 9.55900000e-01 - 1.19760000e+00 -1.04200000e-01 7.25400000e-01 -7.50000000e-03 4.69400000e-01 1.39900000e-01 6.78300000e-01 5.03000000e-02 4.50000000e-01 1.50600000e-01 6.72700000e-01 5.19000000e-02 - 2.65000000e-02 8.23000000e-01 1.93800000e-01 9.57000000e-01 2.36500000e-01 1.05400000e+00 6.79000000e-02 1.65370000e+00 8.30000000e-02 1.90380000e+00 3.79000000e-02 -4.64000000e-02 - 1.11210000e+00 -1.06000000e-02 1.17040000e+00 5.75600000e-01 9.48500000e-01 5.25700000e-01 1.21360000e+00 5.21100000e-01 8.55300000e-01 5.59300000e-01 1.25650000e+00 4.80000000e-01 - 2.68600000e-01 3.94500000e-01 3.56200000e-01 6.98200000e-01 4.46200000e-01 7.40800000e-01 3.55600000e-01 7.00100000e-01 4.79100000e-01 7.20600000e-01 4.95500000e-01 5.32900000e-01 - 2.71200000e-01 8.18600000e-01 5.51000000e-02 1.35830000e+00 1.35700000e-01 8.32400000e-01 9.13000000e-02 1.31800000e+00 1.27200000e-01 8.18600000e-01 7.60000000e-02 1.33570000e+00 - 2.93200000e-01 5.12500000e-01 5.62100000e-01 5.65600000e-01 7.16000000e-01 5.27400000e-01 1.77980000e+00 -5.33000000e-02 -1.13000000e-02 2.01910000e+00 -2.36000000e-02 3.02000000e-02 - 1.08280000e+00 2.74000000e-02 5.63700000e-01 3.87700000e-01 5.01100000e-01 2.43600000e-01 5.51700000e-01 4.05300000e-01 4.44700000e-01 2.87500000e-01 5.55000000e-01 4.00700000e-01 - 1.79100000e-01 6.83800000e-01 2.66800000e-01 1.09460000e+00 2.58700000e-01 1.29400000e+00 1.81200000e-01 1.19200000e+00 3.19500000e-01 1.24330000e+00 1.81300000e-01 1.19090000e+00 - 4.07500000e-01 6.13900000e-01 8.65600000e-01 6.88200000e-01 9.16600000e-01 8.70300000e-01 7.65700000e-01 8.08200000e-01 8.97500000e-01 9.20300000e-01 7.93200000e-01 7.76900000e-01 - 4.37000000e-01 6.56100000e-01 2.18700000e-01 3.69700000e-01 1.61800000e-01 3.33500000e-01 2.01600000e-01 3.89200000e-01 1.34200000e-01 3.53800000e-01 2.62800000e-01 3.17500000e-01 - 1.09000000e-01 6.44900000e-01 4.69600000e-01 5.94300000e-01 4.52800000e-01 7.57900000e-01 -4.65000000e-02 1.78430000e+00 -1.02700000e-01 2.12750000e+00 -5.87000000e-02 6.87000000e-02 - 1.04840000e+00 6.61000000e-02 6.35700000e-01 8.07200000e-01 5.07400000e-01 4.79300000e-01 5.91500000e-01 8.59600000e-01 5.08300000e-01 4.51200000e-01 6.89700000e-01 7.42600000e-01 - 1.06450000e+00 5.31000000e-02 6.89500000e-01 1.05580000e+00 6.54400000e-01 1.31560000e+00 7.18800000e-01 1.02710000e+00 5.94200000e-01 1.10340000e+00 8.04600000e-01 9.18700000e-01 - -7.22000000e-02 1.19190000e+00 1.61700000e-01 1.40680000e+00 4.31600000e-01 1.22700000e+00 -3.90000000e-02 1.77380000e+00 5.28000000e-02 1.93420000e+00 -7.32000000e-02 1.81580000e+00 - 2.56300000e-01 1.22900000e-01 3.19200000e-01 2.90700000e-01 4.07300000e-01 2.76100000e-01 3.92900000e-01 1.99800000e-01 4.72200000e-01 2.13000000e-01 3.53500000e-01 2.50600000e-01 - 1.09650000e+00 8.70000000e-03 5.27600000e-01 4.27100000e-01 5.28200000e-01 2.05800000e-01 5.57100000e-01 3.89100000e-01 5.31500000e-01 1.85000000e-01 6.91500000e-01 2.31100000e-01 - 5.12200000e-01 3.40500000e-01 6.78300000e-01 6.80300000e-01 8.98200000e-01 6.26100000e-01 6.58900000e-01 7.07900000e-01 8.58400000e-01 6.98100000e-01 7.47400000e-01 5.98000000e-01 - 6.73300000e-01 4.35700000e-01 1.09490000e+00 4.15300000e-01 1.19170000e+00 4.43600000e-01 1.68570000e+00 5.10000000e-02 1.96420000e+00 4.62000000e-02 -1.42900000e-01 1.90100000e+00 - 1.06950000e+00 4.30000000e-02 6.88300000e-01 1.04050000e+00 6.14100000e-01 8.14800000e-01 6.03900000e-01 1.13770000e+00 5.24100000e-01 8.48900000e-01 6.83600000e-01 1.04340000e+00 - 8.83000000e-01 2.31300000e-01 3.89900000e-01 3.66000000e-02 3.08400000e-01 5.88000000e-02 3.94300000e-01 3.26000000e-02 3.43500000e-01 7.20000000e-03 3.78900000e-01 4.91000000e-02 - 1.15460000e+00 -5.61000000e-02 1.12440000e+00 5.94000000e-01 7.50500000e-01 5.28200000e-01 1.00670000e+00 7.32300000e-01 8.52400000e-01 3.53600000e-01 9.06500000e-01 8.51900000e-01 - 5.87600000e-01 5.09700000e-01 3.12000000e-01 5.23700000e-01 1.99000000e-01 4.87700000e-01 2.77200000e-01 5.69500000e-01 2.64700000e-01 3.92100000e-01 2.66500000e-01 5.81300000e-01 - 3.13600000e-01 7.89000000e-01 2.31400000e-01 1.49390000e+00 1.41400000e-01 1.83290000e+00 1.08500000e-01 1.64010000e+00 1.78800000e-01 1.73670000e+00 2.27300000e-01 1.49830000e+00 - 1.01000000e-01 5.67500000e-01 5.59900000e-01 3.97700000e-01 6.03100000e-01 4.92000000e-01 1.67420000e+00 6.70000000e-02 1.12100000e-01 1.86830000e+00 -3.80000000e-03 5.00000000e-03 - 3.65000000e-01 4.63700000e-01 7.69100000e-01 3.56900000e-01 9.18800000e-01 3.19700000e-01 1.75700000e+00 -3.49000000e-02 9.29000000e-02 1.89460000e+00 -3.44000000e-02 4.13000000e-02 - 8.04000000e-01 2.01800000e-01 1.29640000e+00 2.76700000e-01 1.46000000e+00 3.39100000e-01 1.24290000e+00 3.39000000e-01 1.42120000e+00 4.05200000e-01 1.17730000e+00 4.17900000e-01 - 2.13200000e-01 7.74500000e-01 6.10100000e-01 6.72100000e-01 6.43200000e-01 7.73500000e-01 1.77460000e+00 -5.07000000e-02 1.97000000e-02 1.97920000e+00 -6.48000000e-02 7.62000000e-02 - 1.05070000e+00 6.92000000e-02 7.01700000e-01 3.94900000e-01 6.10300000e-01 2.16200000e-01 7.92000000e-01 2.83600000e-01 5.53300000e-01 2.67800000e-01 7.62700000e-01 3.18600000e-01 - 4.87400000e-01 4.53100000e-01 8.93300000e-01 3.41900000e-01 1.02620000e+00 3.28300000e-01 1.78660000e+00 -6.76000000e-02 1.95180000e+00 5.61000000e-02 3.02000000e-02 -3.54000000e-02 - 8.43000000e-01 5.19000000e-02 1.23190000e+00 -3.71000000e-02 1.17890000e+00 1.66600000e-01 1.82370000e+00 -1.11700000e-01 2.10080000e+00 -1.15100000e-01 5.55000000e-02 -6.56000000e-02 - 7.29600000e-01 3.43200000e-01 1.20720000e+00 4.56700000e-01 1.27240000e+00 6.42500000e-01 1.13080000e+00 5.48400000e-01 1.36200000e+00 5.67400000e-01 1.08660000e+00 6.01500000e-01 - 1.09970000e+00 4.70000000e-03 4.11200000e-01 1.33930000e+00 3.91200000e-01 9.29100000e-01 5.19700000e-01 1.21130000e+00 4.10100000e-01 8.52100000e-01 4.80900000e-01 1.25340000e+00 - 4.14700000e-01 5.40900000e-01 5.89700000e-01 9.16200000e-01 5.66600000e-01 1.17000000e+00 5.82500000e-01 9.24600000e-01 7.14300000e-01 1.01500000e+00 6.08400000e-01 8.89700000e-01 - 1.14110000e+00 -4.03000000e-02 4.37400000e-01 2.74900000e-01 3.12200000e-01 2.82900000e-01 4.58000000e-01 2.53500000e-01 3.07800000e-01 2.77300000e-01 5.24400000e-01 1.71700000e-01 - 4.60800000e-01 2.20500000e-01 7.63200000e-01 3.03400000e-01 8.58000000e-01 3.50800000e-01 7.29400000e-01 3.40900000e-01 8.18900000e-01 4.15200000e-01 6.93900000e-01 3.82800000e-01 - 1.27100000e-01 7.32800000e-01 1.94000000e-01 1.14790000e+00 2.62100000e-01 1.26070000e+00 1.95400000e-01 1.15510000e+00 1.67000000e-01 1.39630000e+00 1.34500000e-01 1.22370000e+00 - 3.00000000e-03 3.71700000e-01 4.31000000e-02 5.40300000e-01 1.27000000e-01 5.20300000e-01 5.98000000e-02 5.20900000e-01 1.10900000e-01 5.50800000e-01 7.87000000e-02 4.97900000e-01 - 2.46000000e-01 8.71700000e-01 1.94900000e-01 7.05900000e-01 1.21000000e-01 6.01300000e-01 2.66200000e-01 6.21600000e-01 1.45700000e-01 5.56800000e-01 1.48100000e-01 7.61500000e-01 - -1.88000000e-02 7.23200000e-01 3.05800000e-01 6.95000000e-01 3.37300000e-01 7.99100000e-01 3.13000000e-02 1.68990000e+00 1.31000000e-02 1.98330000e+00 -3.56000000e-02 4.27000000e-02 - 4.52800000e-01 6.40800000e-01 1.97900000e-01 1.54150000e+00 2.25600000e-01 1.02590000e+00 1.74400000e-01 1.56430000e+00 2.27400000e-01 9.76000000e-01 1.53200000e-01 1.59440000e+00 - 1.11910000e+00 -1.61000000e-02 4.10400000e-01 8.46400000e-01 3.94300000e-01 5.13900000e-01 4.62600000e-01 7.83500000e-01 3.78200000e-01 5.08100000e-01 4.42500000e-01 8.05900000e-01 - 1.11400000e+00 -6.90000000e-03 3.72600000e-01 9.71100000e-01 4.13300000e-01 5.25900000e-01 4.25700000e-01 9.04500000e-01 3.12300000e-01 6.16400000e-01 4.40000000e-01 8.87300000e-01 - 1.80400000e-01 9.07100000e-01 3.86100000e-01 1.29350000e+00 2.42100000e-01 1.70970000e+00 2.16300000e-01 1.49350000e+00 3.58400000e-01 1.60070000e+00 2.49900000e-01 1.46060000e+00 - 6.48300000e-01 4.61400000e-01 9.08300000e-01 5.50300000e-01 1.14490000e+00 4.15500000e-01 1.72620000e+00 6.70000000e-03 1.99370000e+00 2.30000000e-03 4.84000000e-02 1.67770000e+00 - 1.13910000e+00 -3.72000000e-02 5.83600000e-01 6.99700000e-01 4.79100000e-01 4.47700000e-01 6.01200000e-01 6.82500000e-01 3.87000000e-01 5.32400000e-01 6.39200000e-01 6.36900000e-01 - 4.46200000e-01 3.67800000e-01 5.46900000e-01 7.54400000e-01 6.19800000e-01 8.63400000e-01 5.47200000e-01 7.52600000e-01 5.70400000e-01 9.40700000e-01 5.57400000e-01 7.47500000e-01 - 3.58300000e-01 7.45800000e-01 2.07700000e-01 1.51990000e+00 1.08000000e-01 9.75200000e-01 1.66800000e-01 1.57000000e+00 1.27700000e-01 9.20800000e-01 1.92300000e-01 1.53940000e+00 - 5.19000000e-01 5.45400000e-01 8.21500000e-01 5.52900000e-01 9.02800000e-01 5.99100000e-01 1.69280000e+00 4.21000000e-02 6.75000000e-02 1.92300000e+00 9.56000000e-02 1.61970000e+00 - 3.74000000e-01 6.60300000e-01 7.32200000e-01 8.63800000e-01 8.29000000e-01 9.85500000e-01 7.45800000e-01 8.41100000e-01 8.00000000e-01 1.04420000e+00 7.36000000e-01 8.54000000e-01 - 1.17180000e+00 -8.16000000e-02 4.40700000e-01 2.60000000e-01 3.38600000e-01 2.44500000e-01 4.05900000e-01 3.05500000e-01 3.06500000e-01 2.70200000e-01 4.43800000e-01 2.58300000e-01 - 2.74000000e-01 2.77800000e-01 6.40700000e-01 2.12400000e-01 7.39800000e-01 2.35200000e-01 1.75120000e+00 -2.48000000e-02 1.02000000e-02 -1.34000000e-02 6.93000000e-02 -8.36000000e-02 - 1.16790000e+00 -7.20000000e-02 6.00700000e-01 4.26100000e-01 6.17700000e-01 1.55400000e-01 5.62700000e-01 4.67000000e-01 5.31100000e-01 2.37300000e-01 5.66600000e-01 4.64300000e-01 - 3.39000000e-02 5.30000000e-02 3.33300000e-01 6.70000000e-02 5.00100000e-01 1.22000000e-02 -5.25000000e-02 1.79080000e+00 -5.00000000e-02 5.95000000e-02 -2.77000000e-02 3.16000000e-02 - 3.88400000e-01 1.58000000e-01 8.19300000e-01 1.93000000e-02 9.14400000e-01 4.22000000e-02 1.72280000e+00 1.25000000e-02 -4.59000000e-02 5.58000000e-02 4.50000000e-03 -5.50000000e-03 - 3.81400000e-01 2.26500000e-01 6.55100000e-01 2.85000000e-01 7.87900000e-01 2.76000000e-01 7.48700000e-01 1.77400000e-01 7.76000000e-01 3.07400000e-01 6.23200000e-01 3.25300000e-01 - 4.96600000e-01 6.09700000e-01 7.71700000e-01 9.73800000e-01 8.60500000e-01 1.01430000e+00 1.78680000e+00 -6.60000000e-02 8.47000000e-02 1.89790000e+00 7.49000000e-02 1.64430000e+00 - 2.86800000e-01 2.34400000e-01 4.34900000e-01 3.84400000e-01 3.95300000e-01 5.56800000e-01 3.74000000e-01 4.55800000e-01 4.67500000e-01 4.84000000e-01 5.03900000e-01 2.97800000e-01 - 1.52700000e-01 4.68100000e-01 1.82900000e-01 7.96100000e-01 1.37100000e-01 9.91900000e-01 2.11100000e-01 7.65500000e-01 3.03100000e-01 8.15100000e-01 2.01100000e-01 7.74000000e-01 - 3.41000000e-02 1.07450000e+00 -2.73000000e-02 5.91300000e-01 3.38000000e-02 4.27400000e-01 1.00000000e-01 4.37900000e-01 9.91000000e-02 3.40400000e-01 2.27000000e-02 5.30900000e-01 - 1.10460000e+00 8.00000000e-04 3.14700000e-01 1.43960000e+00 3.69400000e-01 8.82900000e-01 3.73400000e-01 1.36950000e+00 4.44500000e-01 7.48700000e-01 4.70200000e-01 1.25450000e+00 - 9.09000000e-02 3.71300000e-01 1.10300000e-01 6.16600000e-01 1.36500000e-01 6.90900000e-01 1.75700000e-01 5.37900000e-01 1.26300000e-01 7.11900000e-01 1.91600000e-01 5.22300000e-01 - 3.13400000e-01 7.89600000e-01 2.75200000e-01 1.44130000e+00 1.59200000e-01 1.39820000e+00 2.32900000e-01 1.49330000e+00 9.79000000e-02 1.38240000e+00 2.85600000e-01 1.42630000e+00 - 3.18700000e-01 6.44700000e-01 7.36900000e-01 5.16300000e-01 8.41700000e-01 5.36800000e-01 1.70130000e+00 4.20000000e-02 1.55000000e-02 1.97800000e+00 -5.30000000e-02 6.26000000e-02 - -2.20000000e-03 5.77500000e-01 2.30600000e-01 6.62400000e-01 3.15400000e-01 7.06800000e-01 -1.56200000e-01 1.91160000e+00 -3.26000000e-02 2.03550000e+00 -5.31000000e-02 6.38000000e-02 - 4.16300000e-01 6.78300000e-01 1.07400000e-01 8.20000000e-01 2.05700000e-01 5.10200000e-01 1.61000000e-01 7.55700000e-01 1.94100000e-01 5.04400000e-01 1.83300000e-01 7.26900000e-01 - 5.17700000e-01 3.90400000e-01 8.68100000e-01 5.38400000e-01 1.00030000e+00 6.04500000e-01 8.99300000e-01 5.04500000e-01 1.01260000e+00 6.15000000e-01 9.59700000e-01 4.32000000e-01 - 2.40500000e-01 5.88500000e-01 5.51200000e-01 5.91300000e-01 8.01000000e-01 4.39900000e-01 1.67510000e+00 6.95000000e-02 -1.63000000e-02 2.01750000e+00 -6.00000000e-04 2.00000000e-04 - 3.56300000e-01 5.90400000e-01 6.99600000e-01 7.59300000e-01 6.49300000e-01 1.03570000e+00 5.51400000e-01 9.33400000e-01 7.52200000e-01 9.36600000e-01 6.30100000e-01 8.38100000e-01 - 8.75800000e-01 2.32300000e-01 3.72400000e-01 6.18300000e-01 3.83900000e-01 3.73000000e-01 3.44100000e-01 6.49600000e-01 3.82700000e-01 3.60800000e-01 4.37500000e-01 5.40700000e-01 - 1.06650000e+00 4.23000000e-02 6.29400000e-01 9.86800000e-01 5.99600000e-01 4.15000000e-01 7.00300000e-01 9.06300000e-01 5.89700000e-01 3.98900000e-01 7.93500000e-01 7.99800000e-01 - 1.15090000e+00 -5.24000000e-02 8.91600000e-01 2.59500000e-01 7.14700000e-01 1.49800000e-01 9.07100000e-01 2.40200000e-01 6.92300000e-01 1.51100000e-01 8.86100000e-01 2.70100000e-01 - 1.07830000e+00 3.58000000e-02 4.02600000e-01 6.20400000e-01 2.97000000e-01 5.06300000e-01 3.87200000e-01 6.42900000e-01 3.59500000e-01 4.13500000e-01 5.25400000e-01 4.80600000e-01 - 1.07750000e+00 3.41000000e-02 3.97200000e-01 5.27800000e-01 4.16400000e-01 3.01100000e-01 4.24900000e-01 4.95200000e-01 2.89700000e-01 4.34900000e-01 3.30700000e-01 6.05800000e-01 - 2.62600000e-01 5.41900000e-01 3.62800000e-01 9.03500000e-01 5.05900000e-01 9.21100000e-01 5.00200000e-01 7.46200000e-01 4.31800000e-01 1.02960000e+00 3.50600000e-01 9.20000000e-01 - 2.54400000e-01 3.09500000e-01 3.10100000e-01 5.90700000e-01 4.94000000e-01 5.08500000e-01 3.89200000e-01 5.00200000e-01 4.60700000e-01 5.62200000e-01 4.34600000e-01 4.42100000e-01 - 6.48900000e-01 1.78300000e-01 9.08900000e-01 2.39100000e-01 9.73600000e-01 3.07500000e-01 1.73840000e+00 -9.10000000e-03 2.04190000e+00 -5.09000000e-02 2.76000000e-02 -3.29000000e-02 - 4.51600000e-01 6.39900000e-01 2.80100000e-01 1.43960000e+00 2.25600000e-01 1.28320000e+00 2.32900000e-01 1.49470000e+00 2.07600000e-01 1.22530000e+00 1.41700000e-01 1.60540000e+00 - 3.84900000e-01 7.11000000e-02 5.86300000e-01 2.05600000e-01 6.72200000e-01 2.46200000e-01 1.76640000e+00 -4.08000000e-02 -1.81000000e-02 2.20000000e-02 -1.06000000e-02 1.29000000e-02 - 8.80100000e-01 3.56000000e-02 1.04620000e+00 2.11600000e-01 1.27100000e+00 8.88000000e-02 1.78450000e+00 -6.72000000e-02 2.05500000e+00 -6.45000000e-02 -6.66000000e-02 7.87000000e-02 - 4.73900000e-01 6.36300000e-01 3.06700000e-01 1.32080000e+00 1.74700000e-01 8.61200000e-01 2.89200000e-01 1.33320000e+00 1.96200000e-01 8.06400000e-01 2.84600000e-01 1.34100000e+00 - 1.05260000e+00 5.96000000e-02 5.05300000e-01 1.23410000e+00 4.24600000e-01 9.39500000e-01 5.02700000e-01 1.23440000e+00 4.86100000e-01 8.10900000e-01 6.03200000e-01 1.12200000e+00 - 1.13540000e+00 -3.36000000e-02 4.06500000e-01 5.88900000e-01 3.20300000e-01 4.65000000e-01 4.77200000e-01 5.06000000e-01 3.67000000e-01 3.87400000e-01 3.99100000e-01 5.99300000e-01 - 2.94300000e-01 6.99700000e-01 7.68600000e-01 5.14800000e-01 7.79700000e-01 6.41100000e-01 1.83180000e+00 -1.17900000e-01 -1.58700000e-01 2.18560000e+00 2.80000000e-03 -3.20000000e-03 - 1.01920000e+00 1.04200000e-01 4.57900000e-01 5.63700000e-01 4.55700000e-01 3.25200000e-01 4.95800000e-01 5.20800000e-01 4.09900000e-01 3.58900000e-01 4.62500000e-01 5.58200000e-01 - 6.41600000e-01 2.69700000e-01 1.14560000e+00 2.55300000e-01 1.21140000e+00 3.97800000e-01 1.11650000e+00 2.89900000e-01 1.12120000e+00 5.34000000e-01 9.65500000e-01 4.74000000e-01 - 1.96900000e-01 6.79500000e-01 5.11200000e-01 6.75700000e-01 6.75900000e-01 6.17300000e-01 1.80420000e+00 -8.76000000e-02 -2.76000000e-02 2.03390000e+00 -2.20000000e-03 2.50000000e-03 - 1.11300000e-01 7.67800000e-01 3.13500000e-01 8.91000000e-01 5.10300000e-01 8.04100000e-01 1.68890000e+00 5.29000000e-02 1.20700000e-01 1.85660000e+00 5.80000000e-03 -5.90000000e-03 - 3.40400000e-01 7.66000000e-01 1.53100000e-01 6.10700000e-01 2.07700000e-01 3.99600000e-01 2.67600000e-01 4.80000000e-01 2.15100000e-01 3.79000000e-01 2.81000000e-01 4.62300000e-01 - 2.96700000e-01 3.38400000e-01 5.26900000e-01 4.55700000e-01 5.81000000e-01 5.44100000e-01 4.87700000e-01 5.06100000e-01 5.62600000e-01 5.85500000e-01 4.97800000e-01 4.97300000e-01 - 4.89200000e-01 3.13700000e-01 7.79900000e-01 4.69000000e-01 9.37700000e-01 4.80900000e-01 7.70600000e-01 4.79300000e-01 8.84500000e-01 5.66200000e-01 8.26100000e-01 4.14800000e-01 - 2.39200000e-01 7.97100000e-01 3.36300000e-01 1.29300000e+00 5.09900000e-01 1.31900000e+00 3.52500000e-01 1.27380000e+00 3.52500000e-01 1.53920000e+00 3.36000000e-01 1.28920000e+00 - 1.40800000e-01 7.21900000e-01 5.89100000e-01 5.62600000e-01 6.44800000e-01 6.38900000e-01 1.71180000e+00 1.77000000e-02 -1.45000000e-02 2.01860000e+00 1.81000000e-02 -2.28000000e-02 - 1.07990000e+00 3.25000000e-02 5.33900000e-01 1.19760000e+00 4.44400000e-01 1.23100000e+00 5.49700000e-01 1.18250000e+00 4.69100000e-01 1.07640000e+00 4.80600000e-01 1.26300000e+00 - 6.04500000e-01 4.88300000e-01 2.72000000e-01 5.57100000e-01 1.93500000e-01 4.79500000e-01 3.08000000e-01 5.14800000e-01 2.29600000e-01 4.26300000e-01 2.74700000e-01 5.52900000e-01 - 9.90200000e-01 1.34700000e-01 5.89600000e-01 3.04000000e-01 5.12200000e-01 1.95800000e-01 5.75600000e-01 3.20600000e-01 5.35800000e-01 1.47300000e-01 5.36900000e-01 3.65400000e-01 - 2.22500000e-01 8.64000000e-01 1.11900000e-01 1.01600000e+00 5.89000000e-02 7.98300000e-01 1.25300000e-01 9.96300000e-01 1.01200000e-01 7.22800000e-01 5.86000000e-02 1.08090000e+00 - 2.37400000e-01 4.45400000e-01 4.78100000e-01 5.69300000e-01 4.18800000e-01 8.01500000e-01 4.52400000e-01 6.03100000e-01 4.09200000e-01 8.30700000e-01 4.32200000e-01 6.23400000e-01 - 9.84000000e-02 3.70400000e-01 1.86000000e-02 7.80800000e-01 1.05500000e-01 8.17500000e-01 3.95000000e-02 1.68340000e+00 -6.70000000e-02 7.87000000e-02 -6.50000000e-03 7.30000000e-03 - 5.80400000e-01 1.38700000e-01 8.13300000e-01 3.35100000e-01 9.07800000e-01 4.02400000e-01 8.14000000e-01 3.35500000e-01 9.24700000e-01 4.12400000e-01 8.91500000e-01 2.43800000e-01 - 5.11700000e-01 2.12000000e-02 7.23200000e-01 1.21300000e-01 9.21500000e-01 2.17000000e-02 7.97400000e-01 3.10000000e-02 8.73500000e-01 9.54000000e-02 8.10800000e-01 1.64000000e-02 - 4.30400000e-01 6.81300000e-01 8.48600000e-01 8.22300000e-01 9.44000000e-01 8.51200000e-01 1.70700000e+00 3.10000000e-02 -3.65000000e-02 2.04710000e+00 5.92000000e-02 1.65800000e+00 - 3.29000000e-01 5.05000000e-01 5.53300000e-01 7.46300000e-01 6.42800000e-01 8.28500000e-01 6.35600000e-01 6.44900000e-01 6.71400000e-01 8.15900000e-01 6.51700000e-01 6.26100000e-01 - 5.00800000e-01 -1.45000000e-02 7.11200000e-01 1.10700000e-01 9.68500000e-01 -5.14000000e-02 1.77000000e+00 -4.45000000e-02 -3.29000000e-02 4.02000000e-02 -2.75000000e-02 3.26000000e-02 - -6.67000000e-02 4.18200000e-01 1.07800000e-01 5.50900000e-01 2.57200000e-01 5.09800000e-01 -1.25000000e-02 1.74600000e+00 6.67000000e-02 -8.33000000e-02 -3.00000000e-04 -8.00000000e-04 - 4.86000000e-02 9.99700000e-01 1.41400000e-01 1.22650000e+00 2.63900000e-01 1.22600000e+00 -2.80000000e-03 1.73480000e+00 3.50000000e-03 1.99700000e+00 6.92000000e-02 1.65070000e+00 - 1.30300000e-01 2.89900000e-01 1.51900000e-01 5.19900000e-01 1.37100000e-01 6.35800000e-01 1.10600000e-01 5.71100000e-01 1.52000000e-01 6.27000000e-01 5.65000000e-02 6.36100000e-01 - 1.07250000e+00 3.97000000e-02 4.42100000e-01 7.44300000e-01 3.76000000e-01 5.06600000e-01 3.98900000e-01 7.93800000e-01 3.67200000e-01 4.93100000e-01 5.05200000e-01 6.64800000e-01 - 3.06800000e-01 2.75800000e-01 4.48800000e-01 4.65300000e-01 5.16500000e-01 5.23100000e-01 4.67300000e-01 4.42500000e-01 4.75600000e-01 5.87600000e-01 4.51400000e-01 4.61000000e-01 - 1.00070000e+00 1.22000000e-01 8.85800000e-01 8.66900000e-01 7.36900000e-01 1.23730000e+00 9.41700000e-01 7.97000000e-01 8.73900000e-01 9.08300000e-01 1.00010000e+00 7.26700000e-01 - 1.07060000e+00 4.02000000e-02 5.98300000e-01 1.13740000e+00 5.75700000e-01 4.69000000e-01 6.96400000e-01 1.02560000e+00 4.76800000e-01 5.55500000e-01 5.73100000e-01 1.16790000e+00 - 6.45200000e-01 4.53800000e-01 2.77700000e-01 8.80700000e-01 1.78200000e-01 7.04200000e-01 2.95300000e-01 8.63000000e-01 2.51100000e-01 5.99300000e-01 2.64100000e-01 8.99800000e-01 - 9.25000000e-02 4.28800000e-01 1.83900000e-01 6.65400000e-01 2.82800000e-01 6.93300000e-01 -2.57000000e-02 1.76300000e+00 4.59000000e-02 -5.55000000e-02 -1.19000000e-02 1.45000000e-02 - 4.86400000e-01 3.77200000e-01 8.02300000e-01 5.39800000e-01 8.37600000e-01 7.04100000e-01 8.33500000e-01 5.01100000e-01 8.20200000e-01 7.49500000e-01 7.76800000e-01 5.71100000e-01 - 1.10000000e+00 5.90000000e-03 5.71500000e-01 5.08300000e-01 4.17900000e-01 4.14200000e-01 5.33500000e-01 5.47600000e-01 4.29000000e-01 3.83700000e-01 4.62900000e-01 6.34100000e-01 - 1.07650000e+00 3.45000000e-02 3.95800000e-01 1.33430000e+00 3.90000000e-01 7.43600000e-01 3.76500000e-01 1.36190000e+00 3.85000000e-01 7.08900000e-01 4.28500000e-01 1.29910000e+00 - 2.97300000e-01 7.83000000e-01 6.89700000e-01 9.64400000e-01 6.19400000e-01 1.29310000e+00 4.93700000e-01 1.19060000e+00 6.56300000e-01 1.27610000e+00 4.54800000e-01 1.23670000e+00 - 4.42600000e-01 6.63600000e-01 2.37100000e-01 1.49340000e+00 2.33300000e-01 8.54900000e-01 2.01000000e-01 1.53800000e+00 1.38600000e-01 9.37200000e-01 2.38300000e-01 1.49320000e+00 - 4.32100000e-01 1.89000000e-02 5.88700000e-01 1.31000000e-01 5.99800000e-01 2.33400000e-01 6.00700000e-01 1.21200000e-01 6.69600000e-01 1.67300000e-01 5.81400000e-01 1.41300000e-01 - 3.79600000e-01 7.27900000e-01 2.06100000e-01 1.52800000e+00 2.33500000e-01 1.07580000e+00 1.32600000e-01 1.61320000e+00 2.48000000e-01 1.00610000e+00 2.84000000e-01 1.43290000e+00 - 1.06260000e+00 4.62000000e-02 1.72480000e+00 1.37000000e-02 1.59500000e+00 3.56800000e-01 1.69300000e+00 4.38000000e-02 1.41230000e+00 3.13900000e-01 1.72430000e+00 8.20000000e-03 - -5.00000000e-03 1.11770000e+00 3.36100000e-01 1.27740000e+00 4.10900000e-01 1.32290000e+00 2.25000000e-02 1.70350000e+00 -7.23000000e-02 2.08340000e+00 -5.10000000e-03 1.74010000e+00 - 1.12550000e+00 -2.45000000e-02 4.62300000e-01 1.26460000e+00 4.78600000e-01 9.48600000e-01 3.95800000e-01 1.34650000e+00 2.34600000e-01 1.16500000e+00 4.59100000e-01 1.27020000e+00 - 1.12240000e+00 -2.06000000e-02 6.77900000e-01 9.85800000e-01 4.80100000e-01 5.56000000e-01 6.68000000e-01 9.96000000e-01 5.14400000e-01 4.86200000e-01 6.66200000e-01 1.00100000e+00 - 4.42600000e-01 1.32200000e-01 6.21500000e-01 2.95000000e-01 7.80000000e-01 2.54100000e-01 6.50400000e-01 2.64800000e-01 8.29700000e-01 2.07300000e-01 6.92000000e-01 2.16300000e-01 - 6.48100000e-01 1.16500000e-01 7.80300000e-01 4.57000000e-01 1.09140000e+00 2.75700000e-01 9.18600000e-01 2.94800000e-01 9.24100000e-01 4.96100000e-01 8.51800000e-01 3.72300000e-01 - 1.06660000e+00 4.35000000e-02 5.77200000e-01 1.16190000e+00 5.69400000e-01 5.69400000e-01 6.04600000e-01 1.12500000e+00 4.15900000e-01 7.12700000e-01 7.26100000e-01 9.80400000e-01 - 1.03270000e+00 8.40000000e-02 4.56300000e-01 9.98100000e-01 3.63700000e-01 6.28300000e-01 5.76000000e-01 8.55100000e-01 3.35800000e-01 6.33300000e-01 4.05600000e-01 1.05670000e+00 - 1.07090000e+00 4.08000000e-02 6.11400000e-01 1.12600000e+00 4.82900000e-01 7.31400000e-01 5.82100000e-01 1.16170000e+00 4.96300000e-01 6.71100000e-01 6.64700000e-01 1.06190000e+00 - 3.32300000e-01 4.05300000e-01 6.74500000e-01 4.51000000e-01 6.90900000e-01 6.05300000e-01 6.30500000e-01 5.01400000e-01 7.19500000e-01 5.91600000e-01 5.66200000e-01 5.80700000e-01 - 3.23000000e-02 7.61600000e-01 1.47300000e-01 1.07860000e+00 2.86100000e-01 1.08240000e+00 1.15800000e-01 1.11460000e+00 2.58200000e-01 1.13960000e+00 2.04700000e-01 1.00650000e+00 - 3.86500000e-01 6.32000000e-01 8.57000000e-01 6.98300000e-01 8.43600000e-01 9.44100000e-01 7.28100000e-01 8.44100000e-01 9.45900000e-01 8.58000000e-01 8.31300000e-01 7.24600000e-01 - 3.91900000e-01 4.20100000e-01 6.69800000e-01 5.92500000e-01 7.67200000e-01 6.66500000e-01 6.62400000e-01 5.98900000e-01 7.88800000e-01 6.66800000e-01 5.81100000e-01 6.98000000e-01 - 1.05390000e+00 5.85000000e-02 8.99000000e-01 8.16900000e-01 6.08000000e-01 4.69800000e-01 9.13100000e-01 8.03800000e-01 6.43400000e-01 3.95200000e-01 8.01900000e-01 9.34800000e-01 - 3.34200000e-01 4.26700000e-01 4.74000000e-01 7.25000000e-01 4.64100000e-01 9.20000000e-01 3.38500000e-01 8.84900000e-01 5.10700000e-01 8.82200000e-01 3.47500000e-01 8.76800000e-01 - 7.97700000e-01 3.59000000e-02 1.23030000e+00 8.17000000e-02 1.36290000e+00 1.36100000e-01 1.21800000e+00 9.37000000e-02 1.36860000e+00 1.62600000e-01 1.05340000e+00 2.90500000e-01 - 3.89300000e-01 1.43800000e-01 6.81500000e-01 1.72800000e-01 7.80200000e-01 1.97000000e-01 1.84320000e+00 -1.26800000e-01 4.46000000e-02 -5.52000000e-02 2.22000000e-02 -2.85000000e-02 - 6.33600000e-01 2.69200000e-01 8.77600000e-01 5.64200000e-01 1.08870000e+00 5.39900000e-01 7.96500000e-01 6.57100000e-01 9.87700000e-01 6.75900000e-01 9.20600000e-01 5.11500000e-01 - 5.57600000e-01 5.56700000e-01 2.48200000e-01 6.16200000e-01 3.39700000e-01 3.31600000e-01 2.52800000e-01 6.11500000e-01 2.25900000e-01 4.51500000e-01 3.85300000e-01 4.53900000e-01 - 3.35300000e-01 6.32200000e-01 7.40200000e-01 7.36700000e-01 6.98000000e-01 1.01050000e+00 6.65400000e-01 8.22800000e-01 7.11300000e-01 1.01790000e+00 6.47200000e-01 8.42000000e-01 - 1.06800000e-01 1.54000000e-02 1.54200000e-01 3.77000000e-02 1.44500000e-01 7.73000000e-02 9.81000000e-02 1.04300000e-01 1.49500000e-01 7.73000000e-02 1.18200000e-01 7.71000000e-02 - 4.43200000e-01 6.61200000e-01 3.44400000e-01 4.17800000e-01 2.18000000e-01 4.13300000e-01 2.32300000e-01 5.48600000e-01 8.86000000e-02 5.54500000e-01 2.38600000e-01 5.43400000e-01 - 5.25900000e-01 5.88700000e-01 2.42900000e-01 5.05600000e-01 3.30500000e-01 2.56700000e-01 3.06500000e-01 4.29800000e-01 1.54200000e-01 4.55100000e-01 3.08600000e-01 4.26300000e-01 - 1.13490000e+00 -3.09000000e-02 4.97800000e-01 5.04400000e-01 4.54400000e-01 3.21000000e-01 5.91400000e-01 3.91700000e-01 5.19200000e-01 2.21600000e-01 5.59000000e-01 4.30100000e-01 - 5.14700000e-01 3.74600000e-01 7.62900000e-01 6.39500000e-01 8.71500000e-01 7.17100000e-01 7.09200000e-01 7.02600000e-01 9.74100000e-01 6.31200000e-01 7.62000000e-01 6.40300000e-01 - 3.78000000e-02 7.04100000e-01 3.36500000e-01 7.20800000e-01 4.68900000e-01 7.04300000e-01 1.76820000e+00 -4.02000000e-02 9.00000000e-04 1.99820000e+00 -7.60000000e-03 8.00000000e-03 - 1.12830000e+00 -2.61000000e-02 3.98100000e-01 1.06870000e+00 3.64700000e-01 6.33300000e-01 5.10500000e-01 9.33700000e-01 4.25100000e-01 5.32300000e-01 4.54200000e-01 1.00120000e+00 - 5.60000000e-02 2.50700000e-01 1.24900000e-01 5.12800000e-01 2.58200000e-01 4.92800000e-01 7.00000000e-02 1.64840000e+00 5.29000000e-02 -6.29000000e-02 3.92000000e-02 -4.73000000e-02 - 1.06690000e+00 4.77000000e-02 4.30600000e-01 4.33300000e-01 4.57200000e-01 2.18200000e-01 6.07900000e-01 2.23800000e-01 4.05500000e-01 2.64500000e-01 4.73500000e-01 3.86000000e-01 - 3.76300000e-01 6.02700000e-01 7.31600000e-01 5.52800000e-01 9.13600000e-01 4.81700000e-01 1.62980000e+00 1.20300000e-01 -5.38000000e-02 2.06530000e+00 -5.83000000e-02 6.86000000e-02 - 2.61900000e-01 8.46200000e-01 1.83400000e-01 3.94000000e-01 1.88400000e-01 2.87300000e-01 1.54200000e-01 4.25800000e-01 1.53100000e-01 3.18300000e-01 1.20700000e-01 4.72300000e-01 - 2.81800000e-01 2.64800000e-01 6.02700000e-01 2.51600000e-01 5.89500000e-01 4.15100000e-01 1.81260000e+00 -8.90000000e-02 3.34000000e-02 -3.79000000e-02 3.42000000e-02 -4.25000000e-02 - 6.66200000e-01 2.92800000e-01 8.63500000e-01 6.67700000e-01 1.04500000e+00 6.96600000e-01 8.34200000e-01 7.02600000e-01 1.00460000e+00 7.65500000e-01 8.92200000e-01 6.38800000e-01 - 3.27400000e-01 5.63800000e-01 6.26700000e-01 5.79600000e-01 7.47900000e-01 5.77400000e-01 1.65380000e+00 9.09000000e-02 -1.18000000e-02 2.01260000e+00 -2.33000000e-02 2.99000000e-02 - 3.55000000e-01 4.13300000e-01 6.73500000e-01 5.08700000e-01 7.67200000e-01 5.75500000e-01 6.32000000e-01 5.55100000e-01 7.12900000e-01 6.60400000e-01 5.85300000e-01 6.08700000e-01 - 2.98000000e-01 1.45000000e-01 5.03400000e-01 1.87100000e-01 4.36500000e-01 3.75400000e-01 4.53100000e-01 2.47400000e-01 4.65600000e-01 3.55900000e-01 3.74500000e-01 3.41400000e-01 - 1.15370000e+00 -5.56000000e-02 1.00210000e+00 7.28300000e-01 7.50400000e-01 4.87300000e-01 9.92200000e-01 7.36900000e-01 6.97000000e-01 5.04900000e-01 9.68500000e-01 7.67100000e-01 - 2.08300000e-01 4.41600000e-01 3.57800000e-01 6.58600000e-01 3.85500000e-01 7.65800000e-01 4.25800000e-01 5.74500000e-01 4.84300000e-01 6.71000000e-01 3.16900000e-01 7.02000000e-01 - -1.55000000e-02 1.12630000e+00 1.11600000e-01 1.31600000e+00 1.46000000e-01 1.40970000e+00 6.72000000e-02 1.65170000e+00 -4.44000000e-02 2.05040000e+00 5.75000000e-02 1.65900000e+00 - 9.61900000e-01 1.44600000e-01 3.74700000e-01 6.99700000e-01 4.16200000e-01 3.89100000e-01 3.51900000e-01 7.21400000e-01 2.59400000e-01 5.56000000e-01 4.21100000e-01 6.42700000e-01 - 2.32200000e-01 6.29900000e-01 3.40900000e-01 1.01770000e+00 4.55100000e-01 1.07520000e+00 4.66600000e-01 8.70900000e-01 5.13500000e-01 1.03090000e+00 4.44300000e-01 8.97200000e-01 - 4.61400000e-01 3.13500000e-01 8.08000000e-01 3.91000000e-01 9.47900000e-01 4.12600000e-01 7.84700000e-01 4.22900000e-01 8.09100000e-01 6.00500000e-01 9.25400000e-01 2.50900000e-01 - 1.15940000e+00 -6.65000000e-02 1.05490000e+00 6.72000000e-01 7.92200000e-01 5.23400000e-01 9.87800000e-01 7.53300000e-01 8.62300000e-01 3.83400000e-01 1.13850000e+00 5.76000000e-01 - 2.28800000e-01 5.18300000e-01 4.80600000e-01 5.86800000e-01 6.62000000e-01 5.20000000e-01 1.81270000e+00 -9.92000000e-02 -8.97000000e-02 2.10720000e+00 2.66000000e-02 -3.26000000e-02 - 2.16600000e-01 8.98700000e-01 1.02100000e-01 1.64290000e+00 1.04500000e-01 1.16750000e+00 1.55800000e-01 1.57650000e+00 1.82600000e-01 1.02820000e+00 1.69800000e-01 1.56070000e+00 - 1.06570000e+00 4.71000000e-02 5.50300000e-01 7.24900000e-01 4.30400000e-01 4.99800000e-01 4.98500000e-01 7.89500000e-01 4.44100000e-01 4.58700000e-01 4.64400000e-01 8.28200000e-01 - 5.99500000e-01 -5.40000000e-03 8.74400000e-01 4.05000000e-02 1.01850000e+00 8.70000000e-03 1.70420000e+00 3.17000000e-02 3.50000000e-02 1.96370000e+00 4.45000000e-02 -5.25000000e-02 - 1.23100000e-01 1.07000000e-01 3.79600000e-01 1.70500000e-01 5.00000000e-01 1.71100000e-01 1.76010000e+00 -3.27000000e-02 -4.29000000e-02 5.37000000e-02 2.53000000e-02 -2.95000000e-02 - 5.83900000e-01 -3.07000000e-02 8.36500000e-01 5.14000000e-02 8.12900000e-01 2.21800000e-01 7.17700000e-01 1.91900000e-01 9.18500000e-01 1.18400000e-01 6.84800000e-01 2.34500000e-01 - 1.12400000e-01 5.84600000e-01 2.18200000e-01 8.65200000e-01 2.41300000e-01 9.91200000e-01 1.93600000e-01 8.93700000e-01 2.04300000e-01 1.05630000e+00 1.19300000e-01 9.84000000e-01 - 4.23900000e-01 1.85800000e-01 7.22100000e-01 2.04400000e-01 8.41600000e-01 2.07200000e-01 1.82100000e+00 -1.01400000e-01 1.43000000e-02 1.98170000e+00 -3.88000000e-02 4.73000000e-02 - 1.64000000e-01 3.52700000e-01 2.99400000e-01 4.95300000e-01 4.59300000e-01 4.23900000e-01 3.52600000e-01 4.31200000e-01 4.25000000e-01 4.77200000e-01 3.25000000e-01 4.63800000e-01 - 4.41600000e-01 2.28400000e-01 5.60800000e-01 5.12900000e-01 6.43300000e-01 5.73200000e-01 6.08800000e-01 4.55000000e-01 5.98000000e-01 6.52000000e-01 5.35800000e-01 5.39900000e-01 - 7.61100000e-01 3.58600000e-01 4.40900000e-01 1.08260000e+00 3.49100000e-01 6.53000000e-01 4.52000000e-01 1.06740000e+00 3.31900000e-01 6.46100000e-01 4.09100000e-01 1.11640000e+00 - 3.54200000e-01 1.91400000e-01 5.89500000e-01 2.56300000e-01 6.77600000e-01 2.88700000e-01 5.88200000e-01 2.59500000e-01 6.82200000e-01 2.93400000e-01 6.57700000e-01 1.77000000e-01 - 5.07900000e-01 2.33600000e-01 7.83100000e-01 2.73000000e-01 8.67100000e-01 3.15500000e-01 1.88350000e+00 -1.81200000e-01 -1.64000000e-02 2.01470000e+00 3.15000000e-02 -3.75000000e-02 - 1.67600000e-01 2.53800000e-01 4.59100000e-01 2.80400000e-01 6.21600000e-01 2.30900000e-01 1.67190000e+00 7.45000000e-02 9.36000000e-02 -1.13100000e-01 3.44000000e-02 -4.14000000e-02 - 7.50000000e-03 6.58300000e-01 2.29300000e-01 7.51300000e-01 3.53000000e-01 7.45700000e-01 7.98000000e-02 1.63440000e+00 -1.39000000e-02 2.01400000e+00 2.50000000e-03 -4.10000000e-03 - 5.74800000e-01 -3.67000000e-02 7.64200000e-01 1.09400000e-01 9.24300000e-01 5.73000000e-02 7.11700000e-01 1.70000000e-01 8.96200000e-01 1.05700000e-01 7.49800000e-01 1.26600000e-01 - 2.68100000e-01 8.37700000e-01 7.19900000e-01 7.92400000e-01 7.54600000e-01 8.95200000e-01 1.79010000e+00 -6.96000000e-02 -2.86000000e-02 2.03140000e+00 -1.00200000e-01 1.84870000e+00 - 6.39800000e-01 4.48500000e-01 2.75100000e-01 1.18610000e+00 2.34500000e-01 7.58000000e-01 3.08500000e-01 1.14820000e+00 2.48200000e-01 7.16100000e-01 1.77100000e-01 1.30280000e+00 - 1.07880000e+00 2.82000000e-02 4.16700000e-01 8.97100000e-01 3.81300000e-01 5.58200000e-01 5.19400000e-01 7.79300000e-01 4.18900000e-01 4.87300000e-01 4.89500000e-01 8.14100000e-01 - 1.15410000e+00 -5.71000000e-02 4.27800000e-01 1.96000000e-01 3.29500000e-01 1.93400000e-01 4.21700000e-01 2.03500000e-01 3.69100000e-01 1.36500000e-01 3.74600000e-01 2.62600000e-01 - 9.34000000e-02 5.16700000e-01 4.45100000e-01 4.66900000e-01 5.48000000e-01 4.88900000e-01 1.72900000e+00 6.00000000e-04 -4.80000000e-03 2.00560000e+00 -3.70000000e-03 3.10000000e-03 - 1.15690000e+00 -6.23000000e-02 3.44600000e-01 6.86700000e-01 3.67000000e-01 4.25300000e-01 4.70300000e-01 5.38100000e-01 4.44300000e-01 3.15100000e-01 5.63900000e-01 4.29300000e-01 - 5.47200000e-01 2.09100000e-01 8.33400000e-01 3.57300000e-01 1.09890000e+00 2.21600000e-01 8.69900000e-01 3.08200000e-01 9.79200000e-01 3.87200000e-01 8.77800000e-01 2.98200000e-01 - 4.90800000e-01 8.02000000e-02 7.27700000e-01 1.71600000e-01 9.04300000e-01 1.00200000e-01 1.72230000e+00 1.23000000e-02 1.63000000e-02 1.97930000e+00 -1.25000000e-02 1.44000000e-02 - 2.79300000e-01 5.01500000e-01 3.66700000e-01 8.73700000e-01 4.03300000e-01 1.00760000e+00 4.14900000e-01 8.14500000e-01 3.55000000e-01 1.08740000e+00 4.89700000e-01 7.22500000e-01 - 5.54400000e-01 5.44500000e-01 8.40500000e-01 6.41700000e-01 9.27500000e-01 6.81500000e-01 1.72350000e+00 9.70000000e-03 3.82000000e-02 1.95880000e+00 9.80000000e-03 1.72050000e+00 - 3.97600000e-01 1.93500000e-01 6.79200000e-01 2.32100000e-01 7.18300000e-01 3.30300000e-01 6.00600000e-01 3.23900000e-01 7.69700000e-01 2.85200000e-01 7.07100000e-01 1.99100000e-01 - 4.06200000e-01 4.01300000e-01 6.39200000e-01 6.29900000e-01 6.64300000e-01 7.87100000e-01 6.25400000e-01 6.42800000e-01 7.75900000e-01 6.76200000e-01 6.55900000e-01 6.09300000e-01 - 1.14260000e+00 -4.02000000e-02 7.75100000e-01 5.37800000e-01 5.83900000e-01 3.63100000e-01 7.78500000e-01 5.32100000e-01 6.77800000e-01 2.27100000e-01 7.37400000e-01 5.81200000e-01 - 3.99500000e-01 6.99400000e-01 7.57900000e-01 7.22700000e-01 7.98000000e-01 8.14900000e-01 1.65830000e+00 8.39000000e-02 7.94000000e-02 1.89980000e+00 -1.64000000e-02 1.75420000e+00 - 1.18940000e+00 -9.92000000e-02 6.91800000e-01 5.22000000e-02 6.69900000e-01 -8.27000000e-02 6.86300000e-01 5.57000000e-02 4.64600000e-01 1.48000000e-01 8.25200000e-01 -1.05900000e-01 - 2.43100000e-01 3.97000000e-01 5.47800000e-01 4.04900000e-01 7.29700000e-01 3.34600000e-01 1.80400000e+00 -8.82000000e-02 2.95000000e-02 1.96850000e+00 -6.71000000e-02 8.04000000e-02 - 2.17700000e-01 7.42100000e-01 6.17300000e-01 6.33400000e-01 7.08700000e-01 6.69700000e-01 1.72670000e+00 9.10000000e-03 7.32000000e-02 1.91600000e+00 2.89000000e-02 -3.46000000e-02 - 3.21100000e-01 5.28700000e-01 6.10700000e-01 7.00300000e-01 5.10000000e-01 1.01710000e+00 5.22500000e-01 8.01500000e-01 5.52600000e-01 9.90200000e-01 4.79900000e-01 8.53300000e-01 - -8.29000000e-02 8.21500000e-01 2.40700000e-01 8.12300000e-01 5.15900000e-01 6.25100000e-01 9.40000000e-03 1.72050000e+00 2.71000000e-02 1.96840000e+00 9.00000000e-04 -3.90000000e-03 - 9.22000000e-02 1.01540000e+00 1.59100000e-01 1.56130000e+00 1.27000000e-02 1.53250000e+00 4.15000000e-02 1.69540000e+00 8.16000000e-02 1.36250000e+00 1.12200000e-01 1.61040000e+00 - 3.44800000e-01 2.58800000e-01 6.57400000e-01 2.61200000e-01 7.94000000e-01 2.45000000e-01 1.85560000e+00 -1.46800000e-01 -4.98000000e-02 2.06180000e+00 -9.50000000e-03 1.33000000e-02 - -4.02000000e-02 5.98900000e-01 -5.90000000e-03 8.70200000e-01 8.74000000e-02 8.80000000e-01 3.78000000e-02 8.14500000e-01 2.05000000e-02 9.74300000e-01 4.49000000e-02 8.04600000e-01 - 1.00800000e-01 9.48800000e-01 2.30100000e-01 1.40810000e+00 2.74200000e-01 1.58090000e+00 2.87300000e-01 1.33810000e+00 3.59300000e-01 1.50980000e+00 2.40100000e-01 1.39510000e+00 - 7.28000000e-02 8.01800000e-01 1.98700000e-01 1.15530000e+00 7.37000000e-02 1.49480000e+00 1.19800000e-01 1.24790000e+00 2.65700000e-01 1.29150000e+00 2.42800000e-01 1.10440000e+00 - 2.93200000e-01 8.15900000e-01 1.26300000e-01 3.53600000e-01 1.96500000e-01 1.92500000e-01 2.60500000e-01 1.93700000e-01 1.77000000e-01 2.10300000e-01 2.15100000e-01 2.47500000e-01 - 1.06950000e+00 4.04000000e-02 5.43000000e-01 9.19400000e-01 4.15400000e-01 5.88600000e-01 6.00400000e-01 8.51500000e-01 4.26300000e-01 5.45400000e-01 5.47900000e-01 9.20700000e-01 - 1.03570000e+00 8.60000000e-02 6.85700000e-01 3.65000000e-02 4.99500000e-01 1.05400000e-01 5.58500000e-01 1.88300000e-01 4.83400000e-01 1.07100000e-01 6.21900000e-01 1.13000000e-01 - 1.85800000e-01 6.11200000e-01 3.01300000e-01 9.42000000e-01 3.80300000e-01 1.03030000e+00 3.00800000e-01 9.43300000e-01 2.84800000e-01 1.16140000e+00 3.86100000e-01 8.40100000e-01 - 1.74100000e-01 9.31100000e-01 2.62000000e-02 7.85200000e-01 3.69000000e-02 6.15900000e-01 3.05000000e-02 7.83600000e-01 1.11800000e-01 5.15700000e-01 7.19000000e-02 7.32600000e-01 - 5.52000000e-01 4.83300000e-01 9.17300000e-01 6.92200000e-01 9.82300000e-01 8.62900000e-01 8.40400000e-01 7.85700000e-01 1.08520000e+00 7.72000000e-01 9.13300000e-01 6.98100000e-01 - 1.84700000e-01 3.65400000e-01 4.43600000e-01 4.35300000e-01 4.87500000e-01 5.23500000e-01 1.79930000e+00 -7.46000000e-02 4.95000000e-02 1.94230000e+00 2.11000000e-02 -2.50000000e-02 - 1.13260000e+00 -3.34000000e-02 6.05100000e-01 4.58600000e-01 4.70000000e-01 3.47400000e-01 5.76200000e-01 4.92800000e-01 4.98800000e-01 2.96200000e-01 4.93500000e-01 5.93000000e-01 - 1.19600000e-01 5.20300000e-01 1.43000000e-01 8.70200000e-01 2.66600000e-01 8.65400000e-01 1.59800000e-01 8.53200000e-01 3.54400000e-01 7.79500000e-01 3.00000000e-01 6.86300000e-01 - 7.18600000e-01 3.86600000e-01 3.11200000e-01 1.43000000e+00 3.22500000e-01 8.80300000e-01 3.05700000e-01 1.42580000e+00 3.32900000e-01 8.31100000e-01 4.05600000e-01 1.31490000e+00 - 9.84800000e-01 1.03300000e-01 1.18480000e+00 5.11800000e-01 1.29220000e+00 5.30000000e-01 1.68500000e+00 6.11000000e-02 2.00170000e+00 -2.60000000e-03 1.54000000e-02 1.71190000e+00 - 5.20000000e-01 9.73000000e-02 8.29000000e-01 1.37300000e-01 8.35100000e-01 2.86900000e-01 7.31800000e-01 2.53500000e-01 9.65600000e-01 1.48000000e-01 7.24200000e-01 2.61900000e-01 - 1.06490000e+00 4.55000000e-02 3.99200000e-01 7.29300000e-01 4.94000000e-01 3.34200000e-01 3.58200000e-01 7.80300000e-01 3.63600000e-01 4.63700000e-01 4.82900000e-01 6.27300000e-01 - 5.15100000e-01 2.56500000e-01 6.68700000e-01 4.49100000e-01 8.54100000e-01 3.67600000e-01 1.81590000e+00 -9.79000000e-02 -2.21000000e-02 2.02600000e+00 -1.90000000e-02 2.50000000e-02 - 1.40600000e-01 5.62800000e-01 4.80700000e-01 5.29600000e-01 6.04400000e-01 5.24900000e-01 1.72730000e+00 7.20000000e-03 1.41800000e-01 1.83600000e+00 -2.30000000e-03 2.90000000e-03 - 1.47000000e-01 3.39200000e-01 3.23300000e-01 4.23200000e-01 4.09000000e-01 4.28600000e-01 3.04300000e-01 4.43700000e-01 3.40500000e-01 5.28100000e-01 2.24700000e-01 5.39000000e-01 - 3.02700000e-01 1.79300000e-01 3.89000000e-01 3.80700000e-01 4.47100000e-01 4.31000000e-01 2.93800000e-01 4.97400000e-01 4.80300000e-01 4.06500000e-01 3.22000000e-01 4.62100000e-01 - 6.52400000e-01 4.37200000e-01 3.29000000e-01 4.73800000e-01 3.14100000e-01 3.27000000e-01 3.25900000e-01 4.74900000e-01 2.40400000e-01 3.98200000e-01 2.80700000e-01 5.28000000e-01 - 2.79300000e-01 4.57900000e-01 4.78300000e-01 6.65800000e-01 5.64600000e-01 7.38100000e-01 5.31200000e-01 6.06800000e-01 5.72100000e-01 7.50100000e-01 3.68000000e-01 8.01000000e-01 - 1.83400000e-01 8.10400000e-01 3.28300000e-01 1.22310000e+00 4.25800000e-01 1.33380000e+00 3.13000000e-01 1.24190000e+00 4.05700000e-01 1.37900000e+00 3.18800000e-01 1.23820000e+00 - 1.12520000e+00 -2.18000000e-02 5.58900000e-01 1.16580000e+00 4.39400000e-01 6.31600000e-01 5.84700000e-01 1.13310000e+00 4.95100000e-01 5.29600000e-01 4.13400000e-01 1.33500000e+00 - 4.48400000e-01 6.53500000e-01 2.82500000e-01 4.03200000e-01 1.64900000e-01 4.15500000e-01 2.16000000e-01 4.83000000e-01 2.33000000e-01 3.23600000e-01 2.38000000e-01 4.58900000e-01 - 1.10800000e+00 -6.00000000e-03 6.79800000e-01 1.07240000e+00 6.37700000e-01 1.32440000e+00 7.39700000e-01 9.97700000e-01 7.01300000e-01 1.03370000e+00 8.00100000e-01 9.28000000e-01 - 3.43000000e-01 2.25200000e-01 4.91200000e-01 4.04200000e-01 3.80000000e-01 6.78200000e-01 4.43800000e-01 4.64800000e-01 5.18800000e-01 5.31400000e-01 4.16800000e-01 4.93100000e-01 - 1.12670000e+00 -2.45000000e-02 4.16700000e-01 2.78900000e-01 2.33400000e-01 3.60600000e-01 3.81300000e-01 3.20100000e-01 4.32900000e-01 1.09500000e-01 3.53900000e-01 3.54400000e-01 - 8.21800000e-01 2.91700000e-01 4.03300000e-01 5.41000000e-01 3.51800000e-01 3.85300000e-01 3.55100000e-01 5.91900000e-01 3.28000000e-01 3.96000000e-01 4.26000000e-01 5.05400000e-01 - 2.00600000e-01 5.78100000e-01 5.43600000e-01 5.40800000e-01 6.40900000e-01 5.61600000e-01 1.82070000e+00 -1.03400000e-01 -3.55000000e-02 2.04310000e+00 8.30000000e-03 -1.10000000e-02 - 1.06640000e+00 4.62000000e-02 7.29900000e-01 1.09900000e-01 6.32200000e-01 3.65000000e-02 7.20500000e-01 1.23500000e-01 6.00500000e-01 5.51000000e-02 7.86800000e-01 4.07000000e-02 - 8.68000000e-02 3.84500000e-01 1.93900000e-01 5.34500000e-01 2.58600000e-01 5.63800000e-01 1.74700000e-01 5.56300000e-01 2.59000000e-01 5.77900000e-01 1.79300000e-01 5.49900000e-01 - 1.15380000e+00 -5.86000000e-02 4.60500000e-01 1.01000000e-02 3.81000000e-01 1.91000000e-02 4.27600000e-01 5.17000000e-02 3.80400000e-01 9.10000000e-03 4.62700000e-01 6.40000000e-03 - 1.14670000e+00 -4.84000000e-02 6.17300000e-01 1.13290000e+00 5.68800000e-01 5.95900000e-01 7.52400000e-01 9.71500000e-01 5.69700000e-01 5.50300000e-01 5.97700000e-01 1.15600000e+00 - 4.77600000e-01 5.09800000e-01 5.93300000e-01 9.88600000e-01 6.36000000e-01 1.17510000e+00 5.34000000e-01 1.05170000e+00 8.23900000e-01 9.73900000e-01 6.80500000e-01 8.86100000e-01 - 2.24700000e-01 6.66700000e-01 6.09200000e-01 5.82100000e-01 5.90300000e-01 7.48100000e-01 1.76260000e+00 -3.63000000e-02 -4.47000000e-02 2.05270000e+00 -1.02000000e-02 1.23000000e-02 - -1.60000000e-03 1.10860000e+00 2.45200000e-01 1.49530000e+00 3.98800000e-01 1.53400000e+00 -3.64000000e-02 1.78290000e+00 -2.42000000e-02 2.02170000e+00 -3.76000000e-02 1.77660000e+00 - 1.09520000e+00 9.90000000e-03 1.23010000e+00 5.05300000e-01 9.49000000e-01 1.01850000e+00 1.19870000e+00 5.45700000e-01 7.97800000e-01 9.22300000e-01 1.26340000e+00 4.68200000e-01 - 1.13370000e+00 -3.56000000e-02 5.68200000e-01 1.96200000e-01 5.02700000e-01 1.14900000e-01 6.27800000e-01 1.24100000e-01 3.78200000e-01 2.44700000e-01 5.99600000e-01 1.59400000e-01 - 4.22600000e-01 6.74700000e-01 1.55000000e-01 9.03100000e-01 1.57800000e-01 6.52500000e-01 2.28000000e-01 8.18600000e-01 1.78300000e-01 6.09300000e-01 1.32500000e-01 9.26000000e-01 - 1.19250000e+00 -1.03600000e-01 7.05600000e-01 1.52500000e-01 6.37600000e-01 3.72000000e-02 7.10700000e-01 1.48800000e-01 4.93600000e-01 1.90300000e-01 6.72000000e-01 1.96200000e-01 - 1.08050000e+00 2.66000000e-02 4.79600000e-01 5.91800000e-01 4.81100000e-01 3.22900000e-01 4.37900000e-01 6.40200000e-01 4.15900000e-01 3.83200000e-01 4.06900000e-01 6.77600000e-01 - 6.00100000e-01 6.10000000e-03 9.60700000e-01 -1.50000000e-02 9.26900000e-01 1.79500000e-01 8.60200000e-01 1.01200000e-01 1.10590000e+00 -1.83000000e-02 8.67500000e-01 9.09000000e-02 - 3.55600000e-01 1.64000000e-02 5.49200000e-01 3.83000000e-02 5.88900000e-01 8.61000000e-02 5.67500000e-01 1.53000000e-02 6.22800000e-01 5.61000000e-02 4.75900000e-01 1.25400000e-01 - 7.71400000e-01 3.35300000e-01 4.42500000e-01 2.22900000e-01 3.42400000e-01 2.12500000e-01 3.89200000e-01 2.90700000e-01 2.95400000e-01 2.59600000e-01 3.75100000e-01 3.03100000e-01 - 2.46100000e-01 8.55800000e-01 1.66800000e-01 1.00700000e+00 1.18400000e-01 7.61500000e-01 1.36100000e-01 1.04060000e+00 1.04100000e-01 7.51000000e-01 1.27600000e-01 1.05410000e+00 - 1.10340000e+00 -2.30000000e-03 1.11940000e+00 6.00700000e-01 7.61500000e-01 6.24400000e-01 9.72400000e-01 7.77800000e-01 7.56100000e-01 5.63400000e-01 1.07860000e+00 6.50700000e-01 - 4.27200000e-01 6.39600000e-01 5.55300000e-01 1.13690000e+00 7.60300000e-01 1.14740000e+00 6.38700000e-01 1.04270000e+00 7.42300000e-01 1.18960000e+00 7.05400000e-01 9.63900000e-01 - 1.10150000e+00 8.10000000e-03 5.22200000e-01 4.26700000e-01 4.08100000e-01 3.45600000e-01 4.84300000e-01 4.76400000e-01 4.40400000e-01 2.90400000e-01 4.24200000e-01 5.46200000e-01 - 3.99700000e-01 4.69400000e-01 7.03200000e-01 4.83700000e-01 7.06900000e-01 6.19300000e-01 1.74180000e+00 -1.30000000e-02 1.42000000e-02 1.98240000e+00 5.59000000e-02 -6.56000000e-02 - 7.75900000e-01 3.29800000e-01 1.07090000e+00 5.30100000e-01 1.27500000e+00 4.34000000e-01 1.81440000e+00 -9.33000000e-02 1.98490000e+00 1.65000000e-02 2.06000000e-02 1.71060000e+00 - 3.64200000e-01 3.58700000e-01 6.14700000e-01 5.14000000e-01 7.12100000e-01 5.70500000e-01 5.71700000e-01 5.60500000e-01 6.23500000e-01 6.95000000e-01 6.21600000e-01 5.02700000e-01 - 1.96600000e-01 9.01000000e-01 6.11000000e-02 7.13400000e-01 7.49000000e-02 5.48400000e-01 1.47700000e-01 6.11300000e-01 7.30000000e-02 5.40200000e-01 5.24000000e-02 7.24700000e-01 - -3.20000000e-03 6.54500000e-01 2.78700000e-01 6.92700000e-01 3.80000000e-01 7.15500000e-01 -8.20000000e-02 1.82680000e+00 -3.76000000e-02 2.04360000e+00 -2.70000000e-02 3.38000000e-02 - 2.66000000e-01 2.34800000e-01 5.70400000e-01 2.44000000e-01 7.07200000e-01 2.27500000e-01 1.77240000e+00 -5.09000000e-02 -4.47000000e-02 5.48000000e-02 6.72000000e-02 -8.11000000e-02 - 1.07200000e-01 1.00360000e+00 5.75000000e-02 1.45490000e+00 8.45000000e-02 9.14000000e-01 1.38300000e-01 1.36440000e+00 1.07200000e-01 8.62600000e-01 3.34000000e-02 1.48540000e+00 - 4.78200000e-01 3.63000000e-01 8.13300000e-01 3.41700000e-01 9.02200000e-01 3.75200000e-01 1.66980000e+00 7.86000000e-02 -1.40000000e-02 2.01400000e+00 -1.68000000e-02 1.89000000e-02 - 1.07640000e+00 3.68000000e-02 5.67100000e-01 1.17150000e+00 5.54000000e-01 6.79300000e-01 7.03400000e-01 1.00920000e+00 5.07300000e-01 6.87300000e-01 7.02700000e-01 1.00990000e+00 - 2.39800000e-01 3.65200000e-01 5.34400000e-01 3.91500000e-01 8.31100000e-01 1.83100000e-01 1.66850000e+00 7.63000000e-02 7.98000000e-02 1.90750000e+00 -6.60000000e-03 8.20000000e-03 - 3.58500000e-01 4.20800000e-01 6.21400000e-01 4.83100000e-01 7.78600000e-01 4.35100000e-01 1.70730000e+00 3.22000000e-02 -1.92000000e-02 2.02430000e+00 -6.62000000e-02 8.04000000e-02 - 3.92400000e-01 3.53800000e-01 5.15800000e-01 6.71400000e-01 6.43000000e-01 7.05100000e-01 5.84800000e-01 5.86500000e-01 5.46700000e-01 8.33000000e-01 5.96700000e-01 5.76700000e-01 - 3.67600000e-01 3.53500000e-01 6.73500000e-01 3.64200000e-01 7.25700000e-01 4.40000000e-01 1.64640000e+00 9.69000000e-02 1.38500000e-01 1.83930000e+00 -2.79000000e-02 3.12000000e-02 - 1.57800000e-01 2.81500000e-01 1.08600000e-01 6.07700000e-01 2.75000000e-01 5.08900000e-01 2.83100000e-01 3.98000000e-01 2.18100000e-01 5.87800000e-01 1.64600000e-01 5.39400000e-01 - 6.33000000e-02 2.21900000e-01 -5.51000000e-02 2.40300000e-01 2.82000000e-02 1.17400000e-01 1.27400000e-01 1.93000000e-02 2.83000000e-02 1.13900000e-01 5.10000000e-02 1.12400000e-01 - 5.67700000e-01 5.55700000e-01 3.02500000e-01 2.29700000e-01 2.14600000e-01 2.38000000e-01 3.91800000e-01 1.18800000e-01 2.31700000e-01 2.06700000e-01 3.74400000e-01 1.39800000e-01 - 4.76000000e-01 2.32900000e-01 8.61100000e-01 1.48600000e-01 9.64200000e-01 1.63300000e-01 1.79740000e+00 -7.26000000e-02 8.30000000e-03 1.98960000e+00 2.04000000e-02 -2.61000000e-02 - 3.19000000e-01 4.90400000e-01 4.93100000e-01 7.76300000e-01 6.08300000e-01 8.28600000e-01 5.03300000e-01 7.59000000e-01 5.60300000e-01 9.02600000e-01 5.62500000e-01 6.93100000e-01 - 1.10160000e+00 2.20000000e-03 4.61500000e-01 1.28300000e+00 4.86900000e-01 7.84900000e-01 5.01800000e-01 1.23610000e+00 4.42800000e-01 7.93500000e-01 4.90200000e-01 1.25260000e+00 - -1.01000000e-02 7.54600000e-01 3.30400000e-01 7.19000000e-01 3.51900000e-01 8.41100000e-01 4.29000000e-02 1.68230000e+00 -1.18200000e-01 2.13750000e+00 2.49000000e-02 -2.70000000e-02 - 3.26300000e-01 5.09500000e-01 6.20700000e-01 5.31500000e-01 7.44300000e-01 5.27800000e-01 1.80920000e+00 -9.06000000e-02 9.23000000e-02 1.89050000e+00 -2.70000000e-03 4.20000000e-03 - 1.04230000e+00 7.70000000e-02 6.75700000e-01 6.91400000e-01 5.68300000e-01 3.90000000e-01 6.22700000e-01 7.51900000e-01 5.06800000e-01 4.31500000e-01 6.91600000e-01 6.76100000e-01 - 1.14210000e+00 -4.25000000e-02 3.81400000e-01 2.88100000e-01 4.14400000e-01 1.24500000e-01 4.87500000e-01 1.62200000e-01 3.83800000e-01 1.46600000e-01 5.64400000e-01 7.09000000e-02 - 3.01100000e-01 4.13900000e-01 6.67800000e-01 3.51300000e-01 8.21900000e-01 3.08800000e-01 1.71550000e+00 2.26000000e-02 -1.20000000e-03 2.00600000e+00 -2.45000000e-02 3.01000000e-02 - 9.80000000e-03 1.09730000e+00 -6.37000000e-02 1.80760000e+00 5.01000000e-02 1.04360000e+00 1.21600000e-01 1.58950000e+00 -6.58000000e-02 1.14550000e+00 5.18000000e-02 1.67520000e+00 - 1.12420000e+00 -2.56000000e-02 4.92600000e-01 1.24180000e+00 4.60300000e-01 1.34670000e+00 4.35900000e-01 1.30540000e+00 3.46800000e-01 1.30240000e+00 5.62400000e-01 1.15860000e+00 - 4.18900000e-01 7.70000000e-02 8.29500000e-01 -3.65000000e-02 9.76400000e-01 -7.05000000e-02 1.63630000e+00 1.10500000e-01 4.90000000e-02 -6.14000000e-02 2.10000000e-02 -2.43000000e-02 - 3.26200000e-01 2.38300000e-01 5.99700000e-01 2.85000000e-01 7.80300000e-01 2.15000000e-01 1.73330000e+00 1.90000000e-03 6.94000000e-02 1.91480000e+00 -3.10000000e-02 3.60000000e-02 - 7.54500000e-01 3.50900000e-01 1.06420000e+00 4.74400000e-01 1.11590000e+00 5.61800000e-01 1.67220000e+00 7.03000000e-02 2.17440000e+00 -2.07000000e-01 -1.79000000e-02 1.75210000e+00 - 3.66000000e-02 9.11100000e-01 1.30000000e-02 1.47800000e+00 1.46600000e-01 1.53060000e+00 3.36000000e-02 1.45840000e+00 -2.10000000e-02 1.75790000e+00 3.06000000e-02 1.45870000e+00 - 2.98000000e-02 3.62200000e-01 1.62400000e-01 4.27700000e-01 1.87000000e-01 4.85200000e-01 7.86000000e-02 5.31100000e-01 1.30100000e-01 5.64500000e-01 1.35400000e-01 4.59200000e-01 - 7.46500000e-01 3.34900000e-01 1.00320000e+00 4.70700000e-01 1.00380000e+00 6.14400000e-01 1.70530000e+00 3.13000000e-02 2.14180000e+00 -1.58900000e-01 2.81000000e-02 1.70130000e+00 - 3.86100000e-01 3.44500000e-01 5.00300000e-01 6.61900000e-01 5.28700000e-01 8.05700000e-01 4.73400000e-01 6.94000000e-01 5.66500000e-01 7.77400000e-01 5.37400000e-01 6.16900000e-01 - 1.21000000e-02 5.68900000e-01 2.86500000e-01 6.03700000e-01 3.93800000e-01 6.20700000e-01 1.37000000e-02 1.71540000e+00 -3.70000000e-02 2.04180000e+00 -1.80000000e-02 2.18000000e-02 - 1.15950000e+00 -6.32000000e-02 4.33900000e-01 5.14700000e-01 3.86700000e-01 3.54800000e-01 5.65300000e-01 3.53700000e-01 3.13500000e-01 4.25100000e-01 4.82600000e-01 4.49400000e-01 - 5.45000000e-01 1.63000000e-01 8.46700000e-01 2.65300000e-01 8.79200000e-01 4.00200000e-01 8.48100000e-01 2.66900000e-01 9.64400000e-01 3.23400000e-01 8.61100000e-01 2.50300000e-01 - 3.66400000e-01 4.51100000e-01 5.83900000e-01 5.65300000e-01 8.52300000e-01 3.87400000e-01 1.61190000e+00 1.45100000e-01 2.37000000e-02 1.96750000e+00 1.76000000e-02 -2.04000000e-02 - 2.35200000e-01 7.24400000e-01 6.11400000e-01 6.49300000e-01 6.72800000e-01 7.16600000e-01 1.81260000e+00 -9.05000000e-02 1.29800000e-01 1.84850000e+00 -5.75000000e-02 6.90000000e-02 - 9.61000000e-02 3.34000000e-01 2.01200000e-01 4.62300000e-01 1.68100000e-01 5.94500000e-01 1.89000000e-01 4.77700000e-01 1.28900000e-01 6.54900000e-01 9.70000000e-02 5.85300000e-01 - -2.50000000e-03 3.46700000e-01 2.84000000e-02 6.29000000e-01 2.21200000e-01 5.45700000e-01 7.89000000e-02 1.63780000e+00 -2.07000000e-02 2.56000000e-02 -1.51000000e-02 1.81000000e-02 - 2.80900000e-01 4.15700000e-01 6.54600000e-01 3.43600000e-01 7.75800000e-01 3.42200000e-01 1.73520000e+00 -7.50000000e-03 -2.35000000e-02 2.02840000e+00 -3.86000000e-02 4.86000000e-02 - 7.41100000e-01 1.61900000e-01 1.15780000e+00 2.60300000e-01 1.41020000e+00 1.85400000e-01 1.29030000e+00 1.02300000e-01 1.44850000e+00 1.71800000e-01 1.15030000e+00 2.68700000e-01 - 3.61400000e-01 7.48700000e-01 6.13900000e-01 1.02300000e+00 7.92900000e-01 9.54800000e-01 1.64450000e+00 1.00400000e-01 1.86000000e-02 1.97860000e+00 -1.75000000e-02 1.74800000e+00 - 7.86200000e-01 3.01500000e-01 2.57600000e-01 3.95400000e-01 2.53200000e-01 2.81200000e-01 2.55800000e-01 3.94200000e-01 1.85200000e-01 3.49000000e-01 3.37900000e-01 2.97100000e-01 - 8.04000000e-02 2.85500000e-01 1.37800000e-01 5.46400000e-01 2.21800000e-01 5.88000000e-01 1.14000000e-02 1.71630000e+00 2.83000000e-02 -3.41000000e-02 -3.10000000e-02 3.77000000e-02 - 7.26000000e-02 3.02800000e-01 3.93000000e-01 2.94200000e-01 5.11400000e-01 2.97200000e-01 1.76990000e+00 -4.40000000e-02 7.00000000e-04 -1.90000000e-03 4.57000000e-02 -5.28000000e-02 - 1.08200000e+00 2.90000000e-02 7.85200000e-01 9.47000000e-01 5.92100000e-01 7.37400000e-01 8.00100000e-01 9.26800000e-01 6.13600000e-01 6.55500000e-01 7.40800000e-01 9.99500000e-01 - 6.19900000e-01 3.16500000e-01 7.87700000e-01 7.13000000e-01 8.83300000e-01 8.25900000e-01 7.63300000e-01 7.42800000e-01 9.92500000e-01 7.24700000e-01 6.68300000e-01 8.54700000e-01 - 1.14900000e+00 -5.28000000e-02 8.34500000e-01 8.98400000e-01 6.53500000e-01 5.37700000e-01 7.73800000e-01 9.63300000e-01 6.63200000e-01 4.80300000e-01 8.91800000e-01 8.29700000e-01 - 1.07200000e-01 9.89400000e-01 6.20000000e-03 4.03700000e-01 3.90000000e-03 3.45100000e-01 -2.10000000e-03 4.14000000e-01 -6.40000000e-03 3.53000000e-01 -4.55000000e-02 4.64500000e-01 - 9.57400000e-01 -1.59000000e-02 1.43680000e+00 4.99000000e-02 1.56330000e+00 1.45900000e-01 1.30860000e+00 2.01900000e-01 1.68830000e+00 2.16000000e-02 1.42880000e+00 6.21000000e-02 - -4.00000000e-02 9.34200000e-01 1.72900000e-01 1.01510000e+00 2.55900000e-01 1.05550000e+00 4.01000000e-02 1.68660000e+00 -9.47000000e-02 2.11170000e+00 7.64000000e-02 -9.20000000e-02 - 2.27500000e-01 5.34600000e-01 5.57600000e-01 5.14300000e-01 6.58600000e-01 5.37000000e-01 1.73610000e+00 -5.50000000e-03 -4.10000000e-03 2.00480000e+00 2.32000000e-02 -2.83000000e-02 - 7.50000000e-03 7.70700000e-01 3.21000000e-01 7.68700000e-01 4.20300000e-01 7.91300000e-01 -1.15000000e-02 1.74540000e+00 4.18000000e-02 1.95340000e+00 -3.45000000e-02 4.10000000e-02 - 9.58000000e-02 1.83900000e-01 1.46000000e-02 1.58300000e-01 8.34000000e-02 5.51000000e-02 9.34000000e-02 6.52000000e-02 8.56000000e-02 5.13000000e-02 6.22000000e-02 1.00400000e-01 - 3.62500000e-01 4.01700000e-01 6.52900000e-01 5.30100000e-01 6.52600000e-01 7.04500000e-01 6.64000000e-01 5.11200000e-01 7.67900000e-01 5.90700000e-01 5.76600000e-01 6.12400000e-01 - 1.19460000e+00 -1.07300000e-01 7.69100000e-01 9.59600000e-01 6.22900000e-01 1.34960000e+00 8.08300000e-01 9.14600000e-01 6.37300000e-01 1.02130000e+00 7.56500000e-01 9.73900000e-01 - 3.30200000e-01 7.71700000e-01 3.98800000e-01 1.34600000e+00 6.05100000e-01 1.35240000e+00 5.25500000e-01 1.19780000e+00 4.75700000e-01 1.54130000e+00 5.63900000e-01 1.15260000e+00 - 8.03000000e-01 3.66000000e-02 1.17680000e+00 -3.43000000e-02 1.16010000e+00 1.29400000e-01 1.64240000e+00 1.05100000e-01 2.15740000e+00 -1.84100000e-01 -1.36000000e-02 1.46000000e-02 - 1.10070000e+00 5.80000000e-03 4.76700000e-01 1.23990000e+00 2.76800000e-01 1.21110000e+00 4.75400000e-01 1.24650000e+00 3.96300000e-01 9.98600000e-01 3.99000000e-01 1.33320000e+00 - 6.40200000e-01 4.37900000e-01 1.01480000e+00 6.70000000e-01 1.18410000e+00 7.32600000e-01 1.00830000e+00 6.82900000e-01 1.12880000e+00 8.34600000e-01 1.09210000e+00 5.83300000e-01 - 1.56000000e-01 9.10200000e-01 4.76800000e-01 9.07800000e-01 5.54000000e-01 9.60500000e-01 1.78030000e+00 -5.52000000e-02 1.61100000e-01 1.80770000e+00 -1.49000000e-02 1.75030000e+00 - 4.24700000e-01 6.89800000e-01 7.78300000e-01 7.49700000e-01 8.08600000e-01 8.55600000e-01 1.78150000e+00 -5.30000000e-02 6.50000000e-02 1.92250000e+00 7.13000000e-02 1.64700000e+00 - 5.40100000e-01 2.68000000e-01 7.19500000e-01 5.72300000e-01 7.85300000e-01 6.92200000e-01 7.97600000e-01 4.85200000e-01 8.25000000e-01 6.67900000e-01 7.21600000e-01 5.72200000e-01 - 1.20900000e-01 4.69800000e-01 1.94700000e-01 7.27600000e-01 1.11200000e-01 9.62000000e-01 1.86400000e-01 7.41700000e-01 2.83200000e-01 7.73400000e-01 1.10800000e-01 8.27100000e-01 - 2.69400000e-01 8.46700000e-01 6.69400000e-01 1.00090000e+00 8.08800000e-01 9.73500000e-01 1.81190000e+00 -9.17000000e-02 -7.04000000e-02 2.08520000e+00 -9.17000000e-02 1.83950000e+00 - 2.04300000e-01 1.71800000e-01 4.35800000e-01 1.31000000e-01 4.03500000e-01 2.60700000e-01 3.93400000e-01 1.78400000e-01 3.51600000e-01 3.24600000e-01 4.09100000e-01 1.60500000e-01 - 5.93800000e-01 5.08000000e-01 3.52000000e-01 1.36700000e+00 2.12700000e-01 9.37900000e-01 3.67300000e-01 1.35560000e+00 1.53600000e-01 9.68900000e-01 3.55700000e-01 1.36480000e+00 - 4.99000000e-01 1.02800000e-01 9.06800000e-01 1.52000000e-02 8.96100000e-01 1.77200000e-01 8.55200000e-01 7.59000000e-02 9.78200000e-01 9.66000000e-02 9.10000000e-01 9.10000000e-03 - -1.74000000e-02 3.64900000e-01 2.66900000e-01 3.80000000e-01 3.63900000e-01 4.06600000e-01 1.79000000e-02 1.71340000e+00 1.77000000e-02 -2.05000000e-02 -6.00000000e-03 5.40000000e-03 - 2.43000000e-02 1.66100000e-01 3.24600000e-01 1.84300000e-01 4.24400000e-01 2.06800000e-01 1.32800000e-01 1.57400000e+00 -7.23000000e-02 8.53000000e-02 -7.27000000e-02 8.75000000e-02 - 3.54000000e-01 1.86900000e-01 5.27700000e-01 3.25800000e-01 5.46900000e-01 4.35800000e-01 5.52700000e-01 2.96700000e-01 6.45500000e-01 3.34400000e-01 6.16000000e-01 2.24400000e-01 - 1.13490000e+00 -3.84000000e-02 5.69000000e-01 1.17870000e+00 5.70800000e-01 9.54600000e-01 6.98200000e-01 1.01790000e+00 4.90800000e-01 9.66000000e-01 5.79000000e-01 1.15950000e+00 - 5.59200000e-01 3.12200000e-01 8.73600000e-01 3.06900000e-01 9.90900000e-01 3.15200000e-01 1.76350000e+00 -3.42000000e-02 2.04950000e+00 -5.78000000e-02 -2.65000000e-02 3.16000000e-02 - 6.50900000e-01 2.13800000e-01 8.49900000e-01 3.46800000e-01 9.81700000e-01 3.27900000e-01 1.73240000e+00 -1.00000000e-03 1.97370000e+00 3.24000000e-02 3.87000000e-02 -4.47000000e-02 - 1.15990000e+00 -6.83000000e-02 5.55700000e-01 1.95000000e-02 3.68700000e-01 1.28900000e-01 4.90000000e-01 9.67000000e-02 4.18300000e-01 5.93000000e-02 5.50400000e-01 2.35000000e-02 - -4.19000000e-02 6.59200000e-01 8.02000000e-02 8.44800000e-01 1.28200000e-01 9.30700000e-01 -6.60000000e-03 1.73680000e+00 -1.43300000e-01 2.17260000e+00 1.81000000e-02 -2.23000000e-02 - 2.66700000e-01 7.68500000e-01 5.38000000e-01 8.17200000e-01 6.14600000e-01 8.68100000e-01 1.65090000e+00 9.26000000e-02 1.44000000e-01 1.83670000e+00 -4.80000000e-03 4.90000000e-03 - 1.11040000e+00 -5.90000000e-03 4.96100000e-01 3.45800000e-01 4.19300000e-01 2.61300000e-01 4.12300000e-01 4.49500000e-01 3.37300000e-01 3.41500000e-01 5.15700000e-01 3.25700000e-01 - 3.03900000e-01 5.93200000e-01 6.05200000e-01 7.65700000e-01 6.36800000e-01 9.36800000e-01 4.37100000e-01 9.63900000e-01 7.37900000e-01 8.37500000e-01 5.57500000e-01 8.23400000e-01 - 1.08240000e+00 2.49000000e-02 3.51700000e-01 7.14800000e-01 2.96200000e-01 5.26000000e-01 5.84100000e-01 4.39100000e-01 3.19700000e-01 4.78300000e-01 5.06500000e-01 5.32000000e-01 - 1.03000000e-02 4.11100000e-01 1.88900000e-01 5.46800000e-01 3.28600000e-01 5.22700000e-01 1.00000000e-03 1.73020000e+00 -2.98000000e-02 3.53000000e-02 -6.30000000e-02 7.43000000e-02 - 7.21200000e-01 3.62800000e-01 9.46400000e-01 4.66800000e-01 1.03760000e+00 5.02500000e-01 1.83720000e+00 -1.21400000e-01 2.02140000e+00 -2.48000000e-02 -2.01000000e-02 1.75460000e+00 - 4.68100000e-01 3.74800000e-01 8.54000000e-01 4.47800000e-01 1.00350000e+00 4.67300000e-01 8.14900000e-01 4.93000000e-01 1.00370000e+00 4.93500000e-01 8.90900000e-01 4.03000000e-01 - 2.34900000e-01 9.86000000e-02 1.34900000e-01 5.70000000e-02 1.70700000e-01 -1.59000000e-02 1.72600000e-01 1.22000000e-02 1.45600000e-01 1.35000000e-02 1.78800000e-01 3.10000000e-03 - 3.58500000e-01 3.40200000e-01 6.87800000e-01 3.19600000e-01 7.93200000e-01 3.38600000e-01 1.75940000e+00 -3.12000000e-02 -1.19700000e-01 2.14090000e+00 3.73000000e-02 -4.55000000e-02 - 8.09000000e-02 1.05050000e+00 1.08900000e-01 9.65400000e-01 2.88000000e-02 8.05700000e-01 8.60000000e-02 9.91200000e-01 1.03100000e-01 6.97500000e-01 1.74500000e-01 8.88000000e-01 - 7.23500000e-01 3.71400000e-01 2.72100000e-01 9.58000000e-01 2.72100000e-01 6.27300000e-01 2.73200000e-01 9.56500000e-01 3.07300000e-01 5.64800000e-01 4.04600000e-01 8.01000000e-01 - 1.69500000e-01 9.47900000e-01 1.83800000e-01 5.89400000e-01 1.61000000e-02 6.37600000e-01 1.73500000e-01 6.05600000e-01 9.45000000e-02 5.32700000e-01 1.66400000e-01 6.10300000e-01 - 1.71400000e-01 6.23900000e-01 3.99300000e-01 8.24300000e-01 6.17800000e-01 7.42600000e-01 4.25400000e-01 7.91100000e-01 6.56500000e-01 7.21900000e-01 4.08800000e-01 8.10200000e-01 - 4.66700000e-01 1.66200000e-01 8.50700000e-01 1.15400000e-01 9.31200000e-01 1.77600000e-01 8.08300000e-01 1.67900000e-01 8.92300000e-01 2.40100000e-01 7.20600000e-01 2.69100000e-01 - 2.98900000e-01 4.39200000e-01 4.32500000e-01 7.25300000e-01 5.12700000e-01 8.02400000e-01 3.84700000e-01 7.84100000e-01 5.18300000e-01 8.13500000e-01 3.57200000e-01 8.17900000e-01 - 7.48000000e-02 1.47300000e-01 2.92100000e-01 2.62900000e-01 5.05300000e-01 1.52000000e-01 1.83920000e+00 -1.25500000e-01 -5.83000000e-02 7.31000000e-02 5.10000000e-02 -6.02000000e-02 - 6.22100000e-01 4.95500000e-01 4.15400000e-01 1.30710000e+00 3.35600000e-01 7.17400000e-01 3.64500000e-01 1.36470000e+00 2.63000000e-01 7.71000000e-01 3.68600000e-01 1.35500000e+00 - 4.24800000e-01 3.35600000e-01 7.10300000e-01 4.74700000e-01 8.15000000e-01 5.34000000e-01 6.91200000e-01 5.00300000e-01 8.13900000e-01 5.56700000e-01 6.72300000e-01 5.18800000e-01 - 4.04400000e-01 7.17900000e-01 8.27300000e-01 5.99500000e-01 1.00340000e+00 5.32400000e-01 1.68250000e+00 6.10000000e-02 9.70000000e-03 1.99230000e+00 -2.66000000e-02 1.76000000e+00 - 7.21000000e-02 5.40400000e-01 1.46000000e-01 8.07400000e-01 1.43000000e-02 1.09920000e+00 4.64000000e-02 9.28300000e-01 -3.10000000e-02 1.16990000e+00 2.79000000e-02 9.50000000e-01 - 1.19970000e+00 -1.11700000e-01 4.30700000e-01 8.26300000e-01 4.21700000e-01 4.82100000e-01 4.78100000e-01 7.63000000e-01 3.31000000e-01 5.66100000e-01 4.87600000e-01 7.52800000e-01 - -4.83000000e-02 3.69700000e-01 -2.75000000e-02 6.58800000e-01 5.40000000e-02 7.04600000e-01 -5.50000000e-02 1.79920000e+00 -1.01000000e-02 1.17000000e-02 2.00000000e-02 -2.49000000e-02 - 6.57000000e-02 -9.30000000e-03 1.39000000e-01 2.45700000e-01 3.61900000e-01 1.23200000e-01 -3.22000000e-02 1.76620000e+00 4.10000000e-02 -4.69000000e-02 5.96000000e-02 -6.95000000e-02 - 3.65700000e-01 4.65800000e-01 7.35000000e-01 4.03000000e-01 8.94200000e-01 3.55000000e-01 1.74310000e+00 -8.90000000e-03 -5.40000000e-02 2.06220000e+00 -2.63000000e-02 3.24000000e-02 - 8.52600000e-01 9.15000000e-02 1.35420000e+00 1.19600000e-01 1.48190000e+00 2.00200000e-01 1.33330000e+00 1.44600000e-01 1.61710000e+00 7.60000000e-02 1.40240000e+00 6.01000000e-02 - 6.80800000e-01 4.14500000e-01 3.55900000e-01 3.97300000e-01 2.47600000e-01 3.74900000e-01 3.86600000e-01 3.58100000e-01 2.74600000e-01 3.27500000e-01 2.78300000e-01 4.87800000e-01 - 1.11000000e-01 5.02800000e-01 1.79800000e-01 7.79900000e-01 2.18100000e-01 8.73200000e-01 8.66000000e-02 8.95700000e-01 1.44800000e-01 9.76600000e-01 9.91000000e-02 8.79300000e-01 - 4.31600000e-01 5.69000000e-01 7.93200000e-01 7.50000000e-01 8.30300000e-01 9.42900000e-01 7.20200000e-01 8.38700000e-01 7.92500000e-01 1.01530000e+00 7.32600000e-01 8.22400000e-01 - 3.09300000e-01 2.26900000e-01 4.36600000e-01 4.12700000e-01 5.13700000e-01 4.51200000e-01 4.17800000e-01 4.39400000e-01 5.33700000e-01 4.39400000e-01 4.11000000e-01 4.44600000e-01 - 2.90900000e-01 4.56500000e-01 3.68700000e-01 8.16400000e-01 4.15200000e-01 9.32800000e-01 4.59700000e-01 7.06900000e-01 4.16200000e-01 9.50800000e-01 4.15700000e-01 7.58200000e-01 - 1.13070000e+00 -2.78000000e-02 5.95900000e-01 4.32200000e-01 5.91700000e-01 1.86000000e-01 6.04600000e-01 4.25500000e-01 5.17900000e-01 2.55300000e-01 6.34200000e-01 3.87700000e-01 - 5.33200000e-01 5.73800000e-01 2.58800000e-01 1.47590000e+00 3.52500000e-01 8.17400000e-01 3.29000000e-01 1.39380000e+00 2.99200000e-01 8.35300000e-01 3.25300000e-01 1.39930000e+00 - 1.13910000e+00 -4.10000000e-02 3.28600000e-01 9.87900000e-01 3.00900000e-01 6.44700000e-01 4.76000000e-01 8.12000000e-01 3.25600000e-01 5.91500000e-01 3.27000000e-01 9.84100000e-01 - 1.68800000e-01 5.35500000e-01 3.31000000e-01 7.65400000e-01 3.45700000e-01 9.04600000e-01 1.49800000e-01 9.76100000e-01 3.44700000e-01 9.28400000e-01 3.21500000e-01 7.78200000e-01 - 5.24900000e-01 1.16000000e-02 8.89600000e-01 -6.32000000e-02 9.13800000e-01 4.66000000e-02 8.44100000e-01 -1.02000000e-02 9.51300000e-01 1.32000000e-02 8.48100000e-01 -1.78000000e-02 - 2.84600000e-01 1.00900000e-01 3.16100000e-01 3.15600000e-01 4.46300000e-01 2.53300000e-01 4.49300000e-01 1.54600000e-01 4.72300000e-01 2.35000000e-01 3.43400000e-01 2.79600000e-01 - 3.33200000e-01 4.62100000e-01 5.93700000e-01 5.22500000e-01 8.84400000e-01 3.16500000e-01 1.69400000e+00 3.98000000e-02 -7.37000000e-02 2.08510000e+00 -1.96000000e-02 2.46000000e-02 - 5.07300000e-01 4.77500000e-01 8.43500000e-01 6.89500000e-01 8.83100000e-01 8.78400000e-01 8.30600000e-01 7.03200000e-01 8.97300000e-01 8.86100000e-01 8.12700000e-01 7.24700000e-01 - 1.10320000e+00 4.60000000e-03 8.60300000e-01 8.71000000e-01 6.90900000e-01 8.52100000e-01 8.03600000e-01 9.32500000e-01 6.21600000e-01 8.51700000e-01 7.44000000e-01 1.00850000e+00 - 5.05800000e-01 2.86300000e-01 7.84800000e-01 4.51700000e-01 9.94900000e-01 3.98900000e-01 7.95300000e-01 4.43900000e-01 1.07140000e+00 3.30900000e-01 8.41900000e-01 3.86000000e-01 - 7.80000000e-02 2.50500000e-01 1.94100000e-01 3.04600000e-01 3.19700000e-01 2.28000000e-01 1.23200000e-01 3.87700000e-01 1.90100000e-01 3.93600000e-01 2.44200000e-01 2.45700000e-01 - 4.90400000e-01 6.19800000e-01 2.95200000e-01 3.51400000e-01 2.05700000e-01 3.39300000e-01 1.97000000e-01 4.68100000e-01 2.31400000e-01 2.94800000e-01 2.27200000e-01 4.33900000e-01 - 6.13700000e-01 4.92300000e-01 2.49800000e-01 1.48990000e+00 2.54300000e-01 1.17930000e+00 3.98500000e-01 1.31410000e+00 2.85000000e-01 1.07520000e+00 3.19800000e-01 1.41090000e+00 - 1.39000000e-01 2.80000000e-01 1.49700000e-01 5.20800000e-01 1.84400000e-01 5.74500000e-01 2.25700000e-01 4.30000000e-01 2.01200000e-01 5.66900000e-01 2.14200000e-01 4.44700000e-01 - 3.15400000e-01 7.92000000e-01 2.11000000e-01 6.58400000e-01 2.06800000e-01 4.84100000e-01 1.82600000e-01 6.89800000e-01 1.20100000e-01 5.70700000e-01 1.85000000e-01 6.88500000e-01 - 4.62500000e-01 2.19000000e-01 8.43600000e-01 1.40400000e-01 9.71200000e-01 1.29300000e-01 1.72430000e+00 6.10000000e-03 5.80000000e-02 1.93520000e+00 -1.17000000e-01 1.38200000e-01 - 6.24000000e-02 1.04780000e+00 -1.24000000e-02 8.24900000e-01 5.80000000e-02 5.86200000e-01 1.15100000e-01 6.73500000e-01 7.27000000e-02 5.58000000e-01 7.76000000e-02 7.12300000e-01 - 4.42300000e-01 6.72200000e-01 1.50400000e-01 1.60490000e+00 2.42700000e-01 9.43700000e-01 2.77800000e-01 1.45410000e+00 2.88000000e-01 8.53600000e-01 3.03800000e-01 1.42580000e+00 - 5.20000000e-02 4.89000000e-02 8.62000000e-02 3.47200000e-01 2.51900000e-01 2.90800000e-01 -3.24000000e-02 1.76740000e+00 -1.05000000e-02 1.37000000e-02 -7.80000000e-03 1.08000000e-02 - 4.87500000e-01 4.04800000e-01 7.88900000e-01 4.15900000e-01 9.54900000e-01 3.63700000e-01 1.68500000e+00 5.58000000e-02 -5.41000000e-02 2.06830000e+00 -1.70000000e-03 2.40000000e-03 - 4.00100000e-01 2.57100000e-01 5.98200000e-01 4.36500000e-01 5.75800000e-01 6.22500000e-01 4.99800000e-01 5.50100000e-01 5.89600000e-01 6.22600000e-01 5.91000000e-01 4.44900000e-01 - 3.29100000e-01 7.86800000e-01 2.96500000e-01 8.25300000e-01 1.97200000e-01 6.56800000e-01 2.86100000e-01 8.36200000e-01 1.98100000e-01 6.31500000e-01 1.88400000e-01 9.51700000e-01 - -6.85000000e-02 7.52100000e-01 3.47500000e-01 6.30900000e-01 4.10900000e-01 6.98800000e-01 1.47800000e-01 1.55820000e+00 -4.60000000e-03 2.00460000e+00 1.63000000e-02 -2.08000000e-02 - 1.05250000e+00 6.30000000e-02 1.72830000e+00 6.50000000e-03 1.24520000e+00 8.76000000e-02 1.69250000e+00 4.37000000e-02 1.20890000e+00 6.85000000e-02 1.75590000e+00 -2.39000000e-02 diff --git a/GPy/util/datasets/oil/DataTstLbls.txt b/GPy/util/datasets/oil/DataTstLbls.txt deleted file mode 100644 index 55cc558d..00000000 --- a/GPy/util/datasets/oil/DataTstLbls.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 diff --git a/GPy/util/datasets/oil/DataVdn.txt b/GPy/util/datasets/oil/DataVdn.txt deleted file mode 100644 index 2a31aad3..00000000 --- a/GPy/util/datasets/oil/DataVdn.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 9.52000000e-02 3.52100000e-01 1.78400000e-01 5.96900000e-01 2.63700000e-01 6.35600000e-01 -2.58000000e-02 1.76100000e+00 -5.56000000e-02 6.57000000e-02 8.70000000e-03 -1.02000000e-02 - 2.67700000e-01 8.24000000e-01 4.34000000e-02 1.70560000e+00 7.18000000e-02 1.90800000e+00 8.29000000e-02 1.65300000e+00 8.15000000e-02 1.74990000e+00 1.07000000e-02 1.73760000e+00 - -3.02000000e-02 8.67200000e-01 2.35800000e-01 9.28400000e-01 4.38900000e-01 8.24600000e-01 -5.67000000e-02 1.79820000e+00 4.10000000e-02 1.94960000e+00 -1.65000000e-02 1.93000000e-02 - 1.09390000e+00 1.30000000e-02 5.31100000e-01 5.07900000e-01 5.33600000e-01 2.53300000e-01 4.84600000e-01 5.64100000e-01 4.00000000e-01 3.89800000e-01 5.23500000e-01 5.14300000e-01 - 1.07070000e+00 4.39000000e-02 5.52400000e-01 6.93400000e-01 3.47100000e-01 5.80400000e-01 4.04200000e-01 8.70100000e-01 4.19800000e-01 4.74000000e-01 4.43200000e-01 8.28800000e-01 - 1.38700000e-01 4.95400000e-01 4.77200000e-01 4.69100000e-01 5.65200000e-01 5.09100000e-01 1.77670000e+00 -5.15000000e-02 -1.68000000e-02 2.01720000e+00 5.13000000e-02 -6.18000000e-02 - 4.93300000e-01 4.25500000e-01 8.25600000e-01 6.05500000e-01 8.45100000e-01 7.96900000e-01 7.79200000e-01 6.54500000e-01 9.11100000e-01 7.46200000e-01 7.31500000e-01 7.14800000e-01 - 2.75400000e-01 8.22500000e-01 7.16000000e-02 1.17590000e+00 9.47000000e-02 8.14700000e-01 2.45000000e-01 9.70600000e-01 8.82000000e-02 8.02500000e-01 1.31700000e-01 1.10730000e+00 - 1.45500000e-01 5.47300000e-01 3.61500000e-01 6.60200000e-01 5.76700000e-01 5.50000000e-01 1.72940000e+00 6.10000000e-03 -2.97000000e-02 2.03600000e+00 -7.78000000e-02 9.24000000e-02 - 6.32800000e-01 6.95000000e-02 9.02100000e-01 1.20100000e-01 1.01520000e+00 1.28300000e-01 1.89100000e+00 -1.88300000e-01 1.95280000e+00 5.52000000e-02 5.75000000e-02 -6.87000000e-02 - 5.11400000e-01 4.07500000e-01 9.70500000e-01 4.33600000e-01 1.02770000e+00 5.90100000e-01 8.75700000e-01 5.47600000e-01 1.15480000e+00 4.66000000e-01 8.52100000e-01 5.71700000e-01 - 4.93100000e-01 6.10300000e-01 9.19300000e-01 5.52600000e-01 8.31600000e-01 8.00300000e-01 1.65180000e+00 9.49000000e-02 -7.64000000e-02 2.09320000e+00 -6.74000000e-02 1.81510000e+00 - 5.41600000e-01 2.03500000e-01 9.16800000e-01 2.38900000e-01 1.01830000e+00 2.99700000e-01 8.39400000e-01 3.30400000e-01 1.02510000e+00 3.14200000e-01 8.52200000e-01 3.15700000e-01 - 1.29400000e-01 5.12600000e-01 4.18800000e-01 5.42400000e-01 4.91000000e-01 5.98000000e-01 1.79900000e+00 -8.68000000e-02 1.08800000e-01 1.86730000e+00 -7.70000000e-03 7.90000000e-03 - 3.46500000e-01 7.65400000e-01 6.57400000e-01 1.02620000e+00 7.43300000e-01 1.06200000e+00 1.74180000e+00 -1.10000000e-02 1.12300000e-01 1.86670000e+00 1.06000000e-02 1.71950000e+00 - 3.78700000e-01 6.49000000e-01 4.78400000e-01 1.15610000e+00 6.80300000e-01 1.15190000e+00 6.28700000e-01 9.77500000e-01 7.23400000e-01 1.13050000e+00 6.42000000e-01 9.62500000e-01 - 4.17300000e-01 4.48600000e-01 6.68000000e-01 5.20400000e-01 7.97300000e-01 5.13200000e-01 1.72500000e+00 6.80000000e-03 6.03000000e-02 1.92620000e+00 2.31000000e-02 -2.79000000e-02 - 2.03700000e-01 9.06700000e-01 1.42900000e-01 1.58850000e+00 1.20000000e-01 1.15170000e+00 1.25900000e-01 1.61020000e+00 8.99000000e-02 1.13720000e+00 2.02500000e-01 1.52010000e+00 - -4.26000000e-02 6.97000000e-01 3.37300000e-01 6.18900000e-01 4.16600000e-01 6.66800000e-01 1.33700000e-01 1.56850000e+00 4.83000000e-02 1.94660000e+00 5.61000000e-02 -6.65000000e-02 - 1.16110000e+00 -6.55000000e-02 5.16100000e-01 3.69700000e-01 4.94400000e-01 2.06000000e-01 6.32300000e-01 2.35400000e-01 5.53100000e-01 1.16600000e-01 6.18400000e-01 2.50900000e-01 - 4.18200000e-01 6.76500000e-01 4.94300000e-01 1.24660000e+00 6.28300000e-01 1.34700000e+00 5.59500000e-01 1.17230000e+00 7.99800000e-01 1.17470000e+00 4.81300000e-01 1.26780000e+00 - 8.16000000e-02 1.02500000e+00 3.46100000e-01 1.18280000e+00 3.84600000e-01 1.27600000e+00 1.76070000e+00 -3.37000000e-02 8.90000000e-02 1.89420000e+00 -1.31700000e-01 1.89120000e+00 - 1.91400000e-01 3.94700000e-01 2.90700000e-01 6.29600000e-01 3.71100000e-01 6.72400000e-01 2.50000000e-01 6.78400000e-01 2.66100000e-01 8.10100000e-01 2.08500000e-01 7.25400000e-01 - 1.13700000e+00 -3.49000000e-02 8.62700000e-01 8.68400000e-01 7.30500000e-01 6.55300000e-01 7.96200000e-01 9.44100000e-01 7.90900000e-01 5.16500000e-01 8.29700000e-01 9.06100000e-01 - 1.16150000e+00 -6.61000000e-02 1.71430000e+00 1.60000000e-03 1.79290000e+00 1.34400000e-01 1.68570000e+00 5.17000000e-02 1.87020000e+00 1.47600000e-01 1.73690000e+00 -3.00000000e-03 - 1.15150000e+00 -5.21000000e-02 4.51100000e-01 8.37500000e-01 4.45500000e-01 4.71900000e-01 4.31100000e-01 8.62600000e-01 3.87500000e-01 5.11400000e-01 4.02600000e-01 8.97500000e-01 - -3.25000000e-02 7.40100000e-01 1.86000000e-01 8.06900000e-01 1.97000000e-01 9.39500000e-01 2.01000000e-02 1.70640000e+00 -5.83000000e-02 2.06980000e+00 7.43000000e-02 -8.99000000e-02 - 5.77200000e-01 5.37000000e-02 9.14100000e-01 2.40000000e-02 1.07910000e+00 -3.15000000e-02 1.72170000e+00 1.79000000e-02 2.07090000e+00 -7.73000000e-02 -1.74000000e-02 2.17000000e-02 - 1.10700000e+00 4.00000000e-04 5.66000000e-01 7.63200000e-01 5.31900000e-01 4.02500000e-01 5.72600000e-01 7.59900000e-01 3.99700000e-01 5.33400000e-01 5.48900000e-01 7.84500000e-01 - 4.09900000e-01 6.57100000e-01 8.42000000e-01 7.93700000e-01 9.60800000e-01 9.03300000e-01 7.73500000e-01 8.76400000e-01 8.74900000e-01 1.03340000e+00 8.18700000e-01 8.19500000e-01 - 1.81400000e-01 5.82700000e-01 5.53900000e-01 5.11500000e-01 7.06800000e-01 4.74100000e-01 1.77920000e+00 -5.67000000e-02 8.10000000e-03 1.98890000e+00 7.16000000e-02 -8.68000000e-02 - 7.01500000e-01 1.69900000e-01 1.06140000e+00 3.16600000e-01 1.13600000e+00 4.39900000e-01 1.00190000e+00 3.83800000e-01 1.12230000e+00 4.84000000e-01 1.06520000e+00 3.03900000e-01 - 2.41100000e-01 6.64100000e-01 5.61800000e-01 8.15200000e-01 4.72300000e-01 1.11920000e+00 4.11700000e-01 9.93600000e-01 6.37900000e-01 9.60300000e-01 4.87700000e-01 9.05500000e-01 - 3.43900000e-01 2.99600000e-01 6.03400000e-01 3.88700000e-01 7.84800000e-01 3.25800000e-01 5.97000000e-01 3.98200000e-01 7.50200000e-01 3.87000000e-01 5.48200000e-01 4.57500000e-01 - 6.49400000e-01 4.51700000e-01 2.85100000e-01 1.43170000e+00 2.72700000e-01 7.49700000e-01 2.13400000e-01 1.51530000e+00 2.83000000e-01 7.10700000e-01 3.72300000e-01 1.32940000e+00 - 8.83000000e-02 4.63200000e-01 7.15000000e-02 8.05900000e-01 7.91000000e-02 9.25500000e-01 -1.08000000e-02 9.05800000e-01 9.92000000e-02 9.10000000e-01 1.60700000e-01 7.00800000e-01 - 4.10000000e-02 7.01900000e-01 2.99900000e-01 7.65200000e-01 3.65500000e-01 8.29500000e-01 -7.21000000e-02 1.81760000e+00 5.59000000e-02 1.93410000e+00 -2.21000000e-02 2.60000000e-02 - 1.12860000e+00 -2.95000000e-02 8.16500000e-01 9.12700000e-01 6.30300000e-01 5.21400000e-01 7.27000000e-01 1.02270000e+00 7.36500000e-01 3.51900000e-01 7.98800000e-01 9.31100000e-01 - 1.67900000e-01 8.77600000e-01 2.54400000e-01 1.38870000e+00 3.30000000e-01 1.53420000e+00 3.78300000e-01 1.23900000e+00 3.30200000e-01 1.55980000e+00 2.95300000e-01 1.34160000e+00 - 1.16000000e-02 5.83100000e-01 7.25000000e-02 8.34500000e-01 1.76300000e-01 8.54000000e-01 7.74000000e-02 1.64470000e+00 -2.45000000e-02 2.02690000e+00 -3.86000000e-02 4.68000000e-02 - 3.00000000e-02 2.96200000e-01 3.43300000e-01 2.95400000e-01 4.35300000e-01 3.28200000e-01 1.32000000e-02 1.71720000e+00 5.03000000e-02 -5.99000000e-02 -7.01000000e-02 8.35000000e-02 - 3.10500000e-01 2.10200000e-01 4.47300000e-01 3.75900000e-01 5.67600000e-01 3.57900000e-01 5.43800000e-01 2.59900000e-01 6.01100000e-01 3.31900000e-01 4.38500000e-01 3.83900000e-01 - 1.08880000e+00 2.20000000e-02 8.21000000e-01 2.46400000e-01 5.96500000e-01 2.36300000e-01 8.38100000e-01 2.26300000e-01 6.96000000e-01 1.01500000e-01 8.46700000e-01 2.15700000e-01 - 1.96300000e-01 9.03600000e-01 2.35700000e-01 1.47040000e+00 1.40000000e-01 9.91300000e-01 4.44000000e-02 1.69900000e+00 1.16300000e-01 9.82000000e-01 2.57000000e-02 1.72140000e+00 - 4.83400000e-01 4.71200000e-01 7.88600000e-01 7.00300000e-01 8.48500000e-01 8.47900000e-01 6.18800000e-01 8.96300000e-01 7.78100000e-01 9.60800000e-01 7.20600000e-01 7.77100000e-01 - 1.16000000e+00 -6.86000000e-02 5.52200000e-01 2.13800000e-01 5.38100000e-01 6.69000000e-02 6.75900000e-01 6.61000000e-02 5.40800000e-01 5.34000000e-02 6.46600000e-01 1.03000000e-01 - 2.76300000e-01 8.09800000e-01 5.65000000e-02 5.68100000e-01 1.07200000e-01 3.98600000e-01 4.64000000e-02 5.81700000e-01 6.50000000e-02 4.40500000e-01 7.98000000e-02 5.40400000e-01 - 1.15630000e+00 -6.18000000e-02 7.39400000e-01 1.01800000e-01 5.78700000e-01 1.01400000e-01 6.35000000e-01 2.26800000e-01 5.79700000e-01 8.26000000e-02 6.72700000e-01 1.83100000e-01 - 1.93000000e-01 3.93500000e-01 5.85600000e-01 2.97700000e-01 6.93300000e-01 3.12500000e-01 1.64000000e+00 1.06100000e-01 -4.61000000e-02 2.04430000e+00 4.19000000e-02 -5.01000000e-02 - 5.40000000e-03 8.39700000e-01 -3.26000000e-02 1.36370000e+00 -2.63000000e-02 1.53880000e+00 1.28000000e-02 1.30870000e+00 1.73000000e-02 1.50950000e+00 -3.98000000e-02 1.36960000e+00 - 5.89500000e-01 -5.00000000e-02 7.59200000e-01 1.11000000e-01 6.79300000e-01 3.48500000e-01 7.49900000e-01 1.24700000e-01 8.80600000e-01 1.23900000e-01 7.07400000e-01 1.76200000e-01 - 1.13120000e+00 -3.37000000e-02 7.37100000e-01 9.95300000e-01 6.51900000e-01 8.82400000e-01 8.17400000e-01 9.06000000e-01 5.83600000e-01 8.72500000e-01 8.11500000e-01 9.06300000e-01 - 1.08300000e+00 2.72000000e-02 7.83200000e-01 1.78800000e-01 6.69800000e-01 7.63000000e-02 8.82100000e-01 6.60000000e-02 6.53700000e-01 8.04000000e-02 9.10100000e-01 2.88000000e-02 - 5.14900000e-01 5.74700000e-01 2.87700000e-01 1.08360000e+00 1.21100000e-01 8.60500000e-01 2.25200000e-01 1.15610000e+00 2.12100000e-01 7.24900000e-01 2.27700000e-01 1.15410000e+00 - 1.11340000e+00 -1.03000000e-02 5.60400000e-01 1.84200000e-01 4.97100000e-01 1.07300000e-01 5.81800000e-01 1.57500000e-01 4.31100000e-01 1.69700000e-01 5.51200000e-01 1.99100000e-01 - 3.92600000e-01 1.19300000e-01 5.76300000e-01 2.31700000e-01 6.95800000e-01 2.13700000e-01 5.57100000e-01 2.52600000e-01 6.95800000e-01 2.27100000e-01 5.78100000e-01 2.28200000e-01 - 5.91700000e-01 4.79400000e-01 8.69100000e-01 8.22800000e-01 9.56300000e-01 9.80300000e-01 8.01000000e-01 9.10800000e-01 1.01790000e+00 9.37900000e-01 8.58600000e-01 8.42800000e-01 - 3.25200000e-01 7.84300000e-01 2.58200000e-01 7.85200000e-01 1.62800000e-01 6.51200000e-01 1.18800000e-01 9.52900000e-01 1.77500000e-01 6.10300000e-01 1.71100000e-01 8.89000000e-01 - 1.06000000e-02 5.49900000e-01 -1.12000000e-02 3.06300000e-01 8.52000000e-02 1.48600000e-01 5.98000000e-02 2.21100000e-01 5.09000000e-02 1.88100000e-01 -3.23000000e-02 3.33100000e-01 - 8.02500000e-01 2.91100000e-01 1.10730000e+00 3.00600000e-01 1.18770000e+00 3.49100000e-01 1.81600000e+00 -9.92000000e-02 2.02850000e+00 -3.15000000e-02 -6.71000000e-02 1.81150000e+00 - 1.73900000e-01 4.56100000e-01 5.29900000e-01 4.10500000e-01 5.97000000e-01 4.68100000e-01 1.76280000e+00 -3.82000000e-02 7.23000000e-02 1.91500000e+00 -6.00000000e-03 6.30000000e-03 - 1.90400000e-01 3.66900000e-01 2.07300000e-01 6.51000000e-02 7.60000000e-02 1.79200000e-01 1.11100000e-01 1.78800000e-01 5.27000000e-02 2.01000000e-01 6.85000000e-02 2.32400000e-01 - 9.19000000e-01 1.57200000e-01 1.43760000e+00 2.53900000e-01 1.58550000e+00 3.46700000e-01 1.45320000e+00 2.35000000e-01 1.65440000e+00 2.98600000e-01 1.32570000e+00 3.77900000e-01 - 2.46900000e-01 1.75700000e-01 5.43700000e-01 1.96800000e-01 6.21600000e-01 2.43900000e-01 1.75460000e+00 -3.00000000e-02 2.78000000e-02 -3.39000000e-02 -2.68000000e-02 3.24000000e-02 - 1.01500000e-01 2.40000000e-02 4.10500000e-01 3.10000000e-02 4.86400000e-01 8.27000000e-02 1.65020000e+00 9.65000000e-02 -4.35000000e-02 5.24000000e-02 1.23000000e-02 -1.43000000e-02 - 5.81900000e-01 6.85000000e-02 8.45100000e-01 1.28100000e-01 9.69300000e-01 1.24600000e-01 1.65980000e+00 8.29000000e-02 -1.16100000e-01 2.13700000e+00 3.92000000e-02 -4.79000000e-02 - 6.99100000e-01 3.89300000e-01 9.87600000e-01 7.43800000e-01 1.16290000e+00 7.90400000e-01 9.45800000e-01 7.93200000e-01 1.15150000e+00 8.41600000e-01 8.48900000e-01 9.01900000e-01 - 1.08100000e-01 1.94100000e-01 1.00400000e-01 3.91600000e-01 1.41000000e-01 4.12500000e-01 9.49000000e-02 3.98300000e-01 1.15500000e-01 4.48000000e-01 9.51000000e-02 3.95900000e-01 - 4.70100000e-01 1.50300000e-01 8.08600000e-01 1.50800000e-01 9.78000000e-01 1.01400000e-01 8.94000000e-01 5.04000000e-02 1.01600000e+00 7.72000000e-02 8.93600000e-01 5.04000000e-02 - 1.10480000e+00 1.20000000e-03 3.44100000e-01 1.39840000e+00 2.72000000e-01 1.58180000e+00 3.43100000e-01 1.40020000e+00 3.59800000e-01 1.28200000e+00 3.20600000e-01 1.42900000e+00 - 4.11500000e-01 3.99600000e-01 7.82900000e-01 3.35900000e-01 8.53100000e-01 3.90600000e-01 1.71190000e+00 2.07000000e-02 -8.60000000e-02 2.10370000e+00 -2.89000000e-02 3.79000000e-02 - 4.61900000e-01 3.48900000e-01 7.47000000e-01 3.81000000e-01 7.86200000e-01 4.76700000e-01 1.71760000e+00 1.29000000e-02 -1.27700000e-01 2.14920000e+00 1.63000000e-02 -2.16000000e-02 - 4.28200000e-01 2.58000000e-01 7.91800000e-01 1.99300000e-01 8.59800000e-01 2.59400000e-01 1.76160000e+00 -3.41000000e-02 1.01000000e-01 1.87450000e+00 5.06000000e-02 -6.11000000e-02 - 1.03000000e-01 2.64700000e-01 2.37500000e-01 3.19900000e-01 3.29500000e-01 2.92200000e-01 2.45200000e-01 3.11600000e-01 2.42000000e-01 4.09600000e-01 1.54400000e-01 4.20300000e-01 - 4.91300000e-01 6.00000000e-01 3.02800000e-01 3.95100000e-01 1.56100000e-01 4.37600000e-01 2.29900000e-01 4.85300000e-01 1.56000000e-01 4.25700000e-01 2.57200000e-01 4.54200000e-01 - 4.73000000e-02 9.38200000e-01 1.87900000e-01 1.13340000e+00 4.01800000e-01 1.02380000e+00 2.00000000e-04 1.73230000e+00 7.85000000e-02 1.91060000e+00 -4.78000000e-02 5.72000000e-02 - 5.54200000e-01 4.26900000e-01 8.79500000e-01 6.62300000e-01 9.75400000e-01 7.85900000e-01 8.42600000e-01 7.09900000e-01 1.09390000e+00 6.73900000e-01 8.52200000e-01 6.97300000e-01 - 2.42400000e-01 8.60000000e-02 4.46900000e-01 5.40000000e-02 4.19100000e-01 1.71700000e-01 2.27300000e-01 3.17200000e-01 3.43100000e-01 2.70900000e-01 2.14300000e-01 3.30100000e-01 - 2.96500000e-01 8.17800000e-01 1.28100000e-01 1.16430000e+00 1.34000000e-01 8.00600000e-01 3.21100000e-01 9.32400000e-01 2.91300000e-01 5.82900000e-01 1.95900000e-01 1.08360000e+00 - 7.94000000e-01 3.18000000e-01 3.76400000e-01 9.42700000e-01 3.29800000e-01 6.13800000e-01 3.52400000e-01 9.68600000e-01 2.83700000e-01 6.40100000e-01 3.31500000e-01 9.96600000e-01 - 4.48000000e-02 3.62100000e-01 1.52400000e-01 4.66500000e-01 8.78000000e-02 6.34500000e-01 2.05300000e-01 4.10100000e-01 5.57000000e-02 6.82100000e-01 1.11500000e-01 5.16300000e-01 - 2.85600000e-01 1.98400000e-01 5.26500000e-01 2.19800000e-01 5.11400000e-01 3.52700000e-01 4.44600000e-01 3.11400000e-01 5.18200000e-01 3.56300000e-01 4.13300000e-01 3.52500000e-01 - 2.77600000e-01 4.37900000e-01 6.45900000e-01 3.71500000e-01 7.30200000e-01 4.16000000e-01 1.65470000e+00 9.08000000e-02 -1.24700000e-01 2.15070000e+00 8.30000000e-03 -9.70000000e-03 - 2.74900000e-01 4.01800000e-01 3.17000000e-01 7.65200000e-01 4.01000000e-01 8.18700000e-01 4.42000000e-01 6.15900000e-01 3.72500000e-01 8.70500000e-01 3.52300000e-01 7.17100000e-01 - 1.02250000e+00 9.98000000e-02 5.99100000e-01 1.13270000e+00 5.13200000e-01 6.52800000e-01 4.90800000e-01 1.25360000e+00 5.54600000e-01 5.66400000e-01 5.16700000e-01 1.22220000e+00 - 1.09810000e+00 9.20000000e-03 5.49700000e-01 5.44500000e-01 5.51900000e-01 2.63700000e-01 6.50000000e-01 4.20500000e-01 4.47600000e-01 3.66400000e-01 6.78000000e-01 3.93200000e-01 - 5.10400000e-01 5.86900000e-01 3.03700000e-01 1.42160000e+00 2.28700000e-01 1.03390000e+00 2.78100000e-01 1.44700000e+00 1.70000000e-01 1.05820000e+00 2.50500000e-01 1.47880000e+00 - 3.14000000e-02 9.59000000e-02 4.51000000e-02 3.94400000e-01 7.57000000e-02 4.94100000e-01 -8.05000000e-02 1.82770000e+00 1.62000000e-02 -1.95000000e-02 -1.56000000e-02 2.01000000e-02 - 1.81800000e-01 4.72000000e-02 4.21000000e-02 1.05500000e-01 3.58000000e-02 9.18000000e-02 -1.86000000e-02 1.79600000e-01 3.76000000e-02 8.91000000e-02 -2.29000000e-02 1.83900000e-01 - -1.82000000e-02 1.13460000e+00 6.00000000e-03 4.21700000e-01 -4.57000000e-02 4.16400000e-01 4.92000000e-02 3.70800000e-01 -3.23000000e-02 3.94200000e-01 6.22000000e-02 3.51700000e-01 - 3.66100000e-01 3.20900000e-01 7.12400000e-01 2.78500000e-01 8.39500000e-01 2.74900000e-01 1.70540000e+00 3.19000000e-02 9.69000000e-02 1.88260000e+00 -3.67000000e-02 4.50000000e-02 - 3.97900000e-01 6.99400000e-01 6.83000000e-02 1.68470000e+00 1.53500000e-01 1.10180000e+00 2.19200000e-01 1.51370000e+00 1.52900000e-01 1.06170000e+00 1.35600000e-01 1.60940000e+00 - 3.36100000e-01 3.52100000e-01 6.82900000e-01 3.58000000e-01 6.26100000e-01 5.92900000e-01 6.49600000e-01 3.99100000e-01 7.08900000e-01 5.13300000e-01 6.01600000e-01 4.56000000e-01 - 1.13130000e+00 -2.79000000e-02 1.12290000e+00 4.20000000e-02 8.39100000e-01 3.36000000e-02 1.00720000e+00 1.77100000e-01 9.40100000e-01 -1.10500000e-01 1.15810000e+00 1.90000000e-03 - 3.60100000e-01 4.32600000e-01 6.17800000e-01 4.98800000e-01 7.71800000e-01 4.57600000e-01 1.73620000e+00 -7.50000000e-03 4.49000000e-02 1.94800000e+00 2.37000000e-02 -2.85000000e-02 - 4.80000000e-01 6.01500000e-01 6.05600000e-01 1.11290000e+00 7.47000000e-01 1.20240000e+00 5.68300000e-01 1.15330000e+00 7.40200000e-01 1.23530000e+00 5.69700000e-01 1.15210000e+00 - 1.08360000e+00 2.37000000e-02 9.22600000e-01 3.31700000e-01 6.85300000e-01 2.34500000e-01 9.00500000e-01 3.54800000e-01 6.53500000e-01 2.44900000e-01 8.36300000e-01 4.36900000e-01 - 4.76400000e-01 8.65000000e-02 7.63200000e-01 1.09800000e-01 8.68300000e-01 1.24600000e-01 7.08900000e-01 1.72900000e-01 8.59300000e-01 1.52500000e-01 6.99200000e-01 1.87900000e-01 - 1.17490000e+00 -7.95000000e-02 5.96600000e-01 2.81500000e-01 5.43300000e-01 1.46800000e-01 5.36700000e-01 3.50200000e-01 5.35200000e-01 1.43400000e-01 6.13500000e-01 2.60800000e-01 - 2.14500000e-01 1.30600000e-01 2.40100000e-01 3.24000000e-01 2.73600000e-01 3.66100000e-01 1.95100000e-01 3.75000000e-01 3.03200000e-01 3.44400000e-01 2.21400000e-01 3.46300000e-01 - 6.73000000e-02 1.03710000e+00 5.40000000e-02 6.04600000e-01 3.63000000e-02 5.09400000e-01 9.81000000e-02 5.53300000e-01 6.37000000e-02 4.68500000e-01 3.18000000e-02 6.32900000e-01 - 1.04920000e+00 6.91000000e-02 7.05800000e-01 6.97000000e-02 6.23200000e-01 -3.70000000e-03 6.97300000e-01 7.93000000e-02 5.60000000e-01 5.96000000e-02 7.14700000e-01 6.00000000e-02 - 2.52700000e-01 6.10000000e-01 5.08400000e-01 8.20400000e-01 6.67200000e-01 8.29100000e-01 5.25100000e-01 8.03000000e-01 6.27600000e-01 9.03000000e-01 5.84400000e-01 7.34600000e-01 - 3.35000000e-01 3.52600000e-01 6.24700000e-01 3.78100000e-01 7.09400000e-01 4.24000000e-01 1.65260000e+00 8.87000000e-02 -4.50000000e-03 2.00690000e+00 -2.03000000e-02 2.32000000e-02 - 7.56800000e-01 -3.56000000e-02 1.07380000e+00 8.07000000e-02 1.36020000e+00 -7.23000000e-02 1.07780000e+00 7.75000000e-02 1.31060000e+00 1.13000000e-02 1.14550000e+00 -1.40000000e-03 - 6.30100000e-01 4.73000000e-01 1.85800000e-01 1.10980000e+00 3.31400000e-01 5.74900000e-01 3.72000000e-01 8.86700000e-01 2.51200000e-01 6.47800000e-01 2.63500000e-01 1.01160000e+00 - 5.06600000e-01 6.01400000e-01 2.44000000e-01 6.33400000e-01 2.33900000e-01 4.63900000e-01 3.31100000e-01 5.36800000e-01 1.74300000e-01 5.17100000e-01 3.29700000e-01 5.34000000e-01 - 6.35500000e-01 3.69100000e-01 1.06370000e+00 4.96600000e-01 1.18500000e+00 5.92500000e-01 1.06650000e+00 4.88300000e-01 1.31950000e+00 4.64900000e-01 1.15930000e+00 3.82400000e-01 - 2.45400000e-01 5.85400000e-01 5.18300000e-01 7.62700000e-01 5.63600000e-01 8.94700000e-01 5.57000000e-01 7.12700000e-01 5.90800000e-01 8.85100000e-01 5.43200000e-01 7.29500000e-01 - 2.22800000e-01 2.06500000e-01 5.08300000e-01 2.41400000e-01 6.70700000e-01 1.91500000e-01 1.72160000e+00 1.18000000e-02 3.31000000e-02 -4.00000000e-02 4.02000000e-02 -4.70000000e-02 - 6.04000000e-01 3.05500000e-01 1.06310000e+00 3.42600000e-01 1.15410000e+00 4.55100000e-01 1.09410000e+00 3.07000000e-01 1.13280000e+00 5.05100000e-01 9.41000000e-01 4.85600000e-01 - 3.83800000e-01 7.38400000e-01 8.62500000e-01 6.61700000e-01 9.89000000e-01 6.53200000e-01 1.77860000e+00 -4.79000000e-02 7.22000000e-02 1.91310000e+00 -4.91000000e-02 1.78690000e+00 - 5.42100000e-01 2.41300000e-01 7.59300000e-01 3.51500000e-01 8.78900000e-01 3.50900000e-01 1.76730000e+00 -4.17000000e-02 -4.80000000e-02 2.05730000e+00 3.60000000e-03 -5.80000000e-03 - 4.92600000e-01 5.90600000e-01 7.34300000e-01 9.67800000e-01 8.39900000e-01 1.09660000e+00 7.39300000e-01 9.63300000e-01 8.73700000e-01 1.09270000e+00 8.34000000e-01 8.48200000e-01 - 4.82700000e-01 2.12200000e-01 8.86000000e-01 1.79500000e-01 9.16700000e-01 3.17800000e-01 8.11900000e-01 2.65200000e-01 9.09400000e-01 3.41100000e-01 8.87600000e-01 1.77100000e-01 - 1.36700000e-01 5.10800000e-01 1.39500000e-01 8.86600000e-01 1.75500000e-01 9.90700000e-01 1.25900000e-01 9.06600000e-01 1.69300000e-01 1.01020000e+00 1.62800000e-01 8.58500000e-01 - 2.18100000e-01 6.30200000e-01 4.57100000e-01 7.15200000e-01 6.83400000e-01 5.92100000e-01 1.69420000e+00 4.82000000e-02 3.05000000e-02 1.96430000e+00 8.37000000e-02 -1.00100000e-01 - 3.68600000e-01 6.32500000e-01 5.24300000e-01 1.05700000e+00 5.68700000e-01 1.23700000e+00 4.56300000e-01 1.13460000e+00 6.66500000e-01 1.15680000e+00 4.41800000e-01 1.15650000e+00 - 1.16450000e+00 -7.23000000e-02 1.09660000e+00 6.33000000e-01 8.50800000e-01 4.10700000e-01 1.19390000e+00 5.20000000e-01 9.29800000e-01 2.63900000e-01 1.16610000e+00 5.55600000e-01 - 9.53400000e-01 1.36600000e-01 3.41200000e-01 6.51800000e-01 3.51700000e-01 4.15800000e-01 4.44100000e-01 5.32300000e-01 3.88900000e-01 3.54200000e-01 3.62900000e-01 6.31000000e-01 - 4.40900000e-01 1.80100000e-01 6.99700000e-01 2.72500000e-01 7.86900000e-01 3.21800000e-01 7.02300000e-01 2.69300000e-01 8.41000000e-01 2.80800000e-01 6.38700000e-01 3.43800000e-01 - 7.22900000e-01 3.65500000e-01 2.91600000e-01 4.14500000e-01 3.07400000e-01 2.64200000e-01 3.62700000e-01 3.32900000e-01 2.84200000e-01 2.78300000e-01 3.61600000e-01 3.34400000e-01 - 5.65800000e-01 3.30500000e-01 7.35500000e-01 4.99300000e-01 8.95700000e-01 4.51500000e-01 1.82670000e+00 -1.13100000e-01 3.25000000e-02 1.95700000e+00 2.93000000e-02 -3.12000000e-02 - 8.01100000e-01 3.00400000e-01 4.17200000e-01 5.32400000e-01 2.52600000e-01 5.13400000e-01 3.17500000e-01 6.49000000e-01 2.80200000e-01 4.62700000e-01 4.13900000e-01 5.37200000e-01 - 2.71000000e-02 6.53300000e-01 2.63300000e-01 7.15000000e-01 2.04400000e-01 9.24500000e-01 -1.55000000e-02 1.75420000e+00 -8.00000000e-03 2.00990000e+00 5.10000000e-03 -5.90000000e-03 - 5.30000000e-02 4.91900000e-01 2.00100000e-01 6.72500000e-01 3.63800000e-01 6.15500000e-01 -3.10000000e-03 1.73710000e+00 5.07000000e-02 1.93960000e+00 1.78000000e-02 -1.97000000e-02 - 4.05000000e-02 6.08100000e-01 4.26300000e-01 5.21900000e-01 4.65200000e-01 6.16100000e-01 1.72650000e+00 9.30000000e-03 1.66000000e-02 1.97450000e+00 6.29000000e-02 -7.36000000e-02 - 2.36000000e-01 4.18600000e-01 5.21000000e-01 4.52900000e-01 6.22100000e-01 4.74700000e-01 1.66120000e+00 8.80000000e-02 -4.41000000e-02 2.05400000e+00 -1.01000000e-02 1.11000000e-02 - 8.73000000e-01 2.26800000e-01 3.35600000e-01 1.40600000e+00 3.59500000e-01 9.55100000e-01 3.42500000e-01 1.39320000e+00 2.35600000e-01 1.05110000e+00 4.09600000e-01 1.31610000e+00 - 3.02900000e-01 8.11300000e-01 6.83000000e-01 9.09600000e-01 8.10700000e-01 9.03800000e-01 1.61040000e+00 1.49700000e-01 3.36000000e-02 1.96110000e+00 -9.00000000e-04 1.73500000e+00 - 5.09100000e-01 4.24300000e-01 9.12700000e-01 3.15800000e-01 8.54400000e-01 5.30700000e-01 1.67960000e+00 6.43000000e-02 -8.99000000e-02 2.10400000e+00 1.06000000e-02 -1.13000000e-02 - 5.24100000e-01 3.05100000e-01 9.24700000e-01 3.69500000e-01 1.02790000e+00 4.39000000e-01 8.05000000e-01 5.05700000e-01 9.92600000e-01 5.04800000e-01 8.75000000e-01 4.22200000e-01 - 5.66000000e-02 9.03600000e-01 -2.59000000e-02 1.54860000e+00 8.25000000e-02 1.62770000e+00 1.91000000e-02 1.49140000e+00 9.97000000e-02 1.63300000e+00 1.32500000e-01 1.36140000e+00 - 2.68500000e-01 8.50400000e-01 2.02800000e-01 1.52440000e+00 2.00200000e-01 9.16400000e-01 2.54600000e-01 1.46870000e+00 1.67100000e-01 9.22000000e-01 1.32900000e-01 1.61050000e+00 - 9.25000000e-02 7.80100000e-01 3.66000000e-02 1.34840000e+00 6.76000000e-02 1.50690000e+00 1.52000000e-02 1.37890000e+00 4.06000000e-02 1.56410000e+00 5.06000000e-02 1.33300000e+00 - 3.85000000e-01 7.17200000e-01 7.27200000e-01 7.86500000e-01 7.30800000e-01 9.33500000e-01 1.77660000e+00 -4.95000000e-02 8.57000000e-02 1.90050000e+00 2.24000000e-02 1.70940000e+00 - 4.50800000e-01 6.53200000e-01 8.06200000e-01 8.71600000e-01 8.16700000e-01 1.00380000e+00 1.69070000e+00 4.85000000e-02 1.49000000e-02 1.98310000e+00 5.00000000e-03 1.72930000e+00 - 2.44000000e-01 3.36700000e-01 3.23900000e-01 5.99000000e-01 3.22900000e-01 7.32500000e-01 4.21400000e-01 4.83700000e-01 3.69600000e-01 6.96500000e-01 3.34700000e-01 5.82500000e-01 - 1.02870000e+00 9.05000000e-02 4.51100000e-01 6.87900000e-01 3.70500000e-01 4.93800000e-01 4.41700000e-01 7.04000000e-01 3.58300000e-01 4.84400000e-01 4.24000000e-01 7.24100000e-01 - 4.87200000e-01 4.03600000e-01 7.43800000e-01 4.64900000e-01 8.42800000e-01 4.98000000e-01 1.83620000e+00 -1.22600000e-01 -2.10000000e-02 2.02370000e+00 1.71000000e-02 -1.90000000e-02 - 1.90200000e-01 9.12000000e-02 3.28600000e-01 1.08500000e-01 4.25800000e-01 5.64000000e-02 3.75300000e-01 5.03000000e-02 4.36900000e-01 5.58000000e-02 4.06200000e-01 1.43000000e-02 - 7.08200000e-01 3.84700000e-01 3.75900000e-01 1.34370000e+00 2.70200000e-01 7.84400000e-01 2.89200000e-01 1.44610000e+00 2.38800000e-01 7.90800000e-01 3.74600000e-01 1.34940000e+00 - 2.50900000e-01 5.53800000e-01 4.15600000e-01 8.40300000e-01 4.62300000e-01 9.72000000e-01 2.41800000e-01 1.03980000e+00 4.96600000e-01 9.42600000e-01 4.11000000e-01 8.44200000e-01 - 4.36300000e-01 2.49300000e-01 7.68800000e-01 2.90800000e-01 8.40700000e-01 3.71200000e-01 8.12000000e-01 2.41200000e-01 8.67400000e-01 3.59900000e-01 7.11800000e-01 3.57200000e-01 - 6.90700000e-01 2.87200000e-01 1.02870000e+00 5.09000000e-01 1.14430000e+00 6.09900000e-01 1.05670000e+00 4.73200000e-01 1.26680000e+00 4.90300000e-01 1.11250000e+00 4.03900000e-01 - 2.52200000e-01 8.43800000e-01 1.54900000e-01 6.69800000e-01 1.15900000e-01 5.49200000e-01 1.04800000e-01 7.28300000e-01 1.03300000e-01 5.48700000e-01 1.81900000e-01 6.34400000e-01 - 1.13080000e+00 -3.01000000e-02 6.61900000e-01 1.28300000e-01 5.90400000e-01 4.38000000e-02 7.00600000e-01 8.71000000e-02 5.60800000e-01 6.25000000e-02 7.24100000e-01 5.72000000e-02 - 1.06230000e+00 4.78000000e-02 9.95200000e-01 -6.22000000e-02 7.26700000e-01 1.95000000e-02 8.76600000e-01 8.07000000e-02 6.93100000e-01 3.64000000e-02 8.05400000e-01 1.59100000e-01 - 7.26900000e-01 3.43000000e-01 1.11940000e+00 5.49800000e-01 1.46900000e+00 4.00800000e-01 1.13730000e+00 5.28700000e-01 1.40990000e+00 5.02400000e-01 1.20160000e+00 4.55300000e-01 - 5.55300000e-01 4.89200000e-01 8.71900000e-01 7.66300000e-01 8.01300000e-01 1.09420000e+00 8.69600000e-01 7.69400000e-01 1.01960000e+00 8.68400000e-01 8.94000000e-01 7.42800000e-01 - 1.15110000e+00 -5.19000000e-02 7.58000000e-01 9.74000000e-02 5.49300000e-01 1.45200000e-01 6.45200000e-01 2.33800000e-01 5.33000000e-01 1.52300000e-01 6.23300000e-01 2.57100000e-01 - 4.75600000e-01 3.27000000e-01 8.85600000e-01 3.40600000e-01 9.34400000e-01 4.73900000e-01 8.97900000e-01 3.26900000e-01 9.05900000e-01 5.30900000e-01 7.36700000e-01 5.11700000e-01 - 6.01200000e-01 2.60800000e-01 7.98900000e-01 3.95600000e-01 9.37000000e-01 3.73100000e-01 1.67370000e+00 6.67000000e-02 -8.00000000e-03 2.01080000e+00 1.64000000e-02 -2.05000000e-02 - 5.63800000e-01 5.48100000e-01 3.50600000e-01 3.79500000e-01 2.90700000e-01 3.09300000e-01 3.48300000e-01 3.81300000e-01 3.15600000e-01 2.63300000e-01 4.06100000e-01 3.13000000e-01 - 2.57400000e-01 8.66900000e-01 5.88000000e-01 8.51200000e-01 7.89400000e-01 7.53600000e-01 1.79300000e+00 -7.37000000e-02 -1.02800000e-01 2.11630000e+00 1.72000000e-02 1.71300000e+00 - 4.18000000e-01 6.89000000e-01 2.43000000e-01 9.36600000e-01 2.20700000e-01 6.57100000e-01 1.80200000e-01 1.01420000e+00 2.46600000e-01 6.04500000e-01 3.14300000e-01 8.54300000e-01 - 2.16100000e-01 3.04100000e-01 3.52000000e-01 4.58200000e-01 4.99300000e-01 4.03800000e-01 3.41600000e-01 4.74000000e-01 4.57000000e-01 4.70300000e-01 3.15600000e-01 5.03900000e-01 - 1.03100000e-01 2.16300000e-01 3.28900000e-01 1.40500000e-01 3.15600000e-01 2.27600000e-01 1.90100000e-01 3.04200000e-01 2.49800000e-01 3.15400000e-01 2.91500000e-01 1.81800000e-01 - 5.57600000e-01 5.52800000e-01 2.40500000e-01 1.02160000e+00 2.49800000e-01 6.65800000e-01 2.96900000e-01 9.54800000e-01 1.50100000e-01 7.58400000e-01 2.58900000e-01 9.97700000e-01 - 1.02180000e+00 7.92000000e-02 3.44100000e-01 5.60700000e-01 2.82400000e-01 4.33600000e-01 3.88500000e-01 5.06900000e-01 3.75600000e-01 3.13600000e-01 3.87900000e-01 5.09000000e-01 - 3.02900000e-01 6.21500000e-01 2.76300000e-01 9.85000000e-02 2.19600000e-01 1.01600000e-01 1.72100000e-01 2.20900000e-01 1.98900000e-01 1.22600000e-01 2.61200000e-01 1.17700000e-01 - 2.84200000e-01 1.36600000e-01 5.95200000e-01 1.38300000e-01 7.36200000e-01 1.15900000e-01 1.75100000e+00 -2.53000000e-02 -7.60000000e-02 9.07000000e-02 2.78000000e-02 -3.45000000e-02 - 9.48000000e-02 4.67300000e-01 4.96800000e-01 3.58200000e-01 4.77100000e-01 5.23500000e-01 1.74110000e+00 -1.43000000e-02 -7.47000000e-02 2.08990000e+00 5.00000000e-03 -8.30000000e-03 - 4.86400000e-01 3.45300000e-01 7.08000000e-01 6.09600000e-01 9.33500000e-01 5.36600000e-01 7.86700000e-01 5.11800000e-01 9.39500000e-01 5.52800000e-01 7.83900000e-01 5.17600000e-01 - 9.44600000e-01 2.26000000e-02 1.27260000e+00 6.30000000e-03 1.39140000e+00 5.20000000e-03 1.71000000e+00 2.16000000e-02 1.97290000e+00 3.93000000e-02 -2.40000000e-02 2.92000000e-02 - 1.10720000e+00 -5.80000000e-03 5.61000000e-01 1.16880000e+00 4.94400000e-01 1.46890000e+00 5.61400000e-01 1.16970000e+00 4.63100000e-01 1.26740000e+00 5.47800000e-01 1.17840000e+00 - 3.13000000e-02 -3.84000000e-02 -2.07000000e-02 9.55000000e-02 3.31000000e-02 1.55400000e-01 -5.82000000e-02 7.05000000e-02 1.52000000e-02 -1.66000000e-02 -4.43000000e-02 5.35000000e-02 - 1.14920000e+00 -5.43000000e-02 4.72200000e-01 3.70900000e-01 3.00100000e-01 3.91000000e-01 4.58100000e-01 3.86900000e-01 3.26400000e-01 3.46200000e-01 4.37900000e-01 4.10400000e-01 - 1.63600000e-01 9.28500000e-01 4.45000000e-02 7.57300000e-01 5.59000000e-02 5.88100000e-01 9.76000000e-02 6.93600000e-01 6.74000000e-02 5.66100000e-01 8.50000000e-03 8.00600000e-01 - 4.95600000e-01 5.37500000e-01 5.63200000e-01 1.09290000e+00 8.78100000e-01 9.66300000e-01 7.28600000e-01 8.93400000e-01 8.31400000e-01 1.04440000e+00 6.96900000e-01 9.32600000e-01 - -1.57000000e-02 1.12580000e+00 -1.49000000e-02 8.01100000e-01 -4.27000000e-02 6.86300000e-01 -5.00000000e-02 8.39000000e-01 4.20000000e-03 6.15300000e-01 5.46000000e-02 7.15100000e-01 - 1.08450000e+00 2.75000000e-02 7.63500000e-01 1.11000000e-01 6.18300000e-01 8.49000000e-02 6.60700000e-01 2.35100000e-01 5.56300000e-01 1.38700000e-01 7.29600000e-01 1.54400000e-01 - 5.69300000e-01 5.39900000e-01 8.39000000e-01 9.04400000e-01 9.44400000e-01 9.35900000e-01 1.77710000e+00 -5.44000000e-02 1.97990000e+00 2.66000000e-02 -2.54000000e-02 1.76200000e+00 - 2.90800000e-01 2.67600000e-01 3.23900000e-01 5.69300000e-01 3.78300000e-01 6.39200000e-01 3.54800000e-01 5.35900000e-01 4.15100000e-01 6.08100000e-01 2.94000000e-01 6.06900000e-01 - 1.08220000e+00 2.73000000e-02 7.01200000e-01 8.25600000e-01 6.68400000e-01 3.24600000e-01 7.55800000e-01 7.65400000e-01 5.91300000e-01 3.88100000e-01 8.01300000e-01 7.07600000e-01 - 5.19500000e-01 4.16300000e-01 9.03000000e-01 5.45900000e-01 9.02100000e-01 7.75100000e-01 7.43600000e-01 7.35300000e-01 8.64500000e-01 8.40500000e-01 7.73800000e-01 7.01400000e-01 - 1.79600000e-01 9.21200000e-01 7.84000000e-02 6.03900000e-01 1.15800000e-01 4.37800000e-01 4.31000000e-02 6.45800000e-01 3.87000000e-02 5.17700000e-01 1.28300000e-01 5.48500000e-01 - 6.61500000e-01 3.64800000e-01 9.02000000e-01 7.26900000e-01 1.07060000e+00 7.81500000e-01 9.64900000e-01 6.60100000e-01 1.07860000e+00 8.02300000e-01 9.48400000e-01 6.77900000e-01 - 4.57100000e-01 4.90400000e-01 7.92900000e-01 6.72500000e-01 6.68500000e-01 1.04330000e+00 7.56100000e-01 7.18600000e-01 1.00720000e+00 6.69000000e-01 7.43900000e-01 7.28300000e-01 - 1.14970000e+00 -5.53000000e-02 4.92600000e-01 1.23650000e+00 3.92800000e-01 1.06900000e+00 4.19900000e-01 1.32080000e+00 3.28500000e-01 1.06690000e+00 4.65900000e-01 1.27200000e+00 - 3.74100000e-01 2.22100000e-01 5.60900000e-01 3.76500000e-01 6.81600000e-01 3.74600000e-01 6.13400000e-01 3.12800000e-01 6.16900000e-01 4.69900000e-01 5.37900000e-01 4.04300000e-01 - 5.16700000e-01 3.53600000e-01 8.14100000e-01 3.71600000e-01 1.00580000e+00 2.85200000e-01 1.57000000e+00 1.87600000e-01 -2.87000000e-02 2.02990000e+00 -3.33000000e-02 3.70000000e-02 - -2.90000000e-02 7.86300000e-01 2.41500000e-01 8.20700000e-01 2.71400000e-01 9.26600000e-01 4.07000000e-02 1.67990000e+00 -2.97000000e-02 2.03640000e+00 3.49000000e-02 -4.14000000e-02 - 1.57900000e-01 7.00600000e-01 1.89400000e-01 1.16630000e+00 2.97900000e-01 1.22900000e+00 2.61700000e-01 1.08250000e+00 2.84700000e-01 1.27300000e+00 2.00300000e-01 1.14810000e+00 - 1.15840000e+00 -5.96000000e-02 1.20300000e+00 5.13100000e-01 9.75100000e-01 8.37600000e-01 1.05150000e+00 6.93700000e-01 7.91300000e-01 8.57000000e-01 1.10800000e+00 6.30500000e-01 - 2.43400000e-01 7.72400000e-01 5.94300000e-01 7.25600000e-01 6.31200000e-01 8.20000000e-01 1.73290000e+00 6.00000000e-04 -1.82000000e-02 2.02420000e+00 3.41000000e-02 -4.42000000e-02 - 6.33000000e-01 1.41100000e-01 9.04400000e-01 3.32600000e-01 1.09120000e+00 2.99000000e-01 9.07000000e-01 3.26100000e-01 1.01500000e+00 4.09900000e-01 8.00600000e-01 4.50800000e-01 - 1.89200000e-01 4.98400000e-01 3.65500000e-01 7.00700000e-01 2.70100000e-01 9.70700000e-01 3.51700000e-01 7.14900000e-01 4.01400000e-01 8.34900000e-01 3.26500000e-01 7.46700000e-01 - 5.18400000e-01 5.93800000e-01 7.92600000e-01 8.71400000e-01 8.37800000e-01 9.61100000e-01 1.74390000e+00 -1.23000000e-02 -2.55000000e-02 2.03230000e+00 -1.32000000e-02 1.74880000e+00 - 5.28500000e-01 5.70700000e-01 2.96900000e-01 5.54600000e-01 2.16300000e-01 4.69300000e-01 2.69000000e-01 5.86200000e-01 3.21500000e-01 3.32400000e-01 3.07700000e-01 5.37400000e-01 - 8.59800000e-01 2.53600000e-01 2.99100000e-01 7.48800000e-01 3.32500000e-01 4.65000000e-01 5.10400000e-01 4.99500000e-01 3.38500000e-01 4.37300000e-01 3.10500000e-01 7.36500000e-01 - 6.47200000e-01 1.74400000e-01 8.98500000e-01 4.09200000e-01 1.20150000e+00 2.50100000e-01 1.00200000e+00 2.82800000e-01 1.17480000e+00 3.02800000e-01 1.05270000e+00 2.25400000e-01 - 7.43100000e-01 3.48100000e-01 3.72500000e-01 8.02000000e-01 1.48800000e-01 7.57900000e-01 2.92200000e-01 9.04000000e-01 2.73600000e-01 5.85800000e-01 3.31500000e-01 8.56900000e-01 - 7.00800000e-01 -2.67000000e-02 9.99900000e-01 -1.32000000e-02 1.07770000e+00 4.32000000e-02 1.63050000e+00 1.19100000e-01 1.99990000e+00 4.80000000e-03 1.84000000e-02 -2.13000000e-02 - 1.80300000e-01 6.04900000e-01 3.00700000e-01 9.31500000e-01 2.21800000e-01 1.20750000e+00 2.13400000e-01 1.03700000e+00 2.56000000e-01 1.18730000e+00 1.13800000e-01 1.15560000e+00 - 8.12000000e-01 3.07500000e-01 3.50400000e-01 6.99000000e-02 3.29400000e-01 2.48000000e-02 3.97500000e-01 1.72000000e-02 2.67400000e-01 9.02000000e-02 2.77700000e-01 1.61900000e-01 - 6.21900000e-01 4.54300000e-01 2.52400000e-01 9.91800000e-01 1.27800000e-01 7.95500000e-01 1.96000000e-01 1.05590000e+00 1.93100000e-01 6.98100000e-01 2.61100000e-01 9.82200000e-01 - 5.19700000e-01 4.37200000e-01 9.01000000e-01 5.85000000e-01 1.02300000e+00 6.58900000e-01 8.81000000e-01 6.03400000e-01 9.50200000e-01 7.77700000e-01 9.34200000e-01 5.41800000e-01 - 2.77300000e-01 8.26900000e-01 6.53200000e-01 1.05720000e+00 6.96700000e-01 1.14760000e+00 1.68070000e+00 5.67000000e-02 -1.34000000e-02 2.01640000e+00 -6.71000000e-02 1.80850000e+00 - 6.30000000e-01 4.71700000e-01 3.14300000e-01 1.99400000e-01 2.08900000e-01 2.35900000e-01 3.38800000e-01 1.73500000e-01 2.95800000e-01 1.26200000e-01 3.50600000e-01 1.61400000e-01 - 7.37700000e-01 3.82600000e-01 1.12440000e+00 4.31300000e-01 1.24590000e+00 4.36800000e-01 1.79410000e+00 -7.18000000e-02 2.03510000e+00 -4.52000000e-02 -8.43000000e-02 1.82910000e+00 - 1.02930000e+00 9.14000000e-02 9.39600000e-01 -3.32000000e-02 6.95700000e-01 3.25000000e-02 9.20300000e-01 -4.90000000e-03 6.83300000e-01 3.03000000e-02 8.07900000e-01 1.27400000e-01 - 5.38500000e-01 5.87400000e-01 8.98700000e-01 8.47100000e-01 9.87900000e-01 9.39500000e-01 1.66700000e+00 7.47000000e-02 2.02440000e+00 -3.16000000e-02 -1.90000000e-03 1.73420000e+00 - 1.49600000e-01 3.67400000e-01 5.47800000e-01 2.67100000e-01 6.88200000e-01 2.45500000e-01 1.77630000e+00 -5.04000000e-02 2.71000000e-02 -2.94000000e-02 -3.13000000e-02 3.71000000e-02 - 1.07680000e+00 3.28000000e-02 6.71400000e-01 4.06800000e-01 4.81200000e-01 3.57400000e-01 6.45100000e-01 4.42900000e-01 5.21400000e-01 2.89400000e-01 6.39100000e-01 4.49200000e-01 - 5.91200000e-01 5.12300000e-01 1.96800000e-01 1.15140000e+00 2.46700000e-01 6.99900000e-01 2.90000000e-01 1.03850000e+00 1.70200000e-01 7.67600000e-01 3.59600000e-01 9.59500000e-01 - 7.92000000e-02 6.33600000e-01 5.43700000e-01 4.57000000e-01 6.30500000e-01 4.94400000e-01 1.67540000e+00 6.67000000e-02 -7.60000000e-02 2.08650000e+00 -2.79000000e-02 3.46000000e-02 - 4.90000000e-01 6.12800000e-01 2.60200000e-01 1.01030000e+00 1.80500000e-01 7.48300000e-01 2.36700000e-01 1.03960000e+00 2.29800000e-01 6.69000000e-01 2.75700000e-01 9.93500000e-01 - 1.19870000e+00 -1.10700000e-01 8.11500000e-01 5.09000000e-01 6.37700000e-01 3.06200000e-01 8.19000000e-01 4.96600000e-01 5.78100000e-01 3.53100000e-01 8.52500000e-01 4.53200000e-01 - 5.04300000e-01 2.66900000e-01 8.30300000e-01 3.68700000e-01 9.29400000e-01 4.38600000e-01 8.30500000e-01 3.65600000e-01 1.00850000e+00 3.66900000e-01 7.16800000e-01 5.00000000e-01 - 6.88000000e-02 6.61300000e-01 3.86100000e-01 6.58700000e-01 4.00300000e-01 7.80500000e-01 2.40000000e-03 1.72930000e+00 -3.60000000e-02 2.03760000e+00 -2.62000000e-02 3.50000000e-02 - 4.36000000e-02 9.31500000e-01 8.18000000e-02 1.44410000e+00 5.62000000e-02 1.69140000e+00 6.25000000e-02 1.47410000e+00 1.15500000e-01 1.64200000e+00 1.12000000e-01 1.41100000e+00 - 4.77100000e-01 5.03300000e-01 8.12700000e-01 7.12700000e-01 8.81300000e-01 8.63800000e-01 8.18100000e-01 7.12800000e-01 7.95200000e-01 9.92500000e-01 7.81600000e-01 7.52100000e-01 - 2.26100000e-01 8.79900000e-01 2.33200000e-01 5.40400000e-01 1.30900000e-01 5.06700000e-01 1.30400000e-01 6.58700000e-01 9.74000000e-02 5.32700000e-01 7.51000000e-02 7.24000000e-01 - 2.99100000e-01 6.34900000e-01 5.94500000e-01 8.42900000e-01 5.52200000e-01 1.10600000e+00 5.02200000e-01 9.51200000e-01 7.01800000e-01 9.55600000e-01 4.29600000e-01 1.04070000e+00 - 1.14580000e+00 -4.37000000e-02 5.24600000e-01 7.90200000e-01 3.47000000e-01 6.10200000e-01 5.31600000e-01 7.83200000e-01 3.88400000e-01 5.36600000e-01 4.38000000e-01 8.95700000e-01 - 6.78300000e-01 4.29000000e-01 1.19400000e+00 5.19400000e-01 1.34490000e+00 6.08100000e-01 1.15040000e+00 5.69200000e-01 1.38840000e+00 5.87500000e-01 1.12390000e+00 6.03600000e-01 - 3.44100000e-01 7.60200000e-01 1.68500000e-01 7.12500000e-01 2.04700000e-01 4.88800000e-01 1.84800000e-01 6.94700000e-01 1.54700000e-01 5.31300000e-01 1.05700000e-01 7.89100000e-01 - -9.63000000e-02 1.06520000e+00 2.24300000e-01 1.05300000e+00 3.43600000e-01 1.05630000e+00 -6.81000000e-02 1.81210000e+00 -6.43000000e-02 2.07740000e+00 -3.15000000e-02 4.07000000e-02 - 6.06500000e-01 2.05800000e-01 8.88200000e-01 2.43600000e-01 9.99600000e-01 2.51600000e-01 1.69470000e+00 4.18000000e-02 1.92970000e+00 8.70000000e-02 6.00000000e-03 -7.60000000e-03 - 1.15000000e-02 1.02280000e+00 1.11000000e-01 1.48950000e+00 1.03200000e-01 1.72150000e+00 2.97000000e-02 1.58850000e+00 1.72900000e-01 1.66490000e+00 1.77000000e-01 1.41420000e+00 - 4.22000000e-02 7.18000000e-01 3.44600000e-01 7.32900000e-01 5.39900000e-01 6.45900000e-01 6.43000000e-02 1.65870000e+00 3.21000000e-02 1.96750000e+00 -2.49000000e-02 2.93000000e-02 - 2.28900000e-01 4.35600000e-01 4.97300000e-01 5.16900000e-01 4.31000000e-01 7.46400000e-01 4.59100000e-01 5.63200000e-01 5.67400000e-01 6.06400000e-01 4.95400000e-01 5.18800000e-01 - 1.09630000e+00 1.04000000e-02 5.29200000e-01 6.78800000e-01 4.79800000e-01 3.98300000e-01 4.61200000e-01 7.57900000e-01 3.91800000e-01 4.80800000e-01 4.72700000e-01 7.41900000e-01 - 4.00300000e-01 1.69500000e-01 8.17100000e-01 3.83000000e-02 8.99900000e-01 8.10000000e-02 7.46500000e-01 1.21600000e-01 9.84700000e-01 -2.40000000e-03 7.04400000e-01 1.78300000e-01 - 1.09210000e+00 1.75000000e-02 5.57100000e-01 2.36800000e-01 4.91500000e-01 1.42100000e-01 5.24100000e-01 2.74400000e-01 4.86700000e-01 1.34600000e-01 4.98700000e-01 3.04200000e-01 - 1.05650000e+00 5.98000000e-02 6.57800000e-01 1.06820000e+00 5.11100000e-01 6.09200000e-01 5.61400000e-01 1.18130000e+00 5.12900000e-01 5.70100000e-01 6.49400000e-01 1.08490000e+00 - 7.32000000e-02 6.79800000e-01 1.00100000e-01 1.08550000e+00 2.81100000e-01 1.03630000e+00 2.17700000e-01 9.49500000e-01 1.84700000e-01 1.17340000e+00 1.51100000e-01 1.02430000e+00 - 9.47300000e-01 1.54100000e-01 3.99700000e-01 8.09000000e-01 2.41600000e-01 6.69100000e-01 4.51300000e-01 7.47400000e-01 3.51200000e-01 5.15500000e-01 3.39100000e-01 8.80400000e-01 - 1.14150000e+00 -3.95000000e-02 7.95200000e-01 9.22700000e-01 5.92600000e-01 6.89000000e-01 7.12800000e-01 1.02020000e+00 5.50200000e-01 6.89900000e-01 8.29800000e-01 8.84400000e-01 - 1.11940000e+00 -1.27000000e-02 4.35700000e-01 1.30370000e+00 4.43300000e-01 7.30200000e-01 4.63200000e-01 1.27490000e+00 3.65600000e-01 7.75100000e-01 4.17000000e-01 1.33350000e+00 - 4.09100000e-01 6.83200000e-01 1.42900000e-01 1.04550000e+00 2.50600000e-01 6.14800000e-01 3.43700000e-01 8.08000000e-01 1.51600000e-01 7.06800000e-01 2.02600000e-01 9.76700000e-01 - 4.98000000e-02 5.47100000e-01 9.98000000e-02 8.29400000e-01 -2.00000000e-03 1.08030000e+00 1.54800000e-01 7.64000000e-01 1.01700000e-01 9.73900000e-01 1.34800000e-01 7.87300000e-01 - 4.39800000e-01 2.63200000e-01 6.96900000e-01 4.05100000e-01 8.21000000e-01 4.23700000e-01 7.35700000e-01 3.56600000e-01 7.77900000e-01 5.00400000e-01 7.83500000e-01 3.04200000e-01 - 3.61000000e-02 4.10400000e-01 1.47400000e-01 5.32400000e-01 1.99900000e-01 5.71600000e-01 1.83400000e-01 4.88000000e-01 2.08700000e-01 5.68800000e-01 1.26600000e-01 5.60200000e-01 - 2.72400000e-01 3.80600000e-01 3.57100000e-01 6.76800000e-01 4.59200000e-01 7.09300000e-01 4.59900000e-01 5.54300000e-01 4.17100000e-01 7.74100000e-01 4.47200000e-01 5.72400000e-01 - 1.62900000e-01 6.02000000e-02 3.77800000e-01 1.75200000e-01 6.02600000e-01 5.01000000e-02 1.59060000e+00 1.69400000e-01 -8.80000000e-03 1.02000000e-02 -7.46000000e-02 8.96000000e-02 - 3.39600000e-01 3.78500000e-01 5.62200000e-01 4.86400000e-01 8.75600000e-01 2.54200000e-01 1.73590000e+00 -6.80000000e-03 -5.95000000e-02 2.06750000e+00 -2.70000000e-02 3.29000000e-02 - 6.09100000e-01 -2.23000000e-02 8.26300000e-01 1.18000000e-01 9.67900000e-01 1.02300000e-01 6.65600000e-01 3.08300000e-01 8.15800000e-01 2.98100000e-01 7.64200000e-01 1.92000000e-01 - 5.05700000e-01 4.55000000e-02 6.31500000e-01 2.56100000e-01 7.07800000e-01 3.07400000e-01 5.69300000e-01 3.29900000e-01 6.67200000e-01 3.64800000e-01 6.37100000e-01 2.46800000e-01 - 2.34400000e-01 4.23400000e-01 3.96000000e-01 6.32600000e-01 4.90500000e-01 6.74200000e-01 3.63400000e-01 6.70700000e-01 4.45500000e-01 7.44400000e-01 3.86800000e-01 6.42100000e-01 - 6.04300000e-01 5.11200000e-01 2.98600000e-01 3.01200000e-01 3.31300000e-01 1.52400000e-01 3.45100000e-01 2.46300000e-01 2.39000000e-01 2.52900000e-01 3.53500000e-01 2.38100000e-01 - 3.55000000e-02 1.02420000e+00 1.87500000e-01 1.20500000e+00 3.47100000e-01 1.15320000e+00 -6.60000000e-03 1.74290000e+00 7.60000000e-02 1.91510000e+00 -5.47000000e-02 1.79420000e+00 - 4.25400000e-01 1.80600000e-01 6.34700000e-01 3.20000000e-01 7.33700000e-01 3.46700000e-01 5.47900000e-01 4.23100000e-01 6.45600000e-01 4.74000000e-01 6.29500000e-01 3.25700000e-01 - 5.95800000e-01 1.86600000e-01 9.40900000e-01 1.51000000e-01 9.66700000e-01 2.58800000e-01 1.61790000e+00 1.29600000e-01 2.09710000e+00 -1.16400000e-01 3.15000000e-02 -3.72000000e-02 - 7.14100000e-01 3.97300000e-01 3.60200000e-01 5.48900000e-01 3.27000000e-01 3.90700000e-01 4.14400000e-01 4.86800000e-01 2.99800000e-01 4.06700000e-01 3.10900000e-01 6.10300000e-01 - 1.11130000e+00 -3.50000000e-03 5.41200000e-01 3.49100000e-01 4.61400000e-01 2.45200000e-01 4.34000000e-01 4.75500000e-01 3.30700000e-01 3.83500000e-01 5.26400000e-01 3.65900000e-01 - 1.73400000e-01 8.19700000e-01 2.27800000e-01 1.33290000e+00 3.68800000e-01 1.38810000e+00 2.69000000e-01 1.28150000e+00 2.87400000e-01 1.50790000e+00 3.11600000e-01 1.23800000e+00 - 4.56000000e-02 8.84400000e-01 9.90000000e-03 1.25150000e+00 1.58700000e-01 1.21790000e+00 8.40000000e-03 1.72490000e+00 -4.16000000e-02 2.05100000e+00 1.90000000e-02 -2.39000000e-02 - 3.09700000e-01 8.01400000e-01 2.31900000e-01 5.14900000e-01 1.40800000e-01 4.75100000e-01 1.73900000e-01 5.81100000e-01 2.17600000e-01 3.75300000e-01 1.91300000e-01 5.59200000e-01 - 1.21000000e-02 9.56800000e-01 4.76500000e-01 7.78400000e-01 4.86800000e-01 9.10500000e-01 1.74660000e+00 -2.16000000e-02 5.95000000e-02 1.93040000e+00 4.28000000e-02 -5.07000000e-02 - 2.84100000e-01 5.03000000e-01 4.54700000e-01 7.80100000e-01 5.32200000e-01 8.69900000e-01 4.33400000e-01 8.04100000e-01 5.80100000e-01 8.36200000e-01 3.83300000e-01 8.64900000e-01 - 1.10250000e+00 4.00000000e-03 8.53400000e-01 1.36400000e-01 6.58100000e-01 1.21200000e-01 7.99400000e-01 2.02900000e-01 6.20700000e-01 1.36700000e-01 7.29800000e-01 2.83200000e-01 - 3.72000000e-02 5.51700000e-01 2.80800000e-01 6.36600000e-01 3.85500000e-01 6.55700000e-01 -6.44000000e-02 1.80710000e+00 -9.50000000e-03 2.01520000e+00 3.93000000e-02 -4.78000000e-02 - 4.94600000e-01 2.98700000e-01 6.64400000e-01 5.96100000e-01 9.37500000e-01 4.64200000e-01 7.57200000e-01 4.87900000e-01 8.68800000e-01 5.73200000e-01 6.51200000e-01 6.11300000e-01 - 3.92600000e-01 4.94700000e-01 5.83300000e-01 8.12300000e-01 5.67400000e-01 1.04320000e+00 4.65600000e-01 9.54500000e-01 5.50700000e-01 1.08150000e+00 4.77400000e-01 9.36800000e-01 - 1.24300000e-01 3.40400000e-01 4.88000000e-02 7.08300000e-01 1.06700000e-01 7.43800000e-01 6.11000000e-02 6.94200000e-01 7.57000000e-02 7.96700000e-01 3.57000000e-02 7.22200000e-01 - 1.86000000e-01 3.05300000e-01 2.99700000e-01 4.67400000e-01 3.34200000e-01 5.40700000e-01 2.35800000e-01 5.42400000e-01 3.78400000e-01 5.01600000e-01 2.67900000e-01 5.04300000e-01 - 1.08650000e+00 2.28000000e-02 6.31300000e-01 1.09540000e+00 4.81200000e-01 5.89000000e-01 5.42200000e-01 1.19930000e+00 4.05700000e-01 6.46300000e-01 5.43900000e-01 1.20040000e+00 - 1.11000000e-02 5.52100000e-01 2.34700000e-01 6.53600000e-01 2.78500000e-01 7.38300000e-01 9.00000000e-04 1.73320000e+00 -7.79000000e-02 2.09030000e+00 -6.86000000e-02 8.30000000e-02 - 3.59900000e-01 3.02800000e-01 4.90300000e-01 5.61300000e-01 4.71000000e-01 7.42900000e-01 4.24000000e-01 6.39000000e-01 4.79500000e-01 7.47900000e-01 4.62900000e-01 5.93000000e-01 - 1.66400000e-01 1.32500000e-01 3.25500000e-01 1.28300000e-01 4.76200000e-01 1.87000000e-02 3.45000000e-01 1.09000000e-01 3.80200000e-01 1.45300000e-01 3.10900000e-01 1.46300000e-01 - 1.03820000e+00 7.95000000e-02 4.20300000e-01 6.87800000e-01 3.63300000e-01 4.81500000e-01 2.88900000e-01 8.42900000e-01 3.13800000e-01 5.13100000e-01 4.39100000e-01 6.70300000e-01 - 1.03180000e+00 8.61000000e-02 5.83700000e-01 6.37400000e-01 5.24600000e-01 3.66000000e-01 6.23600000e-01 5.89500000e-01 5.14400000e-01 3.58200000e-01 5.72900000e-01 6.53200000e-01 - 1.18580000e+00 -9.38000000e-02 5.72100000e-01 4.49700000e-01 5.12100000e-01 2.73800000e-01 5.15900000e-01 5.16300000e-01 4.22400000e-01 3.60000000e-01 5.76000000e-01 4.44100000e-01 - 2.82100000e-01 7.28400000e-01 4.12800000e-01 1.17970000e+00 4.18900000e-01 1.39890000e+00 4.11600000e-01 1.18090000e+00 5.74400000e-01 1.24100000e+00 3.82200000e-01 1.21410000e+00 - 1.42000000e-02 9.51700000e-01 2.51100000e-01 1.03760000e+00 3.91800000e-01 1.00550000e+00 -4.27000000e-02 1.78360000e+00 -1.60000000e-03 2.00370000e+00 1.19000000e-02 -1.45000000e-02 - 1.14660000e+00 -5.16000000e-02 7.66900000e-01 2.30800000e-01 6.09900000e-01 1.69600000e-01 5.92100000e-01 4.35400000e-01 5.49600000e-01 2.21300000e-01 7.29200000e-01 2.76900000e-01 - 2.83600000e-01 6.40300000e-01 5.62500000e-01 6.77400000e-01 8.17900000e-01 5.19700000e-01 1.63250000e+00 1.18000000e-01 -4.31000000e-02 2.04850000e+00 -8.72000000e-02 1.04300000e-01 - 3.11900000e-01 3.24000000e-01 4.33700000e-01 5.75900000e-01 4.36300000e-01 7.23200000e-01 4.07900000e-01 6.06800000e-01 4.28400000e-01 7.48000000e-01 3.06000000e-01 7.25700000e-01 - 1.20540000e+00 -1.21100000e-01 1.26400000e+00 4.69000000e-01 9.76200000e-01 5.99200000e-01 1.27970000e+00 4.54500000e-01 8.57300000e-01 6.38700000e-01 1.27400000e+00 4.55600000e-01 - 2.92100000e-01 3.43300000e-01 6.64800000e-01 2.71100000e-01 7.88700000e-01 2.67200000e-01 1.72640000e+00 8.20000000e-03 3.48000000e-02 1.96340000e+00 -3.64000000e-02 4.35000000e-02 - 3.33200000e-01 3.20000000e-02 4.81300000e-01 9.92000000e-02 5.63200000e-01 9.26000000e-02 5.03600000e-01 7.36000000e-02 6.14500000e-01 4.47000000e-02 5.46100000e-01 2.16000000e-02 - 1.06040000e+00 2.48000000e-02 1.39870000e+00 1.10000000e-03 1.52160000e+00 -1.60000000e-03 1.78940000e+00 -6.87000000e-02 2.03070000e+00 -3.71000000e-02 -8.49000000e-02 1.83870000e+00 - 9.28000000e-02 9.95600000e-01 1.08300000e-01 1.57440000e+00 1.31200000e-01 1.69070000e+00 1.30000000e-03 1.72910000e+00 8.12000000e-02 1.90350000e+00 -5.77000000e-02 1.79570000e+00 - 8.44300000e-01 1.18100000e-01 1.36680000e+00 1.26400000e-01 1.55180000e+00 1.47500000e-01 1.34850000e+00 1.46800000e-01 1.52150000e+00 2.12800000e-01 1.24000000e+00 2.73500000e-01 - 3.27400000e-01 2.45200000e-01 3.82400000e-01 5.40500000e-01 5.60500000e-01 4.66500000e-01 4.47100000e-01 4.63200000e-01 4.42700000e-01 6.26300000e-01 4.68500000e-01 4.44000000e-01 - 2.66000000e-01 3.98200000e-01 5.92900000e-01 3.85500000e-01 7.57800000e-01 3.34700000e-01 1.65470000e+00 8.87000000e-02 -8.67000000e-02 2.10300000e+00 -6.60000000e-03 9.10000000e-03 - 3.82400000e-01 2.95100000e-01 7.52000000e-01 2.78900000e-01 8.21000000e-01 3.57100000e-01 6.91100000e-01 3.52900000e-01 9.18500000e-01 2.64800000e-01 7.47000000e-01 2.87600000e-01 - -5.51000000e-02 1.17340000e+00 2.38100000e-01 1.19460000e+00 2.37800000e-01 1.34650000e+00 -1.86000000e-02 1.75120000e+00 4.99000000e-02 1.94620000e+00 6.29000000e-02 1.65790000e+00 - 4.15600000e-01 4.39900000e-01 7.68900000e-01 5.43600000e-01 8.32100000e-01 6.76100000e-01 7.07200000e-01 6.20300000e-01 7.93400000e-01 7.40900000e-01 6.90300000e-01 6.43600000e-01 - 1.06160000e+00 5.12000000e-02 5.84100000e-01 3.17000000e-02 5.22600000e-01 -1.63000000e-02 6.10500000e-01 4.00000000e-03 4.67900000e-01 3.78000000e-02 6.60700000e-01 -5.73000000e-02 - 4.46000000e-02 1.47800000e-01 1.54000000e-01 3.69400000e-01 3.52400000e-01 2.74200000e-01 1.36000000e-02 1.71410000e+00 -5.11000000e-02 6.19000000e-02 1.21000000e-02 -1.52000000e-02 - 6.11000000e-02 5.65400000e-01 3.90500000e-01 5.42900000e-01 5.21000000e-01 5.31300000e-01 -4.59000000e-02 1.78940000e+00 -3.33000000e-02 2.03950000e+00 1.40000000e-03 -5.00000000e-04 - 1.27800000e-01 2.97600000e-01 1.19600000e-01 5.62500000e-01 9.63000000e-02 6.90400000e-01 5.59000000e-02 6.40200000e-01 6.25000000e-02 7.41300000e-01 1.01900000e-01 5.88800000e-01 - 4.51100000e-01 5.26800000e-01 8.93100000e-01 3.78300000e-01 7.71100000e-01 6.62100000e-01 1.72940000e+00 4.60000000e-03 -7.81000000e-02 2.09070000e+00 -7.70000000e-02 9.08000000e-02 - 1.75200000e-01 4.99700000e-01 3.12100000e-01 7.37000000e-01 4.15700000e-01 7.64800000e-01 2.66700000e-01 7.90000000e-01 3.60200000e-01 8.51600000e-01 3.18900000e-01 7.28700000e-01 - 2.43800000e-01 2.99900000e-01 5.83400000e-01 2.69800000e-01 6.48000000e-01 3.33500000e-01 1.76040000e+00 -3.05000000e-02 3.18000000e-02 -3.87000000e-02 2.60000000e-02 -3.24000000e-02 - 2.29200000e-01 6.28900000e-01 4.71400000e-01 8.54400000e-01 4.39100000e-01 1.08920000e+00 5.01500000e-01 8.20600000e-01 5.50400000e-01 9.81300000e-01 4.18800000e-01 9.15600000e-01 - 1.48100000e-01 2.23500000e-01 2.40300000e-01 3.44900000e-01 3.26500000e-01 3.31200000e-01 2.81900000e-01 2.94700000e-01 3.43100000e-01 3.16000000e-01 2.87500000e-01 2.87100000e-01 - 1.11270000e+00 -7.10000000e-03 6.47700000e-01 1.15300000e-01 5.43800000e-01 7.39000000e-02 7.35400000e-01 1.07000000e-02 5.38600000e-01 6.67000000e-02 6.83400000e-01 7.20000000e-02 - 8.38000000e-02 4.73300000e-01 9.57000000e-02 7.79300000e-01 5.52000000e-02 9.50400000e-01 9.40000000e-02 7.80400000e-01 1.33500000e-01 8.73700000e-01 1.40700000e-01 7.28700000e-01 - 5.10500000e-01 3.59600000e-01 7.13600000e-01 6.65100000e-01 7.98000000e-01 7.75600000e-01 7.27200000e-01 6.44500000e-01 8.24200000e-01 7.65400000e-01 7.16500000e-01 6.64400000e-01 - 5.13400000e-01 5.74800000e-01 7.93100000e-01 6.06400000e-01 1.02870000e+00 4.67000000e-01 1.78550000e+00 -6.01000000e-02 2.01910000e+00 -2.38000000e-02 -1.82000000e-02 1.75060000e+00 - 1.09960000e+00 7.80000000e-03 7.45400000e-01 9.83200000e-01 5.59500000e-01 7.99600000e-01 6.09100000e-01 1.14120000e+00 6.42400000e-01 6.47800000e-01 7.16100000e-01 1.01980000e+00 - 6.67600000e-01 2.18000000e-01 9.73300000e-01 4.27000000e-01 1.22580000e+00 3.46600000e-01 1.06040000e+00 3.25200000e-01 1.23760000e+00 3.59900000e-01 9.96200000e-01 4.02200000e-01 - 6.41900000e-01 4.71700000e-01 3.05500000e-01 1.24020000e+00 2.61600000e-01 7.53400000e-01 3.43100000e-01 1.19740000e+00 3.07800000e-01 6.70800000e-01 3.16400000e-01 1.22390000e+00 - 1.19760000e+00 -1.09400000e-01 3.88200000e-01 7.19000000e-01 4.55300000e-01 3.63700000e-01 4.51400000e-01 6.44900000e-01 4.92500000e-01 3.04100000e-01 5.40300000e-01 5.36700000e-01 - 1.26800000e-01 6.06300000e-01 4.97100000e-01 5.40100000e-01 6.64600000e-01 4.81900000e-01 1.73190000e+00 3.70000000e-03 -2.38000000e-02 2.02770000e+00 4.00000000e-03 -3.70000000e-03 - 4.58200000e-01 6.01100000e-01 8.39900000e-01 5.20300000e-01 7.91600000e-01 7.25300000e-01 1.65270000e+00 9.28000000e-02 -8.72000000e-02 2.09800000e+00 1.28100000e-01 1.58460000e+00 - 7.79300000e-01 2.51300000e-01 1.20930000e+00 1.10600000e-01 1.35740000e+00 7.54000000e-02 1.69990000e+00 3.77000000e-02 2.16300000e+00 -1.92200000e-01 -2.10000000e-02 2.41000000e-02 - 1.10660000e+00 -1.50000000e-03 6.20600000e-01 1.08980000e+00 5.26300000e-01 1.43080000e+00 4.91200000e-01 1.24700000e+00 4.79100000e-01 1.43670000e+00 5.45700000e-01 1.18320000e+00 - 9.42000000e-02 5.47500000e-01 9.87000000e-02 9.18000000e-01 1.20300000e-01 1.03580000e+00 1.48500000e-01 8.55200000e-01 1.38000000e-01 1.03140000e+00 7.89000000e-02 9.37400000e-01 - 1.04990000e+00 6.43000000e-02 4.06800000e-01 6.20600000e-01 4.62200000e-01 3.13700000e-01 5.14200000e-01 4.92500000e-01 3.25200000e-01 4.54100000e-01 5.14000000e-01 4.87600000e-01 - 1.27300000e-01 7.06700000e-01 2.90400000e-01 1.00080000e+00 3.36100000e-01 1.13440000e+00 3.67700000e-01 9.09300000e-01 2.55600000e-01 1.25130000e+00 2.01400000e-01 1.10650000e+00 - 9.97000000e-01 1.04900000e-01 3.69900000e-01 6.40300000e-01 3.42300000e-01 4.36100000e-01 4.09100000e-01 5.96100000e-01 2.88400000e-01 4.84000000e-01 4.70700000e-01 5.22100000e-01 - 1.10800000e+00 -4.20000000e-03 1.83020000e+00 -1.16200000e-01 1.10170000e+00 5.36200000e-01 1.66080000e+00 8.53000000e-02 1.07030000e+00 4.60500000e-01 1.80430000e+00 -8.21000000e-02 - 3.37200000e-01 6.09200000e-01 5.91200000e-01 8.85700000e-01 6.86600000e-01 9.92700000e-01 5.97800000e-01 8.75300000e-01 7.32900000e-01 9.62000000e-01 6.62800000e-01 8.06100000e-01 - 3.75900000e-01 1.55300000e-01 6.42000000e-01 2.10400000e-01 8.93700000e-01 5.41000000e-02 1.66760000e+00 7.35000000e-02 4.11000000e-02 -5.03000000e-02 -3.05000000e-02 3.51000000e-02 - 6.82200000e-01 2.34800000e-01 1.12080000e+00 9.17000000e-02 1.13590000e+00 2.07900000e-01 1.75520000e+00 -2.99000000e-02 2.01660000e+00 -1.76000000e-02 4.84000000e-02 -5.60000000e-02 - 5.52700000e-01 1.94200000e-01 1.11710000e+00 5.90000000e-03 1.10680000e+00 1.98900000e-01 9.19200000e-01 2.39800000e-01 1.15190000e+00 1.67200000e-01 9.82800000e-01 1.64000000e-01 - 2.36400000e-01 1.80800000e-01 2.84200000e-01 3.85500000e-01 2.61500000e-01 5.14600000e-01 2.37100000e-01 4.45300000e-01 3.93900000e-01 3.73000000e-01 2.04500000e-01 4.80600000e-01 - 1.11240000e+00 -9.90000000e-03 4.29400000e-01 1.67700000e-01 3.57000000e-01 1.41200000e-01 4.21600000e-01 1.81900000e-01 3.62800000e-01 1.24500000e-01 4.56200000e-01 1.37600000e-01 - 4.54000000e-01 1.12800000e-01 7.25300000e-01 1.57900000e-01 7.85800000e-01 2.27000000e-01 7.78200000e-01 9.20000000e-02 7.86100000e-01 2.46500000e-01 7.26100000e-01 1.55400000e-01 - 4.99300000e-01 4.08000000e-01 7.79200000e-01 4.49700000e-01 9.83600000e-01 3.47400000e-01 1.68890000e+00 5.50000000e-02 -6.91000000e-02 2.07940000e+00 1.91000000e-02 -2.34000000e-02 - 2.47000000e-02 8.43500000e-01 2.84900000e-01 8.93800000e-01 2.90400000e-01 1.03170000e+00 -4.92000000e-02 1.79190000e+00 -4.90000000e-03 2.00100000e+00 -2.48000000e-02 2.96000000e-02 - 6.50900000e-01 1.86000000e-02 8.64600000e-01 2.13400000e-01 9.43300000e-01 2.92800000e-01 8.30500000e-01 2.55000000e-01 9.82800000e-01 2.64800000e-01 8.46800000e-01 2.35400000e-01 - 1.10280000e+00 -0.00000000e+00 1.35540000e+00 3.59900000e-01 9.47800000e-01 1.01190000e+00 1.23940000e+00 4.94800000e-01 9.48600000e-01 9.68500000e-01 1.24460000e+00 4.81300000e-01 - 8.41600000e-01 1.80800000e-01 1.33680000e+00 2.62600000e-01 1.42030000e+00 4.20800000e-01 1.28200000e+00 3.33100000e-01 1.53420000e+00 3.13800000e-01 1.27650000e+00 3.33400000e-01 - 3.78600000e-01 7.22900000e-01 8.00600000e-01 9.10000000e-01 7.45100000e-01 1.17940000e+00 1.80430000e+00 -7.89000000e-02 1.88000000e-02 1.97700000e+00 4.34000000e-02 1.68530000e+00 - 1.17260000e+00 -8.17000000e-02 5.50300000e-01 3.74600000e-01 4.45800000e-01 2.87700000e-01 5.66000000e-01 3.56000000e-01 4.04100000e-01 3.19500000e-01 5.63200000e-01 3.54600000e-01 - 3.52000000e-02 1.07160000e+00 -1.01700000e-01 7.84900000e-01 -3.62000000e-02 5.89800000e-01 4.90000000e-02 6.03600000e-01 -1.57000000e-02 5.52300000e-01 -4.00000000e-04 6.63600000e-01 - 4.47900000e-01 4.03600000e-01 7.06400000e-01 6.26900000e-01 7.42700000e-01 7.82200000e-01 7.61300000e-01 5.66300000e-01 8.09000000e-01 7.30300000e-01 6.80200000e-01 6.57500000e-01 - 4.32800000e-01 3.30100000e-01 2.93000000e-01 5.31000000e-02 2.48600000e-01 4.60000000e-02 5.43000000e-02 3.36700000e-01 2.25800000e-01 6.83000000e-02 3.06600000e-01 3.58000000e-02 - 1.22710000e+00 -1.45200000e-01 1.21400000e+00 5.11000000e-01 8.92600000e-01 5.31700000e-01 1.12930000e+00 6.10500000e-01 9.43500000e-01 4.03300000e-01 1.26230000e+00 4.55000000e-01 - 3.74200000e-01 7.13900000e-01 2.10900000e-01 3.93900000e-01 1.16000000e-01 3.97700000e-01 2.26200000e-01 3.71800000e-01 1.06800000e-01 3.99500000e-01 1.87400000e-01 4.20300000e-01 - 1.63600000e-01 9.46800000e-01 1.23100000e-01 5.69000000e-01 1.00100000e-01 4.67500000e-01 2.49800000e-01 4.15700000e-01 1.30300000e-01 4.21000000e-01 1.76900000e-01 5.06200000e-01 - 9.89300000e-01 1.35200000e-01 7.95900000e-01 1.81100000e-01 6.76000000e-01 8.14000000e-02 8.62600000e-01 1.04300000e-01 6.94700000e-01 4.09000000e-02 8.27200000e-01 1.46900000e-01 - 1.82800000e-01 9.32700000e-01 1.29000000e-02 1.74810000e+00 6.47000000e-02 1.19090000e+00 1.09000000e-01 1.63340000e+00 1.53000000e-01 1.04430000e+00 1.38200000e-01 1.59250000e+00 - 1.69500000e-01 -1.78000000e-02 4.47700000e-01 2.59000000e-02 5.63800000e-01 2.75000000e-02 1.74060000e+00 -8.20000000e-03 -3.12000000e-02 3.87000000e-02 -2.72000000e-02 3.19000000e-02 - 7.37800000e-01 1.73200000e-01 1.06790000e+00 1.51100000e-01 1.13590000e+00 2.14200000e-01 1.83030000e+00 -1.16100000e-01 1.98120000e+00 1.97000000e-02 -1.48000000e-02 1.96000000e-02 - 1.72300000e-01 6.87900000e-01 6.52800000e-01 4.90400000e-01 7.95400000e-01 4.63000000e-01 1.61460000e+00 1.38400000e-01 8.26000000e-02 1.90840000e+00 2.21000000e-02 -2.78000000e-02 - 1.60000000e-02 1.09180000e+00 6.18000000e-02 1.66200000e+00 1.27000000e-02 1.27090000e+00 1.54000000e-02 1.71460000e+00 1.47000000e-02 1.21880000e+00 3.48000000e-02 1.69230000e+00 - 2.03500000e-01 9.14200000e-01 2.15200000e-01 8.77900000e-01 1.49900000e-01 6.86400000e-01 2.66100000e-01 8.21600000e-01 1.29200000e-01 6.88400000e-01 1.90400000e-01 9.09600000e-01 - 5.90700000e-01 2.08000000e-01 1.03620000e+00 1.95800000e-01 1.06710000e+00 3.55700000e-01 9.88500000e-01 2.56100000e-01 1.05540000e+00 3.91000000e-01 1.02010000e+00 2.12900000e-01 - 1.11610000e+00 -1.05000000e-02 4.73300000e-01 1.11780000e+00 3.72300000e-01 6.56000000e-01 5.68200000e-01 1.00160000e+00 4.11100000e-01 5.79600000e-01 4.95200000e-01 1.08880000e+00 - 7.64000000e-02 7.90400000e-01 3.43300000e-01 8.42800000e-01 4.50600000e-01 8.58800000e-01 1.72310000e+00 8.70000000e-03 5.48000000e-02 1.93880000e+00 1.76000000e-02 -2.08000000e-02 - 4.71800000e-01 1.30500000e-01 7.79200000e-01 1.39600000e-01 9.81700000e-01 4.26000000e-02 1.73920000e+00 -6.60000000e-03 -7.09000000e-02 2.08190000e+00 4.10000000e-03 -4.00000000e-03 - 3.70700000e-01 5.99600000e-01 7.90700000e-01 4.76400000e-01 7.96000000e-01 6.14900000e-01 1.67980000e+00 5.64000000e-02 1.16000000e-02 1.98670000e+00 3.97000000e-02 -4.76000000e-02 - 8.82000000e-01 9.48000000e-02 1.27060000e+00 2.82900000e-01 1.65910000e+00 7.24000000e-02 1.36890000e+00 1.63300000e-01 1.68570000e+00 7.08000000e-02 1.38740000e+00 1.44600000e-01 - 1.10870000e+00 -4.70000000e-03 8.76200000e-01 8.54600000e-01 7.67900000e-01 4.41900000e-01 9.05000000e-01 8.18400000e-01 7.65900000e-01 3.98700000e-01 8.35700000e-01 9.01000000e-01 - 2.23800000e-01 3.29000000e-01 4.02600000e-01 4.55900000e-01 4.29300000e-01 5.54200000e-01 4.26100000e-01 4.30500000e-01 4.52900000e-01 5.40900000e-01 4.25800000e-01 4.32100000e-01 - 1.20100000e-01 4.74000000e-01 4.95700000e-01 4.03600000e-01 5.59200000e-01 4.68100000e-01 1.70200000e+00 3.26000000e-02 2.82000000e-02 1.97210000e+00 1.48000000e-02 -1.81000000e-02 - 4.96900000e-01 6.11800000e-01 8.79000000e-01 6.34800000e-01 9.64300000e-01 6.69200000e-01 1.85340000e+00 -1.47800000e-01 -3.90000000e-02 2.04700000e+00 -5.96000000e-02 1.80640000e+00 - 4.51500000e-01 2.69800000e-01 7.72700000e-01 2.63200000e-01 9.10000000e-01 2.37300000e-01 1.78150000e+00 -5.71000000e-02 -4.23000000e-02 2.05100000e+00 -2.47000000e-02 3.12000000e-02 - 7.74000000e-02 6.70200000e-01 3.53200000e-01 7.17600000e-01 5.34700000e-01 6.39400000e-01 1.78160000e+00 -6.05000000e-02 5.33000000e-02 1.93710000e+00 6.41000000e-02 -7.45000000e-02 - 5.96500000e-01 2.95600000e-01 1.01020000e+00 3.77800000e-01 9.92600000e-01 6.06600000e-01 9.71200000e-01 4.17700000e-01 1.13580000e+00 4.69200000e-01 9.51100000e-01 4.40400000e-01 - 1.13060000e+00 -2.73000000e-02 8.22700000e-01 5.26600000e-01 6.58900000e-01 2.90800000e-01 7.58000000e-01 5.99900000e-01 6.53900000e-01 2.73500000e-01 7.94100000e-01 5.58400000e-01 - 1.10780000e+00 -5.10000000e-03 4.44200000e-01 9.68100000e-01 3.59500000e-01 6.23300000e-01 4.62600000e-01 9.47800000e-01 4.08900000e-01 5.40600000e-01 4.16400000e-01 1.00280000e+00 - 8.32700000e-01 1.59000000e-02 1.27640000e+00 6.24000000e-02 1.33580000e+00 2.09400000e-01 1.30050000e+00 3.37000000e-02 1.41430000e+00 1.49100000e-01 1.23960000e+00 1.06300000e-01 - 3.02700000e-01 3.00900000e-01 2.13300000e-01 8.96000000e-02 1.24400000e-01 1.44500000e-01 1.44000000e-01 1.68600000e-01 1.64600000e-01 9.17000000e-02 1.44900000e-01 1.71000000e-01 - 9.16000000e-02 9.66600000e-01 3.16000000e-01 1.07230000e+00 4.66800000e-01 1.03740000e+00 1.67960000e+00 6.50000000e-02 8.40000000e-03 1.98500000e+00 1.52000000e-02 1.71550000e+00 - 1.10460000e+00 4.30000000e-03 4.46500000e-01 1.92600000e-01 4.13600000e-01 1.09900000e-01 4.78700000e-01 1.58400000e-01 3.95700000e-01 1.19700000e-01 5.36200000e-01 8.85000000e-02 - 1.10420000e+00 3.70000000e-03 7.29100000e-01 9.92800000e-01 5.58200000e-01 9.55600000e-01 6.96700000e-01 1.02750000e+00 5.22500000e-01 9.19900000e-01 6.96400000e-01 1.03330000e+00 - 3.97000000e-02 2.87600000e-01 1.03700000e-01 5.39200000e-01 2.53300000e-01 5.02700000e-01 5.20000000e-03 1.72520000e+00 3.29000000e-02 -3.98000000e-02 -6.15000000e-02 7.44000000e-02 - 8.29700000e-01 2.22700000e-01 1.43740000e+00 1.89400000e-01 1.55510000e+00 3.10600000e-01 1.41560000e+00 2.15800000e-01 1.56170000e+00 3.30000000e-01 1.41280000e+00 2.17700000e-01 - 5.71100000e-01 4.53100000e-01 7.15500000e-01 9.25000000e-01 7.93200000e-01 1.07390000e+00 7.83800000e-01 8.43700000e-01 1.01250000e+00 8.40400000e-01 6.89200000e-01 9.53400000e-01 - -6.15000000e-02 8.48900000e-01 3.16000000e-01 7.68200000e-01 4.69600000e-01 7.26400000e-01 -1.31900000e-01 1.88680000e+00 1.11000000e-02 1.98860000e+00 -1.90000000e-02 2.15000000e-02 - 9.20000000e-02 5.62200000e-01 4.62500000e-01 4.92200000e-01 5.62200000e-01 5.15300000e-01 1.72030000e+00 1.69000000e-02 -1.24000000e-02 2.01120000e+00 -1.30000000e-03 1.90000000e-03 - 4.24900000e-01 5.71700000e-01 6.80400000e-01 6.36100000e-01 8.65800000e-01 5.58600000e-01 1.73310000e+00 5.00000000e-04 1.07000000e-01 1.87380000e+00 -6.60000000e-03 9.00000000e-03 - 1.10210000e+00 9.00000000e-03 3.02800000e-01 9.22400000e-01 2.76500000e-01 6.28900000e-01 4.20000000e-01 7.80900000e-01 2.73900000e-01 6.06100000e-01 4.11100000e-01 7.93400000e-01 - 5.27600000e-01 4.80500000e-01 8.57000000e-01 7.20600000e-01 9.07800000e-01 8.96800000e-01 6.76400000e-01 9.31700000e-01 9.59400000e-01 8.68700000e-01 7.88700000e-01 7.97700000e-01 - 4.89900000e-01 3.50800000e-01 8.43300000e-01 3.04600000e-01 1.00390000e+00 2.54500000e-01 1.65540000e+00 9.02000000e-02 -8.94000000e-02 2.10100000e+00 2.77000000e-02 -3.30000000e-02 - 3.45600000e-01 2.49900000e-01 4.87400000e-01 4.56700000e-01 4.88100000e-01 6.00200000e-01 5.14700000e-01 4.22800000e-01 5.15400000e-01 5.84000000e-01 3.68700000e-01 5.98300000e-01 - 7.87600000e-01 1.92800000e-01 1.17670000e+00 3.71600000e-01 1.27090000e+00 5.01400000e-01 1.08860000e+00 4.71000000e-01 1.32760000e+00 4.68200000e-01 1.14250000e+00 4.11200000e-01 - 3.30700000e-01 7.55700000e-01 4.86900000e-01 1.08120000e+00 6.06000000e-01 1.08680000e+00 1.74950000e+00 -1.90000000e-02 -2.18000000e-02 2.01840000e+00 2.81000000e-02 1.70000000e+00 - 1.01880000e+00 1.08900000e-01 1.43280000e+00 3.12700000e-01 1.08480000e+00 1.35300000e-01 1.49860000e+00 2.33800000e-01 1.11230000e+00 5.52000000e-02 1.50210000e+00 2.27400000e-01 - 1.11000000e-01 3.64200000e-01 1.87400000e-01 5.54000000e-01 2.58500000e-01 5.75800000e-01 1.15500000e-01 6.38900000e-01 1.80500000e-01 6.82100000e-01 1.25500000e-01 6.26100000e-01 - 1.17750000e+00 -8.44000000e-02 5.50300000e-01 1.17860000e+00 4.74700000e-01 6.40400000e-01 6.34000000e-01 1.08200000e+00 4.29700000e-01 6.54600000e-01 6.03800000e-01 1.11720000e+00 - 1.07540000e+00 3.67000000e-02 2.79600000e-01 6.60000000e-01 3.69000000e-01 3.51500000e-01 4.09100000e-01 5.06000000e-01 2.81700000e-01 4.35200000e-01 4.00800000e-01 5.16600000e-01 - -5.06000000e-02 1.16790000e+00 2.48000000e-02 7.73000000e-01 3.53000000e-02 6.02700000e-01 -9.80000000e-03 8.13300000e-01 -3.89000000e-02 6.78400000e-01 1.17300000e-01 6.61500000e-01 - 1.14560000e+00 -4.78000000e-02 4.33000000e-01 8.70100000e-01 3.08100000e-01 6.38100000e-01 4.63600000e-01 8.35600000e-01 2.28300000e-01 7.04900000e-01 3.75800000e-01 9.41100000e-01 - 5.07200000e-01 6.04700000e-01 8.28500000e-01 6.81400000e-01 1.03480000e+00 5.78000000e-01 1.85140000e+00 -1.41000000e-01 -3.16000000e-02 2.03420000e+00 -4.60000000e-02 1.78720000e+00 - 7.70100000e-01 2.79000000e-01 1.03700000e+00 6.44300000e-01 1.31300000e+00 5.79000000e-01 1.20500000e+00 4.42000000e-01 1.36740000e+00 5.44400000e-01 1.13120000e+00 5.29100000e-01 - 3.91800000e-01 3.74600000e-01 5.65300000e-01 6.46800000e-01 7.04200000e-01 6.57700000e-01 5.65000000e-01 6.46100000e-01 7.22600000e-01 6.58800000e-01 5.12100000e-01 7.10400000e-01 - 5.16500000e-01 5.93900000e-01 2.75400000e-01 1.01100000e+00 2.48200000e-01 6.78400000e-01 3.59000000e-01 9.07600000e-01 3.34700000e-01 5.53100000e-01 2.61700000e-01 1.02630000e+00 - 1.20180000e+00 -1.13400000e-01 4.44900000e-01 8.41000000e-01 3.81700000e-01 5.47500000e-01 4.24400000e-01 8.67800000e-01 3.06600000e-01 6.11700000e-01 3.73000000e-01 9.28000000e-01 - 6.34900000e-01 4.67100000e-01 2.07100000e-01 1.53470000e+00 2.55400000e-01 7.80600000e-01 3.66900000e-01 1.35700000e+00 3.31900000e-01 6.59400000e-01 3.48800000e-01 1.38060000e+00 - 1.41900000e-01 1.26900000e-01 3.87600000e-01 3.80000000e-03 3.22600000e-01 1.48700000e-01 2.58600000e-01 1.56400000e-01 3.80800000e-01 8.46000000e-02 3.47500000e-01 5.43000000e-02 - 4.65000000e-02 1.05550000e+00 8.62000000e-02 1.63440000e+00 3.29000000e-02 1.33520000e+00 1.19400000e-01 1.59230000e+00 5.60000000e-02 1.25000000e+00 9.93000000e-02 1.61740000e+00 - 3.21500000e-01 4.53600000e-01 6.36700000e-01 4.54500000e-01 6.29000000e-01 6.06800000e-01 1.74920000e+00 -1.91000000e-02 1.24600000e-01 1.85350000e+00 -1.18200000e-01 1.40800000e-01 - 3.06400000e-01 4.64400000e-01 4.49800000e-01 7.62900000e-01 6.92000000e-01 6.51200000e-01 4.98400000e-01 7.05800000e-01 7.08900000e-01 6.57000000e-01 6.89900000e-01 4.77300000e-01 - 1.07370000e+00 3.96000000e-02 1.14730000e+00 5.66800000e-01 7.61000000e-01 5.22800000e-01 1.07260000e+00 6.57500000e-01 7.91600000e-01 4.40500000e-01 1.05380000e+00 6.78900000e-01 - 1.08890000e+00 9.40000000e-03 4.21900000e-01 5.22200000e-01 3.05100000e-01 4.48700000e-01 4.43600000e-01 5.01100000e-01 2.90500000e-01 4.49000000e-01 3.88300000e-01 5.67100000e-01 - 1.12230000e+00 -1.76000000e-02 1.08710000e+00 6.47600000e-01 6.95500000e-01 7.49400000e-01 1.04180000e+00 6.94600000e-01 7.49900000e-01 6.16900000e-01 1.07850000e+00 6.52700000e-01 - 9.22000000e-02 6.28500000e-01 3.57800000e-01 6.85600000e-01 5.48700000e-01 6.01600000e-01 1.64450000e+00 1.01200000e-01 -1.14000000e-02 2.01060000e+00 -2.33000000e-02 2.75000000e-02 - 9.71800000e-01 1.56800000e-01 5.33600000e-01 9.50700000e-01 4.95200000e-01 4.90300000e-01 4.84900000e-01 1.01030000e+00 3.37300000e-01 6.48000000e-01 4.66500000e-01 1.03230000e+00 - 1.07380000e+00 3.49000000e-02 5.63800000e-01 4.20100000e-01 4.61300000e-01 3.09900000e-01 6.08600000e-01 3.67700000e-01 3.89900000e-01 3.77400000e-01 6.83100000e-01 2.77600000e-01 - -3.03000000e-02 6.75600000e-01 2.44800000e-01 6.99500000e-01 2.56300000e-01 8.30000000e-01 -7.18000000e-02 1.81470000e+00 -1.02200000e-01 2.12540000e+00 -4.97000000e-02 6.03000000e-02 - 3.48300000e-01 7.39800000e-01 6.07800000e-01 1.08560000e+00 5.23900000e-01 1.43130000e+00 6.89300000e-01 9.88000000e-01 7.74300000e-01 1.16090000e+00 5.36700000e-01 1.16340000e+00 - 6.16400000e-01 4.85300000e-01 8.78100000e-01 8.03400000e-01 1.04890000e+00 7.40800000e-01 1.74670000e+00 -1.66000000e-02 1.99720000e+00 6.00000000e-04 5.30000000e-03 1.72600000e+00 - 2.67000000e-02 1.02260000e+00 1.12900000e-01 1.25100000e+00 1.61100000e-01 1.33770000e+00 8.07000000e-02 1.63490000e+00 -6.40000000e-02 2.07520000e+00 -5.39000000e-02 1.79650000e+00 - 3.60600000e-01 6.25000000e-02 6.04800000e-01 1.43100000e-01 6.27900000e-01 2.60200000e-01 1.88740000e+00 -1.85900000e-01 -1.26000000e-02 1.63000000e-02 7.18000000e-02 -8.57000000e-02 - 6.01300000e-01 4.97000000e-01 3.84200000e-01 1.33200000e+00 2.13300000e-01 1.76660000e+00 2.30200000e-01 1.51230000e+00 3.37700000e-01 1.55060000e+00 2.49700000e-01 1.49020000e+00 - 1.11300000e-01 5.53600000e-01 3.94900000e-01 5.90500000e-01 5.68300000e-01 5.27200000e-01 1.71440000e+00 2.08000000e-02 1.76000000e-02 1.97750000e+00 1.14000000e-02 -1.35000000e-02 - 3.28800000e-01 3.53400000e-01 4.98900000e-01 5.65800000e-01 5.62400000e-01 6.53200000e-01 4.73400000e-01 5.98100000e-01 6.30900000e-01 5.88200000e-01 5.43800000e-01 5.16000000e-01 - 1.15630000e+00 -5.79000000e-02 5.30900000e-01 3.62400000e-01 4.21100000e-01 2.94100000e-01 5.29100000e-01 3.62400000e-01 3.31200000e-01 3.84600000e-01 5.39400000e-01 3.53700000e-01 - 1.19560000e+00 -1.05500000e-01 4.42400000e-01 5.41900000e-01 3.30500000e-01 4.47500000e-01 4.03700000e-01 5.88100000e-01 3.67700000e-01 3.84400000e-01 3.66800000e-01 6.32000000e-01 - 2.77200000e-01 5.20700000e-01 6.85000000e-01 4.10000000e-01 6.88900000e-01 5.45800000e-01 1.69590000e+00 4.66000000e-02 -6.23000000e-02 2.07370000e+00 3.84000000e-02 -4.56000000e-02 - 1.11210000e+00 -5.40000000e-03 4.27000000e-01 4.21700000e-01 3.06100000e-01 3.86500000e-01 5.14600000e-01 3.15800000e-01 3.40400000e-01 3.29500000e-01 4.47900000e-01 3.96600000e-01 - 3.02100000e-01 3.59700000e-01 5.42100000e-01 4.84600000e-01 6.16400000e-01 5.51700000e-01 4.75400000e-01 5.60800000e-01 6.44000000e-01 5.35800000e-01 4.95300000e-01 5.34300000e-01 - 3.64100000e-01 6.39900000e-01 5.88400000e-01 7.48300000e-01 6.89600000e-01 7.69000000e-01 1.76160000e+00 -3.66000000e-02 -4.13000000e-02 2.04610000e+00 7.72000000e-02 -9.11000000e-02 - 4.03400000e-01 3.59000000e-02 6.92500000e-01 6.71000000e-02 8.43400000e-01 2.91000000e-02 1.81730000e+00 -1.00500000e-01 -3.06000000e-02 3.60000000e-02 -3.47000000e-02 3.93000000e-02 - 5.51500000e-01 5.64600000e-01 9.40300000e-01 4.87900000e-01 9.44600000e-01 6.25900000e-01 1.75230000e+00 -2.00000000e-02 2.01240000e+00 -7.70000000e-03 -3.48000000e-02 1.77200000e+00 - 4.99300000e-01 6.14300000e-01 2.82800000e-01 3.60400000e-01 2.37200000e-01 2.97600000e-01 2.45100000e-01 4.05000000e-01 2.44700000e-01 2.73900000e-01 3.45300000e-01 2.88600000e-01 - 4.20600000e-01 3.35800000e-01 7.69000000e-01 2.96200000e-01 8.23800000e-01 3.71100000e-01 1.69530000e+00 4.05000000e-02 -3.60000000e-02 2.04560000e+00 6.00000000e-04 7.00000000e-04 - 4.85900000e-01 3.26700000e-01 6.01000000e-01 6.96000000e-01 8.11200000e-01 6.46400000e-01 7.16500000e-01 5.64700000e-01 7.24000000e-01 7.71000000e-01 6.56600000e-01 6.31700000e-01 - 1.06200000e+00 5.14000000e-02 5.26100000e-01 1.20160000e+00 3.46600000e-01 7.22700000e-01 4.88500000e-01 1.24390000e+00 3.70700000e-01 6.61900000e-01 4.16200000e-01 1.33030000e+00 - 3.26200000e-01 7.74500000e-01 6.37800000e-01 7.76600000e-01 8.95000000e-01 6.17200000e-01 1.69510000e+00 4.34000000e-02 1.16000000e-02 1.98380000e+00 -9.22000000e-02 1.84330000e+00 - 1.51700000e-01 9.53400000e-01 4.75000000e-02 1.34750000e+00 1.31400000e-01 8.28400000e-01 1.57000000e-02 1.38580000e+00 1.75800000e-01 7.48400000e-01 1.14900000e-01 1.27140000e+00 - 6.60000000e-02 5.69700000e-01 3.81700000e-01 5.63800000e-01 5.00300000e-01 5.62300000e-01 -7.99000000e-02 1.82730000e+00 4.58000000e-02 1.94520000e+00 2.86000000e-02 -3.47000000e-02 - 4.94400000e-01 3.71500000e-01 7.69400000e-01 5.89800000e-01 8.90300000e-01 6.55500000e-01 8.10900000e-01 5.37000000e-01 9.49400000e-01 6.03400000e-01 8.17300000e-01 5.34900000e-01 - 5.33000000e-02 6.40300000e-01 2.63000000e-01 7.64000000e-01 3.72800000e-01 7.78800000e-01 9.50000000e-02 1.62280000e+00 -9.40000000e-03 2.01100000e+00 5.97000000e-02 -7.23000000e-02 - 2.42800000e-01 4.47200000e-01 3.27000000e-01 7.72100000e-01 4.03900000e-01 8.38100000e-01 3.71300000e-01 7.16500000e-01 4.23300000e-01 8.34300000e-01 3.53000000e-01 7.39800000e-01 - 4.56000000e-01 5.01100000e-01 7.67800000e-01 5.01400000e-01 8.16300000e-01 5.86200000e-01 1.73730000e+00 -3.40000000e-03 -2.39000000e-02 2.02720000e+00 2.77000000e-02 -3.06000000e-02 - 2.93000000e-01 2.48400000e-01 4.35600000e-01 4.13000000e-01 5.01400000e-01 4.68600000e-01 3.69000000e-01 4.92400000e-01 5.95900000e-01 3.70200000e-01 4.59700000e-01 3.91100000e-01 - 1.03920000e+00 8.00000000e-02 5.16300000e-01 1.21320000e+00 4.22800000e-01 7.00300000e-01 6.66200000e-01 1.03730000e+00 4.39300000e-01 6.46500000e-01 4.77000000e-01 1.26270000e+00 - 9.76300000e-01 1.33500000e-01 3.21100000e-01 7.49300000e-01 2.67000000e-01 5.56300000e-01 3.43500000e-01 7.18600000e-01 3.53000000e-01 4.32900000e-01 4.30000000e-01 6.17700000e-01 - 3.72300000e-01 7.25300000e-01 1.47600000e-01 4.38900000e-01 1.72500000e-01 3.08400000e-01 2.37500000e-01 3.32800000e-01 1.64500000e-01 3.05600000e-01 1.91100000e-01 3.90000000e-01 - 3.55300000e-01 2.61800000e-01 6.55300000e-01 2.78600000e-01 7.25500000e-01 3.38400000e-01 1.73940000e+00 -5.70000000e-03 9.77000000e-02 1.88570000e+00 4.80000000e-02 -5.61000000e-02 - 8.02800000e-01 8.75000000e-02 1.30910000e+00 7.65000000e-02 1.45810000e+00 1.24400000e-01 1.34240000e+00 3.68000000e-02 1.49070000e+00 1.14100000e-01 1.26610000e+00 1.24700000e-01 - 9.76400000e-01 1.15900000e-01 3.73500000e-01 7.27300000e-01 3.36500000e-01 4.97000000e-01 3.86700000e-01 7.12500000e-01 2.86200000e-01 5.35900000e-01 3.52200000e-01 7.57000000e-01 - 4.78700000e-01 1.85400000e-01 7.09400000e-01 3.39100000e-01 7.72000000e-01 4.24300000e-01 7.21900000e-01 3.22300000e-01 8.85200000e-01 3.11100000e-01 7.07900000e-01 3.40700000e-01 - 9.32000000e-02 3.80400000e-01 1.28300000e-01 6.20300000e-01 1.83800000e-01 6.57500000e-01 8.81000000e-02 6.66400000e-01 6.49000000e-02 8.12800000e-01 1.11600000e-01 6.38100000e-01 - 8.57300000e-01 2.33400000e-01 1.11610000e+00 5.56600000e-01 1.21050000e+00 5.92700000e-01 1.73130000e+00 6.00000000e-04 1.95540000e+00 5.59000000e-02 5.22000000e-02 1.67110000e+00 - 2.23600000e-01 5.02900000e-01 3.73300000e-01 7.60300000e-01 4.14500000e-01 8.82400000e-01 4.71000000e-01 6.44800000e-01 4.57600000e-01 8.48900000e-01 3.15500000e-01 8.31600000e-01 - -2.97000000e-02 8.15600000e-01 1.37000000e-02 1.07580000e+00 2.85000000e-02 1.19120000e+00 3.13000000e-02 1.69830000e+00 1.15000000e-02 1.98770000e+00 -7.00000000e-02 8.58000000e-02 - 1.17900000e-01 6.05500000e-01 2.62900000e-01 8.55400000e-01 3.49900000e-01 9.15600000e-01 2.55500000e-01 8.65100000e-01 4.39100000e-01 8.26800000e-01 3.66800000e-01 7.31400000e-01 - -2.37000000e-02 6.88600000e-01 2.79800000e-01 6.78600000e-01 2.35500000e-01 8.70600000e-01 3.76000000e-02 1.68740000e+00 -4.48000000e-02 2.05140000e+00 7.10000000e-03 -8.20000000e-03 - 5.31000000e-01 1.97400000e-01 7.47500000e-01 4.15900000e-01 8.60100000e-01 4.61800000e-01 6.53800000e-01 5.27000000e-01 8.59200000e-01 4.83700000e-01 7.00200000e-01 4.71000000e-01 - 7.08300000e-01 4.04200000e-01 3.52400000e-01 8.34900000e-01 2.91400000e-01 5.89700000e-01 3.59700000e-01 8.29200000e-01 3.22700000e-01 5.31500000e-01 3.33700000e-01 8.55600000e-01 - 2.82500000e-01 4.88700000e-01 3.14000000e-01 9.17000000e-01 3.83500000e-01 1.01790000e+00 3.30700000e-01 8.99400000e-01 4.36800000e-01 9.73600000e-01 4.02500000e-01 8.11500000e-01 - 3.06500000e-01 7.99100000e-01 1.72600000e-01 4.27600000e-01 1.37500000e-01 3.69600000e-01 1.59400000e-01 4.46000000e-01 1.76300000e-01 3.09400000e-01 2.06500000e-01 3.90300000e-01 - 1.05390000e+00 6.30000000e-02 5.73200000e-01 2.61300000e-01 4.49200000e-01 2.28400000e-01 6.12100000e-01 2.15200000e-01 5.90300000e-01 4.14000000e-02 6.29800000e-01 1.94700000e-01 - 5.32000000e-02 5.19900000e-01 1.43800000e-01 7.39300000e-01 1.47800000e-01 8.56300000e-01 1.39300000e-01 7.43400000e-01 9.36000000e-02 9.35100000e-01 1.71900000e-01 7.06300000e-01 - 1.05150000e+00 6.28000000e-02 5.53500000e-01 3.77200000e-01 5.52500000e-01 1.63800000e-01 5.99300000e-01 3.22500000e-01 3.94100000e-01 3.37200000e-01 6.20100000e-01 2.97700000e-01 - 1.12160000e+00 -1.54000000e-02 1.20360000e+00 5.36000000e-01 1.02480000e+00 4.35100000e-01 1.23400000e+00 4.95600000e-01 8.52800000e-01 5.61000000e-01 1.17940000e+00 5.64100000e-01 - 6.92900000e-01 1.29000000e-02 1.04240000e+00 7.01000000e-02 1.21960000e+00 3.87000000e-02 1.16810000e+00 -7.95000000e-02 1.21560000e+00 6.17000000e-02 1.08160000e+00 2.58000000e-02 - 1.15190000e+00 -5.45000000e-02 7.67400000e-01 9.66600000e-01 5.96200000e-01 9.45900000e-01 7.11200000e-01 1.03050000e+00 5.82300000e-01 8.72600000e-01 6.70400000e-01 1.07660000e+00 - 8.82000000e-02 1.02260000e+00 4.00900000e-01 1.24360000e+00 6.59000000e-01 1.08020000e+00 1.79350000e+00 -7.16000000e-02 5.17000000e-02 1.93820000e+00 -3.73000000e-02 1.78150000e+00 - 2.42100000e-01 8.47000000e-01 1.56600000e-01 1.02900000e+00 8.88000000e-02 7.99600000e-01 1.12500000e-01 1.08550000e+00 1.81400000e-01 6.69200000e-01 9.96000000e-02 1.09860000e+00 - 5.61500000e-01 5.24400000e-01 7.58200000e-01 6.59200000e-01 9.99400000e-01 5.11300000e-01 1.77460000e+00 -5.04000000e-02 -6.24000000e-02 2.07600000e+00 2.11000000e-02 1.70580000e+00 - 6.49100000e-01 4.65500000e-01 9.44600000e-01 6.65900000e-01 1.10130000e+00 6.18800000e-01 1.76770000e+00 -4.31000000e-02 2.01490000e+00 -1.16000000e-02 3.21000000e-02 1.69520000e+00 - 1.90400000e-01 5.04000000e-01 4.46800000e-01 5.69700000e-01 6.32400000e-01 4.90100000e-01 1.76690000e+00 -4.37000000e-02 -4.66000000e-02 2.06110000e+00 1.01000000e-02 -1.14000000e-02 - 6.16000000e-02 3.75100000e-01 2.95000000e-02 6.63200000e-01 1.08000000e-02 7.83900000e-01 -5.50000000e-03 7.04900000e-01 4.39000000e-02 7.55200000e-01 6.10000000e-03 6.90900000e-01 - 1.06820000e+00 4.67000000e-02 5.86100000e-01 6.82900000e-01 5.71100000e-01 3.37700000e-01 5.91200000e-01 6.77400000e-01 4.49300000e-01 4.56900000e-01 4.76300000e-01 8.17400000e-01 - 4.83400000e-01 2.49000000e-01 9.43800000e-01 7.42000000e-02 9.38200000e-01 2.20800000e-01 1.79800000e+00 -8.21000000e-02 -2.58000000e-02 2.03020000e+00 2.71000000e-02 -3.37000000e-02 - 1.84400000e-01 3.28400000e-01 1.72900000e-01 6.50200000e-01 1.58700000e-01 7.86200000e-01 1.86700000e-01 6.33800000e-01 4.22000000e-02 9.38200000e-01 1.61300000e-01 6.64000000e-01 - 3.62400000e-01 7.34000000e-01 1.32900000e-01 3.09500000e-01 1.87800000e-01 1.72700000e-01 1.61600000e-01 2.76700000e-01 2.30700000e-01 1.15600000e-01 1.02600000e-01 3.47500000e-01 - 4.19900000e-01 6.74300000e-01 7.19900000e-01 7.86500000e-01 8.18200000e-01 8.18900000e-01 1.73200000e+00 -4.40000000e-03 -6.27000000e-02 2.07060000e+00 -1.14700000e-01 1.86690000e+00 - 2.92700000e-01 4.39800000e-01 3.93600000e-01 7.67100000e-01 4.82200000e-01 8.28500000e-01 4.02300000e-01 7.53900000e-01 4.96700000e-01 8.31400000e-01 3.59000000e-01 8.07700000e-01 - 3.52400000e-01 4.21800000e-01 6.20500000e-01 4.71100000e-01 8.05700000e-01 3.91900000e-01 1.74230000e+00 -1.04000000e-02 7.22000000e-02 1.91530000e+00 -2.33000000e-02 2.96000000e-02 - -1.13000000e-02 5.96100000e-01 7.55000000e-02 8.22300000e-01 5.52000000e-02 9.74700000e-01 1.66000000e-02 8.95100000e-01 3.16000000e-02 1.01750000e+00 7.60000000e-02 8.23300000e-01 - 1.84100000e-01 8.77000000e-02 3.49700000e-01 6.25000000e-02 3.30900000e-01 1.52700000e-01 2.88100000e-01 1.35500000e-01 2.96200000e-01 2.03500000e-01 3.01600000e-01 1.21200000e-01 - 2.59000000e-01 6.07300000e-01 5.88300000e-01 5.88800000e-01 7.29900000e-01 5.57900000e-01 1.65090000e+00 9.28000000e-02 4.62000000e-02 1.95050000e+00 -5.90000000e-02 7.12000000e-02 - 1.04880000e+00 6.65000000e-02 1.48590000e+00 1.07900000e-01 1.64300000e+00 6.48000000e-02 1.69430000e+00 4.25000000e-02 1.91250000e+00 1.01600000e-01 1.64900000e+00 9.81000000e-02 - 2.18600000e-01 8.73300000e-01 5.33000000e-02 4.15600000e-01 2.32000000e-02 3.77500000e-01 1.44100000e-01 3.10300000e-01 8.24000000e-02 2.99900000e-01 1.28700000e-01 3.23500000e-01 - 1.48000000e-01 5.06300000e-01 1.79900000e-01 8.57600000e-01 2.98100000e-01 8.65200000e-01 2.31800000e-01 7.97700000e-01 3.05200000e-01 8.74200000e-01 3.56200000e-01 6.48100000e-01 - 4.47300000e-01 2.54700000e-01 7.22500000e-01 2.99200000e-01 7.13800000e-01 4.55700000e-01 1.80790000e+00 -8.59000000e-02 9.90000000e-03 1.98660000e+00 6.35000000e-02 -7.53000000e-02 - 4.70000000e-01 1.58300000e-01 7.95900000e-01 1.80800000e-01 7.87500000e-01 3.51000000e-01 6.48800000e-01 3.58400000e-01 8.08300000e-01 3.45500000e-01 7.44400000e-01 2.46300000e-01 - 6.21300000e-01 4.95000000e-01 9.67700000e-01 5.42100000e-01 1.12790000e+00 4.93600000e-01 1.77880000e+00 -5.45000000e-02 1.91540000e+00 1.02300000e-01 -3.09000000e-02 1.77010000e+00 - 2.39700000e-01 5.29200000e-01 4.46400000e-01 7.42200000e-01 4.96500000e-01 8.56800000e-01 4.11700000e-01 7.84300000e-01 5.41400000e-01 8.27600000e-01 4.54700000e-01 7.28700000e-01 - 1.07500000e-01 7.60200000e-01 1.88000000e-02 1.36750000e+00 5.88000000e-02 1.51190000e+00 3.58000000e-02 1.34120000e+00 4.16000000e-02 1.55690000e+00 5.27000000e-02 1.32500000e+00 - 1.09620000e+00 1.43000000e-02 9.60700000e-01 7.87200000e-01 8.60100000e-01 1.09740000e+00 1.03600000e+00 6.95900000e-01 8.34800000e-01 1.03290000e+00 1.06990000e+00 6.57600000e-01 - -2.40000000e-03 1.01010000e+00 2.10800000e-01 1.10540000e+00 2.25700000e-01 1.22790000e+00 -1.04900000e-01 1.85860000e+00 5.33000000e-02 1.93420000e+00 -3.10000000e-02 3.67000000e-02 - 1.14120000e+00 -4.20000000e-02 4.64500000e-01 5.69000000e-01 3.25700000e-01 4.80800000e-01 3.70200000e-01 6.80800000e-01 3.53200000e-01 4.31600000e-01 3.88100000e-01 6.60400000e-01 - 3.52400000e-01 3.59200000e-01 5.88000000e-01 5.15400000e-01 6.59600000e-01 6.00700000e-01 5.53200000e-01 5.58600000e-01 7.01900000e-01 5.70400000e-01 6.31100000e-01 4.63500000e-01 - 5.94200000e-01 4.99700000e-01 8.00300000e-01 7.29300000e-01 9.24800000e-01 7.23600000e-01 1.83970000e+00 -1.29900000e-01 5.57000000e-02 1.93200000e+00 6.00000000e-04 1.73630000e+00 - 9.82700000e-01 1.45100000e-01 6.01700000e-01 2.88800000e-01 4.62700000e-01 2.54200000e-01 5.83500000e-01 3.10200000e-01 4.57400000e-01 2.44100000e-01 5.99900000e-01 2.93000000e-01 - 3.86900000e-01 6.25900000e-01 5.71200000e-01 1.02420000e+00 7.86600000e-01 1.00730000e+00 7.63800000e-01 7.93600000e-01 7.31600000e-01 1.09950000e+00 6.97500000e-01 8.75600000e-01 - 7.05800000e-01 3.85300000e-01 3.01100000e-01 1.43110000e+00 2.86500000e-01 8.19200000e-01 3.22200000e-01 1.41010000e+00 2.75800000e-01 7.91200000e-01 3.73800000e-01 1.35020000e+00 - 3.26500000e-01 4.94200000e-01 7.09100000e-01 5.40500000e-01 6.06200000e-01 8.49200000e-01 4.06900000e-01 8.97000000e-01 5.72300000e-01 9.14500000e-01 6.07200000e-01 6.60100000e-01 - 6.32100000e-01 4.67100000e-01 4.01200000e-01 3.78000000e-01 2.90200000e-01 3.53500000e-01 3.06800000e-01 4.95800000e-01 2.22400000e-01 4.15700000e-01 3.43500000e-01 4.51100000e-01 - 1.17890000e+00 -8.54000000e-02 4.82500000e-01 1.23780000e+00 3.44100000e-01 7.09800000e-01 4.96200000e-01 1.22630000e+00 3.59000000e-01 6.61300000e-01 4.12400000e-01 1.32010000e+00 - 2.30200000e-01 3.99000000e-01 6.05200000e-01 3.30600000e-01 6.08500000e-01 4.70800000e-01 5.46600000e-01 3.95500000e-01 6.56200000e-01 4.30100000e-01 5.34000000e-01 4.11900000e-01 - 4.88700000e-01 4.54600000e-01 6.35900000e-01 8.63800000e-01 8.58900000e-01 8.20800000e-01 7.20100000e-01 7.62300000e-01 6.64700000e-01 1.08310000e+00 6.11900000e-01 8.94300000e-01 - 5.29000000e-01 3.19400000e-01 9.66100000e-01 1.76100000e-01 8.31200000e-01 4.75300000e-01 1.77270000e+00 -5.07000000e-02 -8.48000000e-02 2.09950000e+00 5.65000000e-02 -6.85000000e-02 - -5.19000000e-02 8.93700000e-01 2.58900000e-01 8.87900000e-01 3.78200000e-01 8.79900000e-01 4.31000000e-02 1.67920000e+00 -9.10000000e-02 2.10840000e+00 -5.85000000e-02 7.15000000e-02 - 4.00700000e-01 4.51700000e-01 7.88500000e-01 3.66400000e-01 8.75800000e-01 4.05300000e-01 1.78540000e+00 -6.11000000e-02 -1.11400000e-01 2.13010000e+00 3.12000000e-02 -3.75000000e-02 - -2.50000000e-02 5.87700000e-01 -2.36000000e-02 8.99800000e-01 5.93000000e-02 9.28000000e-01 1.91000000e-02 1.70730000e+00 -7.76000000e-02 2.08770000e+00 -1.79000000e-02 2.27000000e-02 - 5.54700000e-01 4.75800000e-01 9.80600000e-01 6.18400000e-01 1.03320000e+00 7.99500000e-01 9.60500000e-01 6.47300000e-01 9.67700000e-01 9.11500000e-01 8.14900000e-01 8.19400000e-01 - 1.53500000e-01 5.73000000e-01 5.18800000e-01 5.14900000e-01 6.28700000e-01 5.29100000e-01 1.68200000e+00 5.71000000e-02 -4.25000000e-02 2.05230000e+00 -2.41000000e-02 2.85000000e-02 - 7.13900000e-01 7.02000000e-02 1.03970000e+00 5.17000000e-02 1.11840000e+00 1.04700000e-01 1.81250000e+00 -9.06000000e-02 2.01560000e+00 -1.59000000e-02 5.90000000e-03 -4.10000000e-03 - 9.63400000e-01 8.17000000e-02 1.24490000e+00 1.15000000e-01 1.43030000e+00 4.12000000e-02 1.78590000e+00 -6.36000000e-02 2.06570000e+00 -7.63000000e-02 3.20000000e-02 -3.92000000e-02 - 1.36100000e-01 3.90200000e-01 1.80700000e-01 6.46500000e-01 9.02000000e-02 8.77100000e-01 8.65000000e-02 7.61200000e-01 1.41200000e-01 8.26800000e-01 5.13000000e-02 8.00400000e-01 - 4.70500000e-01 1.27100000e-01 7.07900000e-01 2.36000000e-01 9.35800000e-01 1.15000000e-01 8.40600000e-01 7.79000000e-02 8.76300000e-01 2.03600000e-01 7.50800000e-01 1.91700000e-01 - -3.23000000e-02 6.35300000e-01 2.06000000e-01 7.12500000e-01 3.11000000e-01 7.28900000e-01 -9.00000000e-03 1.74200000e+00 -3.00000000e-02 2.03580000e+00 -5.07000000e-02 6.09000000e-02 - 6.43100000e-01 -2.50000000e-02 9.30200000e-01 4.89000000e-02 1.10820000e+00 2.80000000e-03 9.32000000e-01 5.17000000e-02 1.06580000e+00 7.05000000e-02 9.27300000e-01 5.68000000e-02 - 4.26200000e-01 2.50800000e-01 7.27800000e-01 3.23800000e-01 6.79500000e-01 5.51100000e-01 5.97800000e-01 4.77300000e-01 6.88600000e-01 5.52100000e-01 6.73300000e-01 3.87100000e-01 - 1.85200000e-01 4.82400000e-01 4.12400000e-01 5.86000000e-01 6.32900000e-01 4.69200000e-01 1.71520000e+00 3.00000000e-02 -8.05000000e-02 2.09610000e+00 3.96000000e-02 -4.70000000e-02 - 3.34400000e-01 5.05700000e-01 6.32100000e-01 5.25100000e-01 6.48900000e-01 6.48400000e-01 1.61740000e+00 1.33600000e-01 7.35000000e-02 1.91500000e+00 -5.45000000e-02 6.47000000e-02 - 1.07370000e+00 4.12000000e-02 6.94300000e-01 2.87200000e-01 5.14700000e-01 2.58300000e-01 6.25300000e-01 3.66900000e-01 5.09100000e-01 2.43700000e-01 6.47800000e-01 3.43400000e-01 - -6.50000000e-03 1.04150000e+00 3.54000000e-01 9.85900000e-01 4.53200000e-01 1.01130000e+00 -1.35000000e-02 1.75010000e+00 1.23500000e-01 1.85480000e+00 -4.61000000e-02 5.30000000e-02 - 3.55500000e-01 7.39400000e-01 1.92500000e-01 3.24000000e-01 1.27200000e-01 3.12800000e-01 9.04000000e-02 4.46000000e-01 1.13500000e-01 3.20800000e-01 1.48500000e-01 3.74400000e-01 - 1.16120000e+00 -6.80000000e-02 7.64600000e-01 9.50700000e-01 5.62800000e-01 8.11900000e-01 7.44900000e-01 9.75100000e-01 5.62900000e-01 7.48300000e-01 7.28100000e-01 9.94300000e-01 - 9.73000000e-02 2.51200000e-01 1.26800000e-01 4.20300000e-01 1.82400000e-01 4.32400000e-01 2.18800000e-01 3.07800000e-01 1.98400000e-01 4.22300000e-01 8.37000000e-02 4.72400000e-01 - 4.74800000e-01 6.35700000e-01 9.12300000e-01 7.31800000e-01 9.70600000e-01 8.01300000e-01 1.73160000e+00 -1.70000000e-03 5.29000000e-02 1.93600000e+00 5.56000000e-02 1.66860000e+00 - 1.02360000e+00 9.56000000e-02 6.50200000e-01 1.06570000e+00 5.06300000e-01 6.04500000e-01 5.36300000e-01 1.20420000e+00 4.72700000e-01 6.04000000e-01 5.00100000e-01 1.24090000e+00 - 3.17900000e-01 3.09700000e-01 6.70900000e-01 2.60300000e-01 7.91400000e-01 2.58300000e-01 1.74560000e+00 -9.20000000e-03 9.61000000e-02 1.88960000e+00 3.66000000e-02 -4.57000000e-02 - 1.12970000e+00 -2.60000000e-02 5.81400000e-01 1.17110000e+00 6.40600000e-01 4.87200000e-01 7.09700000e-01 1.01830000e+00 6.53300000e-01 4.34000000e-01 6.07000000e-01 1.13400000e+00 - 1.05090000e+00 6.40000000e-02 4.55000000e-01 5.70300000e-01 4.44300000e-01 3.44600000e-01 5.37900000e-01 4.69500000e-01 4.06100000e-01 3.66800000e-01 5.58900000e-01 4.50100000e-01 - 5.17000000e-02 1.44000000e-02 1.57700000e-01 2.52300000e-01 3.64500000e-01 1.47100000e-01 2.72000000e-02 1.69940000e+00 3.98000000e-02 -4.91000000e-02 2.04000000e-02 -2.50000000e-02 - 1.19800000e-01 3.35400000e-01 2.55000000e-02 2.30900000e-01 -3.50000000e-03 2.31500000e-01 1.07200000e-01 1.34200000e-01 1.20000000e-02 2.09900000e-01 7.90000000e-03 2.51200000e-01 - 5.41800000e-01 5.58200000e-01 2.42200000e-01 8.02700000e-01 2.75500000e-01 5.17800000e-01 2.94600000e-01 7.41200000e-01 3.33700000e-01 4.29700000e-01 1.86600000e-01 8.71200000e-01 - 1.11850000e+00 -1.40000000e-02 1.12830000e+00 1.25500000e-01 8.39500000e-01 7.91000000e-02 1.15090000e+00 1.00000000e-01 8.86200000e-01 -3.50000000e-03 1.23840000e+00 -5.00000000e-03 - 3.44200000e-01 1.64200000e-01 2.23200000e-01 4.42000000e-02 2.25200000e-01 -2.10000000e-03 2.77400000e-01 -2.14000000e-02 2.37700000e-01 -2.22000000e-02 1.25100000e-01 1.56900000e-01 - 4.08400000e-01 3.47400000e-01 8.41100000e-01 2.99100000e-01 9.85900000e-01 3.11200000e-01 7.21100000e-01 4.48100000e-01 8.68900000e-01 4.73600000e-01 7.77800000e-01 3.80300000e-01 - 4.87500000e-01 6.05300000e-01 2.45000000e-01 6.15500000e-01 2.23500000e-01 4.63600000e-01 2.80500000e-01 5.70500000e-01 1.66500000e-01 5.15500000e-01 2.34100000e-01 6.30600000e-01 - 5.79900000e-01 3.14500000e-01 8.70900000e-01 3.36600000e-01 9.78100000e-01 3.58900000e-01 1.64040000e+00 1.09400000e-01 2.05980000e+00 -7.99000000e-02 1.73000000e-02 -2.10000000e-02 - 5.89200000e-01 1.90000000e-03 8.39600000e-01 7.50000000e-02 9.52800000e-01 8.22000000e-02 1.76850000e+00 -3.98000000e-02 1.92200000e+00 9.24000000e-02 -5.60000000e-03 5.50000000e-03 - 4.22200000e-01 3.30400000e-01 6.55300000e-01 5.24800000e-01 7.90800000e-01 5.47200000e-01 6.42200000e-01 5.40500000e-01 6.82400000e-01 6.97100000e-01 4.62200000e-01 7.56200000e-01 - 8.47900000e-01 2.31700000e-01 1.17510000e+00 2.18800000e-01 1.33680000e+00 1.63000000e-01 1.67150000e+00 6.55000000e-02 2.04870000e+00 -5.42000000e-02 -6.94000000e-02 1.81680000e+00 - 1.80400000e-01 5.60000000e-01 3.63700000e-01 7.15800000e-01 6.12100000e-01 5.61000000e-01 1.77230000e+00 -5.20000000e-02 3.62000000e-02 1.94970000e+00 4.86000000e-02 -5.81000000e-02 - 1.25570000e+00 -1.77200000e-01 5.63300000e-01 1.16230000e+00 5.36700000e-01 7.83800000e-01 5.88800000e-01 1.12730000e+00 4.31000000e-01 8.50800000e-01 6.07500000e-01 1.11310000e+00 - 4.43900000e-01 2.60600000e-01 7.01900000e-01 3.29400000e-01 8.02200000e-01 3.55200000e-01 1.80910000e+00 -9.27000000e-02 1.73000000e-02 1.98480000e+00 -4.26000000e-02 5.00000000e-02 - 1.61400000e-01 7.86300000e-01 4.05900000e-01 8.65300000e-01 5.91900000e-01 7.86400000e-01 1.64970000e+00 9.53000000e-02 1.99000000e-02 1.98030000e+00 3.58000000e-02 -4.22000000e-02 - 3.16100000e-01 7.99800000e-01 2.04400000e-01 1.52790000e+00 2.34100000e-01 1.72750000e+00 1.61100000e-01 1.58280000e+00 2.08000000e-01 1.52330000e+00 3.09900000e-01 1.40260000e+00 - 3.93300000e-01 3.90100000e-01 6.53600000e-01 4.49700000e-01 7.78900000e-01 4.45000000e-01 1.72550000e+00 9.40000000e-03 4.57000000e-02 1.94670000e+00 -2.94000000e-02 3.33000000e-02 - 1.03560000e+00 8.40000000e-02 1.76240000e+00 -3.76000000e-02 1.34470000e+00 4.58000000e-02 1.72510000e+00 8.70000000e-03 1.25590000e+00 7.93000000e-02 1.70620000e+00 3.24000000e-02 - 1.39000000e-01 5.32100000e-01 2.70300000e-01 7.66000000e-01 1.74600000e-01 1.03260000e+00 1.92300000e-01 8.63600000e-01 2.90900000e-01 9.10500000e-01 2.55000000e-01 7.84500000e-01 - 1.18810000e+00 -9.73000000e-02 4.96300000e-01 1.23130000e+00 3.95800000e-01 6.39200000e-01 5.13900000e-01 1.21200000e+00 4.44200000e-01 5.51700000e-01 5.83600000e-01 1.13000000e+00 - 9.16300000e-01 1.78200000e-01 3.02400000e-01 3.76500000e-01 2.90200000e-01 2.66900000e-01 3.38100000e-01 3.33900000e-01 2.58100000e-01 2.93600000e-01 3.78900000e-01 2.85000000e-01 - 1.13160000e+00 -3.60000000e-02 1.23360000e+00 4.98600000e-01 8.75200000e-01 8.94500000e-01 1.26470000e+00 4.68100000e-01 9.81900000e-01 6.14200000e-01 1.14720000e+00 5.96400000e-01 - 1.63800000e-01 9.43800000e-01 1.48900000e-01 8.34800000e-01 1.81000000e-02 7.69800000e-01 1.24000000e-01 8.65000000e-01 9.01000000e-02 6.65400000e-01 2.97000000e-02 9.75700000e-01 - 5.07200000e-01 6.04400000e-01 2.87600000e-01 7.12700000e-01 3.20400000e-01 4.43200000e-01 2.68900000e-01 7.29800000e-01 1.06300000e-01 6.79600000e-01 2.64400000e-01 7.38100000e-01 - 6.16400000e-01 2.19800000e-01 9.14600000e-01 4.05600000e-01 9.41300000e-01 5.80200000e-01 8.11100000e-01 5.22200000e-01 9.85200000e-01 5.50000000e-01 9.26300000e-01 3.87000000e-01 - 3.13100000e-01 7.91400000e-01 1.81100000e-01 7.34000000e-01 1.60600000e-01 5.60800000e-01 1.58600000e-01 7.59600000e-01 1.10000000e-01 6.09100000e-01 1.41900000e-01 7.82200000e-01 - 3.30100000e-01 3.35900000e-01 5.51200000e-01 4.84000000e-01 6.65400000e-01 5.03800000e-01 6.15800000e-01 4.07400000e-01 6.63300000e-01 5.27300000e-01 6.09900000e-01 4.11900000e-01 - 1.15680000e+00 -6.15000000e-02 5.24600000e-01 2.69900000e-01 4.03200000e-01 2.43500000e-01 4.98200000e-01 2.98300000e-01 4.55800000e-01 1.69500000e-01 5.20300000e-01 2.75600000e-01 - 1.65200000e-01 7.34400000e-01 2.76800000e-01 1.13180000e+00 3.01000000e-01 1.30310000e+00 2.95300000e-01 1.10810000e+00 3.35200000e-01 1.28370000e+00 2.97600000e-01 1.10800000e+00 - 4.75000000e-01 6.29600000e-01 6.97400000e-01 1.01320000e+00 9.30100000e-01 8.75400000e-01 1.79500000e+00 -7.16000000e-02 -2.14000000e-02 2.02110000e+00 7.70000000e-03 1.72680000e+00 - 3.63200000e-01 3.76600000e-01 4.90300000e-01 6.84000000e-01 6.80100000e-01 6.30900000e-01 5.61600000e-01 5.98300000e-01 4.59800000e-01 9.12100000e-01 5.39500000e-01 6.26200000e-01 - 3.51800000e-01 7.47700000e-01 1.75700000e-01 1.56370000e+00 1.60400000e-01 8.99500000e-01 1.63500000e-01 1.57210000e+00 1.76800000e-01 8.53000000e-01 1.30200000e-01 1.60930000e+00 - 2.88000000e-01 5.85400000e-01 4.56300000e-01 9.09600000e-01 5.76700000e-01 9.68800000e-01 4.78500000e-01 8.84600000e-01 6.02800000e-01 9.58400000e-01 5.16200000e-01 8.41300000e-01 - 5.89500000e-01 5.20800000e-01 3.49800000e-01 2.18900000e-01 3.19200000e-01 1.51000000e-01 3.63500000e-01 2.04300000e-01 2.29200000e-01 2.49100000e-01 2.49800000e-01 3.38500000e-01 - 7.24900000e-01 3.69300000e-01 3.56400000e-01 1.04810000e+00 3.36700000e-01 6.25700000e-01 2.22900000e-01 1.20390000e+00 2.81600000e-01 6.65300000e-01 3.48400000e-01 1.06000000e+00 - 8.35000000e-02 1.02890000e+00 1.19200000e-01 5.66400000e-01 3.76000000e-02 5.35000000e-01 3.81000000e-02 6.64400000e-01 8.13000000e-02 4.75600000e-01 7.31000000e-02 6.20800000e-01 - 1.09700000e+00 8.80000000e-03 4.32100000e-01 7.96900000e-01 3.80200000e-01 5.25400000e-01 4.15600000e-01 8.15600000e-01 3.96600000e-01 4.82000000e-01 4.29300000e-01 8.05400000e-01 - 3.89000000e-02 1.51400000e-01 7.19000000e-02 2.25600000e-01 1.05000000e-01 2.31700000e-01 5.75000000e-02 2.42200000e-01 -6.10000000e-03 3.68700000e-01 -1.44000000e-02 3.32900000e-01 - 1.09560000e+00 1.09000000e-02 7.08000000e-01 1.72400000e-01 4.87900000e-01 2.30200000e-01 7.14800000e-01 1.66100000e-01 5.03400000e-01 1.94600000e-01 6.52000000e-01 2.38400000e-01 - 1.95700000e-01 6.14600000e-01 7.14500000e-01 3.68600000e-01 7.35600000e-01 4.87200000e-01 1.74650000e+00 -2.09000000e-02 1.09300000e-01 1.87520000e+00 1.72000000e-02 -2.15000000e-02 - 1.07930000e+00 2.81000000e-02 4.60700000e-01 9.35500000e-01 4.96100000e-01 4.58200000e-01 4.77500000e-01 9.12900000e-01 5.09900000e-01 4.15300000e-01 5.06200000e-01 8.80900000e-01 - 4.21200000e-01 1.40700000e-01 6.04200000e-01 2.92000000e-01 7.90400000e-01 2.13400000e-01 6.32200000e-01 2.56800000e-01 7.29800000e-01 3.03500000e-01 6.69200000e-01 2.13200000e-01 - 6.76300000e-01 1.35400000e-01 1.00990000e+00 2.67500000e-01 1.08330000e+00 3.74600000e-01 8.84800000e-01 4.16500000e-01 1.08250000e+00 4.05900000e-01 9.21800000e-01 3.69700000e-01 - 1.11670000e+00 -1.34000000e-02 4.54800000e-01 2.55700000e-01 4.17900000e-01 1.58200000e-01 4.49100000e-01 2.63700000e-01 4.39400000e-01 1.21800000e-01 4.99100000e-01 2.04800000e-01 - 5.68500000e-01 5.29100000e-01 2.26500000e-01 2.10500000e-01 2.54100000e-01 1.05000000e-01 2.54500000e-01 1.74400000e-01 2.11000000e-01 1.48400000e-01 3.37100000e-01 7.93000000e-02 - 6.10900000e-01 2.15100000e-01 9.72100000e-01 1.64800000e-01 1.03160000e+00 2.33900000e-01 1.78410000e+00 -6.49000000e-02 1.96190000e+00 4.69000000e-02 4.40000000e-03 -6.80000000e-03 - 1.43600000e-01 6.15900000e-01 4.59200000e-01 6.10900000e-01 5.21800000e-01 6.75200000e-01 1.73750000e+00 -8.20000000e-03 -1.41000000e-02 2.01950000e+00 -7.36000000e-02 8.81000000e-02 - 1.03900000e+00 7.70000000e-02 9.88500000e-01 5.58500000e-01 9.15500000e-01 7.40000000e-02 1.04940000e+00 4.80200000e-01 7.94200000e-01 1.84500000e-01 1.10440000e+00 4.22000000e-01 - 1.02090000e+00 9.81000000e-02 4.50200000e-01 1.07870000e+00 3.15700000e-01 7.05600000e-01 3.72500000e-01 1.16980000e+00 4.86200000e-01 4.73100000e-01 3.26100000e-01 1.23290000e+00 - 3.14800000e-01 2.62700000e-01 5.01400000e-01 4.02900000e-01 4.01700000e-01 6.58600000e-01 4.60100000e-01 4.50300000e-01 5.09200000e-01 5.47800000e-01 4.69100000e-01 4.38500000e-01 - 4.70600000e-01 6.31300000e-01 2.17900000e-01 1.15660000e+00 1.84600000e-01 7.85000000e-01 3.04100000e-01 1.05390000e+00 2.14300000e-01 7.20600000e-01 2.89000000e-01 1.07300000e+00 - 1.09550000e+00 1.41000000e-02 6.00800000e-01 4.10600000e-01 5.38200000e-01 2.40000000e-01 7.28800000e-01 2.60300000e-01 5.20400000e-01 2.46800000e-01 6.94900000e-01 2.95000000e-01 - 3.18800000e-01 3.82000000e-01 4.81800000e-01 6.15600000e-01 6.40400000e-01 5.91900000e-01 5.00200000e-01 5.95500000e-01 6.27800000e-01 6.21900000e-01 4.85500000e-01 6.12100000e-01 - 5.87200000e-01 5.17200000e-01 3.12400000e-01 3.56500000e-01 2.64000000e-01 2.85200000e-01 3.26900000e-01 3.37800000e-01 2.38100000e-01 3.04200000e-01 2.59900000e-01 4.18400000e-01 - 9.17000000e-01 1.16900000e-01 1.23390000e+00 1.09700000e-01 1.23070000e+00 2.54000000e-01 1.74110000e+00 -1.10000000e-02 1.94840000e+00 6.09000000e-02 -7.80000000e-03 1.02000000e-02 - 6.20900000e-01 3.84200000e-01 9.36000000e-01 6.46300000e-01 1.08590000e+00 7.14300000e-01 1.06650000e+00 4.93100000e-01 1.12700000e+00 6.96300000e-01 1.08070000e+00 4.67300000e-01 - 1.08180000e+00 2.76000000e-02 8.25600000e-01 9.04100000e-01 6.04300000e-01 5.11500000e-01 8.00000000e-01 9.36500000e-01 6.40800000e-01 4.30100000e-01 8.37200000e-01 8.88800000e-01 - 2.11000000e-01 8.97200000e-01 6.32000000e-02 6.01500000e-01 1.53700000e-01 3.75800000e-01 1.69000000e-01 4.76400000e-01 7.95000000e-02 4.56600000e-01 1.68200000e-01 4.72800000e-01 - 1.08180000e+00 2.65000000e-02 6.00300000e-01 5.15200000e-01 5.63800000e-01 2.74700000e-01 6.35200000e-01 4.73800000e-01 5.98900000e-01 2.09900000e-01 6.34100000e-01 4.78700000e-01 - 6.10000000e-03 6.71800000e-01 1.69600000e-01 8.14300000e-01 3.07500000e-01 7.97700000e-01 -6.08000000e-02 1.80200000e+00 5.42000000e-02 1.93730000e+00 2.63000000e-02 -3.07000000e-02 - 3.43000000e-02 7.56600000e-01 1.75500000e-01 1.04200000e+00 3.93000000e-02 1.38060000e+00 1.08500000e-01 1.12970000e+00 8.52000000e-02 1.34710000e+00 1.30000000e-01 1.09690000e+00 - 9.35800000e-01 1.61100000e-01 3.00800000e-01 1.44910000e+00 2.69500000e-01 1.05350000e+00 4.64800000e-01 1.25790000e+00 3.77800000e-01 8.74900000e-01 5.02500000e-01 1.21100000e+00 - 1.01110000e+00 1.08400000e-01 7.92000000e-01 5.85600000e-01 7.16900000e-01 2.36300000e-01 7.59400000e-01 6.20800000e-01 6.35100000e-01 3.03400000e-01 8.67900000e-01 4.97100000e-01 - 6.78500000e-01 4.31000000e-01 1.04780000e+00 3.78900000e-01 1.16560000e+00 3.81800000e-01 1.69290000e+00 5.13000000e-02 1.93810000e+00 6.71000000e-02 -5.80000000e-02 1.80590000e+00 - 7.09700000e-01 3.91500000e-01 1.02500000e+00 4.43100000e-01 1.11050000e+00 4.83200000e-01 1.64710000e+00 9.90000000e-02 2.08650000e+00 -1.07400000e-01 -3.00000000e-02 1.77050000e+00 - 6.56500000e-01 6.92000000e-02 1.01180000e+00 1.22400000e-01 1.20350000e+00 8.13000000e-02 1.07420000e+00 5.52000000e-02 1.23790000e+00 6.41000000e-02 1.12700000e+00 -1.14000000e-02 - 1.32200000e-01 4.39500000e-01 3.01500000e-01 5.72400000e-01 3.00800000e-01 7.08400000e-01 2.71400000e-01 6.11200000e-01 2.65700000e-01 7.61300000e-01 2.40400000e-01 6.48100000e-01 - 3.43200000e-01 4.45400000e-01 4.09500000e-01 8.52500000e-01 5.50500000e-01 8.65100000e-01 4.13400000e-01 8.45800000e-01 3.65600000e-01 1.10400000e+00 4.68800000e-01 7.78800000e-01 - 2.51600000e-01 8.58700000e-01 2.05700000e-01 1.51670000e+00 6.36000000e-02 9.82400000e-01 1.89300000e-01 1.53810000e+00 1.02900000e-01 9.04200000e-01 2.61000000e-01 1.44930000e+00 - 1.14510000e+00 -4.77000000e-02 5.75100000e-01 5.97200000e-01 3.90700000e-01 5.01000000e-01 4.72500000e-01 7.17900000e-01 4.07000000e-01 4.58100000e-01 5.13200000e-01 6.70900000e-01 - 5.95100000e-01 3.65300000e-01 1.10250000e+00 3.78800000e-01 1.16660000e+00 5.36700000e-01 1.10710000e+00 3.64900000e-01 1.17860000e+00 5.49200000e-01 9.12300000e-01 5.99900000e-01 - 6.19200000e-01 -4.09000000e-02 9.75000000e-01 -7.19000000e-02 9.88900000e-01 6.71000000e-02 8.15000000e-01 1.18500000e-01 9.10000000e-01 1.79600000e-01 8.66000000e-01 5.96000000e-02 - 5.92000000e-01 5.11100000e-01 3.19500000e-01 8.73300000e-01 1.39500000e-01 7.69700000e-01 2.95200000e-01 9.03900000e-01 2.63100000e-01 6.00800000e-01 3.04500000e-01 8.98300000e-01 - 5.60200000e-01 8.87000000e-02 8.52900000e-01 1.68700000e-01 1.01210000e+00 1.44900000e-01 8.86100000e-01 1.30200000e-01 9.86500000e-01 1.94900000e-01 8.64700000e-01 1.55800000e-01 - 3.22700000e-01 2.50800000e-01 4.58100000e-01 4.45600000e-01 6.08900000e-01 4.06900000e-01 5.06500000e-01 3.90900000e-01 5.64800000e-01 4.69400000e-01 4.82200000e-01 4.20000000e-01 - 6.22200000e-01 1.26300000e-01 8.22800000e-01 3.78300000e-01 1.00430000e+00 3.51700000e-01 8.42700000e-01 3.54300000e-01 9.16800000e-01 4.74200000e-01 9.24000000e-01 2.59000000e-01 - 3.20700000e-01 3.48100000e-01 4.12700000e-01 6.51800000e-01 4.48400000e-01 7.67600000e-01 5.21200000e-01 5.23500000e-01 4.95000000e-01 7.27300000e-01 4.47900000e-01 6.11400000e-01 - 3.26200000e-01 4.57400000e-01 3.61700000e-01 8.98000000e-01 3.94500000e-01 1.04320000e+00 3.53200000e-01 9.09700000e-01 3.91400000e-01 1.06410000e+00 4.29100000e-01 8.18800000e-01 - 3.90700000e-01 3.26600000e-01 6.63300000e-01 4.55100000e-01 7.03800000e-01 5.74800000e-01 6.18900000e-01 5.04100000e-01 6.54800000e-01 6.54900000e-01 6.85800000e-01 4.25400000e-01 - 4.18900000e-01 5.51400000e-01 7.13000000e-01 5.78000000e-01 8.39800000e-01 5.65000000e-01 1.79380000e+00 -6.55000000e-02 -5.92000000e-02 2.06980000e+00 -1.50000000e-02 1.97000000e-02 - 1.13100000e+00 -2.37000000e-02 4.30300000e-01 9.99600000e-01 4.09100000e-01 5.69900000e-01 3.41200000e-01 1.10830000e+00 3.53600000e-01 6.03700000e-01 4.23200000e-01 1.00430000e+00 - 1.06350000e+00 5.52000000e-02 4.99100000e-01 8.29800000e-01 3.99600000e-01 5.45300000e-01 5.05800000e-01 8.19500000e-01 3.63200000e-01 5.66000000e-01 4.62800000e-01 8.72000000e-01 - -7.50000000e-03 6.58400000e-01 1.28000000e-01 8.40300000e-01 2.16500000e-01 8.75400000e-01 3.24000000e-02 1.69430000e+00 -5.30000000e-02 2.06100000e+00 8.10000000e-03 -1.13000000e-02 - 1.76500000e-01 6.30800000e-01 5.29300000e-01 5.85300000e-01 6.41900000e-01 5.93400000e-01 1.79080000e+00 -6.94000000e-02 6.31000000e-02 1.92150000e+00 -1.48000000e-02 1.77000000e-02 - 4.82300000e-01 3.08600000e-01 8.20900000e-01 4.08200000e-01 9.42000000e-01 4.57300000e-01 8.13000000e-01 4.23200000e-01 1.10630000e+00 2.89600000e-01 8.88300000e-01 3.34400000e-01 - 3.81400000e-01 2.83700000e-01 6.41700000e-01 3.44900000e-01 7.97900000e-01 3.04900000e-01 1.76750000e+00 -3.98000000e-02 4.23000000e-02 1.94410000e+00 4.66000000e-02 -5.70000000e-02 - 2.13400000e-01 5.98700000e-01 3.82000000e-01 8.78800000e-01 3.39200000e-01 1.11150000e+00 2.83600000e-01 9.95500000e-01 3.52000000e-01 1.11860000e+00 3.17300000e-01 9.55500000e-01 - 1.10670000e+00 -2.60000000e-03 1.27120000e+00 4.75600000e-01 9.39900000e-01 1.03900000e+00 1.57610000e+00 1.22700000e-01 8.14300000e-01 1.05450000e+00 1.38120000e+00 3.43000000e-01 - 6.27400000e-01 4.75500000e-01 2.49500000e-01 1.49580000e+00 2.34200000e-01 8.38800000e-01 2.59500000e-01 1.48440000e+00 2.78700000e-01 7.53400000e-01 3.96700000e-01 1.32050000e+00 - 5.57000000e-01 1.23200000e-01 7.34300000e-01 3.58900000e-01 9.09200000e-01 3.23300000e-01 7.82000000e-01 3.04500000e-01 8.81800000e-01 3.78300000e-01 7.06100000e-01 3.98200000e-01 - 1.81600000e-01 4.11400000e-01 4.07000000e-01 5.10500000e-01 4.68700000e-01 5.87100000e-01 1.60970000e+00 1.41600000e-01 8.98000000e-02 1.89200000e+00 4.93000000e-02 -5.99000000e-02 - 5.23000000e-02 1.04630000e+00 -2.44000000e-02 1.22280000e+00 -1.14100000e-01 1.02290000e+00 9.85000000e-02 1.07960000e+00 7.96000000e-02 7.69900000e-01 -7.19000000e-02 1.28250000e+00 - 8.47000000e-02 6.42500000e-01 2.04400000e-01 9.27800000e-01 3.35700000e-01 9.34000000e-01 2.03500000e-01 9.30700000e-01 2.09200000e-01 1.10110000e+00 2.49600000e-01 8.73200000e-01 - 3.88700000e-01 1.75200000e-01 5.00400000e-01 4.09600000e-01 5.46600000e-01 4.89700000e-01 5.51600000e-01 3.50800000e-01 5.15400000e-01 5.43700000e-01 5.21100000e-01 3.83300000e-01 - 2.99600000e-01 4.86600000e-01 6.31100000e-01 4.64800000e-01 8.38000000e-01 3.58200000e-01 1.71900000e+00 1.50000000e-02 1.39000000e-02 1.98380000e+00 -2.12000000e-02 2.54000000e-02 - 2.81900000e-01 2.93800000e-01 5.12400000e-01 3.89500000e-01 6.98500000e-01 3.12600000e-01 1.64160000e+00 1.03300000e-01 -8.50000000e-02 2.10380000e+00 -2.97000000e-02 3.73000000e-02 - 1.06410000e+00 5.17000000e-02 6.02500000e-01 6.00000000e-03 5.54100000e-01 -5.53000000e-02 5.78600000e-01 4.22000000e-02 4.93600000e-01 3.60000000e-03 5.92400000e-01 2.60000000e-02 - 1.06040000e+00 5.02000000e-02 4.80500000e-01 7.02600000e-01 3.53500000e-01 5.35700000e-01 4.95800000e-01 6.84600000e-01 3.99200000e-01 4.58400000e-01 4.63100000e-01 7.24800000e-01 - 5.49000000e-01 4.84600000e-01 7.79200000e-01 5.86200000e-01 9.89800000e-01 4.76200000e-01 1.68030000e+00 6.23000000e-02 1.92990000e+00 8.32000000e-02 3.50000000e-03 -5.30000000e-03 - 1.69500000e-01 6.22000000e-01 5.56000000e-01 5.36000000e-01 6.32700000e-01 5.86400000e-01 1.77350000e+00 -4.76000000e-02 -2.06000000e-02 2.02340000e+00 9.30000000e-03 -9.80000000e-03 - 1.23700000e-01 2.78400000e-01 3.94700000e-01 1.93000000e-01 4.81100000e-01 1.80400000e-01 3.40200000e-01 2.57700000e-01 3.79300000e-01 3.12800000e-01 4.57900000e-01 1.18900000e-01 - 1.60800000e-01 7.78000000e-02 1.72200000e-01 2.12000000e-01 2.03800000e-01 2.36500000e-01 2.62600000e-01 1.06600000e-01 2.25800000e-01 2.17800000e-01 1.61600000e-01 2.27600000e-01 - 4.36000000e-02 6.15900000e-01 -4.08000000e-02 1.09200000e+00 -1.11100000e-01 1.32380000e+00 9.48000000e-02 9.33900000e-01 4.78000000e-02 1.14810000e+00 -1.70000000e-02 1.06470000e+00 - 1.06000000e-02 8.72400000e-01 3.00700000e-01 8.96600000e-01 4.36000000e-01 8.78400000e-01 1.05000000e-02 1.71740000e+00 2.85000000e-02 1.97010000e+00 2.73000000e-02 -3.56000000e-02 - 7.65600000e-01 -1.12300000e-01 9.86300000e-01 8.06000000e-02 1.17530000e+00 2.66000000e-02 1.01630000e+00 4.10000000e-02 1.13590000e+00 9.36000000e-02 1.09900000e+00 -5.37000000e-02 - 1.95100000e-01 9.21200000e-01 1.03400000e-01 8.73200000e-01 1.40800000e-01 6.13800000e-01 2.69200000e-01 6.77800000e-01 1.64200000e-01 5.69800000e-01 1.59500000e-01 8.05300000e-01 - 3.77100000e-01 5.04000000e-01 5.15200000e-01 8.85600000e-01 7.19700000e-01 8.50000000e-01 5.71200000e-01 8.16200000e-01 7.00900000e-01 8.98400000e-01 5.12700000e-01 8.85400000e-01 - 4.20500000e-01 2.31500000e-01 7.02500000e-01 3.04900000e-01 8.17600000e-01 3.27600000e-01 7.14800000e-01 2.92400000e-01 8.54800000e-01 3.05000000e-01 7.21800000e-01 2.81900000e-01 - 4.09600000e-01 3.71000000e-01 6.49100000e-01 5.68100000e-01 7.21900000e-01 6.71500000e-01 7.16100000e-01 4.92400000e-01 6.87700000e-01 7.35800000e-01 6.70300000e-01 5.45200000e-01 - 3.03600000e-01 2.87900000e-01 5.12300000e-01 4.10700000e-01 5.93700000e-01 4.59900000e-01 1.71000000e+00 2.58000000e-02 2.50000000e-03 1.99880000e+00 -5.00000000e-02 5.89000000e-02 - 8.70700000e-01 2.20500000e-01 3.40100000e-01 1.39510000e+00 2.85100000e-01 7.77700000e-01 4.12300000e-01 1.31150000e+00 2.63200000e-01 7.67200000e-01 3.10700000e-01 1.42790000e+00 - 1.79500000e-01 8.30000000e-01 3.92400000e-01 9.49600000e-01 5.73800000e-01 8.73100000e-01 1.67540000e+00 6.85000000e-02 4.98000000e-02 1.94610000e+00 -5.70000000e-02 6.75000000e-02 - 1.36000000e-02 7.33900000e-01 2.06900000e-01 8.48500000e-01 1.99000000e-01 9.91500000e-01 2.70000000e-02 1.69890000e+00 1.48600000e-01 1.82570000e+00 7.20000000e-02 -8.63000000e-02 - 1.09580000e+00 1.33000000e-02 6.31500000e-01 1.08880000e+00 5.14500000e-01 9.03400000e-01 6.03100000e-01 1.12610000e+00 3.87200000e-01 9.80700000e-01 6.56800000e-01 1.06210000e+00 - 3.75000000e-01 1.92600000e-01 6.65400000e-01 2.20500000e-01 7.67300000e-01 2.41100000e-01 1.78490000e+00 -6.22000000e-02 4.85000000e-02 1.94210000e+00 -2.81000000e-02 3.25000000e-02 - 8.94100000e-01 2.04500000e-01 4.41200000e-01 1.27790000e+00 3.62900000e-01 1.01450000e+00 3.66400000e-01 1.37740000e+00 3.12300000e-01 1.01570000e+00 4.05500000e-01 1.31990000e+00 - 1.12730000e+00 -2.84000000e-02 4.85700000e-01 3.26600000e-01 4.15100000e-01 2.36900000e-01 4.63900000e-01 3.47600000e-01 3.96200000e-01 2.42500000e-01 4.93200000e-01 3.16500000e-01 - 5.35300000e-01 4.30800000e-01 8.95400000e-01 6.08300000e-01 9.69400000e-01 7.55900000e-01 7.66800000e-01 7.65300000e-01 1.04140000e+00 6.94300000e-01 8.04400000e-01 7.11900000e-01 - -9.50000000e-03 4.51400000e-01 2.82000000e-01 4.70600000e-01 4.71300000e-01 3.87700000e-01 -4.49000000e-02 1.78930000e+00 1.36000000e-02 -1.76000000e-02 -2.00000000e-04 3.00000000e-04 - 1.14200000e+00 -4.30000000e-02 5.37500000e-01 4.04900000e-01 3.36700000e-01 4.22300000e-01 4.40600000e-01 5.16800000e-01 4.08000000e-01 3.22600000e-01 4.31800000e-01 5.30200000e-01 - 4.91200000e-01 6.07600000e-01 1.42300000e-01 1.61320000e+00 1.71400000e-01 9.33500000e-01 1.75600000e-01 1.57710000e+00 2.37500000e-01 8.21000000e-01 2.23200000e-01 1.51680000e+00 - 4.67600000e-01 6.49600000e-01 8.41100000e-01 8.34700000e-01 8.66800000e-01 9.42700000e-01 1.75800000e+00 -3.08000000e-02 8.00000000e-04 1.99260000e+00 -1.71000000e-02 1.75120000e+00 - 4.83000000e-01 5.66700000e-01 8.69400000e-01 7.53500000e-01 8.44500000e-01 1.03520000e+00 8.56400000e-01 7.70400000e-01 1.01570000e+00 8.55200000e-01 8.01000000e-01 8.38100000e-01 - 9.85000000e-02 1.01230000e+00 1.93000000e-02 1.65990000e+00 3.87000000e-02 9.95600000e-01 6.17000000e-02 1.60860000e+00 4.84000000e-02 9.53600000e-01 5.37000000e-02 1.62360000e+00 - -2.40000000e-02 1.03200000e+00 1.88200000e-01 1.13250000e+00 4.07900000e-01 1.01500000e+00 6.18000000e-02 1.65760000e+00 3.39000000e-02 1.95800000e+00 -1.84000000e-02 2.39000000e-02 - 3.26300000e-01 4.12000000e-01 4.95900000e-01 6.59400000e-01 5.90800000e-01 7.21800000e-01 5.92800000e-01 5.43300000e-01 6.22300000e-01 7.00100000e-01 5.51600000e-01 5.95300000e-01 - 1.00110000e+00 1.26600000e-01 8.04000000e-01 9.14300000e-01 5.85400000e-01 4.90500000e-01 6.79500000e-01 1.06140000e+00 5.95600000e-01 4.45300000e-01 6.51800000e-01 1.09750000e+00 - 1.24600000e-01 1.60500000e-01 4.42700000e-01 1.56000000e-01 4.93500000e-01 2.38800000e-01 1.68010000e+00 5.94000000e-02 2.74000000e-02 -3.14000000e-02 -2.61000000e-02 2.92000000e-02 - 4.95800000e-01 1.92300000e-01 8.55300000e-01 2.10000000e-01 9.13400000e-01 3.10000000e-01 7.73300000e-01 3.09500000e-01 9.53300000e-01 2.84900000e-01 8.89400000e-01 1.72600000e-01 - 1.10000000e+00 7.90000000e-03 6.74900000e-01 -1.20000000e-03 5.19500000e-01 4.00000000e-02 6.15100000e-01 6.75000000e-02 4.55300000e-01 1.04400000e-01 6.31700000e-01 4.48000000e-02 - 2.30900000e-01 7.20200000e-01 5.81500000e-01 6.75000000e-01 6.62100000e-01 7.21400000e-01 1.75090000e+00 -2.34000000e-02 -3.91000000e-02 2.04920000e+00 3.05000000e-02 -3.30000000e-02 - 1.21800000e-01 8.32700000e-01 5.17600000e-01 7.32300000e-01 5.33700000e-01 8.54500000e-01 1.86900000e+00 -1.62200000e-01 6.79000000e-02 1.92080000e+00 1.34000000e-02 -1.67000000e-02 - 6.61000000e-01 -1.44000000e-02 9.08800000e-01 6.40000000e-02 1.05100000e+00 3.64000000e-02 1.61510000e+00 1.37500000e-01 1.99990000e+00 2.00000000e-04 9.86000000e-02 -1.20300000e-01 - 5.24700000e-01 1.51900000e-01 8.00700000e-01 1.95800000e-01 9.53900000e-01 1.54000000e-01 1.72350000e+00 1.24000000e-02 -4.86000000e-02 2.05290000e+00 4.55000000e-02 -5.58000000e-02 - 1.01550000e+00 1.05900000e-01 9.46800000e-01 7.79200000e-01 7.60900000e-01 1.00270000e+00 9.43100000e-01 7.87400000e-01 6.87900000e-01 9.28100000e-01 9.15500000e-01 8.15200000e-01 - 3.49400000e-01 5.36900000e-01 6.41500000e-01 5.64600000e-01 7.41600000e-01 5.85200000e-01 1.66270000e+00 8.06000000e-02 3.47000000e-02 1.96080000e+00 -3.14000000e-02 3.67000000e-02 - 4.92900000e-01 3.35900000e-01 7.49400000e-01 4.01700000e-01 8.67900000e-01 3.99400000e-01 1.75540000e+00 -2.43000000e-02 -4.40000000e-03 2.00570000e+00 1.96000000e-02 -2.37000000e-02 - 3.85500000e-01 6.67700000e-01 8.24100000e-01 5.20700000e-01 8.21900000e-01 6.63500000e-01 1.64710000e+00 1.01700000e-01 7.10000000e-03 1.99710000e+00 6.08000000e-02 -7.25000000e-02 - 5.05900000e-01 1.20400000e-01 8.57600000e-01 1.09300000e-01 9.38100000e-01 1.71200000e-01 8.24900000e-01 1.53500000e-01 8.68900000e-01 2.74300000e-01 8.41200000e-01 1.34100000e-01 - 1.14590000e+00 -4.32000000e-02 5.32300000e-01 1.20540000e+00 4.45800000e-01 6.49400000e-01 6.24100000e-01 1.09310000e+00 4.13500000e-01 6.56400000e-01 4.64700000e-01 1.28520000e+00 - 5.87200000e-01 -5.06000000e-02 8.23400000e-01 3.53000000e-02 9.24200000e-01 6.07000000e-02 7.98600000e-01 6.97000000e-02 9.60500000e-01 2.89000000e-02 7.94800000e-01 7.39000000e-02 - 7.10500000e-01 2.53100000e-01 1.12810000e+00 1.27800000e-01 1.12880000e+00 2.64900000e-01 1.68060000e+00 6.10000000e-02 1.98920000e+00 1.19000000e-02 -4.17000000e-02 5.12000000e-02 - 1.13240000e+00 -3.12000000e-02 5.40500000e-01 1.38300000e-01 4.72700000e-01 8.03000000e-02 5.02100000e-01 1.81800000e-01 4.48000000e-01 1.00400000e-01 4.64500000e-01 2.31400000e-01 - 4.21000000e-02 1.05430000e+00 1.10000000e-01 1.59850000e+00 1.15100000e-01 1.78640000e+00 4.14000000e-02 1.68400000e+00 1.34400000e-01 1.84380000e+00 -3.30000000e-02 1.77300000e+00 - 1.53000000e-01 4.79300000e-01 1.88100000e-01 8.07100000e-01 2.24000000e-01 9.11500000e-01 8.85000000e-02 9.26500000e-01 1.46100000e-01 1.01910000e+00 1.65300000e-01 8.36400000e-01 - -4.53000000e-02 2.93300000e-01 1.84100000e-01 3.77600000e-01 3.40100000e-01 3.34100000e-01 -9.38000000e-02 1.84160000e+00 3.23000000e-02 -3.94000000e-02 -3.35000000e-02 3.96000000e-02 - -1.20000000e-02 4.89300000e-01 1.60500000e-01 6.31200000e-01 2.12300000e-01 7.12200000e-01 9.10000000e-03 1.72430000e+00 -6.40000000e-03 7.40000000e-03 2.99000000e-02 -3.60000000e-02 - 8.63600000e-01 -1.01000000e-02 1.11560000e+00 5.91000000e-02 1.26770000e+00 1.88000000e-02 1.67770000e+00 6.74000000e-02 1.96380000e+00 3.89000000e-02 1.31000000e-02 -1.72000000e-02 - 3.53900000e-01 5.95700000e-01 5.79900000e-01 6.97200000e-01 7.66800000e-01 6.19300000e-01 1.71060000e+00 2.34000000e-02 -3.71000000e-02 2.04150000e+00 3.55000000e-02 -4.19000000e-02 - 1.13350000e+00 -3.25000000e-02 7.22200000e-01 1.00120000e+00 5.90100000e-01 8.28700000e-01 6.67900000e-01 1.06580000e+00 4.73600000e-01 8.98600000e-01 7.82900000e-01 9.40400000e-01 - 1.15490000e+00 -5.87000000e-02 7.63700000e-01 9.75700000e-01 6.11300000e-01 5.41100000e-01 8.77700000e-01 8.38300000e-01 6.41400000e-01 4.60800000e-01 7.43900000e-01 9.94700000e-01 - 1.02200000e-01 9.93300000e-01 6.44000000e-02 6.74600000e-01 -1.09000000e-02 6.24200000e-01 3.17000000e-02 7.12200000e-01 1.90000000e-02 5.77400000e-01 5.08000000e-02 6.91500000e-01 - 7.11400000e-01 1.41300000e-01 1.21750000e+00 1.03800000e-01 1.29290000e+00 2.26200000e-01 1.12490000e+00 2.17400000e-01 1.37540000e+00 1.57900000e-01 1.02940000e+00 3.22100000e-01 - 4.79900000e-01 4.71600000e-01 7.41100000e-01 7.49800000e-01 8.22600000e-01 8.79600000e-01 7.36400000e-01 7.62000000e-01 8.23800000e-01 9.04600000e-01 7.58500000e-01 7.27500000e-01 - 1.12520000e+00 -2.17000000e-02 5.52900000e-01 3.29500000e-01 3.65900000e-01 3.52800000e-01 5.26700000e-01 3.65600000e-01 4.85400000e-01 1.93300000e-01 4.94500000e-01 3.99900000e-01 - 1.23100000e-01 6.91900000e-01 1.40300000e-01 1.14400000e+00 1.40400000e-01 1.32340000e+00 2.13000000e-01 1.05620000e+00 1.96200000e-01 1.28140000e+00 2.76800000e-01 9.78900000e-01 - 5.45900000e-01 5.75900000e-01 3.05700000e-01 9.06200000e-01 1.81800000e-01 7.27700000e-01 2.94600000e-01 9.15500000e-01 2.97000000e-01 5.67300000e-01 2.86700000e-01 9.24600000e-01 - 4.66200000e-01 5.70600000e-01 8.30000000e-01 7.85400000e-01 8.70800000e-01 9.79900000e-01 5.64600000e-01 1.09410000e+00 8.59600000e-01 1.02300000e+00 8.07100000e-01 8.10700000e-01 - 2.69600000e-01 8.52600000e-01 6.12800000e-01 8.31400000e-01 7.79500000e-01 7.67100000e-01 1.77570000e+00 -5.05000000e-02 7.26000000e-02 1.91700000e+00 3.16000000e-02 1.70070000e+00 - 6.38700000e-01 1.42900000e-01 9.80800000e-01 2.46400000e-01 1.13880000e+00 2.53400000e-01 8.47900000e-01 4.02700000e-01 1.16500000e+00 2.43100000e-01 9.86100000e-01 2.38000000e-01 - 2.46100000e-01 6.91800000e-01 5.89300000e-01 6.53000000e-01 6.43000000e-01 7.35100000e-01 1.80830000e+00 -8.26000000e-02 6.23000000e-02 1.92860000e+00 -2.42000000e-02 2.81000000e-02 - 9.89700000e-01 1.35900000e-01 5.04400000e-01 1.22160000e+00 4.07600000e-01 1.00680000e+00 4.92100000e-01 1.23810000e+00 3.34400000e-01 1.02540000e+00 4.76700000e-01 1.25790000e+00 - 2.69000000e-01 8.14500000e-01 1.89400000e-01 1.52240000e+00 1.43800000e-01 1.10360000e+00 1.35600000e-01 1.58510000e+00 1.28000000e-02 1.21540000e+00 1.08700000e-01 1.62300000e+00 - 1.15400000e+00 -5.90000000e-02 6.02400000e-01 4.91200000e-01 4.59000000e-01 3.78300000e-01 5.53900000e-01 5.49000000e-01 4.00300000e-01 4.28100000e-01 4.54500000e-01 6.67200000e-01 - 5.63500000e-01 1.81300000e-01 8.49900000e-01 2.08900000e-01 1.02400000e+00 1.43700000e-01 1.78100000e+00 -5.55000000e-02 4.68000000e-02 1.94560000e+00 7.58000000e-02 -9.07000000e-02 - 3.82400000e-01 3.85500000e-01 6.04900000e-01 5.96500000e-01 7.79100000e-01 5.74300000e-01 6.06600000e-01 5.93500000e-01 7.98700000e-01 5.71800000e-01 5.72300000e-01 6.33000000e-01 - 2.02400000e-01 2.74800000e-01 1.73200000e-01 7.30000000e-02 1.41200000e-01 7.67000000e-02 1.13000000e-01 1.48000000e-01 3.23000000e-02 2.01400000e-01 1.06900000e-01 1.53400000e-01 - 4.26800000e-01 6.32600000e-01 7.14300000e-01 6.66100000e-01 8.41200000e-01 6.56300000e-01 1.71090000e+00 3.20000000e-02 5.71000000e-02 1.93130000e+00 -1.01300000e-01 1.85110000e+00 - 2.44000000e-01 6.56000000e-01 5.46400000e-01 6.68800000e-01 7.56900000e-01 5.64500000e-01 1.79120000e+00 -6.75000000e-02 -4.81000000e-02 2.06080000e+00 1.34000000e-02 -1.59000000e-02 - 2.62600000e-01 8.58400000e-01 2.14000000e-01 6.57000000e-01 1.72300000e-01 5.24900000e-01 8.24000000e-02 8.15100000e-01 1.59700000e-01 5.24000000e-01 1.43900000e-01 7.43400000e-01 - 1.04070000e+00 8.04000000e-02 7.17400000e-01 1.03640000e+00 6.76700000e-01 1.09700000e+00 8.80100000e-01 8.45600000e-01 6.94000000e-01 9.09500000e-01 9.66100000e-01 7.35000000e-01 - 6.15900000e-01 1.55400000e-01 9.37500000e-01 2.77500000e-01 8.74200000e-01 5.43900000e-01 8.54800000e-01 3.71400000e-01 8.78200000e-01 5.64200000e-01 8.52500000e-01 3.76500000e-01 - 7.28100000e-01 3.25800000e-01 1.06100000e+00 6.02600000e-01 1.18340000e+00 7.10600000e-01 1.02220000e+00 6.47900000e-01 1.37690000e+00 5.20600000e-01 1.20430000e+00 4.36400000e-01 - 3.56200000e-01 2.63600000e-01 7.12500000e-01 2.15500000e-01 8.47500000e-01 1.95200000e-01 1.68350000e+00 5.63000000e-02 8.01000000e-02 1.90540000e+00 2.64000000e-02 -3.28000000e-02 - 1.18910000e+00 -9.94000000e-02 8.84300000e-01 5.90900000e-01 5.77600000e-01 4.26700000e-01 7.46500000e-01 7.45700000e-01 4.60000000e-01 5.39700000e-01 7.99000000e-01 6.89600000e-01 - 2.64800000e-01 6.72800000e-01 7.74900000e-01 4.41600000e-01 6.89800000e-01 6.79600000e-01 1.68860000e+00 5.09000000e-02 -5.22000000e-02 2.06440000e+00 2.93000000e-02 -3.73000000e-02 - 3.85400000e-01 6.28900000e-01 6.10700000e-01 7.38500000e-01 8.20500000e-01 6.27100000e-01 1.66490000e+00 7.66000000e-02 -1.80000000e-02 2.01760000e+00 6.42000000e-02 -7.48000000e-02 - 7.06900000e-01 4.18800000e-01 1.18690000e+00 5.31800000e-01 1.24050000e+00 6.38400000e-01 1.69270000e+00 4.75000000e-02 2.06110000e+00 -7.43000000e-02 -3.90000000e-03 1.73400000e+00 - 1.67000000e-02 -1.88000000e-02 9.29000000e-02 9.57000000e-02 2.47700000e-01 5.34000000e-02 5.39000000e-02 -6.36000000e-02 -2.80000000e-03 4.70000000e-03 -6.59000000e-02 7.89000000e-02 - 1.06460000e+00 4.60000000e-02 7.61600000e-01 4.22000000e-02 5.73300000e-01 8.53000000e-02 6.73900000e-01 1.49000000e-01 5.57400000e-01 8.78000000e-02 6.07500000e-01 2.26500000e-01 - 9.36600000e-01 1.60100000e-01 1.34720000e+00 2.46100000e-01 1.37630000e+00 3.51900000e-01 1.73070000e+00 -2.00000000e-03 2.00220000e+00 -3.50000000e-03 6.68000000e-02 1.65310000e+00 - 7.31300000e-01 2.20900000e-01 1.01260000e+00 5.08100000e-01 1.22850000e+00 4.85400000e-01 1.04620000e+00 4.70000000e-01 1.18830000e+00 5.61700000e-01 1.09410000e+00 4.14200000e-01 - 4.55000000e-01 4.55000000e-01 7.26000000e-01 5.04300000e-01 9.02100000e-01 4.39500000e-01 1.84270000e+00 -1.35500000e-01 -3.93000000e-02 2.05060000e+00 7.15000000e-02 -8.37000000e-02 - 1.06200000e-01 7.29600000e-01 3.58300000e-01 8.05600000e-01 5.57800000e-01 7.08300000e-01 -6.29000000e-02 1.80470000e+00 -2.00000000e-02 2.02460000e+00 -4.50000000e-03 4.90000000e-03 - 4.49000000e-02 1.06590000e+00 1.13400000e-01 1.60910000e+00 -3.10000000e-03 1.04480000e+00 7.73000000e-02 1.64900000e+00 3.16000000e-02 9.75800000e-01 8.26000000e-02 1.64470000e+00 - 1.20100000e-01 7.88600000e-01 1.27500000e-01 1.30460000e+00 1.79700000e-01 1.44600000e+00 7.12000000e-02 1.36690000e+00 2.15600000e-01 1.42350000e+00 1.63000000e-01 1.26150000e+00 - 3.81600000e-01 7.18800000e-01 6.37800000e-01 1.10010000e+00 7.63100000e-01 1.11140000e+00 1.72550000e+00 1.24000000e-02 1.16300000e-01 1.86080000e+00 2.46000000e-02 1.70540000e+00 - 6.31400000e-01 1.04200000e-01 9.65600000e-01 1.94900000e-01 9.19900000e-01 4.40600000e-01 9.29500000e-01 2.40500000e-01 1.00250000e+00 3.67200000e-01 8.69100000e-01 3.09900000e-01 - 8.12000000e-02 5.30900000e-01 1.34900000e-01 8.16600000e-01 3.07800000e-01 7.54000000e-01 -2.59000000e-02 1.76410000e+00 -3.32000000e-02 2.03780000e+00 -2.51000000e-02 3.22000000e-02 - 5.24900000e-01 3.50600000e-01 7.06700000e-01 6.87400000e-01 8.31400000e-01 7.55400000e-01 5.79900000e-01 8.37500000e-01 8.36000000e-01 7.70600000e-01 7.02400000e-01 6.92300000e-01 - 1.06810000e+00 4.40000000e-02 7.40400000e-01 9.94100000e-01 6.10600000e-01 5.56300000e-01 6.33200000e-01 1.12290000e+00 4.82700000e-01 6.68500000e-01 7.68100000e-01 9.58900000e-01 - 3.21700000e-01 3.24300000e-01 6.52000000e-01 3.06500000e-01 8.49700000e-01 2.13700000e-01 1.75830000e+00 -3.43000000e-02 1.51000000e-02 1.98510000e+00 -4.52000000e-02 5.23000000e-02 - 1.09570000e+00 9.60000000e-03 1.49310000e+00 2.21700000e-01 9.00400000e-01 5.56000000e-01 1.39240000e+00 3.45300000e-01 1.00550000e+00 3.61600000e-01 1.45520000e+00 2.71200000e-01 - 2.16000000e-01 3.81400000e-01 4.44000000e-01 4.69800000e-01 4.28800000e-01 6.29900000e-01 4.41700000e-01 4.75400000e-01 3.98700000e-01 6.84300000e-01 5.00100000e-01 4.07300000e-01 - 8.02000000e-02 7.62900000e-01 4.19900000e-01 7.32000000e-01 4.58000000e-01 8.24100000e-01 1.67250000e+00 6.82000000e-02 8.71000000e-02 1.89790000e+00 -3.42000000e-02 4.31000000e-02 - 7.80900000e-01 2.38200000e-01 1.13820000e+00 1.89000000e-01 1.24870000e+00 1.95900000e-01 1.64810000e+00 9.92000000e-02 2.03470000e+00 -3.82000000e-02 1.10000000e-02 -1.61000000e-02 - 5.51000000e-01 5.46500000e-01 8.65900000e-01 5.94100000e-01 9.34500000e-01 6.50800000e-01 1.73820000e+00 -5.00000000e-03 -4.50000000e-03 2.00970000e+00 3.61000000e-02 1.69060000e+00 - 1.10040000e+00 8.60000000e-03 5.77900000e-01 7.52000000e-02 4.49200000e-01 9.86000000e-02 6.05500000e-01 4.51000000e-02 4.30600000e-01 1.05600000e-01 5.26400000e-01 1.39700000e-01 - -7.12000000e-02 3.60700000e-01 -5.66000000e-02 6.58000000e-01 8.67000000e-02 6.18900000e-01 -6.32000000e-02 1.80670000e+00 -4.80000000e-02 5.76000000e-02 -3.30000000e-03 4.30000000e-03 - 4.24200000e-01 6.85400000e-01 2.72000000e-01 3.83400000e-01 2.85700000e-01 2.48300000e-01 2.88800000e-01 3.62600000e-01 1.87600000e-01 3.52400000e-01 2.99200000e-01 3.53200000e-01 - 5.30800000e-01 4.53300000e-01 7.84200000e-01 5.20100000e-01 8.02800000e-01 6.36800000e-01 1.82020000e+00 -1.03200000e-01 6.78000000e-02 1.92300000e+00 6.00000000e-02 -7.22000000e-02 - 5.42600000e-01 5.25000000e-01 7.75800000e-01 9.09100000e-01 8.39500000e-01 1.09260000e+00 7.91300000e-01 8.94900000e-01 8.54700000e-01 1.10920000e+00 7.68600000e-01 9.22400000e-01 - 1.01250000e+00 1.08500000e-01 6.22700000e-01 1.10950000e+00 5.20700000e-01 6.34100000e-01 6.30500000e-01 1.10120000e+00 4.67700000e-01 6.58300000e-01 6.90100000e-01 1.02700000e+00 - 1.74600000e-01 7.28000000e-01 4.15100000e-01 8.09000000e-01 5.06700000e-01 8.45600000e-01 1.74810000e+00 -1.91000000e-02 3.17000000e-02 1.96690000e+00 -4.23000000e-02 5.22000000e-02 - 4.81700000e-01 2.04900000e-01 6.43900000e-01 4.52700000e-01 7.49200000e-01 4.93600000e-01 6.97000000e-01 3.85200000e-01 8.61300000e-01 3.86700000e-01 7.95800000e-01 2.72200000e-01 - 6.46800000e-01 3.85800000e-01 9.76400000e-01 6.55400000e-01 1.11680000e+00 7.39200000e-01 9.84600000e-01 6.44700000e-01 1.04900000e+00 8.47500000e-01 9.58100000e-01 6.73400000e-01 - 1.65800000e-01 4.85800000e-01 2.09800000e-01 8.20300000e-01 1.61600000e-01 1.02370000e+00 1.68200000e-01 8.74200000e-01 3.49800000e-01 8.15900000e-01 3.11700000e-01 7.03100000e-01 - 1.12980000e+00 -2.74000000e-02 1.02470000e+00 3.94600000e-01 7.55400000e-01 2.30000000e-01 1.07830000e+00 3.28100000e-01 8.40200000e-01 9.92000000e-02 9.01900000e-01 5.39600000e-01 - 5.60000000e-03 1.11340000e+00 3.75600000e-01 1.32420000e+00 4.43600000e-01 1.38660000e+00 1.74890000e+00 -2.11000000e-02 -5.00000000e-04 1.99570000e+00 3.80000000e-02 1.68940000e+00 - 6.65500000e-01 2.97600000e-01 9.27400000e-01 6.01400000e-01 1.13120000e+00 5.90900000e-01 9.83200000e-01 5.32800000e-01 1.11620000e+00 6.33800000e-01 9.46600000e-01 5.72900000e-01 - 1.49900000e-01 3.54300000e-01 2.97900000e-01 4.79500000e-01 3.04100000e-01 5.85300000e-01 3.28400000e-01 4.40600000e-01 3.68200000e-01 5.23100000e-01 2.60000000e-01 5.23400000e-01 - 3.21900000e-01 1.55900000e-01 5.26100000e-01 2.20200000e-01 5.76100000e-01 2.77400000e-01 4.91700000e-01 2.61800000e-01 5.97100000e-01 2.70000000e-01 5.21200000e-01 2.27400000e-01 - 5.40400000e-01 5.65100000e-01 7.84700000e-01 7.22800000e-01 8.77000000e-01 7.56900000e-01 1.59890000e+00 1.52500000e-01 -4.09000000e-02 2.03980000e+00 1.29000000e-02 1.71700000e+00 - 1.14490000e+00 -4.70000000e-02 5.44100000e-01 1.17770000e+00 4.40800000e-01 1.10440000e+00 4.81400000e-01 1.24900000e+00 3.94200000e-01 1.07060000e+00 5.23900000e-01 1.20700000e+00 - 6.99600000e-01 -1.34000000e-02 9.96100000e-01 1.01000000e-01 1.10060000e+00 1.51900000e-01 1.12290000e+00 -5.29000000e-02 1.15230000e+00 1.10200000e-01 9.94700000e-01 1.00200000e-01 - 1.20200000e+00 -1.15900000e-01 4.57900000e-01 1.67200000e-01 4.53900000e-01 5.07000000e-02 4.32600000e-01 2.00700000e-01 3.73300000e-01 1.39600000e-01 5.64800000e-01 4.07000000e-02 - 1.63900000e-01 8.16500000e-01 6.05000000e-02 1.50630000e+00 1.20100000e-01 1.65420000e+00 8.41000000e-02 1.48050000e+00 2.75300000e-01 1.49720000e+00 2.99000000e-02 1.54150000e+00 - 4.62100000e-01 1.58600000e-01 8.13600000e-01 1.09400000e-01 9.10600000e-01 1.40300000e-01 1.77540000e+00 -5.13000000e-02 -2.52000000e-02 2.02630000e+00 -3.00000000e-04 8.00000000e-04 - 4.76100000e-01 1.11500000e-01 7.37400000e-01 1.84600000e-01 8.24000000e-01 2.29100000e-01 6.61800000e-01 2.72900000e-01 7.46300000e-01 3.37800000e-01 7.94400000e-01 1.20300000e-01 - 3.35300000e-01 5.74200000e-01 5.72700000e-01 6.61200000e-01 7.35600000e-01 6.12800000e-01 1.73960000e+00 -1.26000000e-02 -1.19000000e-02 2.01200000e+00 -5.16000000e-02 6.08000000e-02 - 1.55300000e-01 9.39800000e-01 5.20200000e-01 8.73000000e-01 5.61600000e-01 9.74900000e-01 1.88690000e+00 -1.82000000e-01 9.93000000e-02 1.88290000e+00 8.79000000e-02 1.62500000e+00 - 5.90500000e-01 5.10100000e-01 2.46900000e-01 3.66700000e-01 2.61400000e-01 2.39200000e-01 2.77000000e-01 3.30400000e-01 2.57800000e-01 2.30900000e-01 3.44400000e-01 2.49600000e-01 - 1.76000000e-01 6.59600000e-01 4.88000000e-02 1.14550000e+00 2.51300000e-01 1.04550000e+00 -2.95000000e-02 1.76850000e+00 -5.62000000e-02 2.06250000e+00 1.45000000e-02 -2.00000000e-02 - -1.34000000e-02 1.03820000e+00 3.42300000e-01 9.88100000e-01 4.14400000e-01 1.04240000e+00 -7.90000000e-03 1.74440000e+00 -2.80000000e-02 2.03230000e+00 -9.00000000e-03 1.14000000e-02 - 3.35900000e-01 6.75400000e-01 4.77600000e-01 1.11970000e+00 5.36000000e-01 1.28370000e+00 3.30900000e-01 1.29070000e+00 5.52300000e-01 1.29180000e+00 4.87000000e-01 1.10440000e+00 - 4.69200000e-01 2.00300000e-01 7.76700000e-01 2.00800000e-01 8.50600000e-01 2.59600000e-01 1.70470000e+00 3.71000000e-02 4.13000000e-02 1.95520000e+00 -2.20000000e-02 2.67000000e-02 - -1.06000000e-02 7.31500000e-01 2.98000000e-02 9.95500000e-01 1.23700000e-01 1.01800000e+00 -4.87000000e-02 1.79030000e+00 -5.23000000e-02 2.06110000e+00 2.66000000e-02 -3.11000000e-02 - 6.46500000e-01 2.52700000e-01 1.02260000e+00 3.80700000e-01 1.18620000e+00 4.09200000e-01 8.34300000e-01 6.07700000e-01 9.34800000e-01 7.27400000e-01 9.38500000e-01 4.84800000e-01 - 9.00000000e-04 6.71800000e-01 1.28200000e-01 8.69100000e-01 2.88300000e-01 8.20200000e-01 -1.43000000e-02 1.74720000e+00 -3.98000000e-02 2.05040000e+00 -5.15000000e-02 6.09000000e-02 - 2.41000000e-01 4.82400000e-01 6.57300000e-01 3.61100000e-01 6.63300000e-01 4.94300000e-01 1.68670000e+00 5.73000000e-02 -6.47000000e-02 2.07720000e+00 7.60000000e-02 -9.06000000e-02 - -7.70000000e-03 3.02200000e-01 4.25000000e-02 4.14800000e-01 4.93000000e-02 4.71300000e-01 1.59100000e-01 2.75200000e-01 1.25100000e-01 3.87100000e-01 1.25700000e-01 3.14500000e-01 - 4.87200000e-01 6.17600000e-01 8.34100000e-01 8.80100000e-01 8.80100000e-01 9.73300000e-01 1.77070000e+00 -4.89000000e-02 3.74000000e-02 1.94900000e+00 9.91000000e-02 1.61030000e+00 - 5.25300000e-01 5.12700000e-01 7.97400000e-01 8.31200000e-01 9.16500000e-01 9.33800000e-01 8.08200000e-01 8.20600000e-01 1.07090000e+00 7.87400000e-01 7.77500000e-01 8.53600000e-01 - 5.34500000e-01 3.65500000e-01 6.34500000e-01 8.13900000e-01 7.45700000e-01 8.97000000e-01 7.40200000e-01 6.89300000e-01 8.47600000e-01 8.00700000e-01 7.07300000e-01 7.28100000e-01 - -9.10000000e-03 1.13030000e+00 4.20000000e-02 8.40200000e-01 7.58000000e-02 6.17000000e-01 7.71000000e-02 7.99800000e-01 -5.50000000e-03 6.99900000e-01 7.18000000e-02 8.03400000e-01 - 5.55600000e-01 3.41000000e-01 8.49400000e-01 5.61500000e-01 9.30200000e-01 6.78000000e-01 8.30700000e-01 5.82400000e-01 9.58700000e-01 6.72700000e-01 7.96800000e-01 6.20700000e-01 - 3.74400000e-01 2.18600000e-01 6.40300000e-01 2.75000000e-01 6.72200000e-01 3.86500000e-01 6.87000000e-01 2.18700000e-01 7.68000000e-01 2.88800000e-01 7.37200000e-01 1.61300000e-01 - 2.06000000e-01 4.81200000e-01 5.33900000e-01 4.64300000e-01 5.87200000e-01 5.41000000e-01 1.85800000e+00 -1.47400000e-01 1.08100000e-01 1.87720000e+00 1.03100000e-01 -1.25400000e-01 - -1.02000000e-02 4.11800000e-01 3.22500000e-01 3.87000000e-01 5.16200000e-01 2.99100000e-01 7.00000000e-02 1.65500000e+00 -2.53000000e-02 3.05000000e-02 4.01000000e-02 -5.01000000e-02 - 4.14000000e-01 4.14900000e-01 6.19700000e-01 5.42600000e-01 7.65200000e-01 5.07100000e-01 1.73680000e+00 -3.60000000e-03 2.81000000e-02 1.96730000e+00 -5.79000000e-02 6.83000000e-02 - 1.15450000e+00 -5.77000000e-02 6.15500000e-01 6.66800000e-01 4.85000000e-01 4.49100000e-01 6.44100000e-01 6.27100000e-01 5.00200000e-01 4.08000000e-01 6.58100000e-01 6.18200000e-01 - 3.55500000e-01 3.22400000e-01 6.25200000e-01 4.24100000e-01 6.64900000e-01 5.40200000e-01 6.05300000e-01 4.48000000e-01 7.13700000e-01 4.98600000e-01 6.10200000e-01 4.36500000e-01 - 2.78200000e-01 3.09300000e-01 5.11300000e-01 4.02400000e-01 7.85300000e-01 2.20200000e-01 1.79200000e+00 -6.57000000e-02 -3.24000000e-02 2.03500000e+00 5.80000000e-03 -4.70000000e-03 - 3.70000000e-02 8.44300000e-01 3.56900000e-01 8.37500000e-01 4.51400000e-01 8.68100000e-01 1.70670000e+00 2.97000000e-02 -1.39500000e-01 2.16990000e+00 -3.41000000e-02 4.05000000e-02 - 5.80100000e-01 2.83000000e-01 9.34700000e-01 4.06500000e-01 1.11070000e+00 4.10700000e-01 8.64500000e-01 4.92300000e-01 1.13780000e+00 4.01300000e-01 9.63200000e-01 3.73200000e-01 - 3.56500000e-01 6.83000000e-01 6.76400000e-01 9.32400000e-01 7.34200000e-01 1.10520000e+00 7.27300000e-01 8.71600000e-01 9.51000000e-01 8.79300000e-01 6.82000000e-01 9.25900000e-01 - -3.57000000e-02 4.76500000e-01 3.28800000e-01 4.10800000e-01 3.86300000e-01 4.83900000e-01 3.50000000e-03 1.72580000e+00 -2.97000000e-02 3.65000000e-02 -6.70000000e-03 9.10000000e-03 - 3.90600000e-01 4.00400000e-01 7.47000000e-01 3.46800000e-01 8.13300000e-01 4.11700000e-01 1.73050000e+00 5.90000000e-03 3.21000000e-02 1.95920000e+00 -6.60000000e-03 8.80000000e-03 - 1.04210000e+00 7.61000000e-02 1.08590000e+00 6.39200000e-01 7.05300000e-01 3.64100000e-01 1.02520000e+00 7.12200000e-01 7.87400000e-01 2.34300000e-01 1.01950000e+00 7.15700000e-01 - 7.39000000e-02 1.25400000e-01 1.67900000e-01 1.33100000e-01 2.41100000e-01 9.01000000e-02 1.80600000e-01 1.18500000e-01 1.38600000e-01 2.20400000e-01 1.06000000e-01 2.07200000e-01 - 7.19100000e-01 1.27100000e-01 1.14980000e+00 1.69100000e-01 1.30030000e+00 2.07000000e-01 1.15430000e+00 1.63900000e-01 1.39780000e+00 1.10200000e-01 1.20260000e+00 1.08200000e-01 - 4.03000000e-02 6.10200000e-01 3.80700000e-01 5.76500000e-01 4.44800000e-01 6.41400000e-01 5.46000000e-02 1.66850000e+00 -5.30000000e-03 2.00850000e+00 -2.68000000e-02 3.44000000e-02 - 1.29400000e-01 3.40500000e-01 2.42500000e-01 4.86200000e-01 2.17500000e-01 6.24300000e-01 1.86300000e-01 5.55700000e-01 2.83300000e-01 5.61700000e-01 2.57300000e-01 4.70900000e-01 - 1.04360000e+00 2.57000000e-02 1.23860000e+00 1.62100000e-01 1.35760000e+00 1.68300000e-01 1.80530000e+00 -8.77000000e-02 2.01490000e+00 -2.01000000e-02 -5.03000000e-02 1.79310000e+00 - 9.61000000e-02 7.50900000e-01 3.86600000e-01 7.80900000e-01 5.41800000e-01 7.39400000e-01 1.66960000e+00 7.20000000e-02 -8.98000000e-02 2.10620000e+00 6.30000000e-03 -9.70000000e-03 - 6.67300000e-01 3.29300000e-01 8.74300000e-01 4.49700000e-01 1.16280000e+00 2.52600000e-01 1.68410000e+00 6.05000000e-02 2.04640000e+00 -5.42000000e-02 2.83000000e-02 -3.21000000e-02 - 2.09800000e-01 4.48200000e-01 2.99600000e-01 7.31900000e-01 3.02300000e-01 8.79800000e-01 2.92200000e-01 7.40700000e-01 3.03500000e-01 8.98600000e-01 1.89800000e-01 8.64100000e-01 - 9.87800000e-01 1.21200000e-01 3.58200000e-01 1.13520000e+00 2.71100000e-01 7.38700000e-01 3.33800000e-01 1.15670000e+00 3.48100000e-01 6.21100000e-01 3.88400000e-01 1.10150000e+00 - 7.73800000e-01 6.69000000e-02 1.16400000e+00 1.60000000e-01 1.23490000e+00 2.84700000e-01 1.06330000e+00 2.76300000e-01 1.37670000e+00 1.43800000e-01 1.04790000e+00 2.91000000e-01 - 1.13430000e+00 -3.44000000e-02 4.68700000e-01 6.61200000e-01 3.03300000e-01 5.67300000e-01 4.05100000e-01 7.40100000e-01 3.61800000e-01 4.77800000e-01 4.16300000e-01 7.23000000e-01 - 1.68000000e-01 9.37600000e-01 3.56000000e-02 7.40800000e-01 1.76800000e-01 4.27600000e-01 1.25300000e-01 6.33300000e-01 5.52000000e-02 5.59100000e-01 1.37500000e-01 6.16800000e-01 - 1.08500000e+00 2.41000000e-02 4.56400000e-01 2.60200000e-01 4.43200000e-01 1.31300000e-01 5.70600000e-01 1.25000000e-01 4.03300000e-01 1.68900000e-01 5.54900000e-01 1.39700000e-01 - 1.92500000e-01 5.14400000e-01 2.46900000e-01 8.70200000e-01 2.81100000e-01 9.94900000e-01 2.43800000e-01 8.78200000e-01 3.49600000e-01 9.34100000e-01 2.83400000e-01 8.28400000e-01 - 3.87900000e-01 4.00000000e-01 5.89700000e-01 6.42900000e-01 8.32800000e-01 5.44900000e-01 6.38100000e-01 5.83800000e-01 7.86900000e-01 6.19500000e-01 7.08000000e-01 5.08000000e-01 - 1.56200000e-01 2.11800000e-01 5.96900000e-01 5.93000000e-02 7.00400000e-01 8.14000000e-02 1.64710000e+00 9.64000000e-02 2.54000000e-02 -2.93000000e-02 -1.60000000e-03 5.00000000e-04 - 6.20200000e-01 3.07800000e-01 1.06350000e+00 3.71500000e-01 1.14910000e+00 4.87800000e-01 9.37700000e-01 5.18200000e-01 1.06840000e+00 6.15200000e-01 9.65300000e-01 4.83900000e-01 - 8.73100000e-01 -2.09000000e-02 1.28640000e+00 6.21000000e-02 1.50650000e+00 1.42000000e-02 1.23390000e+00 1.20600000e-01 1.47540000e+00 7.86000000e-02 1.30170000e+00 4.01000000e-02 - 2.29900000e-01 4.42600000e-01 4.59200000e-01 5.76100000e-01 3.97700000e-01 7.99400000e-01 4.32400000e-01 6.07500000e-01 4.25000000e-01 7.92100000e-01 3.46200000e-01 7.07500000e-01 - 1.62900000e-01 3.70100000e-01 1.46100000e-01 7.14600000e-01 2.53700000e-01 7.06800000e-01 1.45700000e-01 7.13400000e-01 2.36400000e-01 7.40700000e-01 1.74800000e-01 6.82500000e-01 - -1.74000000e-02 6.53700000e-01 8.27000000e-02 8.94800000e-01 1.31900000e-01 9.76300000e-01 1.14900000e-01 8.56900000e-01 1.18100000e-01 1.00510000e+00 1.35800000e-01 8.31500000e-01 - 1.10360000e+00 -0.00000000e+00 4.79200000e-01 1.25890000e+00 4.09700000e-01 6.42100000e-01 5.31500000e-01 1.19180000e+00 3.52200000e-01 6.76900000e-01 5.28000000e-01 1.19750000e+00 - 3.76500000e-01 7.21100000e-01 5.73600000e-01 9.91700000e-01 8.11000000e-01 8.59900000e-01 1.82630000e+00 -1.12100000e-01 -7.90000000e-02 2.09460000e+00 6.44000000e-02 1.65770000e+00 - 1.13830000e+00 -3.66000000e-02 5.04300000e-01 1.23570000e+00 5.08200000e-01 1.13800000e+00 5.64100000e-01 1.16420000e+00 4.51400000e-01 1.08970000e+00 6.21100000e-01 1.10140000e+00 - 1.10420000e+00 2.80000000e-03 1.32060000e+00 4.06000000e-01 9.10100000e-01 5.33800000e-01 1.22790000e+00 5.16300000e-01 8.81900000e-01 4.92300000e-01 1.28510000e+00 4.46600000e-01 - 5.93000000e-02 1.20100000e-01 3.11000000e-01 1.88700000e-01 4.69600000e-01 1.45300000e-01 -7.92000000e-02 1.82590000e+00 -1.70000000e-03 2.20000000e-03 5.39000000e-02 -6.77000000e-02 - 3.81000000e-01 3.00300000e-01 5.70500000e-01 5.04100000e-01 6.08600000e-01 6.19400000e-01 4.69500000e-01 6.15700000e-01 5.36800000e-01 7.23200000e-01 4.86200000e-01 6.03400000e-01 - 2.91500000e-01 4.51200000e-01 4.08100000e-01 7.61700000e-01 5.95900000e-01 7.10800000e-01 4.85000000e-01 6.68300000e-01 5.66300000e-01 7.67200000e-01 5.19300000e-01 6.26300000e-01 - 2.28600000e-01 3.83900000e-01 5.16000000e-01 4.18900000e-01 6.46500000e-01 4.04400000e-01 1.81390000e+00 -9.54000000e-02 5.21000000e-02 1.93960000e+00 5.64000000e-02 -6.76000000e-02 - 2.58400000e-01 3.84300000e-01 4.85500000e-01 4.91600000e-01 6.28000000e-01 4.61600000e-01 1.69440000e+00 4.27000000e-02 2.71000000e-02 1.97170000e+00 -1.21000000e-02 1.31000000e-02 - 4.00000000e-03 -4.30000000e-03 -1.10000000e-03 9.53000000e-02 1.07300000e-01 1.14300000e-01 4.60000000e-02 -5.49000000e-02 -1.38000000e-02 1.70000000e-02 9.03000000e-02 -1.09200000e-01 - 3.00900000e-01 7.95300000e-01 1.57500000e-01 5.55300000e-01 1.08900000e-01 4.77000000e-01 1.64700000e-01 5.43400000e-01 1.16500000e-01 4.59000000e-01 1.42700000e-01 5.71900000e-01 - 1.04390000e+00 7.47000000e-02 5.46300000e-01 2.21100000e-01 4.89200000e-01 1.28300000e-01 5.48300000e-01 2.17800000e-01 4.35500000e-01 1.77100000e-01 5.70700000e-01 1.92700000e-01 - 1.23940000e+00 -1.60500000e-01 6.37100000e-01 5.58200000e-01 4.55700000e-01 4.46900000e-01 6.85200000e-01 5.02000000e-01 5.19200000e-01 3.49800000e-01 6.58700000e-01 5.33800000e-01 - 7.78100000e-01 2.21800000e-01 1.11600000e+00 1.91000000e-01 1.17300000e+00 2.73200000e-01 1.71960000e+00 1.32000000e-02 1.89800000e+00 1.21800000e-01 -3.28000000e-02 4.00000000e-02 - 7.02400000e-01 4.12700000e-01 9.60500000e-01 5.41500000e-01 1.20650000e+00 3.90700000e-01 1.74380000e+00 -1.40000000e-02 2.02300000e+00 -2.90000000e-02 -2.23000000e-02 1.75800000e+00 - 1.09500000e+00 1.50000000e-02 1.15380000e+00 5.84100000e-01 9.46200000e-01 2.80800000e-01 1.09710000e+00 6.50300000e-01 8.45900000e-01 3.55600000e-01 1.20000000e+00 5.29300000e-01 - 3.18100000e-01 6.31800000e-01 5.52600000e-01 7.23300000e-01 7.90100000e-01 5.84300000e-01 1.85250000e+00 -1.41300000e-01 4.94000000e-02 1.94150000e+00 8.31000000e-02 -1.00100000e-01 - 1.99400000e-01 3.56600000e-01 3.53400000e-01 5.09300000e-01 3.60700000e-01 6.28800000e-01 4.03800000e-01 4.49700000e-01 4.06000000e-01 5.88900000e-01 3.56300000e-01 5.05300000e-01 - 1.12640000e+00 -2.37000000e-02 4.71700000e-01 1.25710000e+00 3.88300000e-01 9.44500000e-01 5.46100000e-01 1.16530000e+00 4.09800000e-01 8.62400000e-01 5.36400000e-01 1.17940000e+00 - 1.12690000e+00 -2.41000000e-02 6.46600000e-01 3.78300000e-01 5.30500000e-01 2.61700000e-01 5.29900000e-01 5.21000000e-01 4.83600000e-01 3.01400000e-01 6.91300000e-01 3.32700000e-01 - 4.99000000e-01 6.00800000e-01 2.30600000e-01 2.25800000e-01 2.13700000e-01 1.68400000e-01 2.63400000e-01 1.87500000e-01 1.28600000e-01 2.61200000e-01 2.88100000e-01 1.55100000e-01 - 4.12600000e-01 6.95500000e-01 7.60600000e-01 6.92900000e-01 8.89300000e-01 6.83800000e-01 1.68590000e+00 4.95000000e-02 2.65000000e-02 1.97220000e+00 1.45000000e-02 1.71630000e+00 - 6.41400000e-01 4.53100000e-01 2.97200000e-01 1.43540000e+00 3.34400000e-01 6.85400000e-01 4.34800000e-01 1.27630000e+00 2.37000000e-01 7.70500000e-01 2.94100000e-01 1.43320000e+00 - 5.25100000e-01 2.17400000e-01 8.26900000e-01 3.35000000e-01 9.21000000e-01 4.02300000e-01 7.76200000e-01 3.96500000e-01 9.32100000e-01 4.14800000e-01 7.23700000e-01 4.50500000e-01 - -1.67000000e-02 5.48600000e-01 1.17900000e-01 7.27400000e-01 1.95000000e-01 7.74900000e-01 9.96000000e-02 1.61290000e+00 -5.30000000e-02 6.50000000e-02 8.70000000e-03 -1.03000000e-02 - 5.54800000e-01 5.35000000e-01 8.73400000e-01 8.33600000e-01 9.03400000e-01 1.06050000e+00 6.63900000e-01 1.08010000e+00 9.40300000e-01 1.04430000e+00 7.20300000e-01 1.01430000e+00 - 3.61300000e-01 1.35200000e-01 7.55000000e-01 3.99000000e-02 8.74200000e-01 4.21000000e-02 1.66410000e+00 7.88000000e-02 1.19000000e-02 -1.42000000e-02 -9.60000000e-03 1.24000000e-02 - 1.13100000e+00 -3.30000000e-02 4.35500000e-01 7.17500000e-01 3.48600000e-01 5.21400000e-01 4.36900000e-01 7.16000000e-01 3.63900000e-01 4.82000000e-01 4.75100000e-01 6.69200000e-01 - 2.56000000e-01 7.26000000e-02 3.11600000e-01 2.17000000e-01 3.37800000e-01 2.72800000e-01 3.22700000e-01 2.05900000e-01 3.79000000e-01 2.35100000e-01 3.00700000e-01 2.30400000e-01 - -4.25000000e-02 5.96300000e-01 2.19200000e-01 6.31900000e-01 1.86500000e-01 8.15900000e-01 -4.04000000e-02 1.77720000e+00 -1.24000000e-02 1.70000000e-02 -1.75000000e-02 2.28000000e-02 - 2.67400000e-01 3.50900000e-01 4.58800000e-01 5.07800000e-01 5.02900000e-01 6.02500000e-01 4.89500000e-01 4.67800000e-01 5.26300000e-01 5.88800000e-01 3.13100000e-01 6.79400000e-01 - 5.52200000e-01 3.21600000e-01 8.82900000e-01 4.78500000e-01 1.07620000e+00 4.65500000e-01 8.91300000e-01 4.70100000e-01 1.12410000e+00 4.33100000e-01 8.69100000e-01 4.95100000e-01 - 9.90000000e-03 1.02220000e+00 3.63900000e-01 9.65200000e-01 3.87000000e-01 1.08180000e+00 9.28000000e-02 1.62240000e+00 -8.50000000e-02 2.10190000e+00 5.66000000e-02 -6.77000000e-02 - 1.08870000e+00 2.51000000e-02 6.74000000e-01 1.04210000e+00 5.02700000e-01 1.09310000e+00 6.37400000e-01 1.08670000e+00 3.36100000e-01 1.18830000e+00 6.21800000e-01 1.10640000e+00 - -3.04000000e-02 5.83900000e-01 3.53200000e-01 4.87700000e-01 3.42300000e-01 6.42700000e-01 2.88000000e-02 1.69960000e+00 2.71000000e-02 -3.24000000e-02 -3.11000000e-02 3.63000000e-02 - 3.52300000e-01 5.70800000e-01 5.44600000e-01 8.97600000e-01 6.71500000e-01 9.66200000e-01 5.43700000e-01 8.97400000e-01 7.43900000e-01 8.98400000e-01 5.96700000e-01 8.32900000e-01 - 4.78100000e-01 1.65900000e-01 8.60500000e-01 8.34000000e-02 9.21700000e-01 1.53400000e-01 1.78060000e+00 -5.61000000e-02 -2.75000000e-02 2.03320000e+00 1.10000000e-02 -1.51000000e-02 - 2.88500000e-01 1.03200000e-01 4.49000000e-01 1.68300000e-01 5.46000000e-01 1.49800000e-01 5.13300000e-01 9.02000000e-02 5.20400000e-01 1.91200000e-01 4.95900000e-01 1.16500000e-01 - 3.73500000e-01 7.35500000e-01 6.52900000e-01 1.08860000e+00 8.56900000e-01 1.08460000e+00 1.61940000e+00 1.33400000e-01 8.75000000e-02 1.90220000e+00 1.51000000e-02 1.71670000e+00 - 1.19500000e-01 4.65000000e-01 1.80600000e-01 7.41000000e-01 2.48500000e-01 7.84900000e-01 1.79100000e-01 7.41900000e-01 1.73100000e-01 8.95200000e-01 2.55200000e-01 6.48200000e-01 - -1.70000000e-02 1.13210000e+00 2.51000000e-02 5.43900000e-01 -1.42000000e-02 4.94100000e-01 7.82000000e-02 4.80800000e-01 -7.00000000e-04 4.70800000e-01 -6.00000000e-03 5.78500000e-01 - 4.65000000e-02 5.40000000e-01 9.13000000e-02 8.23300000e-01 8.31000000e-02 9.64600000e-01 1.48400000e-01 7.56600000e-01 1.65000000e-01 8.85100000e-01 7.72000000e-02 8.39600000e-01 - 5.32400000e-01 2.01200000e-01 7.85500000e-01 3.75300000e-01 8.15500000e-01 5.13800000e-01 7.01700000e-01 4.73100000e-01 8.92100000e-01 4.46100000e-01 7.58300000e-01 4.04700000e-01 - 5.31600000e-01 5.19600000e-01 7.91700000e-01 8.57800000e-01 9.66100000e-01 9.00800000e-01 8.63800000e-01 7.75600000e-01 1.09400000e+00 7.77700000e-01 7.81600000e-01 8.76600000e-01 - 4.77300000e-01 5.06700000e-01 8.70800000e-01 4.13600000e-01 9.19100000e-01 4.96000000e-01 1.73410000e+00 -1.90000000e-03 -2.43000000e-02 2.02620000e+00 5.77000000e-02 -6.72000000e-02 - 6.86000000e-02 1.04770000e+00 3.60000000e-02 5.68800000e-01 4.65000000e-02 4.55300000e-01 9.62000000e-02 4.98900000e-01 1.91000000e-02 4.78500000e-01 9.58000000e-02 4.98800000e-01 - 8.48600000e-01 2.42400000e-01 3.81500000e-01 1.34830000e+00 3.22800000e-01 9.00100000e-01 2.55900000e-01 1.49540000e+00 3.46900000e-01 8.24300000e-01 3.92400000e-01 1.33400000e+00 - 4.68400000e-01 3.02700000e-01 7.82100000e-01 4.20600000e-01 7.93400000e-01 5.95600000e-01 7.64400000e-01 4.41600000e-01 8.98100000e-01 4.94700000e-01 7.65000000e-01 4.37300000e-01 - 2.28200000e-01 5.31400000e-01 3.81600000e-01 7.97800000e-01 3.57400000e-01 1.00180000e+00 3.13400000e-01 8.75800000e-01 4.48300000e-01 9.16700000e-01 3.78300000e-01 8.00900000e-01 - 5.18000000e-02 6.38200000e-01 2.56100000e-01 7.66400000e-01 4.51800000e-01 6.74600000e-01 1.45600000e-01 1.55750000e+00 -1.74000000e-02 2.02120000e+00 -5.87000000e-02 6.89000000e-02 - 3.58900000e-01 3.00000000e-01 2.14600000e-01 1.09500000e-01 1.15000000e-01 1.75000000e-01 2.26000000e-01 9.55000000e-02 2.73900000e-01 -1.69000000e-02 2.17100000e-01 1.07800000e-01 - -7.13000000e-02 1.01540000e+00 2.32700000e-01 1.01280000e+00 4.17200000e-01 9.35000000e-01 -7.29000000e-02 1.81700000e+00 2.50000000e-03 2.00190000e+00 9.10000000e-03 -1.04000000e-02 - 3.82600000e-01 3.45300000e-01 7.69600000e-01 3.37700000e-01 7.87600000e-01 4.90300000e-01 6.50300000e-01 4.82300000e-01 8.39800000e-01 4.51000000e-01 7.76800000e-01 3.30200000e-01 - 4.94000000e-01 4.21900000e-01 7.90100000e-01 4.42100000e-01 9.58900000e-01 3.83500000e-01 1.69380000e+00 4.75000000e-02 -9.18000000e-02 2.10500000e+00 2.30000000e-02 -2.59000000e-02 - 7.72800000e-01 1.04500000e-01 1.04480000e+00 3.61300000e-01 1.26900000e+00 3.18500000e-01 1.15430000e+00 2.37000000e-01 1.30200000e+00 3.03200000e-01 1.11000000e+00 2.82800000e-01 - 4.42500000e-01 4.64900000e-01 7.25600000e-01 4.98600000e-01 8.71400000e-01 4.68700000e-01 1.62030000e+00 1.33100000e-01 3.67000000e-02 1.95550000e+00 3.34000000e-02 -3.72000000e-02 - -6.76000000e-02 7.38700000e-01 2.81400000e-01 6.79700000e-01 3.12100000e-01 7.83700000e-01 -6.90000000e-03 1.74250000e+00 -4.61000000e-02 2.05330000e+00 -4.70000000e-03 6.50000000e-03 - 3.82000000e-01 3.75700000e-01 5.92500000e-01 4.97400000e-01 7.84100000e-01 4.11900000e-01 1.65670000e+00 9.31000000e-02 1.84000000e-01 1.78390000e+00 -1.66000000e-02 1.95000000e-02 - 9.22000000e-01 1.49800000e-01 1.07670000e+00 3.33900000e-01 1.26590000e+00 2.56400000e-01 1.75520000e+00 -2.36000000e-02 1.97270000e+00 3.68000000e-02 4.93000000e-02 1.67400000e+00 - 2.76500000e-01 8.33200000e-01 5.84500000e-01 1.04280000e+00 6.68300000e-01 1.09000000e+00 1.81400000e+00 -1.01000000e-01 -3.86000000e-02 2.04730000e+00 3.93000000e-02 1.68190000e+00 - 3.91200000e-01 7.30500000e-01 2.59700000e-01 2.91100000e-01 2.46600000e-01 2.09100000e-01 3.79300000e-01 1.46600000e-01 2.45800000e-01 2.01000000e-01 2.29300000e-01 3.29000000e-01 - 2.91600000e-01 8.07700000e-01 2.54200000e-01 1.46120000e+00 1.75200000e-01 1.22500000e+00 1.44600000e-01 1.58550000e+00 1.40200000e-01 1.20140000e+00 1.99600000e-01 1.52100000e+00 - 3.25000000e-01 2.22700000e-01 4.60400000e-01 4.04200000e-01 4.88700000e-01 5.01200000e-01 5.11700000e-01 3.46600000e-01 5.28200000e-01 4.68300000e-01 4.02400000e-01 4.68000000e-01 - 2.17900000e-01 8.89800000e-01 2.13800000e-01 5.80900000e-01 1.65900000e-01 4.75100000e-01 1.50600000e-01 6.59200000e-01 1.26100000e-01 5.14500000e-01 8.27000000e-02 7.37200000e-01 - 4.66800000e-01 2.37900000e-01 6.43000000e-01 4.73800000e-01 7.31000000e-01 5.44200000e-01 6.59200000e-01 4.57700000e-01 7.51400000e-01 5.38900000e-01 6.81500000e-01 4.29200000e-01 - 2.07900000e-01 1.79000000e-01 1.10100000e-01 1.11700000e-01 1.07200000e-01 8.13000000e-02 1.09300000e-01 1.10700000e-01 1.32100000e-01 4.79000000e-02 3.65000000e-02 1.98400000e-01 - 2.71800000e-01 4.74500000e-01 6.13300000e-01 4.43300000e-01 7.06500000e-01 4.69900000e-01 1.73530000e+00 -4.60000000e-03 -6.94000000e-02 2.07720000e+00 4.38000000e-02 -5.04000000e-02 - 2.99000000e-02 5.48600000e-01 4.09400000e-01 4.67900000e-01 5.26000000e-01 4.72900000e-01 1.81140000e+00 -9.13000000e-02 2.42000000e-02 1.97290000e+00 -4.19000000e-02 4.98000000e-02 - 9.09300000e-01 1.92700000e-01 3.21700000e-01 7.14900000e-01 3.97800000e-01 3.81100000e-01 3.22400000e-01 7.12700000e-01 2.66600000e-01 5.18600000e-01 3.48400000e-01 6.81800000e-01 - 5.20900000e-01 5.74500000e-01 8.63400000e-01 7.92800000e-01 7.78400000e-01 1.03170000e+00 1.78800000e+00 -6.88000000e-02 6.66000000e-02 1.92540000e+00 -4.68000000e-02 1.78990000e+00 - -1.49000000e-02 5.99400000e-01 3.37600000e-01 5.44400000e-01 2.99600000e-01 7.31000000e-01 -5.28000000e-02 1.79790000e+00 -6.19000000e-02 2.07670000e+00 -5.50000000e-03 6.90000000e-03 - 2.37000000e-02 7.68600000e-01 1.44000000e-01 9.73900000e-01 2.34100000e-01 1.01150000e+00 -3.09000000e-02 1.77180000e+00 -1.22900000e-01 2.14160000e+00 -3.94000000e-02 4.91000000e-02 - 2.70200000e-01 3.35000000e-01 5.12800000e-01 4.17400000e-01 5.41000000e-01 5.26900000e-01 4.72000000e-01 4.65400000e-01 6.01500000e-01 4.71300000e-01 4.97500000e-01 4.38400000e-01 - 4.68600000e-01 8.30000000e-02 7.33600000e-01 1.32400000e-01 8.06200000e-01 1.91800000e-01 8.11100000e-01 3.94000000e-02 8.19600000e-01 1.86300000e-01 7.76400000e-01 8.29000000e-02 - 4.47000000e-01 3.32500000e-01 8.99300000e-01 2.86100000e-01 8.34600000e-01 5.53300000e-01 7.54800000e-01 4.59900000e-01 8.85500000e-01 5.14900000e-01 7.68400000e-01 4.42400000e-01 - 5.91800000e-01 5.09900000e-01 8.88700000e-01 6.09200000e-01 1.04390000e+00 5.69300000e-01 1.71270000e+00 2.40000000e-02 1.95660000e+00 5.53000000e-02 3.30000000e-03 1.73010000e+00 - -9.20000000e-03 6.83100000e-01 4.09000000e-01 5.54900000e-01 4.98300000e-01 5.94900000e-01 3.34000000e-02 1.69070000e+00 -1.65600000e-01 2.19320000e+00 6.50000000e-03 -9.00000000e-03 - -5.35000000e-02 5.80100000e-01 -4.23000000e-02 8.83700000e-01 4.40000000e-03 9.57200000e-01 -9.32000000e-02 1.84120000e+00 -4.26000000e-02 5.19000000e-02 6.22000000e-02 -7.52000000e-02 - 5.33600000e-01 3.40200000e-01 9.72400000e-01 1.90200000e-01 8.98200000e-01 4.17400000e-01 1.69350000e+00 4.15000000e-02 1.91760000e+00 1.02800000e-01 -1.15000000e-02 1.30000000e-02 - 1.79900000e-01 5.70500000e-01 2.18000000e-01 9.67800000e-01 3.76000000e-01 9.60000000e-01 2.73700000e-01 9.04800000e-01 2.53400000e-01 1.12400000e+00 3.45400000e-01 8.26000000e-01 - 1.43300000e-01 9.52300000e-01 3.98900000e-01 1.03390000e+00 4.31900000e-01 1.14330000e+00 1.80840000e+00 -8.47000000e-02 7.80000000e-02 1.91070000e+00 1.04000000e-01 1.61180000e+00 - 5.86700000e-01 1.31400000e-01 8.70000000e-01 2.64800000e-01 9.23100000e-01 3.84200000e-01 8.79400000e-01 2.58900000e-01 9.47200000e-01 3.74600000e-01 8.68600000e-01 2.68500000e-01 - 1.08590000e+00 2.51000000e-02 5.77700000e-01 1.16080000e+00 5.01500000e-01 9.16000000e-01 6.39500000e-01 1.08740000e+00 5.07600000e-01 8.45100000e-01 7.28500000e-01 9.85200000e-01 - 5.30900000e-01 5.70600000e-01 2.00800000e-01 5.62700000e-01 1.95000000e-01 4.23600000e-01 3.10500000e-01 4.30900000e-01 3.34200000e-01 2.44700000e-01 3.02400000e-01 4.43300000e-01 - 8.97000000e-02 9.35000000e-01 3.51000000e-02 1.58570000e+00 -1.56000000e-02 1.87530000e+00 -3.40000000e-02 1.67190000e+00 3.06000000e-02 1.84090000e+00 -6.67000000e-02 1.70730000e+00 - 4.48400000e-01 4.09800000e-01 6.59100000e-01 6.86600000e-01 8.55900000e-01 6.56500000e-01 6.17600000e-01 7.39400000e-01 7.65000000e-01 7.93900000e-01 6.22000000e-01 7.32300000e-01 - 7.89800000e-01 3.07000000e-01 3.64000000e-01 6.82200000e-01 3.40100000e-01 4.59800000e-01 2.98700000e-01 7.59500000e-01 2.98900000e-01 4.90700000e-01 3.35200000e-01 7.17500000e-01 - 1.53000000e-01 9.15500000e-01 -8.00000000e-04 1.72450000e+00 1.41700000e-01 1.80040000e+00 4.26000000e-02 1.67700000e+00 4.74000000e-02 1.93120000e+00 1.02900000e-01 1.60680000e+00 - 1.10480000e+00 3.30000000e-03 7.15400000e-01 -1.36200000e-01 5.10300000e-01 -1.00000000e-02 4.79300000e-01 1.45100000e-01 4.68600000e-01 2.64000000e-02 5.35600000e-01 7.72000000e-02 - 3.59800000e-01 7.20200000e-01 1.01900000e-01 6.69800000e-01 1.78500000e-01 4.35100000e-01 1.23700000e-01 6.45900000e-01 9.44000000e-02 5.16800000e-01 1.47600000e-01 6.18500000e-01 - 3.49700000e-01 6.08200000e-01 5.68400000e-01 9.26100000e-01 6.95200000e-01 9.98700000e-01 4.92400000e-01 1.01250000e+00 7.18900000e-01 9.97500000e-01 6.34900000e-01 8.49000000e-01 - 7.45000000e-01 1.26600000e-01 9.28400000e-01 4.80300000e-01 1.15380000e+00 4.33600000e-01 1.16450000e+00 2.00900000e-01 1.28200000e+00 3.06700000e-01 1.06070000e+00 3.26100000e-01 - 2.06300000e-01 6.30100000e-01 5.84400000e-01 5.52300000e-01 5.00700000e-01 7.92000000e-01 1.74340000e+00 -1.16000000e-02 2.30000000e-03 1.99950000e+00 4.54000000e-02 -5.31000000e-02 - 1.84400000e-01 6.66500000e-01 3.98200000e-01 9.10000000e-01 5.11700000e-01 9.66800000e-01 3.32300000e-01 9.86500000e-01 4.97600000e-01 1.00650000e+00 3.04600000e-01 1.02250000e+00 - 3.51400000e-01 7.42500000e-01 7.25500000e-01 6.71100000e-01 7.47100000e-01 7.88100000e-01 1.61740000e+00 1.32600000e-01 -1.21000000e-02 2.01260000e+00 6.20000000e-03 1.72850000e+00 - 4.58800000e-01 4.29100000e-01 8.76200000e-01 4.83100000e-01 1.00100000e+00 5.48400000e-01 8.83300000e-01 4.74800000e-01 9.91600000e-01 5.80800000e-01 8.95300000e-01 4.58100000e-01 - 7.29000000e-02 6.63800000e-01 2.15700000e-01 9.20000000e-01 1.59700000e-01 1.15080000e+00 2.18400000e-01 9.15300000e-01 1.73100000e-01 1.15050000e+00 1.45800000e-01 1.00220000e+00 - 4.86800000e-01 5.86700000e-01 8.06500000e-01 5.79600000e-01 9.00300000e-01 6.13700000e-01 1.75570000e+00 -2.93000000e-02 6.86000000e-02 1.92170000e+00 -6.21000000e-02 1.80550000e+00 - 3.99200000e-01 2.39100000e-01 7.75200000e-01 2.03800000e-01 7.17800000e-01 4.21500000e-01 5.95500000e-01 4.11300000e-01 8.27500000e-01 3.10400000e-01 6.28800000e-01 3.73500000e-01 - 6.82000000e-01 4.33900000e-01 3.97900000e-01 4.43500000e-01 3.13700000e-01 3.70800000e-01 3.47200000e-01 5.09800000e-01 3.03400000e-01 3.64000000e-01 3.47200000e-01 5.05000000e-01 - 6.73600000e-01 3.05100000e-01 1.06400000e+00 4.69800000e-01 1.22060000e+00 5.23900000e-01 1.04780000e+00 4.88300000e-01 1.25510000e+00 5.08200000e-01 9.78400000e-01 5.71300000e-01 - 5.64000000e-02 5.55600000e-01 2.64000000e-01 6.77500000e-01 5.22100000e-01 5.16100000e-01 1.75460000e+00 -2.31000000e-02 7.80000000e-03 1.99780000e+00 -4.87000000e-02 5.90000000e-02 - -3.70000000e-02 6.10500000e-01 2.85900000e-01 5.89500000e-01 2.42200000e-01 7.80500000e-01 1.40000000e-02 1.71920000e+00 -5.94000000e-02 2.07120000e+00 -7.78000000e-02 9.36000000e-02 - 5.56400000e-01 5.38500000e-01 7.74200000e-01 7.57300000e-01 9.77300000e-01 6.63300000e-01 1.77750000e+00 -5.65000000e-02 2.83000000e-02 1.96520000e+00 -9.10000000e-02 1.83830000e+00 - 2.52200000e-01 5.51300000e-01 6.81700000e-01 4.12300000e-01 7.80300000e-01 4.34200000e-01 1.73200000e+00 -3.50000000e-03 -1.07800000e-01 2.12670000e+00 -6.21000000e-02 7.56000000e-02 - 4.34700000e-01 3.62500000e-01 5.81900000e-01 6.90500000e-01 6.94600000e-01 7.40400000e-01 5.76400000e-01 6.90000000e-01 6.55600000e-01 8.10200000e-01 5.55300000e-01 7.18000000e-01 - 2.70000000e-02 6.59400000e-01 5.31000000e-02 1.01870000e+00 5.06000000e-02 1.17180000e+00 8.10000000e-02 9.79700000e-01 1.63000000e-02 1.22390000e+00 -1.22000000e-02 1.09800000e+00 - 5.46000000e-01 3.08400000e-01 8.42300000e-01 3.26700000e-01 1.03150000e+00 2.48500000e-01 1.74840000e+00 -2.49000000e-02 1.94500000e+00 5.96000000e-02 6.20000000e-03 -6.20000000e-03 - 5.06800000e-01 5.10600000e-01 7.16000000e-01 8.88300000e-01 8.07300000e-01 1.01720000e+00 7.75000000e-01 8.22000000e-01 7.98200000e-01 1.05570000e+00 7.61800000e-01 8.33300000e-01 - 4.68100000e-01 6.35100000e-01 7.53100000e-01 8.90500000e-01 9.44000000e-01 8.08700000e-01 1.74210000e+00 -7.30000000e-03 -2.28000000e-02 2.03130000e+00 -1.16000000e-02 1.74710000e+00 - 3.65600000e-01 7.35800000e-01 1.69000000e-01 7.71000000e-01 2.19500000e-01 5.08100000e-01 1.77400000e-01 7.62600000e-01 2.05400000e-01 5.10200000e-01 2.12700000e-01 7.23900000e-01 - 6.15100000e-01 2.76300000e-01 9.30300000e-01 4.75300000e-01 1.12890000e+00 4.59200000e-01 9.69000000e-01 4.27700000e-01 1.16140000e+00 4.46600000e-01 1.02910000e+00 3.56900000e-01 - 2.93800000e-01 4.03400000e-01 4.14000000e-01 6.85900000e-01 5.69200000e-01 6.61200000e-01 4.93800000e-01 5.92300000e-01 5.80000000e-01 6.73300000e-01 4.29100000e-01 6.68200000e-01 - 1.22420000e+00 -1.41300000e-01 5.59700000e-01 1.18330000e+00 6.01800000e-01 1.24180000e+00 5.79400000e-01 1.16140000e+00 4.91300000e-01 1.16140000e+00 5.29300000e-01 1.22060000e+00 - 1.05650000e+00 5.82000000e-02 5.99500000e-01 7.11900000e-01 5.02600000e-01 4.32300000e-01 5.96800000e-01 7.14700000e-01 4.08400000e-01 5.18800000e-01 4.66900000e-01 8.71100000e-01 - 3.28600000e-01 3.95600000e-01 4.42200000e-01 7.06500000e-01 4.22500000e-01 8.99100000e-01 4.46800000e-01 7.01900000e-01 4.21700000e-01 9.20400000e-01 2.95900000e-01 8.81100000e-01 - 5.64500000e-01 3.76300000e-01 8.56900000e-01 6.25100000e-01 1.04630000e+00 6.24000000e-01 9.26200000e-01 5.39800000e-01 1.01900000e+00 6.83700000e-01 7.84900000e-01 7.09200000e-01 - 3.84400000e-01 6.30100000e-01 6.49900000e-01 6.84500000e-01 6.71800000e-01 8.02000000e-01 1.81980000e+00 -1.04200000e-01 9.20000000e-03 1.99090000e+00 7.80000000e-03 -8.50000000e-03 - -7.20000000e-02 1.01640000e+00 1.46000000e-02 1.24690000e+00 1.70400000e-01 1.20200000e+00 2.77000000e-02 1.70370000e+00 -2.32000000e-02 2.02740000e+00 9.00000000e-04 -1.40000000e-03 - 6.40000000e-02 5.46900000e-01 1.29400000e-01 8.18900000e-01 1.26300000e-01 9.62000000e-01 1.32600000e-01 8.18800000e-01 9.41000000e-02 1.01990000e+00 8.27000000e-02 8.81800000e-01 - 4.30800000e-01 3.33200000e-01 7.15600000e-01 4.72100000e-01 8.93400000e-01 4.51200000e-01 6.41100000e-01 5.63700000e-01 7.44900000e-01 6.46100000e-01 7.06600000e-01 4.88800000e-01 - 4.55400000e-01 6.55200000e-01 8.18500000e-01 7.71200000e-01 9.75200000e-01 7.26900000e-01 1.75980000e+00 -3.09000000e-02 1.87000000e-02 1.98090000e+00 1.52000000e-02 1.71460000e+00 - 2.26900000e-01 3.78400000e-01 3.30500000e-01 6.25500000e-01 4.10500000e-01 6.69500000e-01 3.27600000e-01 6.27400000e-01 4.37800000e-01 6.51100000e-01 3.74400000e-01 5.70300000e-01 - 1.04990000e+00 6.43000000e-02 5.94600000e-01 2.42200000e-01 5.41500000e-01 1.22400000e-01 6.16400000e-01 2.18600000e-01 4.90600000e-01 1.68300000e-01 5.90000000e-01 2.47700000e-01 - 2.29700000e-01 6.72800000e-01 4.76200000e-01 7.50300000e-01 6.86700000e-01 6.46100000e-01 1.70480000e+00 3.57000000e-02 1.52000000e-02 1.97910000e+00 2.58000000e-02 -3.16000000e-02 - 3.83300000e-01 3.63500000e-01 6.14400000e-01 4.58700000e-01 6.85300000e-01 5.15000000e-01 1.82520000e+00 -1.07900000e-01 -1.41000000e-02 2.01560000e+00 -1.20000000e-03 -4.00000000e-04 - 4.25100000e-01 6.83400000e-01 2.42800000e-01 3.33300000e-01 2.28000000e-01 2.48100000e-01 2.15000000e-01 3.65400000e-01 2.29400000e-01 2.38000000e-01 1.83700000e-01 4.01900000e-01 - 7.12000000e-02 9.71700000e-01 3.70500000e-01 1.22080000e+00 4.63300000e-01 1.33720000e+00 4.24400000e-01 1.15200000e+00 4.04000000e-01 1.43450000e+00 3.56100000e-01 1.23560000e+00 - 2.60000000e-02 6.37100000e-01 6.09000000e-02 9.21000000e-01 1.89600000e-01 9.13500000e-01 7.93000000e-02 1.64090000e+00 2.14000000e-02 1.97640000e+00 -4.20000000e-03 6.40000000e-03 - 3.27200000e-01 2.83000000e-01 8.01300000e-01 8.83000000e-02 8.75800000e-01 1.44800000e-01 1.61480000e+00 1.36500000e-01 1.09900000e-01 1.87360000e+00 -7.95000000e-02 9.55000000e-02 - 1.12130000e+00 -1.70000000e-02 4.69700000e-01 1.96700000e-01 4.27100000e-01 1.14100000e-01 4.19300000e-01 2.53600000e-01 4.56100000e-01 6.80000000e-02 4.20400000e-01 2.58000000e-01 - 3.02000000e-02 4.72400000e-01 -3.51000000e-02 8.66300000e-01 1.27700000e-01 8.15800000e-01 -4.67000000e-02 1.78770000e+00 1.17000000e-02 -1.31000000e-02 7.06000000e-02 -8.62000000e-02 - 4.14300000e-01 3.05700000e-01 5.22500000e-01 6.25800000e-01 5.84300000e-01 7.23200000e-01 5.52300000e-01 5.91300000e-01 6.45300000e-01 6.73000000e-01 6.48700000e-01 4.72800000e-01 - 1.41200000e-01 7.21100000e-01 4.42600000e-01 7.35100000e-01 5.57600000e-01 7.41100000e-01 1.63170000e+00 1.18800000e-01 -4.24000000e-02 2.04880000e+00 -2.44000000e-02 2.86000000e-02 - 1.14200000e-01 1.14400000e-01 1.33400000e-01 2.27600000e-01 3.00300000e-01 8.52000000e-02 1.69600000e-01 1.85300000e-01 2.40500000e-01 1.61100000e-01 2.07600000e-01 1.40100000e-01 - 2.15000000e-01 8.94600000e-01 5.80100000e-01 9.60400000e-01 6.45800000e-01 1.02400000e+00 1.75740000e+00 -2.97000000e-02 -8.76000000e-02 2.10270000e+00 -3.87000000e-02 1.77970000e+00 - 7.30200000e-01 3.65100000e-01 3.16400000e-01 6.98100000e-01 2.95300000e-01 4.89100000e-01 2.43100000e-01 7.88300000e-01 2.76100000e-01 4.92400000e-01 3.63800000e-01 6.43400000e-01 - 8.47700000e-01 2.42100000e-01 3.08200000e-01 7.90200000e-01 3.01900000e-01 5.28300000e-01 3.39100000e-01 7.55400000e-01 3.15400000e-01 4.92800000e-01 3.40500000e-01 7.50200000e-01 - 7.15400000e-01 3.84100000e-01 1.07310000e+00 5.45700000e-01 1.07110000e+00 6.87800000e-01 1.78280000e+00 -5.86000000e-02 2.03160000e+00 -3.93000000e-02 -3.19000000e-02 1.76880000e+00 - 1.81400000e-01 9.32000000e-01 9.93000000e-02 1.64270000e+00 7.26000000e-02 9.87300000e-01 1.26500000e-01 1.60830000e+00 1.05100000e-01 9.18500000e-01 2.57500000e-01 1.45540000e+00 - 4.90600000e-01 1.41400000e-01 8.04000000e-01 1.79900000e-01 9.14300000e-01 2.05200000e-01 7.94300000e-01 1.90700000e-01 1.00870000e+00 1.09600000e-01 7.35800000e-01 2.59300000e-01 - 4.48200000e-01 6.77600000e-01 8.39700000e-01 6.66300000e-01 1.04660000e+00 5.63400000e-01 1.80480000e+00 -8.25000000e-02 -3.71000000e-02 2.05080000e+00 -2.70000000e-03 1.73720000e+00 - 1.67300000e-01 6.01700000e-01 4.10200000e-01 7.67500000e-01 4.99800000e-01 8.35700000e-01 4.60400000e-01 7.10600000e-01 4.78300000e-01 8.82300000e-01 3.40800000e-01 8.47000000e-01 - -6.45000000e-02 7.48300000e-01 2.19900000e-01 7.61500000e-01 1.52900000e-01 9.84200000e-01 1.24000000e-02 1.71410000e+00 8.51000000e-02 1.89450000e+00 -3.09000000e-02 3.56000000e-02 - 3.25900000e-01 7.98100000e-01 7.47600000e-01 9.81600000e-01 9.38200000e-01 9.74200000e-01 1.70760000e+00 2.70000000e-02 -4.64000000e-02 2.05330000e+00 7.10000000e-02 1.64850000e+00 - 2.49700000e-01 5.27600000e-01 5.83900000e-01 5.05400000e-01 6.13600000e-01 6.10100000e-01 1.67400000e+00 6.57000000e-02 -1.12000000e-02 2.01190000e+00 7.20000000e-03 -9.30000000e-03 - 8.60900000e-01 2.36400000e-01 3.14100000e-01 1.42390000e+00 2.94300000e-01 1.39270000e+00 4.67100000e-01 1.24560000e+00 3.54100000e-01 1.19790000e+00 4.80300000e-01 1.23340000e+00 - 1.14510000e+00 -4.73000000e-02 3.86700000e-01 1.36590000e+00 4.00000000e-01 8.12900000e-01 4.30900000e-01 1.30650000e+00 4.01500000e-01 7.66100000e-01 5.49200000e-01 1.17250000e+00 - 7.19400000e-01 2.25600000e-01 8.44900000e-01 6.82400000e-01 1.27670000e+00 4.07700000e-01 1.13320000e+00 3.43600000e-01 1.17380000e+00 5.50600000e-01 1.07600000e+00 4.08000000e-01 - 8.45800000e-01 2.66500000e-01 4.02900000e-01 5.57800000e-01 3.55900000e-01 3.95800000e-01 3.39000000e-01 6.32300000e-01 3.00200000e-01 4.43600000e-01 3.81100000e-01 5.77500000e-01 - 4.68200000e-01 6.32600000e-01 2.86900000e-01 5.44400000e-01 2.01800000e-01 4.75500000e-01 1.93400000e-01 6.59100000e-01 1.74500000e-01 4.92600000e-01 2.67800000e-01 5.67500000e-01 - -7.08000000e-02 1.07480000e+00 3.18700000e-01 9.76300000e-01 4.20100000e-01 1.00040000e+00 -2.40000000e-02 1.76060000e+00 1.07200000e-01 1.86930000e+00 -3.35000000e-02 4.19000000e-02 - 2.12300000e-01 4.81200000e-01 4.43600000e-01 6.19000000e-01 5.37600000e-01 6.67800000e-01 4.01000000e-01 6.73500000e-01 4.24800000e-01 8.17500000e-01 3.72500000e-01 7.03800000e-01 - 1.13940000e+00 -4.41000000e-02 6.08800000e-01 5.28000000e-01 4.86100000e-01 3.76600000e-01 6.44100000e-01 4.84600000e-01 4.38900000e-01 4.06900000e-01 6.24000000e-01 5.10600000e-01 - 1.22400000e-01 2.82900000e-01 3.05300000e-01 3.07600000e-01 3.90400000e-01 2.98800000e-01 2.86700000e-01 3.30200000e-01 3.15300000e-01 3.99100000e-01 3.60900000e-01 2.42700000e-01 - 1.92800000e-01 4.23200000e-01 4.99400000e-01 4.26800000e-01 3.48100000e-01 7.44200000e-01 3.79300000e-01 5.70400000e-01 4.21700000e-01 6.76200000e-01 4.33600000e-01 5.06600000e-01 - 3.08400000e-01 6.11600000e-01 6.16800000e-01 6.13000000e-01 7.30800000e-01 6.20600000e-01 1.80140000e+00 -8.12000000e-02 -2.48000000e-02 2.02830000e+00 -6.85000000e-02 8.13000000e-02 - -2.22000000e-02 3.71700000e-01 9.72000000e-02 4.23000000e-01 6.99000000e-02 5.29700000e-01 8.12000000e-02 4.40700000e-01 9.76000000e-02 5.07200000e-01 4.70000000e-02 4.85200000e-01 - 1.14610000e+00 -4.43000000e-02 1.80130000e+00 -7.59000000e-02 1.46520000e+00 3.13000000e-02 1.68200000e+00 5.62000000e-02 1.46060000e+00 -5.17000000e-02 1.68830000e+00 5.07000000e-02 - 6.11800000e-01 4.96300000e-01 3.81200000e-01 3.64500000e-01 2.08000000e-01 4.19700000e-01 3.59900000e-01 3.94100000e-01 2.69700000e-01 3.33800000e-01 3.25100000e-01 4.37400000e-01 - 1.10750000e+00 -6.30000000e-03 1.07240000e+00 6.47600000e-01 7.20700000e-01 4.98400000e-01 9.88000000e-01 7.44600000e-01 8.87900000e-01 2.54800000e-01 9.76700000e-01 7.60000000e-01 - 4.05900000e-01 3.50400000e-01 6.62000000e-01 4.15200000e-01 8.18600000e-01 3.75500000e-01 1.63450000e+00 1.19000000e-01 -1.59200000e-01 2.19000000e+00 2.76000000e-02 -3.26000000e-02 - 3.49600000e-01 4.01900000e-01 6.49200000e-01 5.18100000e-01 7.08700000e-01 6.17100000e-01 5.54600000e-01 6.26200000e-01 6.11200000e-01 7.59700000e-01 6.79400000e-01 4.76500000e-01 - -4.26000000e-02 8.57700000e-01 1.43500000e-01 1.00030000e+00 3.81700000e-01 8.60500000e-01 5.63000000e-02 1.66290000e+00 -5.90000000e-03 2.00940000e+00 2.37000000e-02 -2.86000000e-02 - -2.30000000e-03 6.95900000e-01 8.00000000e-03 1.00170000e+00 1.45000000e-01 9.83700000e-01 -8.60000000e-03 1.74500000e+00 5.46000000e-02 1.93740000e+00 3.84000000e-02 -4.55000000e-02 - 4.10900000e-01 2.65000000e-01 5.48800000e-01 5.32900000e-01 8.01700000e-01 3.92100000e-01 7.02000000e-01 3.51800000e-01 8.54000000e-01 3.48300000e-01 7.57400000e-01 2.81400000e-01 - 4.93900000e-01 2.60900000e-01 7.66000000e-01 3.07800000e-01 8.76100000e-01 3.16600000e-01 1.71620000e+00 1.56000000e-02 -8.58000000e-02 2.10360000e+00 1.89000000e-02 -2.33000000e-02 - 3.84900000e-01 6.99100000e-01 6.11800000e-01 1.08710000e+00 6.66500000e-01 1.27130000e+00 6.11500000e-01 1.08490000e+00 6.00000000e-01 1.37570000e+00 5.65600000e-01 1.14510000e+00 - 1.09790000e+00 1.31000000e-02 8.85800000e-01 2.66700000e-01 7.42300000e-01 1.13300000e-01 8.91200000e-01 2.57800000e-01 6.73200000e-01 1.70100000e-01 8.97000000e-01 2.50700000e-01 - 1.45300000e-01 7.65500000e-01 2.87100000e-01 1.12690000e+00 3.80100000e-01 1.22000000e+00 2.49200000e-01 1.17290000e+00 3.88900000e-01 1.23390000e+00 3.04300000e-01 1.10810000e+00 - 2.42300000e-01 4.07800000e-01 5.09200000e-01 4.85200000e-01 4.01700000e-01 7.65600000e-01 4.72500000e-01 5.30600000e-01 4.55300000e-01 7.24300000e-01 3.76000000e-01 6.40300000e-01 - 8.53600000e-01 2.58900000e-01 3.52800000e-01 3.02800000e-01 3.05500000e-01 2.36000000e-01 3.64400000e-01 2.88800000e-01 2.84900000e-01 2.50500000e-01 3.82200000e-01 2.69400000e-01 - -5.10000000e-03 5.93400000e-01 1.88000000e-02 8.92200000e-01 1.55400000e-01 8.69500000e-01 7.95000000e-02 1.63730000e+00 -7.47000000e-02 2.09030000e+00 -1.75000000e-02 2.04000000e-02 - -5.60000000e-03 1.11070000e+00 1.69900000e-01 1.49540000e+00 1.52800000e-01 1.65720000e+00 -1.49000000e-02 1.75420000e+00 -2.17000000e-02 2.02410000e+00 4.41000000e-02 1.67610000e+00 - 1.65000000e-01 9.32000000e-01 9.92000000e-02 5.41400000e-01 1.54000000e-01 3.63500000e-01 7.96000000e-02 5.65000000e-01 1.18500000e-01 3.95800000e-01 4.45000000e-02 6.10500000e-01 - 8.30600000e-01 1.90000000e-03 1.31370000e+00 -1.41000000e-02 1.44730000e+00 3.84000000e-02 1.29630000e+00 6.10000000e-03 1.46040000e+00 5.38000000e-02 1.31170000e+00 -1.45000000e-02 - -2.52000000e-02 5.56200000e-01 2.02000000e-02 8.13200000e-01 2.84000000e-02 9.36500000e-01 1.50000000e-02 1.71570000e+00 3.90000000e-03 -3.00000000e-03 5.10000000e-02 -6.22000000e-02 - 1.08100000e+00 3.13000000e-02 6.91400000e-01 3.74600000e-01 4.80200000e-01 3.47800000e-01 5.92600000e-01 4.90100000e-01 5.31800000e-01 2.67000000e-01 5.18100000e-01 5.77700000e-01 - 1.10680000e+00 1.20000000e-03 7.39100000e-01 -1.53000000e-02 5.87200000e-01 4.60000000e-03 7.15200000e-01 9.00000000e-03 4.95500000e-01 1.01200000e-01 6.59000000e-01 7.69000000e-02 - 5.85500000e-01 4.04100000e-01 7.92300000e-01 7.84900000e-01 9.06000000e-01 8.90000000e-01 8.94400000e-01 6.62900000e-01 8.97400000e-01 9.27300000e-01 7.07800000e-01 8.81400000e-01 - 3.84400000e-01 5.83600000e-01 6.25700000e-01 6.72100000e-01 8.82700000e-01 5.06300000e-01 1.72920000e+00 6.60000000e-03 -7.76000000e-02 2.08940000e+00 2.15000000e-02 -2.38000000e-02 - 2.99000000e-01 4.41800000e-01 6.81500000e-01 3.59800000e-01 6.93600000e-01 4.87000000e-01 1.84530000e+00 -1.29400000e-01 7.14000000e-02 1.91460000e+00 -5.32000000e-02 6.55000000e-02 - 5.13700000e-01 4.50600000e-01 7.25600000e-01 8.01300000e-01 8.64500000e-01 8.67500000e-01 7.50400000e-01 7.70900000e-01 9.07700000e-01 8.48700000e-01 8.22900000e-01 6.89600000e-01 - 3.77700000e-01 1.34700000e-01 6.58400000e-01 1.35300000e-01 7.71200000e-01 1.29200000e-01 6.62600000e-01 1.32300000e-01 7.51900000e-01 1.63700000e-01 6.15200000e-01 1.85800000e-01 - 1.13210000e+00 -3.12000000e-02 5.97100000e-01 7.09800000e-01 5.78700000e-01 3.42900000e-01 6.55300000e-01 6.37600000e-01 5.79500000e-01 3.13400000e-01 6.58700000e-01 6.36900000e-01 - 3.56500000e-01 4.24200000e-01 7.23500000e-01 3.56800000e-01 8.59400000e-01 3.41600000e-01 1.63840000e+00 1.02300000e-01 5.51000000e-02 1.93810000e+00 -6.07000000e-02 7.33000000e-02 - 7.42000000e-02 3.93700000e-01 2.44500000e-01 4.71100000e-01 2.13200000e-01 6.13200000e-01 5.28000000e-02 6.95100000e-01 2.35900000e-01 6.00500000e-01 1.62000000e-01 5.67700000e-01 - 4.47200000e-01 4.84200000e-01 7.21500000e-01 5.29500000e-01 7.62700000e-01 6.23600000e-01 1.72210000e+00 1.38000000e-02 -2.27000000e-02 2.02410000e+00 2.60000000e-02 -3.35000000e-02 - 1.06640000e+00 4.88000000e-02 5.33700000e-01 7.65600000e-01 4.90600000e-01 4.38000000e-01 5.46100000e-01 7.54000000e-01 4.02000000e-01 5.19700000e-01 5.21500000e-01 7.80600000e-01 - 2.11300000e-01 6.10000000e-02 6.75100000e-01 -1.21300000e-01 6.31600000e-01 7.61000000e-02 1.79990000e+00 -7.61000000e-02 -3.07000000e-02 3.87000000e-02 1.01000000e-02 -1.19000000e-02 - 2.44600000e-01 3.45700000e-01 5.14300000e-01 3.89200000e-01 5.70500000e-01 4.58600000e-01 5.27400000e-01 3.68200000e-01 5.85600000e-01 4.52700000e-01 4.53500000e-01 4.55900000e-01 - 5.60500000e-01 5.49800000e-01 3.32100000e-01 1.05340000e+00 2.88300000e-01 6.74500000e-01 2.66100000e-01 1.13200000e+00 3.14300000e-01 6.20100000e-01 2.66000000e-01 1.13340000e+00 - 6.17700000e-01 4.81400000e-01 3.74500000e-01 5.53900000e-01 2.78700000e-01 4.58000000e-01 2.78700000e-01 6.65600000e-01 2.14900000e-01 5.20100000e-01 2.94500000e-01 6.50100000e-01 - 1.57200000e-01 7.53500000e-01 5.10400000e-01 7.06100000e-01 5.61700000e-01 7.89200000e-01 1.69970000e+00 3.29000000e-02 1.63900000e-01 1.79970000e+00 -2.64000000e-02 2.95000000e-02 - 1.44900000e-01 9.50400000e-01 7.90000000e-03 7.96000000e-01 4.54000000e-02 6.02900000e-01 -1.14000000e-02 8.18500000e-01 8.04000000e-02 5.42900000e-01 8.09000000e-02 7.07900000e-01 - 2.05200000e-01 4.30200000e-01 3.05100000e-01 6.95100000e-01 4.08900000e-01 7.14700000e-01 2.98600000e-01 6.99300000e-01 3.32400000e-01 8.27300000e-01 3.08000000e-01 6.90700000e-01 - 3.06000000e-01 4.43900000e-01 5.68600000e-01 5.07300000e-01 6.87600000e-01 5.11800000e-01 1.78900000e+00 -6.74000000e-02 4.23000000e-02 1.95010000e+00 -4.08000000e-02 4.70000000e-02 - 1.16610000e+00 -6.90000000e-02 6.76100000e-01 2.51300000e-01 5.78800000e-01 1.49100000e-01 6.67000000e-01 2.64400000e-01 6.12900000e-01 8.91000000e-02 6.26100000e-01 3.11400000e-01 - -9.11000000e-02 6.95700000e-01 2.86000000e-01 6.06700000e-01 3.56900000e-01 6.64400000e-01 3.96000000e-02 1.68110000e+00 -3.70000000e-03 2.00040000e+00 2.22000000e-02 -2.54000000e-02 - 6.30800000e-01 2.10600000e-01 9.09500000e-01 4.20800000e-01 9.04700000e-01 6.30900000e-01 1.06030000e+00 2.38500000e-01 1.09180000e+00 4.36700000e-01 1.00510000e+00 3.05400000e-01 - 1.09100000e+00 1.41000000e-02 5.30100000e-01 4.86300000e-01 4.93700000e-01 2.85500000e-01 5.18500000e-01 4.99400000e-01 4.29700000e-01 3.37900000e-01 4.27400000e-01 6.05500000e-01 - 7.03100000e-01 1.01600000e-01 1.00910000e+00 1.12500000e-01 1.21300000e+00 1.60000000e-02 1.72250000e+00 1.52000000e-02 1.98580000e+00 1.50000000e-02 -9.62000000e-02 1.12900000e-01 - 3.37200000e-01 7.82200000e-01 2.31300000e-01 4.27700000e-01 1.62600000e-01 3.91000000e-01 2.41900000e-01 4.14700000e-01 2.03400000e-01 3.26900000e-01 2.65800000e-01 3.86800000e-01 - -4.30000000e-02 3.90800000e-01 1.11200000e-01 5.38900000e-01 2.45400000e-01 5.24600000e-01 -7.62000000e-02 1.82570000e+00 -1.50000000e-03 2.90000000e-03 6.40000000e-03 -7.60000000e-03 - -7.30000000e-02 7.73900000e-01 1.14500000e-01 8.89700000e-01 2.20100000e-01 8.99800000e-01 3.02000000e-02 1.69510000e+00 1.16200000e-01 1.86740000e+00 3.32000000e-02 -3.88000000e-02 - 8.08500000e-01 2.36000000e-01 1.18280000e+00 4.60700000e-01 1.35860000e+00 5.20600000e-01 1.18310000e+00 4.71100000e-01 1.37000000e+00 5.35600000e-01 1.17070000e+00 4.86400000e-01 - 9.18000000e-02 7.45200000e-01 -1.53000000e-02 1.36330000e+00 5.11000000e-02 1.46390000e+00 -4.46000000e-02 1.39350000e+00 1.04000000e-01 1.42460000e+00 6.72000000e-02 1.26250000e+00 - 4.78800000e-01 6.28300000e-01 7.65000000e-01 8.24900000e-01 8.83600000e-01 8.26400000e-01 1.77980000e+00 -5.84000000e-02 8.16000000e-02 1.90240000e+00 -6.11000000e-02 1.80640000e+00 - 1.18900000e-01 4.55900000e-01 1.72400000e-01 7.28700000e-01 2.14000000e-01 8.06600000e-01 1.20000000e-01 7.96100000e-01 1.44400000e-01 9.10500000e-01 1.43100000e-01 7.62800000e-01 - 4.61900000e-01 1.13900000e-01 6.96700000e-01 2.13800000e-01 8.11700000e-01 2.21600000e-01 6.78900000e-01 2.32900000e-01 8.21200000e-01 2.27300000e-01 6.91200000e-01 2.18800000e-01 - 3.44100000e-01 7.63900000e-01 6.68400000e-01 8.85900000e-01 7.33200000e-01 9.52900000e-01 1.77140000e+00 -4.70000000e-02 6.40000000e-03 1.99060000e+00 3.33000000e-02 1.69800000e+00 - 5.44700000e-01 -2.33000000e-02 7.40100000e-01 1.22600000e-01 8.87400000e-01 8.54000000e-02 1.70620000e+00 2.66000000e-02 -8.04000000e-02 9.56000000e-02 4.04000000e-02 -5.02000000e-02 - 7.08100000e-01 6.40000000e-02 9.22900000e-01 1.79800000e-01 8.83500000e-01 3.63300000e-01 1.63500000e+00 1.18100000e-01 2.18180000e+00 -2.14300000e-01 -1.26000000e-02 1.41000000e-02 - 1.13990000e+00 -3.99000000e-02 5.55700000e-01 4.77500000e-01 4.86300000e-01 3.07900000e-01 5.15000000e-01 5.27800000e-01 3.66700000e-01 4.29000000e-01 5.98000000e-01 4.30400000e-01 - 1.21100000e+00 -1.24200000e-01 5.73800000e-01 5.13100000e-01 4.28800000e-01 4.09000000e-01 6.13300000e-01 4.66000000e-01 4.36000000e-01 3.78400000e-01 5.39800000e-01 5.55100000e-01 - 1.08910000e+00 1.83000000e-02 3.18000000e-01 9.02900000e-01 3.52800000e-01 5.35900000e-01 3.48700000e-01 8.65800000e-01 3.67200000e-01 4.98200000e-01 4.66700000e-01 7.20200000e-01 - 5.54600000e-01 2.54800000e-01 7.72700000e-01 5.14800000e-01 9.94500000e-01 4.54700000e-01 8.30500000e-01 4.48100000e-01 1.02380000e+00 4.45200000e-01 8.24600000e-01 4.56900000e-01 - 5.58900000e-01 5.46400000e-01 2.96200000e-01 6.73300000e-01 2.15600000e-01 5.51800000e-01 3.30100000e-01 6.31000000e-01 2.53200000e-01 4.88000000e-01 3.04700000e-01 6.61300000e-01 - 6.79200000e-01 4.18500000e-01 9.42600000e-01 7.56300000e-01 1.06060000e+00 7.53100000e-01 1.73360000e+00 3.10000000e-03 1.92210000e+00 9.22000000e-02 4.20000000e-03 1.72980000e+00 - 3.86400000e-01 4.63900000e-01 6.99400000e-01 6.15300000e-01 7.73600000e-01 7.28600000e-01 6.35100000e-01 6.92700000e-01 7.29100000e-01 8.04000000e-01 6.37000000e-01 6.88900000e-01 - 1.15100000e+00 -5.35000000e-02 1.20820000e+00 5.42900000e-01 1.03730000e+00 2.37400000e-01 1.27140000e+00 4.70200000e-01 9.37500000e-01 3.04400000e-01 1.37290000e+00 3.50400000e-01 - 2.52300000e-01 6.82000000e-01 5.46300000e-01 7.01100000e-01 7.77700000e-01 5.70500000e-01 1.81180000e+00 -9.28000000e-02 1.81000000e-02 1.97670000e+00 2.25000000e-02 -2.58000000e-02 - 3.13500000e-01 7.85500000e-01 6.27000000e-01 9.95400000e-01 6.39900000e-01 1.12040000e+00 1.77550000e+00 -5.51000000e-02 -1.04400000e-01 2.12400000e+00 -7.40000000e-03 1.74410000e+00 - 2.06800000e-01 4.97100000e-01 4.97700000e-01 5.23300000e-01 5.91300000e-01 5.54400000e-01 1.69950000e+00 4.45000000e-02 -1.49000000e-02 2.01850000e+00 2.04000000e-02 -2.66000000e-02 - 1.11440000e+00 -1.79000000e-02 1.46600000e+00 2.48500000e-01 1.45490000e+00 5.22200000e-01 1.74640000e+00 -1.66000000e-02 2.10430000e+00 -1.20400000e-01 1.79020000e+00 -6.50000000e-02 - 5.33000000e-01 4.72400000e-01 6.41700000e-01 9.60200000e-01 8.65700000e-01 9.35700000e-01 7.92200000e-01 7.83800000e-01 9.57900000e-01 8.58200000e-01 8.08900000e-01 7.64800000e-01 - 2.40300000e-01 1.26400000e-01 3.97900000e-01 1.72000000e-01 4.29100000e-01 2.22800000e-01 4.18000000e-01 1.46000000e-01 3.93000000e-01 2.78700000e-01 3.98400000e-01 1.70600000e-01 - 5.30600000e-01 3.93300000e-01 8.45500000e-01 6.06500000e-01 8.90000000e-01 7.79500000e-01 8.26200000e-01 6.25600000e-01 1.04520000e+00 6.15700000e-01 8.61600000e-01 5.81500000e-01 - 1.55000000e-01 9.33300000e-01 1.02000000e-02 9.94100000e-01 1.09000000e-02 7.71700000e-01 4.66000000e-02 9.50800000e-01 2.52000000e-02 7.35400000e-01 1.01000000e-02 9.94300000e-01 diff --git a/GPy/util/datasets/oil/DataVdnLbls.txt b/GPy/util/datasets/oil/DataVdnLbls.txt deleted file mode 100644 index 3f9c3c23..00000000 --- a/GPy/util/datasets/oil/DataVdnLbls.txt +++ /dev/null @@ -1,1000 +0,0 @@ - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 0.00000000e+00 0.00000000e+00 1.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 1.00000000e+00 0.00000000e+00 0.00000000e+00 - 0.00000000e+00 1.00000000e+00 0.00000000e+00 diff --git a/GPy/util/datasets/olympics.mat b/GPy/util/datasets/olympics.mat deleted file mode 100644 index e16c35b9..00000000 Binary files a/GPy/util/datasets/olympics.mat and /dev/null differ diff --git a/GPy/util/datasets/pumadyn-32nm/Dataset.data.gz b/GPy/util/datasets/pumadyn-32nm/Dataset.data.gz deleted file mode 100644 index 27b07e7c..00000000 Binary files a/GPy/util/datasets/pumadyn-32nm/Dataset.data.gz and /dev/null differ diff --git a/GPy/util/datasets/pumadyn-32nm/Dataset.spec b/GPy/util/datasets/pumadyn-32nm/Dataset.spec deleted file mode 100644 index aa2de227..00000000 --- a/GPy/util/datasets/pumadyn-32nm/Dataset.spec +++ /dev/null @@ -1,44 +0,0 @@ -# -# Puma forward dynamics -- 32nm = 32 inputs, high nonlinearity, med noise -# -# -Origin: simulated - -Usage: assessment - -Order: uninformative - -Attributes: - 1 theta1 u [-3.1416,3.1416] # ang position of joint 1 in radians - 2 theta2 u [-3.1416,3.1416] # ang position of joint 2 in radians - 3 theta3 u [-3.1416,3.1416] # ang position of joint 3 in radians - 4 theta4 u [-3.1416,3.1416] # ang position of joint 4 in radians - 5 theta5 u [-3.1416,3.1416] # ang position of joint 5 in radians - 6 theta6 u [-3.1416,3.1416] # ang position of joint 6 in radians - 7 thetad1 u (-Inf,Inf) # ang vel of joint 1 in rad/sec - 8 thetad2 u (-Inf,Inf) # ang vel of joint 2 in rad/sec - 9 thetad3 u (-Inf,Inf) # ang vel of joint 3 in rad/sec - 10 thetad4 u (-Inf,Inf) # ang vel of joint 4 in rad/sec - 11 thetad5 u (-Inf,Inf) # ang vel of joint 5 in rad/sec - 12 thetad6 u (-Inf,Inf) # ang vel of joint 6 in rad/sec - 13 tau1 u (-Inf,Inf) # torque on jt 1 - 14 tau2 u (-Inf,Inf) # torque on jt 2 - 15 tau3 u (-Inf,Inf) # torque on jt 3 - 16 tau4 u (-Inf,Inf) # torque on jt 4 - 17 tau5 u (-Inf,Inf) # torque on jt 5 - 18 dm1 u [0,Inf) # proportion change in mass of link 1 - 19 dm2 u [0,Inf) # prop change in mass of link 2 - 20 dm3 u [0,Inf) # prop change in mass of link 3 - 21 dm4 u [0,Inf) # prop change in mass of link 4 - 22 dm5 u [0,Inf) # prop change in mass of link 5 - 23 da1 u [0,Inf) # prop change in length of link 1 - 24 da2 u [0,Inf) # prop change in length of link 2 - 25 da3 u [0,Inf) # prop change in length of link 3 - 26 da4 u [0,Inf) # prop change in length of link 4 - 27 da5 u [0,Inf) # prop change in length of link 5 - 28 db1 u [0,Inf) # prop change in visc friction of link 1 - 29 db2 u [0,Inf) # prop change in visc friction of link 2 - 30 db3 u [0,Inf) # prop change in visc friction of link 3 - 31 db4 u [0,Inf) # prop change in visc friction of link 4 - 32 db5 u [0,Inf) # prop change in visc friction of link 5 - 33 thetadd6 u (-Inf,Inf) # ang acceleration of joint 6 diff --git a/GPy/util/datasets/pumadyn-32nm/accel/Prototask.data.gz b/GPy/util/datasets/pumadyn-32nm/accel/Prototask.data.gz deleted file mode 100644 index 20b51bbd..00000000 Binary files a/GPy/util/datasets/pumadyn-32nm/accel/Prototask.data.gz and /dev/null differ diff --git a/GPy/util/datasets/pumadyn-32nm/accel/Prototask.spec b/GPy/util/datasets/pumadyn-32nm/accel/Prototask.spec deleted file mode 100644 index e92fcd15..00000000 --- a/GPy/util/datasets/pumadyn-32nm/accel/Prototask.spec +++ /dev/null @@ -1,12 +0,0 @@ -# -# Prototask.spec -# -Cases: all -Origin: simulated -Inputs: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 -Order: retain -Targets: 33 -Test-Set-Size: 4096 -Training-Set-Sizes: 64 128 256 512 1024 -Test-Set-Selection: hierarchical -Maximum-Number-Of-Instances: 8 diff --git a/GPy/util/datasets/pumadyn-32nm/accel/std.prior b/GPy/util/datasets/pumadyn-32nm/accel/std.prior deleted file mode 100644 index d5cf2c7c..00000000 --- a/GPy/util/datasets/pumadyn-32nm/accel/std.prior +++ /dev/null @@ -1,33 +0,0 @@ - 1 NLMH real - 2 NLMH real - 3 NLMH real - 4 NLMH real - 5 NLMH real - 6 NLMH real - 7 NLMH real - 8 NLMH real - 9 NLMH real - 10 NLMH real - 11 NLMH real - 12 NLMH real - 13 NLMH real - 14 NLMH real - 15 NLMH real - 16 NLMH real - 17 NLMH real - 18 NLMH real - 19 NLMH real - 20 NLMH real - 21 NLMH real - 22 NLMH real - 23 NLMH real - 24 NLMH real - 25 NLMH real - 26 NLMH real - 27 NLMH real - 28 NLMH real - 29 NLMH real - 30 NLMH real - 31 NLMH real - 32 NLMH real - 33 NLMH real diff --git a/GPy/util/datasets/swiss_roll_data.mat b/GPy/util/datasets/swiss_roll_data.mat deleted file mode 100644 index c83a97aa..00000000 Binary files a/GPy/util/datasets/swiss_roll_data.mat and /dev/null differ diff --git a/GPy/util/datasets/synth.te b/GPy/util/datasets/synth.te deleted file mode 100644 index fe055bb2..00000000 --- a/GPy/util/datasets/synth.te +++ /dev/null @@ -1,1001 +0,0 @@ - xs ys yc - -0.970990139 0.429424950 0 - -0.631997027 0.251952852 0 - -0.773605760 0.690750778 0 - -0.606211523 0.175677956 0 - -0.539409005 0.376744239 0 - -0.960325850 0.110040710 0 - -1.041375608 0.328508085 0 - -0.822600536 0.175874200 0 - -0.943714771 -0.180633309 0 - -0.968763299 0.296070217 0 - -0.853637980 0.644010559 0 - -0.771994930 0.476344773 0 - -0.718952712 0.090457675 0 - -0.539520701 0.447837856 0 - -0.540093447 0.551067215 0 - -0.792923186 0.531235891 0 - -0.861472850 0.287352652 0 - -0.470131571 0.544251260 0 - -0.770683778 0.482733051 0 - -0.803031230 0.228632039 0 - -0.962520756 0.367759881 0 - -0.681960494 0.495354977 0 - -0.433007837 0.213645636 0 - -0.336831640 0.293614869 0 - -0.696425307 0.315194495 0 - -0.355766886 0.269794553 0 - -0.547898136 0.277054714 0 - -0.799663889 0.292931173 0 - -0.780012402 0.038437662 0 - -0.853938355 0.198423604 0 - -0.896295454 0.286916469 0 - -0.824028270 0.295231859 0 - -0.901075546 0.321018371 0 - -0.556718720 0.358145252 0 - -0.871004652 0.258992681 0 - -0.800820459 0.363123198 0 - -0.699003238 0.417050087 0 - -0.759409251 0.366156047 0 - -0.775268090 0.306716684 0 - -0.893576947 -0.096908084 0 - -0.284857192 0.307321395 0 - -0.665571750 0.365820514 0 - -0.741374392 0.298498149 0 - -0.767733049 0.245811163 0 - -0.779306345 0.319092986 0 - -0.892190952 0.201459901 0 - -0.122811626 0.516497113 0 - -0.731730651 0.055992550 0 - -1.011976425 0.344692082 0 - -0.573762197 0.059676643 0 - -0.641425285 0.333730563 0 - -0.985902178 0.162020997 0 - -0.661140507 0.136840396 0 - -0.749218489 0.185148533 0 - -0.540329548 0.387396621 0 - -0.592092859 0.447510299 0 - -0.860077357 0.218917745 0 - -0.867516891 -0.137491677 0 - -0.590055695 0.466004783 0 - -0.775966325 0.403399745 0 - -0.849687489 0.315466589 0 - -0.746283040 0.256242513 0 - -0.700854929 0.518361424 0 - -0.923680439 0.449453255 0 - -0.912092992 0.407980138 0 - -0.650765709 0.412200546 0 - -0.980330108 0.299281948 0 - -0.744408938 0.203087089 0 - -0.604170665 0.326156917 0 - -0.735903002 0.655288145 0 - -0.643607616 0.513819006 0 - -0.963376987 0.249000843 0 - -0.426980732 0.282178155 0 - -0.654762824 0.562181098 0 - -0.843491783 0.345421521 0 - -0.553968009 0.538960351 0 - -0.716946447 0.122102049 0 - -0.775328790 0.498892271 0 - -0.640289822 0.435762487 0 - -0.516878864 0.182337108 0 - -0.952125366 0.298280511 0 - -0.723017513 0.256182935 0 - -0.658805240 0.269147489 0 - -0.464552773 0.218324319 0 - -0.564517221 0.196511498 0 - -0.814096964 0.228304066 0 - -0.396184143 0.511765539 0 - -0.996637001 0.209223029 0 - -0.815950989 0.235966820 0 - -0.526626592 0.418687316 0 - -0.667763995 0.428833798 0 - -0.658898181 0.031828081 0 - -0.923935948 0.530254142 0 - -0.909973792 0.451785093 0 - -0.410551229 0.252159645 0 - -0.462064440 0.230673805 0 - -0.366146922 -0.036140226 0 - -0.595861370 0.400288539 0 - -0.704392096 0.238984335 0 - -0.841225771 0.577095745 0 - -0.969828933 0.155360193 0 - -0.557037265 0.314190393 0 - -0.671104208 0.361767035 0 - -0.503286446 0.566417412 0 - -0.950325858 0.078493347 0 - -0.675813120 0.319308250 0 - -0.831561973 0.143581661 0 - -0.435074090 0.492855894 0 - -0.793021028 0.118140919 0 - -0.848627588 0.082762982 0 - -0.820269797 0.395714263 0 - -0.422092727 0.477760711 0 - -0.408676218 0.374918252 0 - -0.546953839 0.473748255 0 - -0.735444130 0.266138774 0 - -0.582205470 0.271991191 0 - -0.338346632 0.242426860 0 - -0.535045557 0.118043648 0 - -0.493743519 0.717856305 0 - -0.760932705 0.416245530 0 - -0.515677444 0.184242721 0 - -0.673504588 0.296239478 0 - -0.459705697 0.186931282 0 - -0.694881314 0.381840980 0 - -0.387447545 0.080890693 0 - -0.596036129 0.184974829 0 - -0.664372536 0.423940859 0 - -0.883742635 0.614943083 0 - -0.509344933 0.290033636 0 - -0.925124882 0.604748154 0 - -0.841007867 0.290327096 0 - -0.894120137 0.157169952 0 - -0.646573229 0.609447746 0 - -1.017873059 0.148721295 0 - -0.582528753 0.184940557 0 - -0.897329196 0.532091737 0 - -0.465016860 0.285520226 0 - -0.726508681 0.181867205 0 - -0.514352969 0.156961029 0 - -0.739246011 0.408845252 0 - -0.537049319 0.307417180 0 - -0.923407832 0.492249753 0 - -0.663217181 0.241275721 0 - -0.871900824 0.191786697 0 - -0.574764695 0.216699985 0 - -0.778723382 0.417127421 0 - -0.717491428 0.169911784 0 - -0.293985190 0.341692708 0 - -0.732183039 0.611673182 0 - -0.672451661 0.290330390 0 - -0.392906014 0.314507904 0 - -0.821496561 0.383502471 0 - -0.441649840 0.131552989 0 - -0.734149425 0.138366727 0 - -0.353467324 0.403725989 0 - -0.756729286 0.140926608 0 - -0.985271855 0.307051129 0 - -0.734362749 0.131915653 0 - -0.843814454 0.508797861 0 - -0.871470989 0.409534472 0 - -0.643774042 0.386072579 0 - -0.617659001 0.067340392 0 - -0.282068649 0.693923139 0 - -0.402555368 0.204385656 0 - -0.458583969 0.420739380 0 - -0.846296983 0.277152491 0 - -1.048542317 0.338822747 0 - -0.799795307 0.309430762 0 - -0.852040552 0.307281614 0 - -0.616474678 0.252952510 0 - -0.691690351 0.272750414 0 - -0.809142202 0.441901584 0 - -0.837139722 0.269171931 0 - -0.743520251 0.247417602 0 - -0.660650230 -0.028489077 0 - -0.594815839 0.109164679 0 - -0.597128033 -0.037465241 0 - -0.921420258 -0.069844290 0 - -0.877566913 0.304297059 0 - -0.765371773 0.596974416 0 - -0.699840550 0.167126769 0 - -0.523434825 -0.064742897 0 - -0.656387744 0.012460495 0 - -1.036967640 0.141450813 0 - -0.715165192 0.217239838 0 - -0.747858131 0.569994813 0 - -0.625684541 0.320122450 0 - -0.756699924 0.174518616 0 - -0.679690670 0.438410861 0 - -0.612004202 -0.134269826 0 - -0.647906789 0.239638558 0 - -0.691066413 0.255635309 0 - -0.675112764 0.550169559 0 - -0.851072790 0.474955936 0 - -0.837051482 0.408050507 0 - -0.961405831 0.588207922 0 - -0.642774716 0.163487304 0 - -0.892075711 0.064132978 0 - -0.927798777 0.072240031 0 - -0.751800726 0.409258566 0 - -0.805341030 0.064157327 0 - -0.692838235 0.171715163 0 - -0.703943931 0.476730183 0 - -0.694804098 0.268655402 0 - -0.567758798 0.207116645 0 - -0.822380000 0.268404036 0 - -0.565082539 0.327015498 0 - -0.724181702 0.625763803 0 - -0.916357511 0.236124996 0 - -0.430182548 0.268033748 0 - -0.632645741 0.522382761 0 - -0.850972862 0.345168936 0 - -0.609691020 0.501872186 0 - -0.705661024 0.220694983 0 - -0.693161871 0.100244402 0 - -0.633922642 0.390701059 0 - -0.710406768 0.015180240 0 - -1.055052036 0.517833140 0 - -0.621276063 0.167382599 0 - -0.613423246 0.266134950 0 - -0.989565379 0.166693580 0 - -0.923580375 0.412606504 0 - -0.889581095 0.426760653 0 - -0.930040388 0.240533824 0 - -0.691421356 0.006339557 0 - -1.031412255 0.482277646 0 - -0.701394895 0.462356010 0 - -0.627721178 0.243813111 0 - -0.829380326 0.487867261 0 - -0.612200851 0.121715064 0 - -0.528139634 0.449962538 0 - -0.616674472 0.058254182 0 - -0.649202842 0.263909873 0 - -0.655384302 0.225793561 0 - -0.750085240 0.119545244 0 - -0.471920626 0.278830975 0 - -0.219905912 0.315052974 0 - -0.871701260 0.240570129 0 - -0.730197977 0.295504781 0 - -0.620676222 0.046383576 0 - -0.657830687 0.265899761 0 - -0.475352116 0.279850946 0 - -0.734794644 0.365235616 0 - -0.772673638 0.355477724 0 - -0.620710470 0.770796635 0 - -0.529626406 0.091067609 0 - -0.730846476 0.642803364 0 - -0.938694493 0.324275071 0 - -0.723706354 -0.017999841 0 - -0.979569099 -0.003034376 0 - 0.448754392 0.015050386 0 - -0.077907282 0.245842052 0 - 0.316786631 0.252917817 0 - 0.229597046 0.067681573 0 - 0.197949376 0.310003887 0 - 0.048404642 -0.037865268 0 - 0.270601003 0.260199166 0 - 0.516192043 0.258256258 0 - 0.154718993 0.040306842 0 - -0.005611276 0.223658499 0 - 0.365076313 -0.001956641 0 - 0.086615547 0.138482814 0 - 0.198645891 0.047611642 0 - 0.131870660 0.402255360 0 - 0.585894768 0.433203159 0 - -0.023498655 0.379919943 0 - 0.394174061 0.533936878 0 - 0.595983773 0.680516952 0 - 0.388419733 0.321931614 0 - 0.270452263 0.360309566 0 - 0.336909893 0.176262915 0 - 0.481432232 0.326027716 0 - 0.246865240 0.532700400 0 - -0.020439631 0.132155124 0 - 0.389941424 0.309223343 0 - 0.048115168 0.104763308 0 - 0.284816331 -0.048775617 0 - 0.529166911 0.285314795 0 - 0.349208427 0.063167392 0 - 0.323888259 0.192358455 0 - 0.321213977 0.101190083 0 - 0.303365953 0.286689359 0 - -0.075979803 0.312196126 0 - 0.317894059 0.110578558 0 - 0.136145272 0.223509762 0 - 0.086777443 0.397316175 0 - 0.330555298 -0.018831347 0 - 0.202260475 0.212061643 0 - 0.276704436 0.541792424 0 - 0.244814590 -0.033434890 0 - 0.429043775 0.183967494 0 - 0.340412789 0.237474210 0 - 0.382064022 0.123295299 0 - 0.381833239 0.085809636 0 - 0.424417864 0.321954582 0 - 0.206306313 0.348957865 0 - 0.091614953 0.309132098 0 - 0.627597689 0.472188745 0 - 0.270244718 0.361936451 0 - 0.127928396 0.368238186 0 - 0.399192895 0.120050819 0 - 0.450618123 0.452328633 0 - 0.254900382 0.410220018 0 - 0.259523390 0.124427489 0 - 0.417004689 0.300805900 0 - 0.346581338 0.283479475 0 - 0.748854615 0.246812787 0 - 0.428530072 0.636260298 0 - 0.127369504 0.321732050 0 - 0.528722462 0.227075837 0 - 0.618168220 0.327309276 0 - 0.286029472 0.215643450 0 - 0.142578461 0.112955825 0 - 0.282764909 0.091628143 0 - 0.788220007 0.464545152 0 - 0.119165220 0.239567886 0 - 0.244772936 0.014906673 0 - 0.160442893 0.455259044 0 - 0.454067300 0.332582882 0 - -0.057868287 0.498675578 0 - -0.111365306 0.079756044 0 - 0.198824819 0.476017542 0 - 0.595468169 0.162120124 0 - 0.085627364 0.315262031 0 - 0.465261497 0.123331422 0 - 0.359673625 0.364504393 0 - 0.111822093 0.296370162 0 - 0.509269078 0.464037322 0 - 0.470888018 0.285556829 0 - 0.393262912 0.093782124 0 - 0.311897634 0.286626364 0 - 0.151594554 0.268411495 0 - 0.084423498 0.319282396 0 - 0.208641564 0.230226362 0 - 0.361230606 0.506867239 0 - 0.425667999 0.239049251 0 - 0.399549324 0.136827304 0 - 0.279615939 0.310402719 0 - 0.109049911 0.630255432 0 - 0.102929855 0.446152743 0 - 0.551085316 0.313983603 0 - 0.579201159 0.179353765 0 - 0.356514867 0.178396614 0 - 0.259861364 0.096917764 0 - 0.545480531 0.272730569 0 - 0.398789597 0.149343536 0 - 0.383441254 0.243298247 0 - 0.405415302 0.351024129 0 - 0.249091946 0.423059272 0 - 0.293535767 0.133960638 0 - 0.149869213 0.305675082 0 - 0.224986842 0.464864831 0 - 0.240826479 0.233973445 0 - 0.122917552 0.406179372 0 - 0.301231733 0.178773911 0 - 0.257698819 0.537312141 0 - 0.446288764 0.206483371 0 - 0.511214849 0.156330717 0 - 0.474675267 0.454212426 0 - 0.373402327 0.107531816 0 - 0.453575217 0.013564367 0 - 0.363708989 0.324209899 0 - 0.323172397 0.308234424 0 - 0.263568182 0.097321560 0 - 0.375989273 0.511128488 0 - 0.483416817 -0.027606822 0 - 0.412708967 0.353260156 0 - 0.294590710 0.338631607 0 - 0.148425126 0.313998286 0 - 0.476236614 0.009138517 0 - 0.051021769 0.518229423 0 - 0.488029582 0.492206314 0 - 0.193703118 0.356127440 0 - 0.390385684 0.402548715 0 - 0.166515062 0.077486533 0 - 0.378346001 0.205554127 0 - 0.059890677 0.615481812 0 - -0.077252668 0.325973024 0 - 0.519325984 0.352901733 0 - 0.271955420 0.031010063 0 - 0.027254987 0.289394991 0 - 0.437437673 -0.027210937 0 - 0.028370640 0.166304765 0 - 0.433657082 0.604909277 0 - 0.280505393 0.022916023 0 - 0.300735977 0.188023897 0 - 0.182031568 0.292354741 0 - 0.316158641 0.423973591 0 - 0.530601146 0.287109075 0 - 0.210237556 0.384357431 0 - 0.399444521 0.496882692 0 - 0.272113433 0.437262474 0 - 0.418146305 0.145521656 0 - 0.504825239 0.154106314 0 - 0.166974207 0.180641380 0 - 0.106527356 0.500370591 0 - 0.607348514 0.184680121 0 - 0.517847638 0.396858357 0 - 0.231553652 0.403086636 0 - 0.255029497 0.430592319 0 - 0.287511011 0.219412906 0 - 0.200852107 0.272097495 0 - 0.226547849 0.244596483 0 - 0.011878373 0.352803074 0 - 0.380569910 0.434089493 0 - 0.519215428 0.072764703 0 - 0.623854880 0.338983888 0 - 0.183173455 0.255322403 0 - 0.226420389 0.075341621 0 - 0.455356509 0.367957232 0 - 0.332301375 -0.011058516 0 - 0.376306021 0.188460770 0 - 0.428169526 0.054583036 0 - 0.145829529 0.368253163 0 - 0.493757540 0.376063674 0 - 0.529391969 0.074698658 0 - 0.409826160 0.280322788 0 - 0.612354746 0.120926664 0 - 0.221568084 0.273458368 0 - 0.427545649 0.106200846 0 - 0.533325611 0.591671136 0 - 0.462109537 0.357955560 0 - 0.182362120 0.298520960 0 - 0.310107790 0.301510248 0 - 0.159799550 0.257640193 0 - 0.254288145 0.374308080 0 - 0.316374077 0.029411804 0 - 0.285942260 0.338773678 0 - 0.552541865 -0.016858031 0 - -0.004090460 0.399012387 0 - 0.060484031 0.277592649 0 - 0.545097739 0.218461339 0 - 0.268284924 0.267903340 0 - 0.159022649 0.531382417 0 - 0.492658208 0.486286052 0 - -0.128240252 0.533333926 0 - 0.447760080 0.284865402 0 - 0.239374886 0.462386877 0 - 0.138634894 0.395550274 0 - 0.417284343 0.200022118 0 - 0.178303979 0.306720386 0 - 0.221552636 0.396534895 0 - -0.009120409 0.724738825 0 - 0.292748806 0.414432640 0 - 0.300563713 0.214325496 0 - 0.242506812 0.232690286 0 - 0.234494302 0.247006083 0 - 0.352550448 0.351581175 0 - 0.185994378 0.269914887 0 - 0.409680307 0.212370722 0 - 0.163919950 0.026130185 0 - 0.169756191 0.104358886 0 - 0.354398935 0.227524046 0 - 0.388870060 0.042378087 0 - 0.344788486 0.246053805 0 - 0.193145216 0.271352787 0 - 0.430800164 0.263193765 0 - 0.232808591 0.445516712 0 - 0.326059317 0.563886858 0 - 0.330837091 0.256040145 0 - 0.323691216 0.356872920 0 - 0.367737090 -0.088857683 0 - 0.530750561 0.327389964 0 - 0.089596372 0.338423910 0 - 0.432192982 0.394261493 0 - 0.186694048 0.438187113 0 - 0.458275145 0.324647633 0 - 0.480078071 0.374810492 0 - 0.582758378 0.390433695 0 - 0.437808065 0.389265557 0 - 0.208830936 0.010096493 0 - 0.377797466 0.474572076 0 - 0.183803076 -0.090083970 0 - 0.155682547 0.537563127 0 - 0.071926861 0.572783083 0 - 0.364435618 -0.123841713 0 - 0.408213991 0.254483065 0 - 0.466073956 0.398618252 0 - 0.614281743 0.283302172 0 - -0.047151673 0.214579449 0 - 0.326917150 0.468066389 0 - 0.458840582 0.443470083 0 - 0.109537926 0.189505910 0 - 0.161895892 0.123705078 0 - 0.450055408 0.501518844 0 - 0.368869484 0.557190529 0 - 0.334209119 0.413960488 0 - -0.031121068 0.228014456 0 - 0.176753850 0.430199990 0 - 0.552527788 0.224902508 0 - 0.304266409 0.220287210 0 - 0.210462653 0.415336683 0 - 0.063953710 0.045543235 0 - -0.063149684 0.351389125 0 - 0.073535710 0.252143534 0 - 0.665453703 0.203720086 0 - 0.539642761 0.279986737 0 - 0.250981585 0.069569958 0 - 0.392679888 0.090261998 0 - 0.431409216 0.288456378 0 - -0.516451834 0.501256111 1 - -0.116775286 0.483404773 1 - -0.327960793 0.546240228 1 - -0.394572192 0.755243715 1 - -0.110201988 0.553402230 1 - -0.160538577 0.579525838 1 - -0.124742465 0.323661757 1 - -0.109742769 0.696514698 1 - -0.687328305 0.807033124 1 - -0.358374262 0.807265743 1 - -0.335836520 0.392482381 1 - -0.321604223 0.591913273 1 - -0.091546228 0.562483354 1 - -0.660890881 0.611049023 1 - -0.561938441 0.907495412 1 - -0.244433911 0.451367292 1 - -0.392885460 0.550604753 1 - -0.429608736 0.644152661 1 - -0.090462865 0.522251590 1 - -0.436484641 0.520039359 1 - -0.519966218 0.940830736 1 - -0.418391404 1.011277424 1 - -0.405807798 0.738999068 1 - -0.085688384 0.847932361 1 - -0.210347223 0.416696729 1 - -0.531896660 0.452618557 1 - -0.294588066 0.846012850 1 - -0.092753982 0.693082777 1 - -0.314549926 0.797236706 1 - -0.262918395 0.787474678 1 - -0.389819133 0.579880509 1 - -0.162163174 0.315021403 1 - -0.418250429 0.684349895 1 - -0.356533257 0.896022491 1 - -0.461800168 0.782142975 1 - -0.149067005 0.837864969 1 - -0.376621128 0.553207248 1 - -0.235807559 0.642937572 1 - -0.433816383 0.568682995 1 - 0.003602461 0.804352974 1 - -0.286855152 0.710632583 1 - -0.424066790 0.994872459 1 - -0.270030002 0.833427152 1 - -0.239212386 0.378268423 1 - -0.255304685 0.822105360 1 - -0.196569409 0.703182679 1 - -0.125203354 0.844725933 1 - -0.338351441 0.680964321 1 - -0.383184405 0.839383812 1 - -0.398513962 0.750284450 1 - 0.027844709 0.537770177 1 - -0.295483256 0.846722230 1 - -0.552989277 0.794817114 1 - -0.004901838 0.608282407 1 - -0.029384352 0.614072912 1 - -0.444694587 0.779042878 1 - -0.338928122 0.789725990 1 - 0.122195503 0.784475027 1 - -0.186584991 0.560614872 1 - -0.295015658 0.840559001 1 - -0.102630670 0.675938267 1 - -0.430785693 0.645617846 1 - -0.099297566 0.894434898 1 - -0.009264193 1.012595196 1 - -0.560973647 0.807423104 1 - -0.536294204 0.529432752 1 - -0.563297476 0.646381268 1 - -0.292902091 0.620924549 1 - -0.107464304 0.615869773 1 - -0.261216307 0.699646352 1 - -0.105100716 0.868085863 1 - -0.362473095 0.683245848 1 - -0.548222187 0.726739882 1 - -0.522717054 0.636324411 1 - -0.406753361 0.858975870 1 - -0.272149948 1.009788333 1 - -0.058505372 0.722037722 1 - -0.286284031 0.564831018 1 - -0.145641743 0.527786275 1 - -0.254951568 0.909735133 1 - -0.200910922 0.911648155 1 - -0.397769966 0.398117280 1 - -0.547436085 0.779495789 1 - -0.231129177 0.491139768 1 - -0.473894736 0.682466158 1 - -0.231075189 0.453157246 1 - -0.268776826 0.676814477 1 - -0.180889587 0.880462410 1 - -0.326237906 0.599734095 1 - -0.252657163 0.575832499 1 - -0.294967226 0.707617098 1 - -0.441714737 0.649258390 1 - -0.434336942 0.859634714 1 - -0.080950672 0.608362742 1 - -0.256056671 0.465280126 1 - -0.767972482 0.818894418 1 - -0.250929687 0.807765177 1 - -0.233531508 0.536107452 1 - -0.166252171 0.578022234 1 - -0.399389870 0.961981117 1 - -0.383257048 0.918196737 1 - -0.246208261 0.728269018 1 - -0.112873567 0.825689335 1 - -0.096666032 0.707306804 1 - -0.457949369 0.704015342 1 - -0.255003562 0.504258034 1 - -0.073434667 0.722783609 1 - -0.409375468 0.526062925 1 - -0.363348126 0.881713044 1 - -0.257217769 0.607597755 1 - -0.349331300 0.703112332 1 - -0.151880213 0.492886000 1 - -0.404171363 0.737139545 1 - -0.462320910 0.423673110 1 - -0.546143281 0.835222198 1 - -0.229962943 0.611218821 1 - -0.246561278 0.550748181 1 - -0.392635644 0.396901704 1 - -0.175983074 0.659236133 1 - -0.160444346 0.856989440 1 - -0.341235994 0.536421185 1 - -0.333233675 0.558945553 1 - -0.274226030 0.677337101 1 - -0.394217634 1.084965709 1 - -0.177110920 1.174990894 1 - -0.403972304 0.705580257 1 - -0.387046408 0.654499407 1 - -0.044038573 0.753839485 1 - -0.278389636 0.349432166 1 - -0.272249470 0.234622985 1 - -0.191592271 0.380898603 1 - -0.590368203 0.698331693 1 - -0.374188840 0.819242381 1 - -0.351703587 0.730361507 1 - -0.281959049 0.469288157 1 - -0.751945036 0.885219702 1 - -0.306929899 0.574182522 1 - -0.762727447 0.890352701 1 - -0.564448380 0.729602705 1 - 0.040323664 0.779572618 1 - -0.462188702 0.998868915 1 - -0.447915766 0.843500207 1 - -0.217001799 0.796623800 1 - -0.112509220 0.611900551 1 - -0.131149777 0.948975611 1 - -0.403054671 0.786868546 1 - 0.008848708 0.652933806 1 - 0.090647590 0.654317764 1 - -0.358620932 0.936462477 1 - -0.441265488 0.326283245 1 - -0.479842420 0.788087594 1 - -0.588843824 0.648214630 1 - -0.562606783 0.754763105 1 - -0.514270007 0.324312047 1 - -0.392905106 0.821041597 1 - -0.075132059 0.685702990 1 - -0.196830870 0.714112820 1 - -0.301481674 0.552313534 1 - -0.181585205 0.659988770 1 - -0.114373131 0.736877415 1 - -0.331936585 0.440209520 1 - -0.266807581 0.545085006 1 - -0.475109818 0.947483833 1 - -0.557037972 0.778719573 1 - -0.193240214 0.574512048 1 - -0.029348731 0.829601881 1 - -0.383376526 0.624385592 1 - -0.035071125 0.812800625 1 - -0.060506093 0.772166835 1 - -0.160710931 0.530042141 1 - -0.210362275 0.567446850 1 - -0.283272444 0.798839816 1 - -0.520613526 0.837372559 1 - -0.263870495 0.687937002 1 - -0.060226406 0.688228649 1 - -0.429473669 0.654717940 1 - -0.325250467 0.791105596 1 - 0.094837102 0.750572909 1 - -0.326848641 0.823553280 1 - -0.537630937 0.827068887 1 - -0.589458171 0.897096209 1 - -0.255109811 0.737443245 1 - -0.350722503 0.739648314 1 - -0.111745167 0.705987527 1 - -0.213435551 0.466547665 1 - -0.272518877 0.683481004 1 - -0.440414101 0.974317798 1 - -0.303362790 0.576264653 1 - -0.221200040 0.987888085 1 - -0.286914561 0.619578181 1 - 0.096845361 0.511673423 1 - -0.363110834 0.661562448 1 - -0.211246704 0.813171823 1 - -0.222052903 0.686080299 1 - -0.321828330 0.624357510 1 - -0.473737950 0.506318972 1 - -0.212793549 0.774693470 1 - 0.008463870 0.614591369 1 - -0.205693420 0.644919563 1 - -0.378486601 0.778361218 1 - -0.229442899 0.594732866 1 - -0.162703081 0.930991126 1 - -0.321296905 0.828610911 1 - -0.400332594 0.688297191 1 - -0.312050685 0.618494750 1 - -0.039349153 0.959790721 1 - -0.273914659 0.599403497 1 - -0.348565665 0.612606769 1 - -0.413758325 0.696448995 1 - -0.098831839 0.854519409 1 - -0.287690535 0.883301183 1 - -0.383124103 0.672367628 1 - -0.561271474 1.067278573 1 - -0.166431846 0.897151624 1 - -0.635114720 0.688087392 1 - -0.332175204 0.501477407 1 - -0.474805835 0.711218005 1 - -0.116004389 0.708363990 1 - -0.477937453 0.702949001 1 - -0.126810442 0.971409951 1 - -0.156822576 0.457687275 1 - -0.293523863 0.856486819 1 - -0.129615545 0.891819146 1 - -0.108242313 0.644814421 1 - -0.501979824 0.370050434 1 - -0.138108021 0.612928438 1 - -0.179322731 0.366517387 1 - -0.458093963 0.571370985 1 - -0.028565637 0.486501211 1 - -0.426175577 0.461765467 1 - -0.310680953 0.544905689 1 - -0.180247439 0.876336671 1 - -0.217870537 0.390856979 1 - -0.315992257 0.736172703 1 - 0.236276902 0.714179743 1 - -0.185456072 0.702294953 1 - -0.203065705 0.317910002 1 - -0.296142711 0.648026589 1 - -0.448939545 0.650603998 1 - 0.077064746 0.797884087 1 - 0.034024500 0.788213418 1 - -0.439519067 0.946446539 1 - -0.471452461 0.708540945 1 - -0.263821096 0.565778110 1 - -0.676333519 1.064998541 1 - -0.394630195 0.732544473 1 - -0.334698783 0.638313660 1 - 0.043828297 0.782970773 1 - 0.073254562 0.639405607 1 - -0.358305948 0.638878595 1 - 0.289824646 0.645297701 1 - 0.479141353 0.769272264 1 - 0.180670084 0.518893193 1 - 0.199825830 0.747216818 1 - 0.735249202 0.833027044 1 - 0.249991814 0.350660256 1 - 0.413137889 0.854044549 1 - 0.518581462 0.386362750 1 - 0.465359263 0.854392557 1 - 0.348309276 0.680024754 1 - 0.174782318 0.544423218 1 - 0.549911988 0.472172493 1 - 0.203934276 0.410263392 1 - 0.338644108 1.028370469 1 - 0.161322119 0.950855699 1 - 0.350961307 0.686427652 1 - 0.090257414 0.846995122 1 - 0.764373743 0.615571296 1 - 0.414756998 0.893306725 1 - 0.679361421 0.659759084 1 - 0.640285978 0.804268545 1 - 0.630876040 0.710028594 1 - 0.366370214 0.772543364 1 - 0.314611449 0.755070836 1 - 0.745924055 0.706345767 1 - 0.489768059 0.684198041 1 - 0.075247977 0.621422345 1 - 0.499573139 0.679632119 1 - 0.350405143 0.443980792 1 - 0.636928363 0.603842916 1 - 0.224908918 0.840917922 1 - -0.032261912 0.655726651 1 - 0.627052189 0.808688697 1 - 0.263348975 0.455434849 1 - 0.520257017 0.762965338 1 - 0.151882522 0.966544141 1 - 0.098482589 0.517323437 1 - 0.201212077 0.549826846 1 - 0.371298202 0.761389940 1 - 0.497766489 0.769076360 1 - 0.409493154 0.305118700 1 - 0.340849813 0.766677739 1 - 0.391675543 0.489773920 1 - 0.516131854 0.412661585 1 - 0.522760611 0.520845425 1 - 0.446358722 0.869775036 1 - 0.224400728 0.559199836 1 - 0.583149627 0.871728559 1 - 0.420184227 0.768544337 1 - 0.340883764 0.582414682 1 - 0.407626346 1.016274588 1 - 0.226804848 0.997357208 1 - 0.461550030 0.728402685 1 - 0.275762111 0.773039119 1 - 0.304760108 0.405069957 1 - 0.636786149 0.521153930 1 - 0.544820787 0.902598154 1 - 0.816098957 0.643244361 1 - 0.454637082 0.627059827 1 - 0.416886517 0.498139441 1 - 0.585814059 0.472857968 1 - 0.158972903 0.877325952 1 - 0.218197123 0.791103192 1 - 0.436713777 0.582375556 1 - 0.465359340 0.619108530 1 - 0.346901746 0.776639489 1 - 0.599207277 0.605698565 1 - 0.463002935 0.972725613 1 - 0.694263789 0.550710864 1 - 1.000277812 0.669240364 1 - 0.503660224 0.451743317 1 - 0.609419010 0.560098000 1 - 0.352923549 0.639530833 1 - 0.313797682 0.428469344 1 - 0.275593847 0.624510853 1 - 0.310310776 0.757815199 1 - 0.200769573 1.068014129 1 - 0.393611386 0.489922085 1 - 0.293284180 0.564537846 1 - 0.150904334 0.874953285 1 - 0.359648477 0.984800311 1 - 0.425437016 0.605205704 1 - 0.550057275 0.953322346 1 - 0.369377777 0.717383758 1 - 0.483823544 0.776401643 1 - 0.665201554 0.609337149 1 - 0.367662676 0.432857589 1 - 0.603654120 0.439204275 1 - 0.361992913 0.607744455 1 - 0.365320313 0.193465958 1 - 0.565587013 0.766374185 1 - 0.459978544 0.421990201 1 - 0.389662454 0.697573566 1 - 0.662029374 0.545080251 1 - 0.193287037 0.660104813 1 - 0.770581129 0.678276952 1 - 0.517729293 0.709447233 1 - 0.666759179 0.738395921 1 - 0.507357601 0.504291821 1 - 0.074897782 0.726624656 1 - 0.267419803 0.669125800 1 - 0.570998498 0.905961669 1 - 0.234076185 0.680851488 1 - 0.204728441 0.915150466 1 - 0.463600872 0.831022543 1 - 0.551695270 0.877530083 1 - 0.375064997 0.706265086 1 - 0.548113044 0.683542273 1 - 0.436411367 0.523946916 1 - 0.171669265 0.706402907 1 - 0.228628170 0.696358973 1 - 0.258176000 0.750019031 1 - 0.427636052 0.726640752 1 - 0.551129128 1.041844415 1 - 0.382357212 0.485587245 1 - 0.627187520 0.857796470 1 - 0.759430378 0.897903714 1 - 0.385966401 0.649098802 1 - 0.216206061 0.886147391 1 - 0.107421934 0.525437056 1 - 0.466619974 0.649300564 1 - 0.483552867 0.519368234 1 - 0.188288155 0.704849311 1 - 0.123111648 0.618943465 1 - 0.149201404 0.674098357 1 - 0.541125439 0.641048950 1 - 0.707584972 1.048980926 1 - 0.250259605 0.738434506 1 - 0.388929309 0.980538827 1 - 0.163559795 0.768820434 1 - 0.290938989 0.858416660 1 - 0.671326658 0.887569891 1 - 0.419646183 0.833301601 1 - 0.297576300 0.815635781 1 - 0.488205349 0.928912516 1 - 0.274956333 0.622947292 1 - 0.364636103 0.552039161 1 - 0.020765563 0.400801476 1 - 0.503582267 0.462402974 1 - 0.129743512 0.478205376 1 - 0.205737679 0.652800375 1 - 0.491663362 0.919029482 1 - 0.541928820 0.592238748 1 - 0.352448258 0.438954474 1 - 0.340546986 0.610581184 1 - 0.087362845 0.722352081 1 - 0.544510425 0.310570940 1 - 0.426834451 0.697519317 1 - 0.505026501 0.203961507 1 - 0.393952243 0.701709243 1 - 0.341212359 0.487823226 1 - 0.443882109 0.515215865 1 - 0.216623801 0.641423278 1 - 0.325421774 0.565006133 1 - 0.339954219 0.500219969 1 - 0.757953402 0.646113630 1 - 0.166511560 0.675639720 1 - 0.394924171 0.795156547 1 - 0.581373272 0.769434777 1 - 0.469451043 0.686613394 1 - 0.180074959 0.917903510 1 - 0.314960733 0.919406796 1 - 0.781475499 1.074871466 1 - 0.261043992 0.883671133 1 - 0.149151175 0.475484999 1 - 0.236371870 0.975832107 1 - 0.646323770 0.522312176 1 - 0.518347874 0.876936157 1 - 0.089471338 0.658664051 1 - 0.498070451 0.902620720 1 - 0.248059552 0.746906831 1 - 0.550195316 0.737298487 1 - 0.280602842 0.603132684 1 - 0.431834416 0.533887741 1 - 0.267799611 0.603699345 1 - 0.507750995 0.826989974 1 - -0.064478127 0.834070122 1 - 0.342112413 0.661643764 1 - 0.332313982 0.509083774 1 - 0.665012582 0.878512787 1 - 0.382910589 0.749228951 1 - 0.361027556 0.645111929 1 - 0.571981147 0.794214002 1 - 0.536918322 0.898472992 1 - 0.331872670 0.570367930 1 - 0.044037168 0.476641964 1 - 0.410716663 0.798924771 1 - 0.455083777 0.551831167 1 - 0.474594596 0.889946347 1 - 0.413672127 0.867650039 1 - 0.682171442 0.972182362 1 - 0.425353451 0.535316350 1 - 0.262277420 0.637457666 1 - 0.007860344 0.806598462 1 - 0.380999590 0.653580787 1 - 0.538437280 0.907997360 1 - 0.180415465 0.914334885 1 - 0.237060285 0.752505492 1 - 0.829663295 0.697894513 1 - 0.307664951 1.074702414 1 - 0.239849381 0.753987444 1 - 0.275375404 0.806554305 1 - 0.416984789 0.452953422 1 - 0.476493007 0.858473259 1 - 0.564497576 0.915314697 1 - 0.198295169 0.534934547 1 - 0.294198911 0.374100529 1 - 0.684760671 0.892746414 1 - 0.168075136 0.794230658 1 - 0.502763522 0.712129784 1 - 0.129722603 0.697110450 1 - 0.285983065 0.796121883 1 - 0.097239329 0.681159777 1 - 0.210574775 0.792652629 1 - 0.593896992 0.530407106 1 - 0.358836790 0.671400853 1 - 0.197591638 0.710584968 1 - 0.540587182 0.774780451 1 - 0.175106338 0.609394118 1 - 0.448304389 0.663333083 1 - 0.289880687 0.204721503 1 - 0.300130047 0.934825869 1 - 0.152511070 0.851596486 1 - 0.495317475 0.631046756 1 - 0.072423805 0.678667079 1 - 0.500846416 0.689706961 1 - 0.159104712 0.628206422 1 - 0.710308164 0.777809751 1 - 0.750642087 0.828037270 1 - 0.559868855 0.783081248 1 - 0.400801648 0.786167018 1 - 0.356480531 0.911823818 1 - 0.844132265 0.561509712 1 - 0.426337951 0.777438407 1 - 0.461052514 0.615763585 1 - 0.205997206 0.785369909 1 - 0.118613656 0.832647177 1 - 0.444428480 0.747145725 1 - 0.278467451 0.755943870 1 - 0.329683958 0.704522943 1 - 0.338924385 0.739418880 1 - 0.427674817 0.962589298 1 - 0.324169980 0.808410845 1 - 0.526486063 0.856427139 1 - 0.664857776 0.773954077 1 - 0.327675416 0.608013752 1 - 0.247589562 0.279270348 1 - 0.418514564 1.044157214 1 - 0.232314519 0.819642835 1 - 0.762040971 0.573218465 1 diff --git a/GPy/util/datasets/synth.tr b/GPy/util/datasets/synth.tr deleted file mode 100644 index b25d26e6..00000000 --- a/GPy/util/datasets/synth.tr +++ /dev/null @@ -1,251 +0,0 @@ - xs ys yc - 0.05100797 0.16086164 0 - -0.74807425 0.08904024 0 - -0.77293371 0.26317168 0 - 0.21837360 0.12706142 0 - 0.37268336 0.49656200 0 - -0.62931544 0.63202159 0 - -0.43307167 0.14479166 0 - -0.84151970 -0.19131316 0 - 0.47525648 0.22483671 0 - 0.32082976 0.32721288 0 - 0.32061253 0.33407547 0 - -0.89077472 0.41168783 0 - 0.17850119 0.44691359 0 - 0.31558002 0.38853383 0 - 0.55777224 0.47272748 0 - 0.03191877 0.01222964 0 - 0.25090585 0.30716705 0 - 0.23571547 0.22493837 0 - -0.07236203 0.33376524 0 - 0.50440241 0.08054579 0 - -0.63223351 0.44552458 0 - -0.76784656 0.23614689 0 - -0.70017557 0.21038848 0 - -0.64713491 0.15921366 0 - -0.76739248 0.09259038 0 - -0.51788734 0.03288107 0 - 0.17516644 0.34534871 0 - -0.68031190 0.47612156 0 - 0.01595199 0.32167526 0 - -0.71481078 0.51421443 0 - 0.07837946 0.32284981 0 - -0.80872251 0.47036593 0 - -0.84211234 0.09294232 0 - -0.98591577 0.48309267 0 - 0.29104081 0.34275967 0 - 0.24321541 0.51488295 0 - -0.60104419 0.05060116 0 - -1.24652451 0.45923165 0 - -0.82769016 0.36187460 0 - -0.62117301 -0.10912158 0 - -0.70584105 0.65907662 0 - 0.06718867 0.60574850 0 - 0.30505147 0.47417973 0 - 0.60788138 0.39361588 0 - -0.78937483 0.17591675 0 - -0.53123209 0.42652809 0 - 0.25202071 0.17029707 0 - -0.57880357 0.26553665 0 - -0.83176749 0.54447377 0 - -0.69859164 0.38566851 0 - -0.73642607 0.11857527 0 - -0.93496195 0.11370707 0 - 0.43959309 0.41430638 0 - -0.54690854 0.24956276 0 - -0.08405550 0.36521058 0 - 0.32211458 0.69087105 0 - 0.10764739 0.57946932 0 - -0.71864030 0.25645757 0 - -0.87877752 0.45064757 0 - -0.69846046 0.95053870 0 - 0.39757434 0.11810207 0 - -0.50451354 0.57196376 0 - 0.25023622 0.39783889 0 - 0.61709156 0.10185808 0 - 0.31832860 0.08790562 0 - -0.57453363 0.18624195 0 - 0.09761865 0.55176786 0 - 0.48449339 0.35372973 0 - 0.52400684 0.46616851 0 - -0.78138463 -0.07534713 0 - -0.49704591 0.59948077 0 - -0.96984525 0.46624927 0 - 0.43541407 0.12192386 0 - -0.67942462 0.30753942 0 - -0.62529036 0.07099046 0 - -0.02318116 0.40442601 0 - 0.23200141 0.71066846 0 - 0.09384354 0.46674396 0 - 0.14234301 0.17898711 0 - -0.61686357 0.25507763 0 - 0.23636288 0.51543839 0 - 0.38914177 0.40429568 0 - -0.95178678 -0.03772239 0 - 0.24087822 0.71948890 0 - 0.12446266 0.45178849 0 - -0.60566430 0.26906478 0 - -0.71397188 0.30871780 0 - 0.31008428 0.34675335 0 - 0.18018786 0.46204643 0 - -0.42663885 0.64723225 0 - 0.06143230 0.32491150 0 - 0.07736952 0.32183287 0 - 0.42814970 0.13445957 0 - -0.80250753 0.66878999 0 - 0.40142623 0.42516398 0 - 0.37084776 0.26407123 0 - -0.80774748 0.41485899 0 - 0.50163585 0.23934856 0 - 0.58238323 0.22842741 0 - -0.59136100 0.30230321 0 - -0.87037236 0.26941446 0 - -0.72086765 0.19676678 0 - 0.27778443 0.21792253 0 - 0.33240813 0.27349865 0 - -0.14092068 0.39247351 0 - -0.59759518 0.14790267 0 - -0.85581534 0.14513961 0 - -0.88912232 0.26896001 0 - 0.21345680 0.43611756 0 - -0.53467949 0.57901229 0 - 0.31686848 0.39705856 0 - -0.68121733 0.04209840 0 - -0.97586127 0.45964811 0 - 0.41457183 0.27141230 0 - 0.32751292 0.36780137 0 - -0.93209192 0.09362034 0 - 0.58395341 0.47147282 0 - -0.44437309 0.23010142 0 - 0.29109441 0.19365556 0 - -0.51080722 0.41496003 0 - -0.96597511 0.17931052 0 - 0.18741315 0.29747132 0 - 0.17965417 0.45175449 0 - -0.72689602 0.35728387 0 - -0.54339877 0.41012013 0 - -0.59823393 0.98701425 1 - -0.20194736 0.62101680 1 - 0.47146103 0.48221146 1 - -0.09821987 0.58755577 1 - -0.35657658 0.63709705 1 - 0.63881392 0.42112135 1 - 0.62980614 0.28146085 1 - -0.46223286 0.61661031 1 - -0.07331555 0.55821736 1 - -0.55405533 0.51253129 1 - -0.43761773 0.87811781 1 - -0.22237814 0.88850773 1 - 0.09346162 0.67310494 1 - 0.53174745 0.54372650 1 - 0.40207539 0.51638462 1 - 0.47555171 0.65056336 1 - -0.23383266 0.63642580 1 - -0.31579316 0.75031340 1 - -0.47351720 0.63854125 1 - 0.59239464 0.89256953 1 - -0.22605324 0.79789454 1 - -0.43995011 0.52099256 1 - -0.54645044 0.74577198 1 - 0.46404306 0.51065152 1 - -0.15194296 0.81218439 1 - 0.48536395 0.82018093 1 - 0.34725649 0.70813773 1 - 0.43897015 0.62817158 1 - -0.21415914 0.64363951 1 - 0.57380231 0.63713466 1 - 0.38717361 0.58578395 1 - 0.32038322 0.53529127 1 - -0.20781491 0.65132467 1 - -0.18651283 0.81754816 1 - 0.24752692 0.39081936 1 - 0.66049881 0.89919213 1 - -0.28658801 0.73375946 1 - -0.32588080 0.39865509 1 - -0.25204565 0.67358326 1 - 0.37259022 0.49785904 1 - -0.29096564 1.04372060 1 - -0.30469807 0.86858292 1 - -0.21389978 1.09317811 1 - -0.36830015 0.75639546 1 - -0.46928218 0.88775091 1 - 0.39350146 0.77975197 1 - -0.45639966 0.80523454 1 - 0.51128242 0.76606136 1 - 0.22550468 0.46451215 1 - 0.01462984 0.40190926 1 - -0.19172785 0.80943313 1 - 0.38323479 0.75601744 1 - 0.49791612 0.61334375 1 - 0.35335230 0.77324337 1 - -0.34722575 0.70177856 1 - 0.58380468 0.76357539 1 - -0.13727764 0.71246351 1 - 0.38827268 0.44977123 1 - -0.53172709 0.61934293 1 - -0.11684624 0.87851210 1 - 0.54335864 0.41174865 1 - -0.45399302 0.66512988 1 - -0.21913200 0.83484947 1 - 0.30485742 0.98028760 1 - 0.65676798 0.75766017 1 - 0.61420447 0.75039019 1 - -0.45809964 0.77968606 1 - -0.21617465 0.88626305 1 - -0.26016108 0.81008591 1 - 0.31884531 0.84517725 1 - -0.23727415 0.80178784 1 - 0.58310323 0.77709806 1 - 0.02841337 0.75792620 1 - -0.41840136 0.68041440 1 - 0.67412880 0.60245461 1 - -0.25278281 0.70526103 1 - 0.51609843 0.62092390 1 - 0.20392294 0.91641482 1 - -0.17207124 1.00884096 1 - 0.27274507 0.29346977 1 - 0.07634798 0.56222204 1 - -0.36653499 0.64831007 1 - 0.44290673 0.80087721 1 - -0.19976385 0.54295162 1 - -0.54075738 0.65293033 1 - -0.07060266 1.00296912 1 - 0.50715054 0.35045758 1 - -0.06048611 0.62982713 1 - 0.21532928 0.60260249 1 - 0.46809108 0.87182416 1 - -0.29888511 0.73669866 1 - 0.86129620 0.47289330 1 - 0.70120877 0.74572893 1 - -0.11342797 0.60067099 1 - 0.31234354 0.90756345 1 - -0.12172541 0.84112851 1 - 0.36867857 0.37052586 1 - 0.57311489 0.40949740 1 - -0.25841225 0.67192335 1 - 0.30937186 0.50823318 1 - 0.43319338 0.77016967 1 - -0.30448035 0.57820106 1 - 0.44276338 0.58023403 1 - -0.19442057 0.89876808 1 - -0.06105237 0.74184946 1 - 0.07619347 0.35386246 1 - 0.85826993 0.95819523 1 - 0.37039200 0.72342401 1 - 0.51481515 0.76203996 1 - 0.43127521 0.54259166 1 - 0.42286091 0.65242185 1 - 0.29815001 0.93453682 1 - 0.37128253 0.70089181 1 - -0.51528729 0.76473490 1 - 0.38525783 0.65528189 1 - -0.34825368 0.50529981 1 - 0.68510504 0.78067440 1 - -0.36528923 0.45703265 1 - -0.40903577 0.74230433 1 - 0.43574387 0.44689789 1 - 0.26887846 0.44559230 1 - -0.49254862 1.01443372 1 - 0.07615960 0.63795180 1 - 0.49226224 0.46876241 1 - -0.40249641 0.71301084 1 diff --git a/GPy/util/erfcx.py b/GPy/util/erfcx.py new file mode 100644 index 00000000..f42e49f3 --- /dev/null +++ b/GPy/util/erfcx.py @@ -0,0 +1,63 @@ +## Copyright (C) 2010 Soren Hauberg +## +## Copyright James Hensman 2011 +## +## This program is free software; you can redistribute it and/or modify it +## under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 3 of the License, or (at +## your option) any later version. +## +## This program is distributed in the hope that it will be useful, but +## WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +## General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; see the file COPYING. If not, see +## . + +import numpy as np + +def erfcx (arg): + arg = np.atleast_1d(arg) + assert(np.all(np.isreal(arg)),"erfcx: input must be real") + + ## Get precision dependent thresholds -- or not :p + xneg = -26.628; + xmax = 2.53e+307; + + ## Allocate output + result = np.zeros (arg.shape) + + ## Find values where erfcx can be evaluated + idx_neg = (arg < xneg); + idx_max = (arg > xmax); + idx = ~(idx_neg | idx_max); + + arg = arg [idx]; + + ## Perform the actual computation + t = 3.97886080735226 / (np.abs (arg) + 3.97886080735226); + u = t - 0.5; + y = (((((((((u * 0.00127109764952614092 + 1.19314022838340944e-4) * u \ + - 0.003963850973605135) * u - 8.70779635317295828e-4) * u + \ + 0.00773672528313526668) * u + 0.00383335126264887303) * u - \ + 0.0127223813782122755) * u - 0.0133823644533460069) * u + \ + 0.0161315329733252248) * u + 0.0390976845588484035) * u + \ + 0.00249367200053503304; + y = ((((((((((((y * u - 0.0838864557023001992) * u - \ + 0.119463959964325415) * u + 0.0166207924969367356) * u + \ + 0.357524274449531043) * u + 0.805276408752910567) * u + \ + 1.18902982909273333) * u + 1.37040217682338167) * u + \ + 1.31314653831023098) * u + 1.07925515155856677) * u + \ + 0.774368199119538609) * u + 0.490165080585318424) * u + \ + 0.275374741597376782) * t; + + y [arg < 0] = 2 * np.exp (arg [arg < 0]**2) - y [arg < 0]; + + ## Put the results back into something with the same size is the original input + result [idx] = y; + result [idx_neg] = np.inf; + ## result (idx_max) = 0; # not needed as we initialise with zeros + return(result) + diff --git a/GPy/util/latent_space_visualizations/__init__.py b/GPy/util/latent_space_visualizations/__init__.py new file mode 100644 index 00000000..ee595945 --- /dev/null +++ b/GPy/util/latent_space_visualizations/__init__.py @@ -0,0 +1 @@ +import controllers diff --git a/GPy/util/latent_space_visualizations/controllers/__init__.py b/GPy/util/latent_space_visualizations/controllers/__init__.py new file mode 100644 index 00000000..25f6535e --- /dev/null +++ b/GPy/util/latent_space_visualizations/controllers/__init__.py @@ -0,0 +1 @@ +import axis_event_controller, imshow_controller diff --git a/GPy/util/latent_space_visualizations/controllers/axis_event_controller.py b/GPy/util/latent_space_visualizations/controllers/axis_event_controller.py new file mode 100644 index 00000000..acb1ac8d --- /dev/null +++ b/GPy/util/latent_space_visualizations/controllers/axis_event_controller.py @@ -0,0 +1,142 @@ +''' +Created on 24 Jul 2013 + +@author: maxz +''' +import numpy + +class AxisEventController(object): + def __init__(self, ax): + self.ax = ax + self.activate() + def deactivate(self): + for cb_class in self.ax.callbacks.callbacks.values(): + for cb_num in cb_class.keys(): + self.ax.callbacks.disconnect(cb_num) + def activate(self): + self.ax.callbacks.connect('xlim_changed', self.xlim_changed) + self.ax.callbacks.connect('ylim_changed', self.ylim_changed) + def xlim_changed(self, ax): + pass + def ylim_changed(self, ax): + pass + + +class AxisChangedController(AxisEventController): + ''' + Buffered control of axis limit changes + ''' + _changing = False + + def __init__(self, ax, update_lim=None): + ''' + Constructor + ''' + super(AxisChangedController, self).__init__(ax) + self._lim_ratio_threshold = update_lim or .8 + self._x_lim = self.ax.get_xlim() + self._y_lim = self.ax.get_ylim() + + def update(self, ax): + pass + + def xlim_changed(self, ax): + super(AxisChangedController, self).xlim_changed(ax) + if not self._changing and self.lim_changed(ax.get_xlim(), self._x_lim): + self._changing = True + self._x_lim = ax.get_xlim() + self.update(ax) + self._changing = False + + def ylim_changed(self, ax): + super(AxisChangedController, self).ylim_changed(ax) + if not self._changing and self.lim_changed(ax.get_ylim(), self._y_lim): + self._changing = True + self._y_lim = ax.get_ylim() + self.update(ax) + self._changing = False + + def extent(self, lim): + return numpy.subtract(*lim) + + def lim_changed(self, axlim, savedlim): + axextent = self.extent(axlim) + extent = self.extent(savedlim) + lim_changed = ((axextent / extent) < self._lim_ratio_threshold ** 2 + or (extent / axextent) < self._lim_ratio_threshold ** 2 + or ((1 - (self.extent((axlim[0], savedlim[0])) / self.extent((savedlim[0], axlim[1])))) + < self._lim_ratio_threshold) + or ((1 - (self.extent((savedlim[0], axlim[0])) / self.extent((axlim[0], savedlim[1])))) + < self._lim_ratio_threshold) + ) + return lim_changed + + def _buffer_lim(self, lim): + # buffer_size = 1 - self._lim_ratio_threshold + # extent = self.extent(lim) + return lim + + +class BufferedAxisChangedController(AxisChangedController): + def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=None, **kwargs): + """ + :param plot_function: + function to use for creating image for plotting (return ndarray-like) + plot_function gets called with (2D!) Xtest grid if replotting required + :type plot_function: function + :param plot_limits: + beginning plot limits [xmin, ymin, xmax, ymax] + + :param kwargs: additional kwargs are for pyplot.imshow(**kwargs) + """ + super(BufferedAxisChangedController, self).__init__(ax, update_lim=update_lim) + self.plot_function = plot_function + xmin, xmax = self._x_lim # self._compute_buffered(*self._x_lim) + ymin, ymax = self._y_lim # self._compute_buffered(*self._y_lim) + self.resolution = resolution + self._not_init = False + self.view = self._init_view(self.ax, self.recompute_X(), xmin, xmax, ymin, ymax, **kwargs) + self._not_init = True + + def update(self, ax): + super(BufferedAxisChangedController, self).update(ax) + if self._not_init: + xmin, xmax = self._compute_buffered(*self._x_lim) + ymin, ymax = self._compute_buffered(*self._y_lim) + self.update_view(self.view, self.recompute_X(), xmin, xmax, ymin, ymax) + + def _init_view(self, ax, X, xmin, xmax, ymin, ymax): + raise NotImplementedError('return view for this controller') + + def update_view(self, view, X, xmin, xmax, ymin, ymax): + raise NotImplementedError('update view given in here') + + def get_grid(self): + xmin, xmax = self._compute_buffered(*self._x_lim) + ymin, ymax = self._compute_buffered(*self._y_lim) + x, y = numpy.mgrid[xmin:xmax:1j * self.resolution, ymin:ymax:1j * self.resolution] + return numpy.hstack((x.flatten()[:, None], y.flatten()[:, None])) + + def recompute_X(self): + X = self.plot_function(self.get_grid()) + if isinstance(X, (tuple, list)): + for x in X: + x.shape = [self.resolution, self.resolution] + x[:, :] = x.T[::-1, :] + return X + return X.reshape(self.resolution, self.resolution).T[::-1, :] + + def _compute_buffered(self, mi, ma): + buffersize = self._buffersize() + size = ma - mi + return mi - (buffersize * size), ma + (buffersize * size) + + def _buffersize(self): + try: + buffersize = 1. - self._lim_ratio_threshold + except: + buffersize = .4 + return buffersize + + + diff --git a/GPy/util/latent_space_visualizations/controllers/imshow_controller.py b/GPy/util/latent_space_visualizations/controllers/imshow_controller.py new file mode 100644 index 00000000..fa6682e9 --- /dev/null +++ b/GPy/util/latent_space_visualizations/controllers/imshow_controller.py @@ -0,0 +1,71 @@ +''' +Created on 24 Jul 2013 + +@author: maxz +''' +from GPy.util.latent_space_visualizations.controllers.axis_event_controller import BufferedAxisChangedController +import itertools +import numpy + + +class ImshowController(BufferedAxisChangedController): + def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=.5, **kwargs): + """ + :param plot_function: + function to use for creating image for plotting (return ndarray-like) + plot_function gets called with (2D!) Xtest grid if replotting required + :type plot_function: function + :param plot_limits: + beginning plot limits [xmin, ymin, xmax, ymax] + + :param kwargs: additional kwargs are for pyplot.imshow(**kwargs) + """ + super(ImshowController, self).__init__(ax, plot_function, plot_limits, resolution, update_lim, **kwargs) + + def _init_view(self, ax, X, xmin, xmax, ymin, ymax, **kwargs): + return ax.imshow(X, extent=(xmin, xmax, + ymin, ymax), + vmin=X.min(), + vmax=X.max(), + **kwargs) + + def update_view(self, view, X, xmin, xmax, ymin, ymax): + view.set_data(X) + view.set_extent((xmin, xmax, ymin, ymax)) + +class ImAnnotateController(ImshowController): + def __init__(self, ax, plot_function, plot_limits, resolution=20, update_lim=.99, **kwargs): + """ + :param plot_function: + function to use for creating image for plotting (return ndarray-like) + plot_function gets called with (2D!) Xtest grid if replotting required + :type plot_function: function + :param plot_limits: + beginning plot limits [xmin, ymin, xmax, ymax] + :param text_props: kwargs for pyplot.text(**text_props) + :param kwargs: additional kwargs are for pyplot.imshow(**kwargs) + """ + super(ImAnnotateController, self).__init__(ax, plot_function, plot_limits, resolution, update_lim, **kwargs) + + def _init_view(self, ax, X, xmin, xmax, ymin, ymax, text_props={}, **kwargs): + view = [super(ImAnnotateController, self)._init_view(ax, X[0], xmin, xmax, ymin, ymax, **kwargs)] + xoffset, yoffset = self._offsets(xmin, xmax, ymin, ymax) + xlin = numpy.linspace(xmin, xmax, self.resolution, endpoint=False) + ylin = numpy.linspace(ymin, ymax, self.resolution, endpoint=False) + for [i, x], [j, y] in itertools.product(enumerate(xlin), enumerate(ylin[::-1])): + view.append(ax.text(x + xoffset, y + yoffset, "{}".format(X[1][j, i]), ha='center', va='center', **text_props)) + return view + + def update_view(self, view, X, xmin, xmax, ymin, ymax): + super(ImAnnotateController, self).update_view(view[0], X[0], xmin, xmax, ymin, ymax) + xoffset, yoffset = self._offsets(xmin, xmax, ymin, ymax) + xlin = numpy.linspace(xmin, xmax, self.resolution, endpoint=False) + ylin = numpy.linspace(ymin, ymax, self.resolution, endpoint=False) + for [[i, x], [j, y]], text in itertools.izip(itertools.product(enumerate(xlin), enumerate(ylin[::-1])), view[1:]): + text.set_x(x + xoffset) + text.set_y(y + yoffset) + text.set_text("{}".format(X[1][j, i])) + return view + + def _offsets(self, xmin, xmax, ymin, ymax): + return (xmax - xmin) / (2 * self.resolution), (ymax - ymin) / (2 * self.resolution) diff --git a/GPy/util/linalg.py b/GPy/util/linalg.py index 19cf6545..e3e421f6 100644 --- a/GPy/util/linalg.py +++ b/GPy/util/linalg.py @@ -12,6 +12,7 @@ import ctypes from ctypes import byref, c_char, c_int, c_double # TODO # import scipy.lib.lapack import scipy +import warnings if np.all(np.float64((scipy.__version__).split('.')[:2]) >= np.array([0, 12])): import scipy.linalg.lapack as lapack @@ -21,54 +22,73 @@ else: try: _blaslib = ctypes.cdll.LoadLibrary(np.core._dotblas.__file__) # @UndefinedVariable _blas_available = True - assert hasattr('dsyrk_',_blaslib) - assert hasattr('dsyr_',_blaslib) -except: + assert hasattr(_blaslib, 'dsyrk_') + assert hasattr(_blaslib, 'dsyr_') +except AssertionError: _blas_available = False +except AttributeError as e: + _blas_available = False + warnings.warn("warning: caught this exception:" + str(e)) def dtrtrs(A, B, lower=0, trans=0, unitdiag=0): - """Wrapper for lapack dtrtrs function + """ + Wrapper for lapack dtrtrs function :param A: Matrix A :param B: Matrix B :param lower: is matrix lower (true) or upper (false) :returns: + """ return lapack.dtrtrs(A, B, lower=lower, trans=trans, unitdiag=unitdiag) def dpotrs(A, B, lower=0): - """Wrapper for lapack dpotrs function + """ + Wrapper for lapack dpotrs function :param A: Matrix A :param B: Matrix B :param lower: is matrix lower (true) or upper (false) :returns: + """ return lapack.dpotrs(A, B, lower=lower) def dpotri(A, lower=0): - """Wrapper for lapack dpotri function + """ + Wrapper for lapack dpotri function :param A: Matrix A :param lower: is matrix lower (true) or upper (false) - :returns: + :returns: A inverse + """ return lapack.dpotri(A, lower=lower) +def pddet(A): + """ + Determinant of a positive definite matrix, only symmetric matricies though + """ + L = jitchol(A) + logdetA = 2*sum(np.log(np.diag(L))) + return logdetA + def trace_dot(a, b): """ - efficiently compute the trace of the matrix product of a and b + Efficiently compute the trace of the matrix product of a and b """ return np.sum(a * b) def mdot(*args): - """Multiply all the arguments using matrix product rules. + """ + Multiply all the arguments using matrix product rules. The output is equivalent to multiplying the arguments one by one from left to right using dot(). Precedence can be controlled by creating tuples of arguments, for instance mdot(a,((b,c),d)) multiplies a (a*((b*c)*d)). Note that this means the output of dot(a,b) and mdot(a,b) will differ if a or b is a pure tuple of numbers. + """ if len(args) == 1: return args[0] @@ -115,14 +135,16 @@ def jitchol(A, maxtries=5): def jitchol_old(A, maxtries=5): """ - :param A : An almost pd square matrix + :param A: An almost pd square matrix :rval L: the Cholesky decomposition of A - .. Note: + .. note: + Adds jitter to K, to enforce positive-definiteness if stuff breaks, please check: np.allclose(sp.linalg.cholesky(XXT, lower = True), np.triu(sp.linalg.cho_factor(XXT)[0]).T) + """ try: return linalg.cholesky(A, lower=True) @@ -142,6 +164,7 @@ def jitchol_old(A, maxtries=5): def pdinv(A, *args): """ + :param A: A DxD pd numpy array :rval Ai: the inverse of A @@ -152,6 +175,7 @@ def pdinv(A, *args): :rtype Li: np.ndarray :rval logdet: the log of the determinant of A :rtype logdet: float64 + """ L = jitchol(A, *args) logdet = 2.*np.sum(np.log(np.diag(L))) @@ -177,14 +201,13 @@ def chol_inv(L): def multiple_pdinv(A): """ - Arguments - --------- :param A: A DxDxN numpy array (each A[:,:,i] is pd) - Returns - ------- - invs : the inverses of A - hld: 0.5* the log of the determinants of A + :rval invs: the inverses of A + :rtype invs: np.ndarray + :rval hld: 0.5* the log of the determinants of A + :rtype hld: np.array + """ N = A.shape[-1] chols = [jitchol(A[:, :, i]) for i in range(N)] @@ -198,15 +221,13 @@ def PCA(Y, input_dim): """ Principal component analysis: maximum likelihood solution by SVD - Arguments - --------- :param Y: NxD np.array of data :param input_dim: int, dimension of projection - Returns - ------- + :rval X: - Nxinput_dim np.array of dimensionality reduced data - W - input_dimxD mapping from X to Y + :rval W: - input_dimxD mapping from X to Y + """ if not np.allclose(Y.mean(axis=0), 0.0): print "Y is not zero mean, centering it locally (GPy.util.linalg.PCA)" @@ -273,11 +294,10 @@ def DSYR_blas(A, x, alpha=1.): Performs a symmetric rank-1 update operation: A <- A + alpha * np.dot(x,x.T) - Arguments - --------- :param A: Symmetric NxN np.array :param x: Nx1 np.array :param alpha: scalar + """ N = c_int(A.shape[0]) LDA = c_int(A.shape[0]) @@ -295,11 +315,10 @@ def DSYR_numpy(A, x, alpha=1.): Performs a symmetric rank-1 update operation: A <- A + alpha * np.dot(x,x.T) - Arguments - --------- :param A: Symmetric NxN np.array :param x: Nx1 np.array :param alpha: scalar + """ A += alpha * np.dot(x[:, None], x[None, :]) @@ -318,6 +337,7 @@ def symmetrify(A, upper=False): """ N, M = A.shape assert N == M + c_contig_code = """ int iN; for (int i=1; i @@ -395,4 +418,3 @@ def backsub_both_sides(L, X, transpose='left'): else: tmp, _ = lapack.dtrtrs(L, np.asfortranarray(X), lower=1, trans=0) return lapack.dtrtrs(L, np.asfortranarray(tmp.T), lower=1, trans=0)[0].T - diff --git a/GPy/util/ln_diff_erfs.py b/GPy/util/ln_diff_erfs.py new file mode 100644 index 00000000..bb9cfe03 --- /dev/null +++ b/GPy/util/ln_diff_erfs.py @@ -0,0 +1,110 @@ +# Copyright (c) 2013, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +#Only works for scipy 0.12+ +try: + from scipy.special import erfcx, erf +except ImportError: + from scipy.special import erf + from erfcx import erfcx + +import numpy as np + +def ln_diff_erfs(x1, x2, return_sign=False): + """Function for stably computing the log of difference of two erfs in a numerically stable manner. + :param x1 : argument of the positive erf + :type x1: ndarray + :param x2 : argument of the negative erf + :type x2: ndarray + :return: tuple containing (log(abs(erf(x1) - erf(x2))), sign(erf(x1) - erf(x2))) + + Based on MATLAB code that was written by Antti Honkela and modified by David Luengo and originally derived from code by Neil Lawrence. + """ + x1 = np.require(x1).real + x2 = np.require(x2).real + if x1.size==1: + x1 = np.reshape(x1, (1, 1)) + if x2.size==1: + x2 = np.reshape(x2, (1, 1)) + + if x1.shape==x2.shape: + v = np.zeros_like(x1) + else: + if x1.size==1: + v = np.zeros(x2.shape) + elif x2.size==1: + v = np.zeros(x1.shape) + else: + raise ValueError, "This function does not broadcast unless provided with a scalar." + + if x1.size == 1: + x1 = np.tile(x1, x2.shape) + + if x2.size == 1: + x2 = np.tile(x2, x1.shape) + + sign = np.sign(x1 - x2) + if x1.size == 1: + if sign== -1: + swap = x1 + x1 = x2 + x2 = swap + else: + I = sign == -1 + swap = x1[I] + x1[I] = x2[I] + x2[I] = swap + + with np.errstate(divide='ignore'): + # switch off log of zero warnings. + + # Case 0: arguments of different sign, no problems with loss of accuracy + I0 = np.logical_or(np.logical_and(x1>0, x2<0), np.logical_and(x2>0, x1<0)) # I1=(x1*x2)<0 + + # Case 1: x1 = x2 so we have log of zero. + I1 = (x1 == x2) + + # Case 2: Both arguments are non-negative + I2 = np.logical_and(x1 > 0, np.logical_and(np.logical_not(I0), + np.logical_not(I1))) + # Case 3: Both arguments are non-positive + I3 = np.logical_and(np.logical_and(np.logical_not(I0), + np.logical_not(I1)), + np.logical_not(I2)) + _x2 = x2.flatten() + _x1 = x1.flatten() + for group, flags in zip((0, 1, 2, 3), (I0, I1, I2, I3)): + + if np.any(flags): + if not x1.size==1: + _x1 = x1[flags] + if not x2.size==1: + _x2 = x2[flags] + if group==0: + v[flags] = np.log( erf(_x1) - erf(_x2) ) + elif group==1: + v[flags] = -np.inf + elif group==2: + v[flags] = np.log(erfcx(_x2) + -erfcx(_x1)*np.exp(_x2**2 + -_x1**2)) - _x2**2 + elif group==3: + v[flags] = np.log(erfcx(-_x1) + -erfcx(-_x2)*np.exp(_x1**2 + -_x2**2))-_x1**2 + + # TODO: switch back on log of zero warnings. + + if return_sign: + return v, sign + else: + if v.size==1: + if sign==-1: + v = v.view('complex64') + v += np.pi*1j + else: + # Need to add in a complex part because argument is negative. + v = v.view('complex64') + v[I] += np.pi*1j + + return v diff --git a/GPy/util/misc.py b/GPy/util/misc.py index e0f70703..1cb4c182 100644 --- a/GPy/util/misc.py +++ b/GPy/util/misc.py @@ -1,8 +1,36 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - import numpy as np +from scipy import weave +from config import * + +def chain_1(df_dg, dg_dx): + """ + Generic chaining function for first derivative + + .. math:: + \\frac{d(f . g)}{dx} = \\frac{df}{dg} \\frac{dg}{dx} + """ + return df_dg * dg_dx + +def chain_2(d2f_dg2, dg_dx, df_dg, d2g_dx2): + """ + Generic chaining function for second derivative + + .. math:: + \\frac{d^{2}(f . g)}{dx^{2}} = \\frac{d^{2}f}{dg^{2}}(\\frac{dg}{dx})^{2} + \\frac{df}{dg}\\frac{d^{2}g}{dx^{2}} + """ + return d2f_dg2*(dg_dx**2) + df_dg*d2g_dx2 + +def chain_3(d3f_dg3, dg_dx, d2f_dg2, d2g_dx2, df_dg, d3g_dx3): + """ + Generic chaining function for third derivative + + .. math:: + \\frac{d^{3}(f . g)}{dx^{3}} = \\frac{d^{3}f}{dg^{3}}(\\frac{dg}{dx})^{3} + 3\\frac{d^{2}f}{dg^{2}}\\frac{dg}{dx}\\frac{d^{2}g}{dx^{2}} + \\frac{df}{dg}\\frac{d^{3}g}{dx^{3}} + """ + return d3f_dg3*(dg_dx**3) + 3*d2f_dg2*dg_dx*d2g_dx2 + df_dg*d3g_dx3 def opt_wrapper(m, **kwargs): """ @@ -17,12 +45,9 @@ def linear_grid(D, n = 100, min_max = (-100, 100)): """ Creates a D-dimensional grid of n linearly spaced points - Parameters: - - D: dimension of the grid - n: number of points - min_max: (min, max) list - + :param D: dimension of the grid + :param n: number of points + :param min_max: (min, max) list """ @@ -39,6 +64,7 @@ def kmm_init(X, m = 10): :param X: data :param m: number of inducing points + """ # compute the distances @@ -58,6 +84,97 @@ def kmm_init(X, m = 10): inducing = np.array(inducing) return X[inducing] +def fast_array_equal(A, B): + + + if config.getboolean('parallel', 'openmp'): + pragma_string = '#pragma omp parallel for private(i, j)' + else: + pragma_string = '' + + code2=""" + int i, j; + return_val = 1; + + %s + for(i=0;i + """ % pragma_string + + + weave_options_openmp = {'headers' : [''], + 'extra_compile_args': ['-fopenmp -O3'], + 'extra_link_args' : ['-lgomp'], + 'libraries': ['gomp']} + weave_options_noopenmp = {'extra_compile_args': ['-O3']} + + if config.getboolean('parallel', 'openmp'): + weave_options = weave_options_openmp + else: + weave_options = weave_options_noopenmp + + value = False + + + if (A == None) and (B == None): + return True + elif ((A == None) and (B != None)) or ((A != None) and (B == None)): + return False + elif A.shape == B.shape: + if A.ndim == 2: + N, D = [int(i) for i in A.shape] + value = weave.inline(code2, support_code=support_code, + arg_names=['A', 'B', 'N', 'D'], + type_converters=weave.converters.blitz, **weave_options) + elif A.ndim == 3: + N, D, Q = [int(i) for i in A.shape] + value = weave.inline(code3, support_code=support_code, + arg_names=['A', 'B', 'N', 'D', 'Q'], + type_converters=weave.converters.blitz, **weave_options) + else: + value = np.array_equal(A,B) + + return value + + if __name__ == '__main__': import pylab as plt X = np.linspace(1,10, 100)[:, None] diff --git a/GPy/util/mocap.py b/GPy/util/mocap.py index 4c9fa678..58662cf9 100644 --- a/GPy/util/mocap.py +++ b/GPy/util/mocap.py @@ -67,14 +67,14 @@ class tree: for i in range(len(self.vertices)): if self.vertices[i].id == id: return i - raise Error, 'Reverse look up of id failed.' + raise ValueError('Reverse look up of id failed.') def get_index_by_name(self, name): """Give the index associated with a given vertex name.""" for i in range(len(self.vertices)): if self.vertices[i].name == name: return i - raise Error, 'Reverse look up of name failed.' + raise ValueError('Reverse look up of name failed.') def order_vertices(self): """Order vertices in the graph such that parents always have a lower index than children.""" @@ -92,13 +92,15 @@ class tree: def swap_vertices(self, i, j): - """Swap two vertices in the tree structure array. + """ + Swap two vertices in the tree structure array. swap_vertex swaps the location of two vertices in a tree structure array. - ARG tree : the tree for which two vertices are to be swapped. - ARG i : the index of the first vertex to be swapped. - ARG j : the index of the second vertex to be swapped. - RETURN tree : the tree structure with the two vertex locations - swapped. + + :param tree: the tree for which two vertices are to be swapped. + :param i: the index of the first vertex to be swapped. + :param j: the index of the second vertex to be swapped. + :rval tree: the tree structure with the two vertex locations swapped. + """ store_vertex_i = self.vertices[i] store_vertex_j = self.vertices[j] @@ -117,12 +119,17 @@ class tree: def rotation_matrix(xangle, yangle, zangle, order='zxy', degrees=False): - """Compute the rotation matrix for an angle in each direction. + """ + + Compute the rotation matrix for an angle in each direction. This is a helper function for computing the rotation matrix for a given set of angles in a given order. - ARG xangle : rotation for x-axis. - ARG yangle : rotation for y-axis. - ARG zangle : rotation for z-axis. - ARG order : the order for the rotations.""" + + :param xangle: rotation for x-axis. + :param yangle: rotation for y-axis. + :param zangle: rotation for z-axis. + :param order: the order for the rotations. + + """ if degrees: xangle = math.radians(xangle) yangle = math.radians(yangle) @@ -301,10 +308,12 @@ class acclaim_skeleton(skeleton): def load_skel(self, file_name): - """Loads an ASF file into a skeleton structure. - loads skeleton structure from an acclaim skeleton file. - ARG file_name : the file name to load in. - RETURN skel : the skeleton for the file.""" + """ + Loads an ASF file into a skeleton structure. + + :param file_name: The file name to load in. + + """ fid = open(file_name, 'r') self.read_skel(fid) @@ -424,6 +433,8 @@ class acclaim_skeleton(skeleton): lin = self.read_line(fid) while lin != ':DEGREES': lin = self.read_line(fid) + if lin == '': + raise ValueError('Could not find :DEGREES in ' + fid.name) counter = 0 lin = self.read_line(fid) @@ -434,9 +445,9 @@ class acclaim_skeleton(skeleton): if frame_no: counter += 1 if counter != frame_no: - raise Error, 'Unexpected frame number.' + raise ValueError('Unexpected frame number.') else: - raise Error, 'Single bone name ...' + raise ValueError('Single bone name ...') else: ind = self.get_index_by_name(parts[0]) bones[ind].append(np.array([float(channel) for channel in parts[1:]])) @@ -564,7 +575,7 @@ class acclaim_skeleton(skeleton): return lin = self.read_line(fid) else: - raise Error, 'Unrecognised file format' + raise ValueError('Unrecognised file format') self.finalize() def read_units(self, fid): @@ -692,84 +703,3 @@ skel = acclaim_skeleton() - -def fetch_cmu(subj_motions, base_url = 'http://mocap.cs.cmu.edu:8080/subjects', skel_store_dir = '.', motion_store_dir = '.', store_motions = True, return_motions = True, messages = True): - ''' - Download and store the skel. and motions indicated in a tuple (A,B) where A is a list of skeletons and B - the corresponding 2-D list of motions, ie B_ij is the j-th motion to download for skeleton A_i - The method can optionally store the fetched data and / or return them as arrays. - If the data are already stored, they are not fetched but just retrieved. - - e.g. - # Download the data, do not return anything - GPy.util.mocap.fetch_cmu(subj_motions = ([35],[[1,2,3]]), return_motions = False) - # Fetch and return the data in a list. Do not store them anywhere - GPy.util.mocap.fetch_cmu(subj_motions = ([35],[[1,2,3]]), return_motions = True, store_motions = False) - - In both cases above, if the data do exist in the given skel_store_dir and motion_store_dir, they are just loaded from there. - ''' - - subjectsNum = subj_motions[0] - motionsNum = subj_motions[1] - - # Convert numbers to strings - subjects = [] - motions = [list() for _ in range(len(subjectsNum))] - for i in range(len(subjectsNum)): - curSubj = str(int(subjectsNum[i])) - if subjectsNum[i] < 10: - curSubj = '0' + curSubj - subjects.append(curSubj) - for j in range(len(motionsNum[i])): - curMot = str(int(motionsNum[i][j])) - if motionsNum[i][j] < 10: - curMot = '0' + curMot - motions[i].append(curMot) - - - all_skels = [] - - assert len(subjects) == len(motions) - - if return_motions: - all_motions = [list() for _ in range(len(subjects))] - else: - all_motions = [] - - for i in range(len(subjects)): - cur_skel_suffix = '/' + subjects[i] + '/' - cur_skel_dir = skel_store_dir + cur_skel_suffix - cur_skel_file = cur_skel_dir + subjects[i] + '.asf' - cur_skel_url = base_url + cur_skel_suffix + subjects[i] + '.asf' - - if os.path.isfile(cur_skel_file): - if return_motions: - with open(cur_skel_file, 'r') as f: - cur_skel_data = f.read() - else: - if store_motions: - if not os.path.isdir(cur_skel_dir): - os.mkdir(cur_skel_dir) - if not os.path.isdir(motion_store_dir + cur_skel_suffix): - os.mkdir(motion_store_dir + cur_skel_suffix) - cur_skel_data = dat.download_resource(cur_skel_url, cur_skel_file, store_motions, messages) - - if return_motions: - all_skels.append(cur_skel_data) - - for j in range(len(motions[i])): - cur_motion_url = base_url + cur_skel_suffix + subjects[i] + '_' + motions[i][j] + '.amc' - cur_motion_file = motion_store_dir + cur_skel_suffix + subjects[i] + '_' + motions[i][j] + '.amc' - if os.path.isfile(cur_motion_file): - with open(cur_motion_file, 'r') as f: - if return_motions: - cur_motion_data = f.read() - else: - cur_motion_data = dat.download_resource(cur_motion_url, cur_motion_file, store_motions, messages) - - if return_motions: - all_motions[i].append(cur_motion_data) - - return all_skels, all_motions - - diff --git a/GPy/util/multioutput.py b/GPy/util/multioutput.py new file mode 100644 index 00000000..a57593a7 --- /dev/null +++ b/GPy/util/multioutput.py @@ -0,0 +1,35 @@ +import numpy as np +import warnings +from .. import kern + +def build_lcm(input_dim, num_outputs, CK = [], NC = [], W_columns=1,W=None,kappa=None): + #TODO build_icm or build_lcm + """ + Builds a kernel for a linear coregionalization model + + :input_dim: Input dimensionality + :num_outputs: Number of outputs + :param CK: List of coregionalized kernels (i.e., this will be multiplied by a coregionalize kernel). + :param K: List of kernels that will be added up together with CK, but won't be multiplied by a coregionalize kernel + :param W_columns: number tuples of the corregionalization parameters 'coregion_W' + :type W_columns: integer + """ + + for k in CK: + if k.input_dim <> input_dim: + k.input_dim = input_dim + warnings.warn("kernel's input dimension overwritten to fit input_dim parameter.") + + for k in NC: + if k.input_dim <> input_dim + 1: + k.input_dim = input_dim + 1 + warnings.warn("kernel's input dimension overwritten to fit input_dim parameter.") + + kernel = CK[0].prod(kern.coregionalize(num_outputs,W_columns,W,kappa),tensor=True) + for k in CK[1:]: + k_coreg = kern.coregionalize(num_outputs,W_columns,W,kappa) + kernel += k.prod(k_coreg,tensor=True) + for k in NC: + kernel += k + + return kernel diff --git a/GPy/util/netpbmfile.py b/GPy/util/netpbmfile.py new file mode 100644 index 00000000..030bd574 --- /dev/null +++ b/GPy/util/netpbmfile.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# netpbmfile.py + +# Copyright (c) 2011-2013, Christoph Gohlke +# Copyright (c) 2011-2013, The Regents of the University of California +# Produced at the Laboratory for Fluorescence Dynamics. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holders nor the names of any +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Read and write image data from respectively to Netpbm files. + +This implementation follows the Netpbm format specifications at +http://netpbm.sourceforge.net/doc/. No gamma correction is performed. + +The following image formats are supported: PBM (bi-level), PGM (grayscale), +PPM (color), PAM (arbitrary), XV thumbnail (RGB332, read-only). + +:Author: + `Christoph Gohlke `_ + +:Organization: + Laboratory for Fluorescence Dynamics, University of California, Irvine + +:Version: 2013.01.18 + +Requirements +------------ +* `CPython 2.7, 3.2 or 3.3 `_ +* `Numpy 1.7 `_ +* `Matplotlib 1.2 `_ (optional for plotting) + +Examples +-------- +>>> im1 = numpy.array([[0, 1],[65534, 65535]], dtype=numpy.uint16) +>>> imsave('_tmp.pgm', im1) +>>> im2 = imread('_tmp.pgm') +>>> assert numpy.all(im1 == im2) + +""" + +from __future__ import division, print_function + +import sys +import re +import math +from copy import deepcopy + +import numpy + +__version__ = '2013.01.18' +__docformat__ = 'restructuredtext en' +__all__ = ['imread', 'imsave', 'NetpbmFile'] + + +def imread(filename, *args, **kwargs): + """Return image data from Netpbm file as numpy array. + + `args` and `kwargs` are arguments to NetpbmFile.asarray(). + + Examples + -------- + >>> image = imread('_tmp.pgm') + + """ + try: + netpbm = NetpbmFile(filename) + image = netpbm.asarray() + finally: + netpbm.close() + return image + + +def imsave(filename, data, maxval=None, pam=False): + """Write image data to Netpbm file. + + Examples + -------- + >>> image = numpy.array([[0, 1],[65534, 65535]], dtype=numpy.uint16) + >>> imsave('_tmp.pgm', image) + + """ + try: + netpbm = NetpbmFile(data, maxval=maxval) + netpbm.write(filename, pam=pam) + finally: + netpbm.close() + + +class NetpbmFile(object): + """Read and write Netpbm PAM, PBM, PGM, PPM, files.""" + + _types = {b'P1': b'BLACKANDWHITE', b'P2': b'GRAYSCALE', b'P3': b'RGB', + b'P4': b'BLACKANDWHITE', b'P5': b'GRAYSCALE', b'P6': b'RGB', + b'P7 332': b'RGB', b'P7': b'RGB_ALPHA'} + + def __init__(self, arg=None, **kwargs): + """Initialize instance from filename, open file, or numpy array.""" + for attr in ('header', 'magicnum', 'width', 'height', 'maxval', + 'depth', 'tupltypes', '_filename', '_fh', '_data'): + setattr(self, attr, None) + if arg is None: + self._fromdata([], **kwargs) + elif isinstance(arg, basestring): + self._fh = open(arg, 'rb') + self._filename = arg + self._fromfile(self._fh, **kwargs) + elif hasattr(arg, 'seek'): + self._fromfile(arg, **kwargs) + self._fh = arg + else: + self._fromdata(arg, **kwargs) + + def asarray(self, copy=True, cache=False, **kwargs): + """Return image data from file as numpy array.""" + data = self._data + if data is None: + data = self._read_data(self._fh, **kwargs) + if cache: + self._data = data + else: + return data + return deepcopy(data) if copy else data + + def write(self, arg, **kwargs): + """Write instance to file.""" + if hasattr(arg, 'seek'): + self._tofile(arg, **kwargs) + else: + with open(arg, 'wb') as fid: + self._tofile(fid, **kwargs) + + def close(self): + """Close open file. Future asarray calls might fail.""" + if self._filename and self._fh: + self._fh.close() + self._fh = None + + def __del__(self): + self.close() + + def _fromfile(self, fh): + """Initialize instance from open file.""" + fh.seek(0) + data = fh.read(4096) + if (len(data) < 7) or not (b'0' < data[1:2] < b'8'): + raise ValueError("Not a Netpbm file:\n%s" % data[:32]) + try: + self._read_pam_header(data) + except Exception: + try: + self._read_pnm_header(data) + except Exception: + raise ValueError("Not a Netpbm file:\n%s" % data[:32]) + + def _read_pam_header(self, data): + """Read PAM header and initialize instance.""" + regroups = re.search( + b"(^P7[\n\r]+(?:(?:[\n\r]+)|(?:#.*)|" + b"(HEIGHT\s+\d+)|(WIDTH\s+\d+)|(DEPTH\s+\d+)|(MAXVAL\s+\d+)|" + b"(?:TUPLTYPE\s+\w+))*ENDHDR\n)", data).groups() + self.header = regroups[0] + self.magicnum = b'P7' + for group in regroups[1:]: + key, value = group.split() + setattr(self, unicode(key).lower(), int(value)) + matches = re.findall(b"(TUPLTYPE\s+\w+)", self.header) + self.tupltypes = [s.split(None, 1)[1] for s in matches] + + def _read_pnm_header(self, data): + """Read PNM header and initialize instance.""" + bpm = data[1:2] in b"14" + regroups = re.search(b"".join(( + b"(^(P[123456]|P7 332)\s+(?:#.*[\r\n])*", + b"\s*(\d+)\s+(?:#.*[\r\n])*", + b"\s*(\d+)\s+(?:#.*[\r\n])*" * (not bpm), + b"\s*(\d+)\s(?:\s*#.*[\r\n]\s)*)")), data).groups() + (1, ) * bpm + self.header = regroups[0] + self.magicnum = regroups[1] + self.width = int(regroups[2]) + self.height = int(regroups[3]) + self.maxval = int(regroups[4]) + self.depth = 3 if self.magicnum in b"P3P6P7 332" else 1 + self.tupltypes = [self._types[self.magicnum]] + + def _read_data(self, fh, byteorder='>'): + """Return image data from open file as numpy array.""" + fh.seek(len(self.header)) + data = fh.read() + dtype = 'u1' if self.maxval < 256 else byteorder + 'u2' + depth = 1 if self.magicnum == b"P7 332" else self.depth + shape = [-1, self.height, self.width, depth] + size = numpy.prod(shape[1:]) + if self.magicnum in b"P1P2P3": + data = numpy.array(data.split(None, size)[:size], dtype) + data = data.reshape(shape) + elif self.maxval == 1: + shape[2] = int(math.ceil(self.width / 8)) + data = numpy.frombuffer(data, dtype).reshape(shape) + data = numpy.unpackbits(data, axis=-2)[:, :, :self.width, :] + else: + data = numpy.frombuffer(data, dtype) + data = data[:size * (data.size // size)].reshape(shape) + if data.shape[0] < 2: + data = data.reshape(data.shape[1:]) + if data.shape[-1] < 2: + data = data.reshape(data.shape[:-1]) + if self.magicnum == b"P7 332": + rgb332 = numpy.array(list(numpy.ndindex(8, 8, 4)), numpy.uint8) + rgb332 *= [36, 36, 85] + data = numpy.take(rgb332, data, axis=0) + return data + + def _fromdata(self, data, maxval=None): + """Initialize instance from numpy array.""" + data = numpy.array(data, ndmin=2, copy=True) + if data.dtype.kind not in "uib": + raise ValueError("not an integer type: %s" % data.dtype) + if data.dtype.kind == 'i' and numpy.min(data) < 0: + raise ValueError("data out of range: %i" % numpy.min(data)) + if maxval is None: + maxval = numpy.max(data) + maxval = 255 if maxval < 256 else 65535 + if maxval < 0 or maxval > 65535: + raise ValueError("data out of range: %i" % maxval) + data = data.astype('u1' if maxval < 256 else '>u2') + self._data = data + if data.ndim > 2 and data.shape[-1] in (3, 4): + self.depth = data.shape[-1] + self.width = data.shape[-2] + self.height = data.shape[-3] + self.magicnum = b'P7' if self.depth == 4 else b'P6' + else: + self.depth = 1 + self.width = data.shape[-1] + self.height = data.shape[-2] + self.magicnum = b'P5' if maxval > 1 else b'P4' + self.maxval = maxval + self.tupltypes = [self._types[self.magicnum]] + self.header = self._header() + + def _tofile(self, fh, pam=False): + """Write Netbm file.""" + fh.seek(0) + fh.write(self._header(pam)) + data = self.asarray(copy=False) + if self.maxval == 1: + data = numpy.packbits(data, axis=-1) + data.tofile(fh) + + def _header(self, pam=False): + """Return file header as byte string.""" + if pam or self.magicnum == b'P7': + header = "\n".join(( + "P7", + "HEIGHT %i" % self.height, + "WIDTH %i" % self.width, + "DEPTH %i" % self.depth, + "MAXVAL %i" % self.maxval, + "\n".join("TUPLTYPE %s" % unicode(i) for i in self.tupltypes), + "ENDHDR\n")) + elif self.maxval == 1: + header = "P4 %i %i\n" % (self.width, self.height) + elif self.depth == 1: + header = "P5 %i %i %i\n" % (self.width, self.height, self.maxval) + else: + header = "P6 %i %i %i\n" % (self.width, self.height, self.maxval) + if sys.version_info[0] > 2: + header = bytes(header, 'ascii') + return header + + def __str__(self): + """Return information about instance.""" + return unicode(self.header) + + +if sys.version_info[0] > 2: + basestring = str + unicode = lambda x: str(x, 'ascii') + +if __name__ == "__main__": + # Show images specified on command line or all images in current directory + from glob import glob + from matplotlib import pyplot + files = sys.argv[1:] if len(sys.argv) > 1 else glob('*.p*m') + for fname in files: + try: + pam = NetpbmFile(fname) + img = pam.asarray(copy=False) + if False: + pam.write('_tmp.pgm.out', pam=True) + img2 = imread('_tmp.pgm.out') + assert numpy.all(img == img2) + imsave('_tmp.pgm.out', img) + img2 = imread('_tmp.pgm.out') + assert numpy.all(img == img2) + pam.close() + except ValueError as e: + print(fname, e) + continue + _shape = img.shape + if img.ndim > 3 or (img.ndim > 2 and img.shape[-1] not in (3, 4)): + img = img[0] + cmap = 'gray' if pam.maxval > 1 else 'binary' + pyplot.imshow(img, cmap, interpolation='nearest') + pyplot.title("%s %s %s %s" % (fname, unicode(pam.magicnum), + _shape, img.dtype)) + pyplot.show() diff --git a/GPy/util/plot.py b/GPy/util/plot.py index 309c440e..f44864f3 100644 --- a/GPy/util/plot.py +++ b/GPy/util/plot.py @@ -70,6 +70,36 @@ def align_subplots(N,M,xlim=None, ylim=None): else: removeUpperTicks() +def align_subplot_array(axes,xlim=None, ylim=None): + """make all of the axes in the array hae the same limits, turn off unnecessary ticks + + use pb.subplots() to get an array of axes + """ + #find sensible xlim,ylim + if xlim is None: + xlim = [np.inf,-np.inf] + for ax in axes.flatten(): + xlim[0] = min(xlim[0],ax.get_xlim()[0]) + xlim[1] = max(xlim[1],ax.get_xlim()[1]) + if ylim is None: + ylim = [np.inf,-np.inf] + for ax in axes.flatten(): + ylim[0] = min(ylim[0],ax.get_ylim()[0]) + ylim[1] = max(ylim[1],ax.get_ylim()[1]) + + N,M = axes.shape + for i,ax in enumerate(axes.flatten()): + ax.set_xlim(xlim) + ax.set_ylim(ylim) + if (i)%M: + ax.set_yticks([]) + else: + removeRightTicks(ax) + if i<(M*(N-1)): + ax.set_xticks([]) + else: + removeUpperTicks(ax) + def x_frame1D(X,plot_limits=None,resolution=None): """ Internal helper function for making plots, returns a set of input values to plot as well as lower and upper limits diff --git a/GPy/util/plot_latent.py b/GPy/util/plot_latent.py index c36c5e34..62442650 100644 --- a/GPy/util/plot_latent.py +++ b/GPy/util/plot_latent.py @@ -1,40 +1,59 @@ import pylab as pb import numpy as np from .. import util +from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImshowController +import itertools -def plot_latent(model, labels=None, which_indices=None, resolution=50, ax=None, marker='o', s=40): +def most_significant_input_dimensions(model, which_indices): + if which_indices is None: + if model.input_dim == 1: + input_1 = 0 + input_2 = None + if model.input_dim == 2: + input_1, input_2 = 0, 1 + else: + try: + input_1, input_2 = np.argsort(model.input_sensitivity())[::-1][:2] + except: + raise ValueError, "cannot automatically determine which dimensions to plot, please pass 'which_indices'" + else: + input_1, input_2 = which_indices + return input_1, input_2 + +def plot_latent(model, labels=None, which_indices=None, + resolution=50, ax=None, marker='o', s=40, + fignum=None, plot_inducing=False, legend=True, + aspect='auto', updates=False): """ :param labels: a np.array of size model.num_data containing labels for the points (can be number, strings, etc) :param resolution: the resolution of the grid on which to evaluate the predictive variance """ if ax is None: - ax = pb.gca() + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) util.plot.Tango.reset() if labels is None: labels = np.ones(model.num_data) - if which_indices is None: - if model.input_dim==1: - input_1 = 0 - input_2 = None - if model.input_dim==2: - input_1, input_2 = 0,1 - else: - try: - input_1, input_2 = np.argsort(model.input_sensitivity())[:2] - except: - raise ValueError, "cannot Atomatically determine which dimensions to plot, please pass 'which_indices'" - else: - input_1, input_2 = which_indices - #first, plot the output variance as a function of the latent space - Xtest, xx,yy,xmin,xmax = util.plot.x_frame2D(model.X[:,[input_1, input_2]],resolution=resolution) + input_1, input_2 = most_significant_input_dimensions(model, which_indices) + + # first, plot the output variance as a function of the latent space + Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) - Xtest_full[:, :2] = Xtest - mu, var, low, up = model.predict(Xtest_full) - var = var[:, :1] - ax.imshow(var.reshape(resolution, resolution).T, - extent=[xmin[0], xmax[0], xmin[1], xmax[1]], cmap=pb.cm.binary,interpolation='bilinear',origin='lower') + + def plot_function(x): + Xtest_full[:, [input_1, input_2]] = x + mu, var, low, up = model.predict(Xtest_full) + var = var[:, :1] + return np.log(var) + view = ImshowController(ax, plot_function, + tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), + resolution, aspect=aspect, interpolation='bilinear', + cmap=pb.cm.binary) + +# ax.imshow(var.reshape(resolution, resolution).T, +# extent=[xmin[0], xmax[0], xmin[1], xmax[1]], cmap=pb.cm.binary, interpolation='bilinear', origin='lower') # make sure labels are in order of input: ulabels = [] @@ -42,50 +61,118 @@ def plot_latent(model, labels=None, which_indices=None, resolution=50, ax=None, if not lab in ulabels: ulabels.append(lab) + marker = itertools.cycle(list(marker)) + for i, ul in enumerate(ulabels): if type(ul) is np.string_: this_label = ul elif type(ul) is np.int64: - this_label = 'class %i'%ul + this_label = 'class %i' % ul else: - this_label = 'class %i'%i - if len(marker) == len(ulabels): - m = marker[i] - else: - m = marker + this_label = 'class %i' % i + m = marker.next() - index = np.nonzero(labels==ul)[0] - if model.input_dim==1: - x = model.X[index,input_1] + index = np.nonzero(labels == ul)[0] + if model.input_dim == 1: + x = model.X[index, input_1] y = np.zeros(index.size) else: - x = model.X[index,input_1] - y = model.X[index,input_2] + x = model.X[index, input_1] + y = model.X[index, input_2] ax.scatter(x, y, marker=m, s=s, color=util.plot.Tango.nextMedium(), label=this_label) - ax.set_xlabel('latent dimension %i'%input_1) - ax.set_ylabel('latent dimension %i'%input_2) + ax.set_xlabel('latent dimension %i' % input_1) + ax.set_ylabel('latent dimension %i' % input_2) - if not np.all(labels==1.): - ax.legend(loc=0,numpoints=1) + if not np.all(labels == 1.) and legend: + ax.legend(loc=0, numpoints=1) - ax.set_xlim(xmin[0],xmax[0]) - ax.set_ylim(xmin[1],xmax[1]) + ax.set_xlim(xmin[0], xmax[0]) + ax.set_ylim(xmin[1], xmax[1]) ax.grid(b=False) # remove the grid if present, it doesn't look good ax.set_aspect('auto') # set a nice aspect ratio + + if plot_inducing: + ax.plot(model.Z[:, input_1], model.Z[:, input_2], '^w') + + if updates: + ax.figure.canvas.show() + raw_input('Enter to continue') return ax +def plot_magnification(model, labels=None, which_indices=None, + resolution=60, ax=None, marker='o', s=40, + fignum=None, plot_inducing=False, legend=True, + aspect='auto', updates=False): + """ + :param labels: a np.array of size model.num_data containing labels for the points (can be number, strings, etc) + :param resolution: the resolution of the grid on which to evaluate the predictive variance + """ + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + util.plot.Tango.reset() -def plot_latent_indices(Model, which_indices=None, *args, **kwargs): + if labels is None: + labels = np.ones(model.num_data) - if which_indices is None: - try: - input_1, input_2 = np.argsort(Model.input_sensitivity())[:2] - except: - raise ValueError, "cannot Automatically determine which dimensions to plot, please pass 'which_indices'" - else: - input_1, input_2 = which_indices - ax = plot_latent(Model, which_indices=[input_1, input_2], *args, **kwargs) - # TODO: Here test if there are inducing points... - ax.plot(Model.Z[:, input_1], Model.Z[:, input_2], '^w') + input_1, input_2 = most_significant_input_dimensions(model, which_indices) + + # first, plot the output variance as a function of the latent space + Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) + Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) + def plot_function(x): + Xtest_full[:, [input_1, input_2]] = x + mf=model.magnification(Xtest_full) + return mf + view = ImshowController(ax, plot_function, + tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), + resolution, aspect=aspect, interpolation='bilinear', + cmap=pb.cm.gray) + + # make sure labels are in order of input: + ulabels = [] + for lab in labels: + if not lab in ulabels: + ulabels.append(lab) + + marker = itertools.cycle(list(marker)) + + for i, ul in enumerate(ulabels): + if type(ul) is np.string_: + this_label = ul + elif type(ul) is np.int64: + this_label = 'class %i' % ul + else: + this_label = 'class %i' % i + m = marker.next() + + index = np.nonzero(labels == ul)[0] + if model.input_dim == 1: + x = model.X[index, input_1] + y = np.zeros(index.size) + else: + x = model.X[index, input_1] + y = model.X[index, input_2] + ax.scatter(x, y, marker=m, s=s, color=util.plot.Tango.nextMedium(), label=this_label) + + ax.set_xlabel('latent dimension %i' % input_1) + ax.set_ylabel('latent dimension %i' % input_2) + + if not np.all(labels == 1.) and legend: + ax.legend(loc=0, numpoints=1) + + ax.set_xlim(xmin[0], xmax[0]) + ax.set_ylim(xmin[1], xmax[1]) + ax.grid(b=False) # remove the grid if present, it doesn't look good + ax.set_aspect('auto') # set a nice aspect ratio + + if plot_inducing: + ax.plot(model.Z[:, input_1], model.Z[:, input_2], '^w') + + if updates: + ax.figure.canvas.show() + raw_input('Enter to continue') + + pb.title('Magnification Factor') return ax diff --git a/GPy/util/symbolic.py b/GPy/util/symbolic.py new file mode 100644 index 00000000..49c8c33a --- /dev/null +++ b/GPy/util/symbolic.py @@ -0,0 +1,239 @@ +from sympy import Function, S, oo, I, cos, sin, asin, log, erf, pi, exp, sqrt, sign + + +class ln_diff_erf(Function): + nargs = 2 + + def fdiff(self, argindex=2): + if argindex == 2: + x0, x1 = self.args + return -2*exp(-x1**2)/(sqrt(pi)*(erf(x0)-erf(x1))) + elif argindex == 1: + x0, x1 = self.args + return 2.*exp(-x0**2)/(sqrt(pi)*(erf(x0)-erf(x1))) + else: + raise ArgumentIndexError(self, argindex) + + @classmethod + def eval(cls, x0, x1): + if x0.is_Number and x1.is_Number: + return log(erf(x0)-erf(x1)) + +class dh_dd_i(Function): + nargs = 5 + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + + diff_t = (t-tprime) + l2 = l*l + h = h(t, tprime, d_i, d_j, l) + half_l_di = 0.5*l*d_i + arg_1 = half_l_di + tprime/l + arg_2 = half_l_di - (t-tprime)/l + ln_part_1 = ln_diff_erf(arg_1, arg_2) + arg_1 = half_l_di + arg_2 = half_l_di - t/l + sign_val = sign(t/l) + ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l) + + base = ((0.5*d_i*l2*(d_i+d_j)-1)*h + + (-diff_t*sign_val*exp(half_l_di*half_l_di + -d_i*diff_t + +ln_part_1) + +t*sign_val*exp(half_l_di*half_l_di + -d_i*t-d_j*tprime + +ln_part_2)) + + l/sqrt(pi)*(-exp(-diff_t*diff_t/l2) + +exp(-tprime*tprime/l2-d_i*t) + +exp(-t*t/l2-d_j*tprime) + -exp(-(d_i*t + d_j*tprime)))) + return base/(d_i+d_j) + +class dh_dd_j(Function): + nargs = 5 + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + diff_t = (t-tprime) + l2 = l*l + half_l_di = 0.5*l*d_i + h = h(t, tprime, d_i, d_j, l) + arg_1 = half_l_di + tprime/l + arg_2 = half_l_di - (t-tprime)/l + ln_part_1 = ln_diff_erf(arg_1, arg_2) + arg_1 = half_l_di + arg_2 = half_l_di - t/l + sign_val = sign(t/l) + ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l) + sign_val = sign(t/l) + base = tprime*sign_val*exp(half_l_di*half_l_di-(d_i*t+d_j*tprime)+ln_part_2)-h + return base/(d_i+d_j) + +class dh_dl(Function): + nargs = 5 + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + + diff_t = (t-tprime) + l2 = l*l + h = h(t, tprime, d_i, d_j, l) + return 0.5*d_i*d_i*l*h + 2./(sqrt(pi)*(d_i+d_j))*((-diff_t/l2-d_i/2.)*exp(-diff_t*diff_t/l2)+(-tprime/l2+d_i/2.)*exp(-tprime*tprime/l2-d_i*t)-(-t/l2-d_i/2.)*exp(-t*t/l2-d_j*tprime)-d_i/2.*exp(-(d_i*t+d_j*tprime))) + +class dh_dt(Function): + nargs = 5 + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + if (t is S.NaN + or tprime is S.NaN + or d_i is S.NaN + or d_j is S.NaN + or l is S.NaN): + return S.NaN + else: + half_l_di = 0.5*l*d_i + arg_1 = half_l_di + tprime/l + arg_2 = half_l_di - (t-tprime)/l + ln_part_1 = ln_diff_erf(arg_1, arg_2) + arg_1 = half_l_di + arg_2 = half_l_di - t/l + sign_val = sign(t/l) + ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l) + + + return (sign_val*exp(half_l_di*half_l_di + - d_i*(t-tprime) + + ln_part_1 + - log(d_i + d_j)) + - sign_val*exp(half_l_di*half_l_di + - d_i*t - d_j*tprime + + ln_part_2 + - log(d_i + d_j))).diff(t) + +class dh_dtprime(Function): + nargs = 5 + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + if (t is S.NaN + or tprime is S.NaN + or d_i is S.NaN + or d_j is S.NaN + or l is S.NaN): + return S.NaN + else: + half_l_di = 0.5*l*d_i + arg_1 = half_l_di + tprime/l + arg_2 = half_l_di - (t-tprime)/l + ln_part_1 = ln_diff_erf(arg_1, arg_2) + arg_1 = half_l_di + arg_2 = half_l_di - t/l + sign_val = sign(t/l) + ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l) + + + return (sign_val*exp(half_l_di*half_l_di + - d_i*(t-tprime) + + ln_part_1 + - log(d_i + d_j)) + - sign_val*exp(half_l_di*half_l_di + - d_i*t - d_j*tprime + + ln_part_2 + - log(d_i + d_j))).diff(tprime) + + +class h(Function): + nargs = 5 + def fdiff(self, argindex=5): + t, tprime, d_i, d_j, l = self.args + if argindex == 1: + return dh_dt(t, tprime, d_i, d_j, l) + elif argindex == 2: + return dh_dtprime(t, tprime, d_i, d_j, l) + elif argindex == 3: + return dh_dd_i(t, tprime, d_i, d_j, l) + elif argindex == 4: + return dh_dd_j(t, tprime, d_i, d_j, l) + elif argindex == 5: + return dh_dl(t, tprime, d_i, d_j, l) + + + @classmethod + def eval(cls, t, tprime, d_i, d_j, l): + # putting in the is_Number stuff forces it to look for a fdiff method for derivative. If it's left out, then when asking for self.diff, it just does the diff on the eval symbolic terms directly. We want to avoid that because we are looking to ensure everything is numerically stable. Maybe it's because of the if statement that this happens? + if (t.is_Number + and tprime.is_Number + and d_i.is_Number + and d_j.is_Number + and l.is_Number): + if (t is S.NaN + or tprime is S.NaN + or d_i is S.NaN + or d_j is S.NaN + or l is S.NaN): + return S.NaN + else: + half_l_di = 0.5*l*d_i + arg_1 = half_l_di + tprime/l + arg_2 = half_l_di - (t-tprime)/l + ln_part_1 = ln_diff_erf(arg_1, arg_2) + arg_1 = half_l_di + arg_2 = half_l_di - t/l + sign_val = sign(t/l) + ln_part_2 = ln_diff_erf(half_l_di, half_l_di - t/l) + + + return (sign_val*exp(half_l_di*half_l_di + - d_i*(t-tprime) + + ln_part_1 + - log(d_i + d_j)) + - sign_val*exp(half_l_di*half_l_di + - d_i*t - d_j*tprime + + ln_part_2 + - log(d_i + d_j))) + + + # return (exp((d_j/2.*l)**2)/(d_i+d_j) + # *(exp(-d_j*(tprime - t)) + # *(erf((tprime-t)/l - d_j/2.*l) + # + erf(t/l + d_j/2.*l)) + # - exp(-(d_j*tprime + d_i)) + # *(erf(tprime/l - d_j/2.*l) + # + erf(d_j/2.*l)))) + +class erfc(Function): + nargs = 1 + + @classmethod + def eval(cls, arg): + return 1-erf(arg) + +class erfcx(Function): + nargs = 1 + + @classmethod + def eval(cls, arg): + return erfc(arg)*exp(arg*arg) + diff --git a/GPy/util/univariate_Gaussian.py b/GPy/util/univariate_Gaussian.py index 28946894..702ab25c 100644 --- a/GPy/util/univariate_Gaussian.py +++ b/GPy/util/univariate_Gaussian.py @@ -13,23 +13,42 @@ def std_norm_cdf(x): Cumulative standard Gaussian distribution Based on Abramowitz, M. and Stegun, I. (1970) """ + #Generalize for many x + x = np.asarray(x).copy() + cdf_x = np.zeros_like(x) + N = x.size support_code = "#include " code = """ - double sign = 1.0; - if (x < 0.0){ - sign = -1.0; - x = -x; + double sign, t, erf; + for (int i=0; i 1 and nImg.is_integer(): # Show a mosaic of images - nImg = np.int(nImg) - self.vals = np.zeros((self.dimensions[0]*nImg, self.dimensions[1]*nImg)) - for iR in range(nImg): - for iC in range(nImg): - currImgId = iR*nImg + iC - currImg = np.reshape(vals[0,dim*currImgId+np.array(range(dim))], self.dimensions, order='F') - firstRow = iR*self.dimensions[0] - lastRow = (iR+1)*self.dimensions[0] - firstCol = iC*self.dimensions[1] - lastCol = (iC+1)*self.dimensions[1] - self.vals[firstRow:lastRow, firstCol:lastCol] = currImg + num_images = np.sqrt(vals[0,].size/dim) + if num_images > 1 and num_images.is_integer(): # Show a mosaic of images + num_images = np.int(num_images) + self.vals = np.zeros((self.dimensions[0]*num_images, self.dimensions[1]*num_images)) + for iR in range(num_images): + for iC in range(num_images): + cur_img_id = iR*num_images + iC + cur_img = np.reshape(vals[0,dim*cur_img_id+np.array(range(dim))], self.dimensions, order=self.order) + first_row = iR*self.dimensions[0] + last_row = (iR+1)*self.dimensions[0] + first_col = iC*self.dimensions[1] + last_col = (iC+1)*self.dimensions[1] + self.vals[first_row:last_row, first_col:last_col] = cur_img else: - self.vals = np.reshape(vals[0,dim*self.selectImage+np.array(range(dim))], self.dimensions, order='F') + self.vals = np.reshape(vals[0,dim*self.select_image+np.array(range(dim))], self.dimensions, order=self.order) if self.transpose: self.vals = self.vals.T # if not self.scale: @@ -290,8 +315,8 @@ class image_show(matplotlib_show): self.vals = -self.vals # un-normalizing, for visualisation purposes: - if self.presetSTD >= 0: # The Mean is assumed to be in the range (0,255) - self.vals = self.vals*self.presetSTD + self.presetMean + if self.preset_std >= 0: # The Mean is assumed to be in the range (0,255) + self.vals = self.vals*self.preset_std + self.preset_mean # Clipping the values: self.vals[self.vals < 0] = 0 self.vals[self.vals > 255] = 255 @@ -317,7 +342,7 @@ class mocap_data_show_vpython(vpython_show): for i in range(self.vals.shape[0]): self.spheres.append(visual.sphere(pos=(self.vals[i, 0], self.vals[i, 2], self.vals[i, 1]), radius=self.radius)) self.scene.visible=True - + def draw_edges(self): self.rods = [] self.line_handle = [] @@ -434,7 +459,6 @@ class mocap_data_show(matplotlib_show): self.axes.set_ylim(self.y_lim) self.axes.set_zlim(self.z_lim) - class stick_show(mocap_data_show_vpython): """Show a three dimensional point cloud as a figure. Connect elements of the figure together using the matrix connect.""" def __init__(self, vals, connect=None, scene=None): @@ -497,14 +521,17 @@ def data_play(Y, visualizer, frame_rate=30): This example loads in the CMU mocap database (http://mocap.cs.cmu.edu) subject number 35 motion number 01. It then plays it using the mocap_show visualize object. - data = GPy.util.datasets.cmu_mocap(subject='35', train_motions=['01']) - Y = data['Y'] - Y[:, 0:3] = 0. # Make figure walk in place - visualize = GPy.util.visualize.skeleton_show(Y[0, :], data['skel']) - GPy.util.visualize.data_play(Y, visualize) + .. code-block:: python + + data = GPy.util.datasets.cmu_mocap(subject='35', train_motions=['01']) + Y = data['Y'] + Y[:, 0:3] = 0. # Make figure walk in place + visualize = GPy.util.visualize.skeleton_show(Y[0, :], data['skel']) + GPy.util.visualize.data_play(Y, visualize) + """ for y in Y: - visualizer.modify(y) + visualizer.modify(y[None, :]) time.sleep(1./float(frame_rate)) diff --git a/GPy/util/warping_functions.py b/GPy/util/warping_functions.py index 98d4d2b7..35ad3b80 100644 --- a/GPy/util/warping_functions.py +++ b/GPy/util/warping_functions.py @@ -53,9 +53,11 @@ class TanhWarpingFunction(WarpingFunction): self.num_parameters = 3 * self.n_terms def f(self,y,psi): - """transform y with f using parameter vector psi + """ + transform y with f using parameter vector psi psi = [[a,b,c]] - f = \sum_{terms} a * tanh(b*(y+c)) + ::math::`f = \\sum_{terms} a * tanh(b*(y+c))` + """ #1. check that number of params is consistent @@ -77,8 +79,7 @@ class TanhWarpingFunction(WarpingFunction): """ calculate the numerical inverse of f - == input == - iterations: number of N.R. iterations + :param iterations: number of N.R. iterations """ @@ -165,9 +166,11 @@ class TanhWarpingFunction_d(WarpingFunction): self.num_parameters = 3 * self.n_terms + 1 def f(self,y,psi): - """transform y with f using parameter vector psi + """ + Transform y with f using parameter vector psi psi = [[a,b,c]] - f = \sum_{terms} a * tanh(b*(y+c)) + + :math:`f = \\sum_{terms} a * tanh(b*(y+c))` """ #1. check that number of params is consistent @@ -185,17 +188,18 @@ class TanhWarpingFunction_d(WarpingFunction): return z - def f_inv(self, z, psi, max_iterations = 1000): + def f_inv(self, z, psi, max_iterations=1000, y=None): """ calculate the numerical inverse of f - == input == - iterations: number of N.R. iterations + :param max_iterations: maximum number of N.R. iterations """ z = z.copy() - y = np.ones_like(z) + if y is None: + y = np.ones_like(z) + it = 0 update = np.inf @@ -205,15 +209,16 @@ class TanhWarpingFunction_d(WarpingFunction): it += 1 if it == max_iterations: print "WARNING!!! Maximum number of iterations reached in f_inv " - + return y def fgrad_y(self, y, psi, return_precalc = False): """ gradient of f w.r.t to y ([N x 1]) - returns: Nx1 vector of derivatives, unless return_precalc is true, - then it also returns the precomputed stuff + + :returns: Nx1 vector of derivatives, unless return_precalc is true, then it also returns the precomputed stuff + """ @@ -240,7 +245,7 @@ class TanhWarpingFunction_d(WarpingFunction): """ gradient of f w.r.t to y and psi - returns: NxIx4 tensor of partial derivatives + :returns: NxIx4 tensor of partial derivatives """ diff --git a/MANIFEST.in b/MANIFEST.in index c89284cd..8d5b2304 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,3 +2,5 @@ include *.txt recursive-include doc *.txt include *.md recursive-include doc *.md +include *.cfg +recursive-include doc *.cfg diff --git a/README.md b/README.md index c027bb3a..0f25dd98 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ GPy === -A Gaussian processes framework in python +A Gaussian processes framework in Python. * [User mailing list](https://lists.shef.ac.uk/sympa/subscribe/gpy-users) * [Online documentation](https://gpy.readthedocs.org/en/latest/) @@ -9,3 +9,88 @@ A Gaussian processes framework in python Continuous integration status: ![CI status](https://travis-ci.org/SheffieldML/GPy.png) + +Getting started +=============== +Installing with pip +------------------- +The simplest way to install GPy is using pip. ubuntu users can do: + + sudo apt-get install python-pip + pip install gpy + +If you'd like to install from source, or want to contribute to the project (e.g. by sending pull requests via github), read on. + +Ubuntu +------ +For the most part, the developers are using ubuntu. To install the required packages: + + sudo apt-get install python-numpy python-scipy python-matplotlib + +clone this git repository and add it to your path: + + git clone git@github.com:SheffieldML/GPy.git ~/SheffieldML + echo 'PYTHONPATH=$PYTHONPATH:~/SheffieldML' >> ~/.bashrc + + +Windows +------- +On windows, we recommend the ![anaconda python distribution](http://continuum.io/downloads). We've also had luck with ![enthought](http://www.enthought.com). git clone or unzip the source to a suitable directory, and add an approptiate PYTHONPATH environment variable. + +On windows 7 (and possibly earlier versions) there's a bug in scipy version 0.13 which tries to write very long filenames. Reverting to scipy 0.12 seems to do the trick: + + conda install scipy=0.12 + +OSX +--- +Everything appears to work out-of-the box using ![enthought](http://www.enthought.com) on osx Mavericks. Download/clone GPy, and then add GPy to your PYTHONPATH + + git clone git@github.com:SheffieldML/GPy.git ~/SheffieldML + echo 'PYTHONPATH=$PYTHONPATH:~/SheffieldML' >> ~/.profile + + +Compiling documentation: +======================== + +The documentation is stored in doc/ and is compiled with the Sphinx Python documentation generator, and is written in the reStructuredText format. + +The Sphinx documentation is available here: http://sphinx-doc.org/latest/contents.html + + +Installing dependencies: +------------------------ + +To compile the documentation, first ensure that Sphinx is installed. On Debian-based systems, this can be achieved as follows: + + sudo apt-get install python-pip + sudo pip install sphinx + +A LaTeX distribution is also required to compile the equations. Note that the extra packages are necessary to install the unicode packages. To compile the equations to PNG format for use in HTML pages, the package *dvipng* must be installed. IPython is also required. On Debian-based systems, this can be achieved as follows: + + sudo apt-get install texlive texlive-latex-extra texlive-base texlive-recommended + sudo apt-get install dvipng + sudo apt-get install ipython + + +Compiling documentation: +------------------------ + +The documentation can be compiled as follows: + + cd doc + make html + +The HTML files are then stored in doc/_build/ + + +Running unit tests: +=================== + +Ensure nose is installed via pip: + + pip install nose + +Run nosetests from the root directory of the repository: + + nosetests -v + diff --git a/doc/GPy.core.rst b/doc/GPy.core.rst index e02aaa2a..d7f18192 100644 --- a/doc/GPy.core.rst +++ b/doc/GPy.core.rst @@ -9,6 +9,46 @@ core Package :undoc-members: :show-inheritance: +:mod:`domains` Module +--------------------- + +.. automodule:: GPy.core.domains + :members: + :undoc-members: + :show-inheritance: + +:mod:`fitc` Module +------------------ + +.. automodule:: GPy.core.fitc + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp` Module +---------------- + +.. automodule:: GPy.core.gp + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp_base` Module +--------------------- + +.. automodule:: GPy.core.gp_base + :members: + :undoc-members: + :show-inheritance: + +:mod:`mapping` Module +--------------------- + +.. automodule:: GPy.core.mapping + :members: + :undoc-members: + :show-inheritance: + :mod:`model` Module ------------------- @@ -17,10 +57,10 @@ core Package :undoc-members: :show-inheritance: -:mod:`parameterised` Module +:mod:`parameterized` Module --------------------------- -.. automodule:: GPy.core.parameterised +.. automodule:: GPy.core.parameterized :members: :undoc-members: :show-inheritance: @@ -33,3 +73,35 @@ core Package :undoc-members: :show-inheritance: +:mod:`sparse_gp` Module +----------------------- + +.. automodule:: GPy.core.sparse_gp + :members: + :undoc-members: + :show-inheritance: + +:mod:`svigp` Module +------------------- + +.. automodule:: GPy.core.svigp + :members: + :undoc-members: + :show-inheritance: + +:mod:`transformations` Module +----------------------------- + +.. automodule:: GPy.core.transformations + :members: + :undoc-members: + :show-inheritance: + +:mod:`variational` Module +------------------------- + +.. automodule:: GPy.core.variational + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.examples.rst b/doc/GPy.examples.rst index f17cf826..176ae396 100644 --- a/doc/GPy.examples.rst +++ b/doc/GPy.examples.rst @@ -25,10 +25,10 @@ examples Package :undoc-members: :show-inheritance: -:mod:`non_gaussian` Module --------------------------- +:mod:`laplace_approximations` Module +------------------------------------ -.. automodule:: GPy.examples.non_gaussian +.. automodule:: GPy.examples.laplace_approximations :members: :undoc-members: :show-inheritance: @@ -41,6 +41,14 @@ examples Package :undoc-members: :show-inheritance: +:mod:`stochastic` Module +------------------------ + +.. automodule:: GPy.examples.stochastic + :members: + :undoc-members: + :show-inheritance: + :mod:`tutorials` Module ----------------------- diff --git a/doc/GPy.inference.rst b/doc/GPy.inference.rst index f30e7d25..6a1bef4a 100644 --- a/doc/GPy.inference.rst +++ b/doc/GPy.inference.rst @@ -1,10 +1,18 @@ inference Package ================= -:mod:`SGD` Module ------------------ +:mod:`conjugate_gradient_descent` Module +---------------------------------------- -.. automodule:: GPy.inference.SGD +.. automodule:: GPy.inference.conjugate_gradient_descent + :members: + :undoc-members: + :show-inheritance: + +:mod:`gradient_descent_update_rules` Module +------------------------------------------- + +.. automodule:: GPy.inference.gradient_descent_update_rules :members: :undoc-members: :show-inheritance: @@ -25,3 +33,19 @@ inference Package :undoc-members: :show-inheritance: +:mod:`scg` Module +----------------- + +.. automodule:: GPy.inference.scg + :members: + :undoc-members: + :show-inheritance: + +:mod:`sgd` Module +----------------- + +.. automodule:: GPy.inference.sgd + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.kern.parts.rst b/doc/GPy.kern.parts.rst new file mode 100644 index 00000000..45d3e235 --- /dev/null +++ b/doc/GPy.kern.parts.rst @@ -0,0 +1,275 @@ +parts Package +============= + +:mod:`parts` Package +-------------------- + +.. automodule:: GPy.kern.parts + :members: + :undoc-members: + :show-inheritance: + +:mod:`Brownian` Module +---------------------- + +.. automodule:: GPy.kern.parts.Brownian + :members: + :undoc-members: + :show-inheritance: + +:mod:`Matern32` Module +---------------------- + +.. automodule:: GPy.kern.parts.Matern32 + :members: + :undoc-members: + :show-inheritance: + +:mod:`Matern52` Module +---------------------- + +.. automodule:: GPy.kern.parts.Matern52 + :members: + :undoc-members: + :show-inheritance: + +:mod:`ODE_1` Module +------------------- + +.. automodule:: GPy.kern.parts.ODE_1 + :members: + :undoc-members: + :show-inheritance: + +:mod:`ODE_UY` Module +-------------------- + +.. automodule:: GPy.kern.parts.ODE_UY + :members: + :undoc-members: + :show-inheritance: + +:mod:`bias` Module +------------------ + +.. automodule:: GPy.kern.parts.bias + :members: + :undoc-members: + :show-inheritance: + +:mod:`coregionalize` Module +--------------------------- + +.. automodule:: GPy.kern.parts.coregionalize + :members: + :undoc-members: + :show-inheritance: + +:mod:`eq_ode1` Module +--------------------- + +.. automodule:: GPy.kern.parts.eq_ode1 + :members: + :undoc-members: + :show-inheritance: + +:mod:`exponential` Module +------------------------- + +.. automodule:: GPy.kern.parts.exponential + :members: + :undoc-members: + :show-inheritance: + +:mod:`finite_dimensional` Module +-------------------------------- + +.. automodule:: GPy.kern.parts.finite_dimensional + :members: + :undoc-members: + :show-inheritance: + +:mod:`fixed` Module +------------------- + +.. automodule:: GPy.kern.parts.fixed + :members: + :undoc-members: + :show-inheritance: + +:mod:`gibbs` Module +------------------- + +.. automodule:: GPy.kern.parts.gibbs + :members: + :undoc-members: + :show-inheritance: + +:mod:`hetero` Module +-------------------- + +.. automodule:: GPy.kern.parts.hetero + :members: + :undoc-members: + :show-inheritance: + +:mod:`hierarchical` Module +-------------------------- + +.. automodule:: GPy.kern.parts.hierarchical + :members: + :undoc-members: + :show-inheritance: + +:mod:`independent_outputs` Module +--------------------------------- + +.. automodule:: GPy.kern.parts.independent_outputs + :members: + :undoc-members: + :show-inheritance: + +:mod:`kernpart` Module +---------------------- + +.. automodule:: GPy.kern.parts.kernpart + :members: + :undoc-members: + :show-inheritance: + +:mod:`linear` Module +-------------------- + +.. automodule:: GPy.kern.parts.linear + :members: + :undoc-members: + :show-inheritance: + +:mod:`mlp` Module +----------------- + +.. automodule:: GPy.kern.parts.mlp + :members: + :undoc-members: + :show-inheritance: + +:mod:`periodic_Matern32` Module +------------------------------- + +.. automodule:: GPy.kern.parts.periodic_Matern32 + :members: + :undoc-members: + :show-inheritance: + +:mod:`periodic_Matern52` Module +------------------------------- + +.. automodule:: GPy.kern.parts.periodic_Matern52 + :members: + :undoc-members: + :show-inheritance: + +:mod:`periodic_exponential` Module +---------------------------------- + +.. automodule:: GPy.kern.parts.periodic_exponential + :members: + :undoc-members: + :show-inheritance: + +:mod:`poly` Module +------------------ + +.. automodule:: GPy.kern.parts.poly + :members: + :undoc-members: + :show-inheritance: + +:mod:`prod` Module +------------------ + +.. automodule:: GPy.kern.parts.prod + :members: + :undoc-members: + :show-inheritance: + +:mod:`prod_orthogonal` Module +----------------------------- + +.. automodule:: GPy.kern.parts.prod_orthogonal + :members: + :undoc-members: + :show-inheritance: + +:mod:`rational_quadratic` Module +-------------------------------- + +.. automodule:: GPy.kern.parts.rational_quadratic + :members: + :undoc-members: + :show-inheritance: + +:mod:`rbf` Module +----------------- + +.. automodule:: GPy.kern.parts.rbf + :members: + :undoc-members: + :show-inheritance: + +:mod:`rbf_inv` Module +--------------------- + +.. automodule:: GPy.kern.parts.rbf_inv + :members: + :undoc-members: + :show-inheritance: + +:mod:`rbfcos` Module +-------------------- + +.. automodule:: GPy.kern.parts.rbfcos + :members: + :undoc-members: + :show-inheritance: + +:mod:`spline` Module +-------------------- + +.. automodule:: GPy.kern.parts.spline + :members: + :undoc-members: + :show-inheritance: + +:mod:`symmetric` Module +----------------------- + +.. automodule:: GPy.kern.parts.symmetric + :members: + :undoc-members: + :show-inheritance: + +:mod:`sympy_helpers` Module +--------------------------- + +.. automodule:: GPy.kern.parts.sympy_helpers + :members: + :undoc-members: + :show-inheritance: + +:mod:`sympykern` Module +----------------------- + +.. automodule:: GPy.kern.parts.sympykern + :members: + :undoc-members: + :show-inheritance: + +:mod:`white` Module +------------------- + +.. automodule:: GPy.kern.parts.white + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.kern.rst b/doc/GPy.kern.rst index aef712dc..35d9ec00 100644 --- a/doc/GPy.kern.rst +++ b/doc/GPy.kern.rst @@ -9,38 +9,6 @@ kern Package :undoc-members: :show-inheritance: -:mod:`Brownian` Module ----------------------- - -.. automodule:: GPy.kern.Brownian - :members: - :undoc-members: - :show-inheritance: - -:mod:`Matern32` Module ----------------------- - -.. automodule:: GPy.kern.Matern32 - :members: - :undoc-members: - :show-inheritance: - -:mod:`Matern52` Module ----------------------- - -.. automodule:: GPy.kern.Matern52 - :members: - :undoc-members: - :show-inheritance: - -:mod:`bias` Module ------------------- - -.. automodule:: GPy.kern.bias - :members: - :undoc-members: - :show-inheritance: - :mod:`constructors` Module -------------------------- @@ -49,30 +17,6 @@ kern Package :undoc-members: :show-inheritance: -:mod:`coregionalise` Module ---------------------------- - -.. automodule:: GPy.kern.coregionalise - :members: - :undoc-members: - :show-inheritance: - -:mod:`exponential` Module -------------------------- - -.. automodule:: GPy.kern.exponential - :members: - :undoc-members: - :show-inheritance: - -:mod:`finite_dimensional` Module --------------------------------- - -.. automodule:: GPy.kern.finite_dimensional - :members: - :undoc-members: - :show-inheritance: - :mod:`kern` Module ------------------ @@ -81,107 +25,10 @@ kern Package :undoc-members: :show-inheritance: -:mod:`kernpart` Module ----------------------- +Subpackages +----------- -.. automodule:: GPy.kern.kernpart - :members: - :undoc-members: - :show-inheritance: +.. toctree:: -:mod:`linear` Module --------------------- - -.. automodule:: GPy.kern.linear - :members: - :undoc-members: - :show-inheritance: - -:mod:`periodic_Matern32` Module -------------------------------- - -.. automodule:: GPy.kern.periodic_Matern32 - :members: - :undoc-members: - :show-inheritance: - -:mod:`periodic_Matern52` Module -------------------------------- - -.. automodule:: GPy.kern.periodic_Matern52 - :members: - :undoc-members: - :show-inheritance: - -:mod:`periodic_exponential` Module ----------------------------------- - -.. automodule:: GPy.kern.periodic_exponential - :members: - :undoc-members: - :show-inheritance: - -:mod:`prod` Module ------------------- - -.. automodule:: GPy.kern.prod - :members: - :undoc-members: - :show-inheritance: - -:mod:`prod_orthogonal` Module ------------------------------ - -.. automodule:: GPy.kern.prod_orthogonal - :members: - :undoc-members: - :show-inheritance: - -:mod:`rational_quadratic` Module --------------------------------- - -.. automodule:: GPy.kern.rational_quadratic - :members: - :undoc-members: - :show-inheritance: - -:mod:`rbf` Module ------------------ - -.. automodule:: GPy.kern.rbf - :members: - :undoc-members: - :show-inheritance: - -:mod:`spline` Module --------------------- - -.. automodule:: GPy.kern.spline - :members: - :undoc-members: - :show-inheritance: - -:mod:`symmetric` Module ------------------------ - -.. automodule:: GPy.kern.symmetric - :members: - :undoc-members: - :show-inheritance: - -:mod:`sympykern` Module ------------------------ - -.. automodule:: GPy.kern.sympykern - :members: - :undoc-members: - :show-inheritance: - -:mod:`white` Module -------------------- - -.. automodule:: GPy.kern.white - :members: - :undoc-members: - :show-inheritance: + GPy.kern.parts diff --git a/doc/GPy.likelihoods.noise_models.rst b/doc/GPy.likelihoods.noise_models.rst new file mode 100644 index 00000000..19e5e9fe --- /dev/null +++ b/doc/GPy.likelihoods.noise_models.rst @@ -0,0 +1,75 @@ +noise_models Package +==================== + +:mod:`noise_models` Package +--------------------------- + +.. automodule:: GPy.likelihoods.noise_models + :members: + :undoc-members: + :show-inheritance: + +:mod:`bernoulli_noise` Module +----------------------------- + +.. automodule:: GPy.likelihoods.noise_models.bernoulli_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`exponential_noise` Module +------------------------------- + +.. automodule:: GPy.likelihoods.noise_models.exponential_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`gamma_noise` Module +------------------------- + +.. automodule:: GPy.likelihoods.noise_models.gamma_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`gaussian_noise` Module +---------------------------- + +.. automodule:: GPy.likelihoods.noise_models.gaussian_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp_transformations` Module +-------------------------------- + +.. automodule:: GPy.likelihoods.noise_models.gp_transformations + :members: + :undoc-members: + :show-inheritance: + +:mod:`noise_distributions` Module +--------------------------------- + +.. automodule:: GPy.likelihoods.noise_models.noise_distributions + :members: + :undoc-members: + :show-inheritance: + +:mod:`poisson_noise` Module +--------------------------- + +.. automodule:: GPy.likelihoods.noise_models.poisson_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`student_t_noise` Module +----------------------------- + +.. automodule:: GPy.likelihoods.noise_models.student_t_noise + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.likelihoods.rst b/doc/GPy.likelihoods.rst index 03c15a82..5dcabbd1 100644 --- a/doc/GPy.likelihoods.rst +++ b/doc/GPy.likelihoods.rst @@ -9,7 +9,7 @@ likelihoods Package :undoc-members: :show-inheritance: -:mod:`EP` Module +:mod:`ep` Module ---------------- .. automodule:: GPy.likelihoods.ep @@ -17,7 +17,15 @@ likelihoods Package :undoc-members: :show-inheritance: -:mod:`Gaussian` Module +:mod:`ep_mixed_noise` Module +---------------------------- + +.. automodule:: GPy.likelihoods.ep_mixed_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`gaussian` Module ---------------------- .. automodule:: GPy.likelihoods.gaussian @@ -25,6 +33,22 @@ likelihoods Package :undoc-members: :show-inheritance: +:mod:`gaussian_mixed_noise` Module +---------------------------------- + +.. automodule:: GPy.likelihoods.gaussian_mixed_noise + :members: + :undoc-members: + :show-inheritance: + +:mod:`laplace` Module +--------------------- + +.. automodule:: GPy.likelihoods.laplace + :members: + :undoc-members: + :show-inheritance: + :mod:`likelihood` Module ------------------------ @@ -33,11 +57,18 @@ likelihoods Package :undoc-members: :show-inheritance: -:mod:`likelihood_functions` Module ----------------------------------- +:mod:`noise_model_constructors` Module +-------------------------------------- -.. automodule:: GPy.likelihoods.likelihood_functions +.. automodule:: GPy.likelihoods.noise_model_constructors :members: :undoc-members: :show-inheritance: +Subpackages +----------- + +.. toctree:: + + GPy.likelihoods.noise_models + diff --git a/doc/GPy.mappings.rst b/doc/GPy.mappings.rst new file mode 100644 index 00000000..b7444808 --- /dev/null +++ b/doc/GPy.mappings.rst @@ -0,0 +1,35 @@ +mappings Package +================ + +:mod:`mappings` Package +----------------------- + +.. automodule:: GPy.mappings + :members: + :undoc-members: + :show-inheritance: + +:mod:`kernel` Module +-------------------- + +.. automodule:: GPy.mappings.kernel + :members: + :undoc-members: + :show-inheritance: + +:mod:`linear` Module +-------------------- + +.. automodule:: GPy.mappings.linear + :members: + :undoc-members: + :show-inheritance: + +:mod:`mlp` Module +----------------- + +.. automodule:: GPy.mappings.mlp + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.models.rst b/doc/GPy.models.rst deleted file mode 100644 index f4ae6a59..00000000 --- a/doc/GPy.models.rst +++ /dev/null @@ -1,83 +0,0 @@ -models Package -============== - -:mod:`models` Package ---------------------- - -.. automodule:: GPy.models - :members: - :undoc-members: - :show-inheritance: - -:mod:`Bayesian_GPLVM` Module ----------------------------- - -.. automodule:: GPy.models.bayesian_gplvm - :members: - :undoc-members: - :show-inheritance: - -:mod:`gp` Module ----------------- - -.. automodule:: GPy.models.gp - :members: - :undoc-members: - :show-inheritance: - -:mod:`gplvm` Module -------------------- - -.. automodule:: GPy.models.gplvm - :members: - :undoc-members: - :show-inheritance: - -:mod:`gp_regression` Module ---------------------------- - -.. automodule:: GPy.models.gp_regression - :members: - :undoc-members: - :show-inheritance: - -:mod:`sparse_gp` Module ------------------------ - -.. automodule:: GPy.models.sparse_gp - :members: - :undoc-members: - :show-inheritance: - -:mod:`SparseGPLVM` Module --------------------------- - -.. automodule:: GPy.models.sparse_gplvm - :members: - :undoc-members: - :show-inheritance: - -:mod:`sparse_gp_regression` Module ----------------------------------- - -.. automodule:: GPy.models.sparse_gp_regression - :members: - :undoc-members: - :show-inheritance: - -.. :mod:`uncollapsed_sparse_GP` Module -.. ----------------------------------- - -.. .. automodule:: GPy.models.uncollapsed_sparse_GP -.. :members: -.. :undoc-members: -.. :show-inheritance: - -:mod:`warped_gp` Module ------------------------ - -.. automodule:: GPy.models.warped_gp - :members: - :undoc-members: - :show-inheritance: - diff --git a/doc/GPy.models_modules.rst b/doc/GPy.models_modules.rst new file mode 100644 index 00000000..4169ec3a --- /dev/null +++ b/doc/GPy.models_modules.rst @@ -0,0 +1,131 @@ +models_modules Package +====================== + +:mod:`models_modules` Package +----------------------------- + +.. automodule:: GPy.models_modules + :members: + :undoc-members: + :show-inheritance: + +:mod:`bayesian_gplvm` Module +---------------------------- + +.. automodule:: GPy.models_modules.bayesian_gplvm + :members: + :undoc-members: + :show-inheritance: + +:mod:`bcgplvm` Module +--------------------- + +.. automodule:: GPy.models_modules.bcgplvm + :members: + :undoc-members: + :show-inheritance: + +:mod:`fitc_classification` Module +--------------------------------- + +.. automodule:: GPy.models_modules.fitc_classification + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp_classification` Module +------------------------------- + +.. automodule:: GPy.models_modules.gp_classification + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp_multioutput_regression` Module +--------------------------------------- + +.. automodule:: GPy.models_modules.gp_multioutput_regression + :members: + :undoc-members: + :show-inheritance: + +:mod:`gp_regression` Module +--------------------------- + +.. automodule:: GPy.models_modules.gp_regression + :members: + :undoc-members: + :show-inheritance: + +:mod:`gplvm` Module +------------------- + +.. automodule:: GPy.models_modules.gplvm + :members: + :undoc-members: + :show-inheritance: + +:mod:`gradient_checker` Module +------------------------------ + +.. automodule:: GPy.models_modules.gradient_checker + :members: + :undoc-members: + :show-inheritance: + +:mod:`mrd` Module +----------------- + +.. automodule:: GPy.models_modules.mrd + :members: + :undoc-members: + :show-inheritance: + +:mod:`sparse_gp_classification` Module +-------------------------------------- + +.. automodule:: GPy.models_modules.sparse_gp_classification + :members: + :undoc-members: + :show-inheritance: + +:mod:`sparse_gp_multioutput_regression` Module +---------------------------------------------- + +.. automodule:: GPy.models_modules.sparse_gp_multioutput_regression + :members: + :undoc-members: + :show-inheritance: + +:mod:`sparse_gp_regression` Module +---------------------------------- + +.. automodule:: GPy.models_modules.sparse_gp_regression + :members: + :undoc-members: + :show-inheritance: + +:mod:`sparse_gplvm` Module +-------------------------- + +.. automodule:: GPy.models_modules.sparse_gplvm + :members: + :undoc-members: + :show-inheritance: + +:mod:`svigp_regression` Module +------------------------------ + +.. automodule:: GPy.models_modules.svigp_regression + :members: + :undoc-members: + :show-inheritance: + +:mod:`warped_gp` Module +----------------------- + +.. automodule:: GPy.models_modules.warped_gp + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.rst b/doc/GPy.rst index e56e48e1..31ec3562 100644 --- a/doc/GPy.rst +++ b/doc/GPy.rst @@ -9,6 +9,14 @@ GPy Package :undoc-members: :show-inheritance: +:mod:`models` Module +-------------------- + +.. automodule:: GPy.models + :members: + :undoc-members: + :show-inheritance: + Subpackages ----------- @@ -19,7 +27,8 @@ Subpackages GPy.inference GPy.kern GPy.likelihoods - GPy.models + GPy.mappings + GPy.models_modules GPy.testing GPy.util diff --git a/doc/GPy.testing.rst b/doc/GPy.testing.rst index 5b32558b..15b0cc79 100644 --- a/doc/GPy.testing.rst +++ b/doc/GPy.testing.rst @@ -1,6 +1,22 @@ testing Package =============== +:mod:`testing` Package +---------------------- + +.. automodule:: GPy.testing + :members: + :undoc-members: + :show-inheritance: + +:mod:`bcgplvm_tests` Module +--------------------------- + +.. automodule:: GPy.testing.bcgplvm_tests + :members: + :undoc-members: + :show-inheritance: + :mod:`bgplvm_tests` Module -------------------------- @@ -9,6 +25,14 @@ testing Package :undoc-members: :show-inheritance: +:mod:`cgd_tests` Module +----------------------- + +.. automodule:: GPy.testing.cgd_tests + :members: + :undoc-members: + :show-inheritance: + :mod:`examples_tests` Module ---------------------------- @@ -17,6 +41,14 @@ testing Package :undoc-members: :show-inheritance: +:mod:`gp_transformation_tests` Module +------------------------------------- + +.. automodule:: GPy.testing.gp_transformation_tests + :members: + :undoc-members: + :show-inheritance: + :mod:`gplvm_tests` Module ------------------------- @@ -33,6 +65,30 @@ testing Package :undoc-members: :show-inheritance: +:mod:`likelihoods_tests` Module +------------------------------- + +.. automodule:: GPy.testing.likelihoods_tests + :members: + :undoc-members: + :show-inheritance: + +:mod:`mapping_tests` Module +--------------------------- + +.. automodule:: GPy.testing.mapping_tests + :members: + :undoc-members: + :show-inheritance: + +:mod:`mrd_tests` Module +----------------------- + +.. automodule:: GPy.testing.mrd_tests + :members: + :undoc-members: + :show-inheritance: + :mod:`prior_tests` Module ------------------------- @@ -41,6 +97,22 @@ testing Package :undoc-members: :show-inheritance: +:mod:`psi_stat_expectation_tests` Module +---------------------------------------- + +.. automodule:: GPy.testing.psi_stat_expectation_tests + :members: + :undoc-members: + :show-inheritance: + +:mod:`psi_stat_gradient_tests` Module +------------------------------------- + +.. automodule:: GPy.testing.psi_stat_gradient_tests + :members: + :undoc-members: + :show-inheritance: + :mod:`sparse_gplvm_tests` Module -------------------------------- diff --git a/doc/GPy.util.latent_space_visualizations.controllers.rst b/doc/GPy.util.latent_space_visualizations.controllers.rst new file mode 100644 index 00000000..e78ade7b --- /dev/null +++ b/doc/GPy.util.latent_space_visualizations.controllers.rst @@ -0,0 +1,27 @@ +controllers Package +=================== + +:mod:`controllers` Package +-------------------------- + +.. automodule:: GPy.util.latent_space_visualizations.controllers + :members: + :undoc-members: + :show-inheritance: + +:mod:`axis_event_controller` Module +----------------------------------- + +.. automodule:: GPy.util.latent_space_visualizations.controllers.axis_event_controller + :members: + :undoc-members: + :show-inheritance: + +:mod:`imshow_controller` Module +------------------------------- + +.. automodule:: GPy.util.latent_space_visualizations.controllers.imshow_controller + :members: + :undoc-members: + :show-inheritance: + diff --git a/doc/GPy.util.latent_space_visualizations.rst b/doc/GPy.util.latent_space_visualizations.rst new file mode 100644 index 00000000..4b440f61 --- /dev/null +++ b/doc/GPy.util.latent_space_visualizations.rst @@ -0,0 +1,19 @@ +latent_space_visualizations Package +=================================== + +:mod:`latent_space_visualizations` Package +------------------------------------------ + +.. automodule:: GPy.util.latent_space_visualizations + :members: + :undoc-members: + :show-inheritance: + +Subpackages +----------- + +.. toctree:: + + GPy.util.latent_space_visualizations.controllers + GPy.util.latent_space_visualizations.views + diff --git a/doc/GPy.util.rst b/doc/GPy.util.rst index 5bec990b..2e20c006 100644 --- a/doc/GPy.util.rst +++ b/doc/GPy.util.rst @@ -17,6 +17,30 @@ util Package :undoc-members: :show-inheritance: +:mod:`block_matrices` Module +---------------------------- + +.. automodule:: GPy.util.block_matrices + :members: + :undoc-members: + :show-inheritance: + +:mod:`classification` Module +---------------------------- + +.. automodule:: GPy.util.classification + :members: + :undoc-members: + :show-inheritance: + +:mod:`config` Module +-------------------- + +.. automodule:: GPy.util.config + :members: + :undoc-members: + :show-inheritance: + :mod:`datasets` Module ---------------------- @@ -25,6 +49,22 @@ util Package :undoc-members: :show-inheritance: +:mod:`decorators` Module +------------------------ + +.. automodule:: GPy.util.decorators + :members: + :undoc-members: + :show-inheritance: + +:mod:`erfcx` Module +------------------- + +.. automodule:: GPy.util.erfcx + :members: + :undoc-members: + :show-inheritance: + :mod:`linalg` Module -------------------- @@ -33,6 +73,14 @@ util Package :undoc-members: :show-inheritance: +:mod:`ln_diff_erfs` Module +-------------------------- + +.. automodule:: GPy.util.ln_diff_erfs + :members: + :undoc-members: + :show-inheritance: + :mod:`misc` Module ------------------ @@ -41,6 +89,38 @@ util Package :undoc-members: :show-inheritance: +:mod:`mocap` Module +------------------- + +.. automodule:: GPy.util.mocap + :members: + :undoc-members: + :show-inheritance: + +:mod:`multioutput` Module +------------------------- + +.. automodule:: GPy.util.multioutput + :members: + :undoc-members: + :show-inheritance: + +:mod:`netpbmfile` Module +------------------------ + +.. automodule:: GPy.util.netpbmfile + :members: + :undoc-members: + :show-inheritance: + +:mod:`pca` Module +----------------- + +.. automodule:: GPy.util.pca + :members: + :undoc-members: + :show-inheritance: + :mod:`plot` Module ------------------ @@ -49,6 +129,14 @@ util Package :undoc-members: :show-inheritance: +:mod:`plot_latent` Module +------------------------- + +.. automodule:: GPy.util.plot_latent + :members: + :undoc-members: + :show-inheritance: + :mod:`squashers` Module ----------------------- @@ -57,6 +145,30 @@ util Package :undoc-members: :show-inheritance: +:mod:`symbolic` Module +---------------------- + +.. automodule:: GPy.util.symbolic + :members: + :undoc-members: + :show-inheritance: + +:mod:`univariate_Gaussian` Module +--------------------------------- + +.. automodule:: GPy.util.univariate_Gaussian + :members: + :undoc-members: + :show-inheritance: + +:mod:`visualize` Module +----------------------- + +.. automodule:: GPy.util.visualize + :members: + :undoc-members: + :show-inheritance: + :mod:`warping_functions` Module ------------------------------- @@ -65,3 +177,10 @@ util Package :undoc-members: :show-inheritance: +Subpackages +----------- + +.. toctree:: + + GPy.util.latent_space_visualizations + diff --git a/doc/Makefile b/doc/Makefile index 95018f47..546113b3 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -a -w log.txt -E SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build diff --git a/doc/conf.py b/doc/conf.py index 8a05f386..7b71a897 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -83,6 +83,7 @@ print "finished importing" ############################################################################# class Mock(object): + __all__ = [] def __init__(self, *args, **kwargs): pass @@ -103,8 +104,10 @@ class Mock(object): #import mock print "Mocking" -MOCK_MODULES = ['pylab', 'sympy', 'sympy.utilities', 'sympy.utilities.codegen', 'sympy.core.cache', 'sympy.core', 'sympy.parsing', 'sympy.parsing.sympy_parser', 'matplotlib'] -#'matplotlib', 'matplotlib.color', 'matplotlib.pyplot', 'pylab' ] +MOCK_MODULES = ['sympy', + 'sympy.utilities', 'sympy.utilities.codegen', 'sympy.core.cache', + 'sympy.core', 'sympy.parsing', 'sympy.parsing.sympy_parser', 'Tango', 'numdifftools' + ] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() @@ -288,7 +291,7 @@ latex_elements = { #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - #'preamble': '', + 'preamble': '\\usepackage{MnSymbol}', } # Grouping the document tree into LaTeX files. List of tuples diff --git a/doc/index.rst b/doc/index.rst index a7b68c16..f6207963 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -11,9 +11,13 @@ For a quick start, you can have a look at one of the tutorials: * `Interacting with models `_ * `A kernel overview `_ * `Writing new kernels `_ +* `Writing new models `_ You may also be interested by some examples in the GPy/examples folder. +The detailed Developers Documentation is listed below +===================================================== + Contents: .. toctree:: diff --git a/doc/tuto_creating_new_models.rst b/doc/tuto_creating_new_models.rst new file mode 100644 index 00000000..021b4950 --- /dev/null +++ b/doc/tuto_creating_new_models.rst @@ -0,0 +1,64 @@ +.. _creating_new_models: + +******************* +Creating new Models +******************* + +In GPy all models inherit from the base class :py:class:`~GPy.core.parameterized.Parameterized`. :py:class:`~GPy.core.parameterized.Parameterized` is a class which allows for parameterization of objects. All it holds is functionality for tying, bounding and fixing of parameters. It also provides the functionality of searching and manipulating parameters by regular expression syntax. See :py:class:`~GPy.core.parameterized.Parameterized` for more information. + +The :py:class:`~GPy.core.model.Model` class provides parameter introspection, objective function and optimization. + +In order to fully use all functionality of :py:class:`~GPy.core.model.Model` some methods need to be implemented / overridden. In order to explain the functionality of those methods we will use a wrapper to the numpy ``rosen`` function, which holds input parameters :math:`\mathbf{X}`. Where :math:`\mathbf{X}\in\mathbb{R}^{N\times 1}`. + +Obligatory methods +================== + +:py:meth:`~GPy.core.model.Model.__init__` : + Initialize the model with the given parameters. In our example we have to store shape information of :math:`\mathbf X` and the parameters themselves:: + + self.X = X + self.num_inputs = self.X.shape[0] + assert self.X.ndim == 1, only vector inputs allowed + +:py:meth:`~GPy.core.model.Model._get_params` : + Return parameters of the model as a flattened numpy array-like. So, in our example we have to return the input parameters:: + + return self.X.flatten() + +:py:meth:`~GPy.core.model.Model._set_params` : + Set parameters, which have been fetched through :py:meth:`~GPy.core.model.Model._get_params`. In other words, "invert" the functionality of :py:meth:`~GPy.core.model.Model._get_params`:: + + self.X = params[:self.num_inputs*self.input_dim].reshape(self.num_inputs) + +:py:meth:`~GPy.core.model.Model.log_likelihood` : + Returns the log-likelihood of the new model. For our example this is just the call to ``rosen``:: + + return scipy.optimize.rosen(self.X) + +:py:meth:`~GPy.core.model.Model._log_likelihood_gradients` : + Returns the gradients with respect to all parameters:: + + return scipy.optimize.rosen_der(self.X) + + +Optional methods +================ + +If you want some special functionality please provide the following methods: + +Using the pickle functionality +------------------------------ + +To be able to use the pickle functionality ``m.pickle()`` the methods ``getstate(self)`` and ``setstate(self, state)`` have to be provided. The convention for a ``state`` in ``GPy`` is a list of all parameters, which are needed to restore the model. All classes provided in ``GPy`` follow this convention, thus you can just append to the state of the inherited class and call the inherited class' ``setstate`` with the appropriate state. + +:py:meth:`~GPy.core.model.Model.getstate` : + This method returns a state of the model, following the memento pattern. As we are inheriting from :py:class:`~GPy.core.model.Model`, we have to return the state of Model as well. In out example we have `X` and `num_inputs` as state:: + + return Model.getstate(self) + [self.X, self.num_inputs] + +:py:meth:`~GPy.core.model.Model.setstate` : + This method restores this model with the given ``state``:: + + self.num_inputs = state.pop() + self.X = state.pop() + return Model.setstate(self, state) \ No newline at end of file diff --git a/doc/tuto_interacting_with_models.rst b/doc/tuto_interacting_with_models.rst index 3cea7fb7..5bd0511e 100644 --- a/doc/tuto_interacting_with_models.rst +++ b/doc/tuto_interacting_with_models.rst @@ -1,3 +1,5 @@ +.. _interacting_with_models: + ************************************* Interacting with models ************************************* @@ -18,7 +20,7 @@ All of the examples included in GPy return an instance of a model class, and therefore they can be called in the following way: :: - import numpy as np + import numpy as np import pylab as pb pb.ion() import GPy @@ -105,7 +107,7 @@ inputs: :: m['iip'] = np.arange(-5,0) Getting the model's likelihood and gradients -=========================================== +============================================= Appart form the printing the model, the marginal log-likelihood can be obtained by using the function ``log_likelihood()``. Also, the log-likelihood gradients @@ -210,6 +212,6 @@ white_variance and noise_variance are tied together.:: Further Reading =============== -All of the mechansiams for dealing with parameters are baked right into GPy.core.model, from which all of the classes in GPy.models inherrit. To learn how to construct your own model, you might want to read ??link?? creating_new_models. +All of the mechansiams for dealing with parameters are baked right into GPy.core.model, from which all of the classes in GPy.models inherrit. To learn how to construct your own model, you might want to read :ref:`creating_new_models`. -By deafult, GPy uses the tnc optimizer (from scipy.optimize.tnc). To use other optimisers, and to control the setting of those optimisers, as well as other funky features like automated restarts and diagnostics, you can read the optimization tutorial ??link??. +By deafult, GPy uses the scg optimizer. To use other optimisers, and to control the setting of those optimisers, as well as other funky features like automated restarts and diagnostics, you can read the optimization tutorial ??link??. diff --git a/setup.py b/setup.py index 90645e71..3b493022 100644 --- a/setup.py +++ b/setup.py @@ -18,9 +18,9 @@ setup(name = 'GPy', license = "BSD 3-clause", keywords = "machine-learning gaussian-processes kernels", url = "http://sheffieldml.github.com/GPy/", - packages = ['GPy', 'GPy.core', 'GPy.kern', 'GPy.util', 'GPy.models', 'GPy.inference', 'GPy.examples', 'GPy.likelihoods', 'GPy.testing'], + packages = ['GPy', 'GPy.core', 'GPy.kern', 'GPy.util', 'GPy.models_modules', 'GPy.inference', 'GPy.examples', 'GPy.likelihoods', 'GPy.testing', 'GPy.util.latent_space_visualizations', 'GPy.util.latent_space_visualizations.controllers', 'GPy.likelihoods.noise_models', 'GPy.kern.parts', 'GPy.mappings'], package_dir={'GPy': 'GPy'}, - package_data = {'GPy': ['GPy/examples']}, + package_data = {'GPy': ['GPy/examples', 'gpy_config.cfg']}, py_modules = ['GPy.__init__'], long_description=read('README.md'), install_requires=['numpy>=1.6', 'scipy>=0.9','matplotlib>=1.1', 'nose'],