Minor edits to reading Lee data in datasets.py

This commit is contained in:
Neil Lawrence 2014-06-04 11:21:25 +01:00
parent dd6f7c1f4c
commit 730e229238
63 changed files with 6761 additions and 5 deletions

12
GPy/Youter.txt Normal file
View file

@ -0,0 +1,12 @@
models/generalized_FITC.py: self.Youter = np.dot(self.mu_tilde,self.mu_tilde.T)
models/generalized_FITC.py: B = -.5*np.sum(self.Qi*self.Youter)
models/GP_regression.py: # then it's more efficient to store Youter
models/GP_regression.py: self.Youter = np.dot(self.Y, self.Y.T)
models/GP_regression.py: self.Youter = None
models/GP_regression.py: Computes the model fit using Youter if it's available
models/GP_regression.py: if self.Youter is None:
models/GP_regression.py: return -0.5*np.sum(np.multiply(self.Ki, self.Youter))
models/GP_regression.py: if self.Youter is None:
models/GP_regression.py: dL_dK = 0.5*(mdot(self.Ki, self.Youter, self.Ki) - self.D*self.Ki)
models/warped_GP.py: self.Youter = np.dot(self.Y, self.Y.T)
models/warped_GP.py: self.Youter = None

View file

@ -0,0 +1,137 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
__updated__ = '2014-03-31'
import numpy as np
from parameter_core import Observable, Pickleable
class ObsAr(np.ndarray, Pickleable, Observable):
"""
An ndarray which reports changes to its observers.
The observers can add themselves with a callable, which
will be called every time this array changes. The callable
takes exactly one argument, which is this array itself.
"""
__array_priority__ = -1 # Never give back ObsAr
def __new__(cls, input_array, *a, **kw):
if not isinstance(input_array, ObsAr):
obj = np.atleast_1d(np.require(input_array, dtype=np.float64, requirements=['W', 'C'])).view(cls)
else: obj = input_array
#cls.__name__ = "ObsAr" # because of fixed printing of `array` in np printing
super(ObsAr, obj).__init__(*a, **kw)
return obj
def __array_finalize__(self, obj):
# see InfoArray.__array_finalize__ for comments
if obj is None: return
self._observer_callables_ = getattr(obj, '_observer_callables_', None)
def __array_wrap__(self, out_arr, context=None):
return out_arr.view(np.ndarray)
def copy(self):
memo = {}
memo[id(self)] = self
return self.__deepcopy__(memo)
def __deepcopy__(self, memo):
s = self.__new__(self.__class__, input_array=self.view(np.ndarray).copy())
memo[id(self)] = s
import copy
s.__dict__.update(copy.deepcopy(self.__dict__, memo))
return s
def __reduce__(self):
func, args, state = super(ObsAr, self).__reduce__()
return func, args, (state, Pickleable.__getstate__(self))
def __setstate__(self, state):
np.ndarray.__setstate__(self, state[0])
Pickleable.__setstate__(self, state[1])
def __setitem__(self, s, val):
super(ObsAr, self).__setitem__(s, val)
self.notify_observers()
def __getslice__(self, start, stop):
return self.__getitem__(slice(start, stop))
def __setslice__(self, start, stop, val):
return self.__setitem__(slice(start, stop), val)
def __ilshift__(self, *args, **kwargs):
r = np.ndarray.__ilshift__(self, *args, **kwargs)
self.notify_observers()
return r
def __irshift__(self, *args, **kwargs):
r = np.ndarray.__irshift__(self, *args, **kwargs)
self.notify_observers()
return r
def __ixor__(self, *args, **kwargs):
r = np.ndarray.__ixor__(self, *args, **kwargs)
self.notify_observers()
return r
def __ipow__(self, *args, **kwargs):
r = np.ndarray.__ipow__(self, *args, **kwargs)
self.notify_observers()
return r
def __ifloordiv__(self, *args, **kwargs):
r = np.ndarray.__ifloordiv__(self, *args, **kwargs)
self.notify_observers()
return r
def __isub__(self, *args, **kwargs):
r = np.ndarray.__isub__(self, *args, **kwargs)
self.notify_observers()
return r
def __ior__(self, *args, **kwargs):
r = np.ndarray.__ior__(self, *args, **kwargs)
self.notify_observers()
return r
def __itruediv__(self, *args, **kwargs):
r = np.ndarray.__itruediv__(self, *args, **kwargs)
self.notify_observers()
return r
def __idiv__(self, *args, **kwargs):
r = np.ndarray.__idiv__(self, *args, **kwargs)
self.notify_observers()
return r
def __iand__(self, *args, **kwargs):
r = np.ndarray.__iand__(self, *args, **kwargs)
self.notify_observers()
return r
def __imod__(self, *args, **kwargs):
r = np.ndarray.__imod__(self, *args, **kwargs)
self.notify_observers()
return r
def __iadd__(self, *args, **kwargs):
r = np.ndarray.__iadd__(self, *args, **kwargs)
self.notify_observers()
return r
def __imul__(self, *args, **kwargs):
r = np.ndarray.__imul__(self, *args, **kwargs)
self.notify_observers()
return r

26
GPy/core/tmp/domains.py Normal file
View file

@ -0,0 +1,26 @@
'''
Created on 4 Jun 2013
@author: maxz
(Hyper-)Parameter domains defined for :py:mod:`~GPy.core.priors` and :py:mod:`~GPy.kern`.
These domains specify the legitimate realm of the parameters to live in.
:const:`~GPy.core.domains.REAL` :
real domain, all values in the real numbers are allowed
:const:`~GPy.core.domains.POSITIVE`:
positive domain, only positive real values are allowed
:const:`~GPy.core.domains.NEGATIVE`:
same as :const:`~GPy.core.domains.POSITIVE`, but only negative values are allowed
:const:`~GPy.core.domains.BOUNDED`:
only values within the bounded range are allowed,
the bounds are specified withing the object with the bounded range
'''
REAL = 'real'
POSITIVE = "positive"
NEGATIVE = 'negative'
BOUNDED = 'bounded'

248
GPy/core/tmp/fitc.py Normal file
View file

@ -0,0 +1,248 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
from ..util.linalg import mdot, jitchol, chol_inv, tdot, symmetrify, pdinv, dtrtrs
from ..util.plot import gpplot
from .. import kern
from scipy import stats
from sparse_gp import SparseGP
class FITC(SparseGP):
"""
Sparse FITC approximation
:param X: inputs
:type X: np.ndarray (num_data x Q)
:param likelihood: a likelihood instance, containing the observed data
:type likelihood: GPy.likelihood.(Gaussian | EP)
:param kernel: the kernel (covariance function). See link kernels
:type kernel: a GPy.kern.kern instance
:param Z: inducing inputs (optional, see note)
:type Z: np.ndarray (M x Q) | None
:param normalize_(X|Y): whether to normalize the data before computing (predictions will be in original scales)
:type normalize_(X|Y): bool
"""
def __init__(self, X, likelihood, kernel, Z, normalize_X=False):
SparseGP.__init__(self, X, likelihood, kernel, Z, X_variance=None, normalize_X=False)
assert self.output_dim == 1, "FITC model is not defined for handling multiple outputs"
def update_likelihood_approximation(self, **kwargs):
"""
Approximates a non-Gaussian likelihood using Expectation Propagation
For a Gaussian likelihood, no iteration is required:
this function does nothing
"""
self.likelihood.restart()
self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0, **kwargs)
self._set_params(self._get_params())
def _compute_kernel_matrices(self):
# kernel computations, using BGPLVM notation
self.Kmm = self.kern.K(self.Z)
self.psi0 = self.kern.Kdiag(self.X)
self.psi1 = self.kern.K(self.Z, self.X)
self.psi2 = None
def _computations(self):
#factor Kmm
self.Lm = jitchol(self.Kmm)
self.Lmi,info = dtrtrs(self.Lm,np.eye(self.num_inducing),lower=1)
Lmipsi1 = np.dot(self.Lmi,self.psi1)
self.Qnn = np.dot(Lmipsi1.T,Lmipsi1).copy()
self.Diag0 = self.psi0 - np.diag(self.Qnn)
self.beta_star = self.likelihood.precision/(1. + self.likelihood.precision*self.Diag0[:,None]) #NOTE: beta_star contains Diag0 and the precision
self.V_star = self.beta_star * self.likelihood.Y
# The rather complex computations of self.A
tmp = self.psi1 * (np.sqrt(self.beta_star.flatten().reshape(1, self.num_data)))
tmp, _ = dtrtrs(self.Lm, np.asfortranarray(tmp), lower=1)
self.A = tdot(tmp)
# factor B
self.B = np.eye(self.num_inducing) + self.A
self.LB = jitchol(self.B)
self.LBi = chol_inv(self.LB)
self.psi1V = np.dot(self.psi1, self.V_star)
Lmi_psi1V, info = dtrtrs(self.Lm, np.asfortranarray(self.psi1V), lower=1, trans=0)
self._LBi_Lmi_psi1V, _ = dtrtrs(self.LB, np.asfortranarray(Lmi_psi1V), lower=1, trans=0)
Kmmipsi1 = np.dot(self.Lmi.T,Lmipsi1)
b_psi1_Ki = self.beta_star * Kmmipsi1.T
Ki_pbp_Ki = np.dot(Kmmipsi1,b_psi1_Ki)
Kmmi = np.dot(self.Lmi.T,self.Lmi)
LBiLmi = np.dot(self.LBi,self.Lmi)
LBL_inv = np.dot(LBiLmi.T,LBiLmi)
VVT = np.outer(self.V_star,self.V_star)
VV_p_Ki = np.dot(VVT,Kmmipsi1.T)
Ki_pVVp_Ki = np.dot(Kmmipsi1,VV_p_Ki)
psi1beta = self.psi1*self.beta_star.T
H = self.Kmm + mdot(self.psi1,psi1beta.T)
LH = jitchol(H)
LHi = chol_inv(LH)
Hi = np.dot(LHi.T,LHi)
betapsi1TLmiLBi = np.dot(psi1beta.T,LBiLmi.T)
alpha = np.array([np.dot(a.T,a) for a in betapsi1TLmiLBi])[:,None]
gamma_1 = mdot(VVT,self.psi1.T,Hi)
pHip = mdot(self.psi1.T,Hi,self.psi1)
gamma_2 = mdot(self.beta_star*pHip,self.V_star)
gamma_3 = self.V_star * gamma_2
self._dL_dpsi0 = -0.5 * self.beta_star#dA_dpsi0: logdet(self.beta_star)
self._dL_dpsi0 += .5 * self.V_star**2 #dA_psi0: yT*beta_star*y
self._dL_dpsi0 += .5 *alpha #dC_dpsi0
self._dL_dpsi0 += 0.5*mdot(self.beta_star*pHip,self.V_star)**2 - self.V_star * mdot(self.V_star.T,pHip*self.beta_star).T #dD_dpsi0
self._dL_dpsi1 = b_psi1_Ki.copy() #dA_dpsi1: logdet(self.beta_star)
self._dL_dpsi1 += -np.dot(psi1beta.T,LBL_inv) #dC_dpsi1
self._dL_dpsi1 += gamma_1 - mdot(psi1beta.T,Hi,self.psi1,gamma_1) #dD_dpsi1
self._dL_dKmm = -0.5 * np.dot(Kmmipsi1,b_psi1_Ki) #dA_dKmm: logdet(self.beta_star)
self._dL_dKmm += .5*(LBL_inv - Kmmi) + mdot(LBL_inv,psi1beta,Kmmipsi1.T) #dC_dKmm
self._dL_dKmm += -.5 * mdot(Hi,self.psi1,gamma_1) #dD_dKmm
self._dpsi1_dtheta = 0
self._dpsi1_dX = 0
self._dKmm_dtheta = 0
self._dKmm_dX = 0
self._dpsi1_dX_jkj = 0
self._dpsi1_dtheta_jkj = 0
for i,V_n,alpha_n,gamma_n,gamma_k in zip(range(self.num_data),self.V_star,alpha,gamma_2,gamma_3):
K_pp_K = np.dot(Kmmipsi1[:,i:(i+1)],Kmmipsi1[:,i:(i+1)].T)
_dpsi1 = (-V_n**2 - alpha_n + 2.*gamma_k - gamma_n**2) * Kmmipsi1.T[i:(i+1),:]
_dKmm = .5*(V_n**2 + alpha_n + gamma_n**2 - 2.*gamma_k) * K_pp_K #Diag_dD_dKmm
self._dpsi1_dtheta += self.kern.dK_dtheta(_dpsi1,self.X[i:i+1,:],self.Z)
self._dKmm_dtheta += self.kern.dK_dtheta(_dKmm,self.Z)
self._dKmm_dX += self.kern.dK_dX(_dKmm ,self.Z)
self._dpsi1_dX += self.kern.dK_dX(_dpsi1.T,self.Z,self.X[i:i+1,:])
# the partial derivative vector for the likelihood
if self.likelihood.num_params == 0:
# save computation here.
self.partial_for_likelihood = None
elif self.likelihood.is_heteroscedastic:
raise NotImplementedError, "heteroscedatic derivates not implemented."
else:
# likelihood is not heterscedatic
dbstar_dnoise = self.likelihood.precision * (self.beta_star**2 * self.Diag0[:,None] - self.beta_star)
Lmi_psi1 = mdot(self.Lmi,self.psi1)
LBiLmipsi1 = np.dot(self.LBi,Lmi_psi1)
aux_0 = np.dot(self._LBi_Lmi_psi1V.T,LBiLmipsi1)
aux_1 = self.likelihood.Y.T * np.dot(self._LBi_Lmi_psi1V.T,LBiLmipsi1)
aux_2 = np.dot(LBiLmipsi1.T,self._LBi_Lmi_psi1V)
dA_dnoise = 0.5 * self.input_dim * (dbstar_dnoise/self.beta_star).sum() - 0.5 * self.input_dim * np.sum(self.likelihood.Y**2 * dbstar_dnoise)
dC_dnoise = -0.5 * np.sum(mdot(self.LBi.T,self.LBi,Lmi_psi1) * Lmi_psi1 * dbstar_dnoise.T)
dD_dnoise_1 = mdot(self.V_star*LBiLmipsi1.T,LBiLmipsi1*dbstar_dnoise.T*self.likelihood.Y.T)
alpha = mdot(LBiLmipsi1,self.V_star)
alpha_ = mdot(LBiLmipsi1.T,alpha)
dD_dnoise_2 = -0.5 * self.input_dim * np.sum(alpha_**2 * dbstar_dnoise )
dD_dnoise_1 = mdot(self.V_star.T,self.psi1.T,self.Lmi.T,self.LBi.T,self.LBi,self.Lmi,self.psi1,dbstar_dnoise*self.likelihood.Y)
dD_dnoise_2 = 0.5*mdot(self.V_star.T,self.psi1.T,Hi,self.psi1,dbstar_dnoise*self.psi1.T,Hi,self.psi1,self.V_star)
dD_dnoise = dD_dnoise_1 + dD_dnoise_2
self.partial_for_likelihood = dA_dnoise + dC_dnoise + dD_dnoise
def log_likelihood(self):
""" Compute the (lower bound on the) log marginal likelihood """
A = -0.5 * self.num_data * self.output_dim * np.log(2.*np.pi) + 0.5 * np.sum(np.log(self.beta_star)) - 0.5 * np.sum(self.V_star * self.likelihood.Y)
C = -self.output_dim * (np.sum(np.log(np.diag(self.LB))))
D = 0.5 * np.sum(np.square(self._LBi_Lmi_psi1V))
return A + C + D + self.likelihood.Z
def _log_likelihood_gradients(self):
pass
return np.hstack((self.dL_dZ().flatten(), self.dL_dtheta(), self.likelihood._gradients(partial=self.partial_for_likelihood)))
def dL_dtheta(self):
dL_dtheta = self.kern.dKdiag_dtheta(self._dL_dpsi0,self.X)
dL_dtheta += self.kern.dK_dtheta(self._dL_dpsi1,self.X,self.Z)
dL_dtheta += self.kern.dK_dtheta(self._dL_dKmm,X=self.Z)
dL_dtheta += self._dKmm_dtheta
dL_dtheta += self._dpsi1_dtheta
return dL_dtheta
def dL_dZ(self):
dL_dZ = self.kern.dK_dX(self._dL_dpsi1.T,self.Z,self.X)
dL_dZ += self.kern.dK_dX(self._dL_dKmm,X=self.Z)
dL_dZ += self._dpsi1_dX
dL_dZ += self._dKmm_dX
return dL_dZ
def _raw_predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False):
assert X_variance_new is None, "FITC model is not defined for handling uncertain inputs."
if self.likelihood.is_heteroscedastic:
Iplus_Dprod_i = 1./(1.+ self.Diag0 * self.likelihood.precision.flatten())
self.Diag = self.Diag0 * Iplus_Dprod_i
self.P = Iplus_Dprod_i[:,None] * self.psi1.T
self.RPT0 = np.dot(self.Lmi,self.psi1)
self.L = np.linalg.cholesky(np.eye(self.num_inducing) + np.dot(self.RPT0,((1. - Iplus_Dprod_i)/self.Diag0)[:,None]*self.RPT0.T))
self.R,info = dtrtrs(self.L,self.Lmi,lower=1)
self.RPT = np.dot(self.R,self.P.T)
self.Sigma = np.diag(self.Diag) + np.dot(self.RPT.T,self.RPT)
self.w = self.Diag * self.likelihood.v_tilde
self.Gamma = np.dot(self.R.T, np.dot(self.RPT,self.likelihood.v_tilde))
self.mu = self.w + np.dot(self.P,self.Gamma)
"""
Make a prediction for the generalized FITC model
Arguments
---------
X : Input prediction data - Nx1 numpy array (floats)
"""
# q(u|f) = N(u| R0i*mu_u*f, R0i*C*R0i.T)
# Ci = I + (RPT0)Di(RPT0).T
# C = I - [RPT0] * (input_dim+[RPT0].T*[RPT0])^-1*[RPT0].T
# = I - [RPT0] * (input_dim + self.Qnn)^-1 * [RPT0].T
# = I - [RPT0] * (U*U.T)^-1 * [RPT0].T
# = I - V.T * V
U = np.linalg.cholesky(np.diag(self.Diag0) + self.Qnn)
V,info = dtrtrs(U,self.RPT0.T,lower=1)
C = np.eye(self.num_inducing) - np.dot(V.T,V)
mu_u = np.dot(C,self.RPT0)*(1./self.Diag0[None,:])
#self.C = C
#self.RPT0 = np.dot(self.R0,self.Knm.T) P0.T
#self.mu_u = mu_u
#self.U = U
# q(u|y) = N(u| R0i*mu_H,R0i*Sigma_H*R0i.T)
mu_H = np.dot(mu_u,self.mu)
self.mu_H = mu_H
Sigma_H = C + np.dot(mu_u,np.dot(self.Sigma,mu_u.T))
# q(f_star|y) = N(f_star|mu_star,sigma2_star)
Kx = self.kern.K(self.Z, Xnew, which_parts=which_parts)
KR0T = np.dot(Kx.T,self.Lmi.T)
mu_star = np.dot(KR0T,mu_H)
if full_cov:
Kxx = self.kern.K(Xnew,which_parts=which_parts)
var = Kxx + np.dot(KR0T,np.dot(Sigma_H - np.eye(self.num_inducing),KR0T.T))
else:
Kxx = self.kern.Kdiag(Xnew,which_parts=which_parts)
var = (Kxx + np.sum(KR0T.T*np.dot(Sigma_H - np.eye(self.num_inducing),KR0T.T),0))[:,None]
return mu_star[:,None],var
else:
raise NotImplementedError, "Heteroscedastic case not implemented."
"""
Kx = self.kern.K(self.Z, Xnew)
mu = mdot(Kx.T, self.C/self.scale_factor, self.psi1V)
if full_cov:
Kxx = self.kern.K(Xnew)
var = Kxx - mdot(Kx.T, (self.Kmmi - self.C/self.scale_factor**2), Kx) #NOTE this won't work for plotting
else:
Kxx = self.kern.Kdiag(Xnew)
var = Kxx - np.sum(Kx*np.dot(self.Kmmi - self.C/self.scale_factor**2, Kx),0)
return mu,var[:,None]
"""

275
GPy/core/tmp/gp_base.py Normal file
View file

@ -0,0 +1,275 @@
import numpy as np
from .. import kern
from ..util.plot import gpplot, Tango, x_frame1D, x_frame2D
import pylab as pb
from GPy.core.model import Model
import warnings
from ..likelihoods import Gaussian, Gaussian_Mixed_Noise
class GPBase(Model):
"""
Gaussian process base model for holding shared behaviour between
sparse_GP and GP models, and potentially other models in the future.
Here we define some functions that are use
"""
def __init__(self, X, likelihood, kernel, normalize_X=False):
if len(X.shape)==1:
X = X.reshape(-1,1)
warnings.warn("One dimension output (N,) being reshaped to (N,1)")
self.X = X
assert len(self.X.shape) == 2, "too many dimensions for X input"
self.num_data, self.input_dim = self.X.shape
assert isinstance(kernel, kern.kern)
self.kern = kernel
self.likelihood = likelihood
assert self.X.shape[0] == self.likelihood.data.shape[0]
self.num_data, self.output_dim = self.likelihood.data.shape
if normalize_X:
self._Xoffset = X.mean(0)[None, :]
self._Xscale = X.std(0)[None, :]
self.X = (X.copy() - self._Xoffset) / self._Xscale
else:
self._Xoffset = np.zeros((1, self.input_dim))
self._Xscale = np.ones((1, self.input_dim))
super(GPBase, self).__init__()
# Model.__init__(self)
# All leaf nodes should call self._set_params(self._get_params()) at
# the end
def posterior_samples_f(self,X,size=10,which_parts='all'):
"""
Samples the posterior GP at the points X.
:param X: The points at which to take the samples.
:type X: np.ndarray, Nnew x self.input_dim.
:param size: the number of a posteriori samples to plot.
:type size: int.
:param which_parts: which of the kernel functions to plot (additively).
:type which_parts: 'all', or list of bools.
:param full_cov: whether to return the full covariance matrix, or just the diagonal.
:type full_cov: bool.
:returns: Ysim: set of simulations, a Numpy array (N x samples).
"""
m, v = self._raw_predict(X, which_parts=which_parts, full_cov=True)
v = v.reshape(m.size,-1) if len(v.shape)==3 else v
Ysim = np.random.multivariate_normal(m.flatten(), v, size).T
return Ysim
def posterior_samples(self,X,size=10,which_parts='all',noise_model=None):
"""
Samples the posterior GP at the points X.
:param X: the points at which to take the samples.
:type X: np.ndarray, Nnew x self.input_dim.
:param size: the number of a posteriori samples to plot.
:type size: int.
:param which_parts: which of the kernel functions to plot (additively).
:type which_parts: 'all', or list of bools.
:param full_cov: whether to return the full covariance matrix, or just the diagonal.
:type full_cov: bool.
:param noise_model: for mixed noise likelihood, the noise model to use in the samples.
:type noise_model: integer.
:returns: Ysim: set of simulations, a Numpy array (N x samples).
"""
Ysim = self.posterior_samples_f(X, size, which_parts=which_parts)
if isinstance(self.likelihood,Gaussian):
noise_std = np.sqrt(self.likelihood._get_params())
Ysim += np.random.normal(0,noise_std,Ysim.shape)
elif isinstance(self.likelihood,Gaussian_Mixed_Noise):
assert noise_model is not None, "A noise model must be specified."
noise_std = np.sqrt(self.likelihood._get_params()[noise_model])
Ysim += np.random.normal(0,noise_std,Ysim.shape)
else:
Ysim = self.likelihood.noise_model.samples(Ysim)
return Ysim
def plot_f(self, *args, **kwargs):
"""
Plot the GP's view of the world, where the data is normalized and before applying a likelihood.
This is a convenience function: we simply call self.plot with the
argument use_raw_predict set True. All args and kwargs are passed on to
plot.
see also: gp_base.plot
"""
kwargs['plot_raw'] = True
self.plot(*args, **kwargs)
def plot(self, plot_limits=None, which_data_rows='all',
which_data_ycols='all', which_parts='all', fixed_inputs=[],
levels=20, samples=0, fignum=None, ax=None, resolution=None,
plot_raw=False,
linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']):
"""
Plot the posterior of the GP.
- In one dimension, the function is plotted with a shaded region identifying two standard deviations.
- In two dimsensions, a contour-plot shows the mean predicted function
- In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed.
Can plot only part of the data and part of the posterior functions
using which_data_rowsm which_data_ycols and which_parts
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits
:type plot_limits: np.array
:param which_data_rows: which of the training data to plot (default all)
:type which_data_rows: 'all' or a slice object to slice self.X, self.Y
:param which_data_ycols: when the data has several columns (independant outputs), only plot these
:type which_data_rows: 'all' or a list of integers
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.
:type fixed_inputs: a list of tuples
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param levels: number of levels to plot in a contour plot.
:type levels: int
:param samples: the number of a posteriori samples to plot
:type samples: int
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:type output: integer (first output is 0)
:param linecol: color of line to plot.
:type linecol:
:param fillcol: color of fill
:param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure
"""
#deal with optional arguments
if which_data_rows == 'all':
which_data_rows = slice(None)
if which_data_ycols == 'all':
which_data_ycols = np.arange(self.output_dim)
if len(which_data_ycols)==0:
raise ValueError('No data selected for plotting')
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
#work out what the inputs are for plotting (1D or 2D)
fixed_dims = np.array([i for i,v in fixed_inputs])
free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims)
#one dimensional plotting
if len(free_dims) == 1:
#define the frame on which to plot
resolution = resolution or 200
Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now
Xnew, xmin, xmax = x_frame1D(Xu[:,free_dims], plot_limits=plot_limits)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
#make a prediction on the frame and plot it
if plot_raw:
m, v = self._raw_predict(Xgrid, which_parts=which_parts)
lower = m - 2*np.sqrt(v)
upper = m + 2*np.sqrt(v)
Y = self.likelihood.Y
else:
m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=False) #Compute the exact mean
m_, v_, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=True, num_samples=15000) #Apporximate the percentiles
Y = self.likelihood.data
for d in which_data_ycols:
gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol)
ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5)
#optionally plot some samples
if samples: #NOTE not tested with fixed_inputs
Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts)
for yi in Ysim.T:
ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25)
#ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs.
#set the limits of the plot to some sensible values
ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper))
ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin)
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
#2D plotting
elif len(free_dims) == 2:
#define the frame for plotting on
resolution = resolution or 50
Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now
Xnew, _, _, xmin, xmax = x_frame2D(Xu[:,free_dims], plot_limits, resolution)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution)
#predict on the frame and plot
if plot_raw:
m, _ = self._raw_predict(Xgrid, which_parts=which_parts)
Y = self.likelihood.Y
else:
m, _, _, _ = self.predict(Xgrid, which_parts=which_parts,sampling=False)
Y = self.likelihood.data
for d in which_data_ycols:
m_d = m[:,d].reshape(resolution, resolution).T
contour = ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet)
scatter = ax.scatter(self.X[which_data_rows, free_dims[0]], self.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.)
#set the limits of the plot to some sensible values
ax.set_xlim(xmin[0], xmax[0])
ax.set_ylim(xmin[1], xmax[1])
if samples:
warnings.warn("Samples are rather difficult to plot for 2D inputs...")
return contour, scatter
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
def getstate(self):
"""
Get the curent state of the class. This is only used to efficiently
pickle the model. See also self.setstate
"""
return Model.getstate(self) + [self.X,
self.num_data,
self.input_dim,
self.kern,
self.likelihood,
self.output_dim,
self._Xoffset,
self._Xscale]
def setstate(self, state):
"""
Set the state of the model. Used for efficient pickling
"""
self._Xscale = state.pop()
self._Xoffset = state.pop()
self.output_dim = state.pop()
self.likelihood = state.pop()
self.kern = state.pop()
self.input_dim = state.pop()
self.num_data = state.pop()
self.X = state.pop()
Model.setstate(self, state)
def log_predictive_density(self, x_test, y_test):
"""
Calculation of the log predictive density
.. math:
p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*})
:param x_test: test observations (x_{*})
:type x_test: (Nx1) array
:param y_test: test observations (y_{*})
:type y_test: (Nx1) array
"""
mu_star, var_star = self._raw_predict(x_test)
return self.likelihood.log_predictive_density(y_test, mu_star, var_star)

View file

@ -0,0 +1,465 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import re
import copy
import cPickle
import warnings
import transformations
class Parameterized(object):
def __init__(self):
"""
This is the base class for model and kernel. Mostly just handles tieing and constraining of parameters
"""
self.tied_indices = []
self.fixed_indices = []
self.fixed_values = []
self.constrained_indices = []
self.constraints = []
def _get_params(self):
raise NotImplementedError, "this needs to be implemented to use the Parameterized class"
def _set_params(self, x):
raise NotImplementedError, "this needs to be implemented to use the Parameterized class"
def _get_param_names(self):
raise NotImplementedError, "this needs to be implemented to use the Parameterized class"
#def _get_print_names(self):
# """ Override for which names to print out, when using print m """
# return self._get_param_names()
def pickle(self, filename, protocol=-1):
with open(filename, 'wb') as f:
cPickle.dump(self, f, protocol=protocol)
def copy(self):
"""Returns a (deep) copy of the current model """
return copy.deepcopy(self)
def __getstate__(self):
if self._has_get_set_state():
return self.getstate()
return self.__dict__
def __setstate__(self, state):
if self._has_get_set_state():
self.setstate(state) # set state
self._set_params(self._get_params()) # restore all values
return
self.__dict__ = state
def _has_get_set_state(self):
return 'getstate' in vars(self.__class__) and 'setstate' in vars(self.__class__)
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
For inheriting from Parameterized:
Allways append the state of the inherited object
and call down to the inherited object in setstate!!
"""
return [self.tied_indices,
self.fixed_indices,
self.fixed_values,
self.constrained_indices,
self.constraints]
def setstate(self, state):
self.constraints = state.pop()
self.constrained_indices = state.pop()
self.fixed_values = state.pop()
self.fixed_indices = state.pop()
self.tied_indices = state.pop()
def __getitem__(self, regexp, return_names=False):
"""
Get a model parameter by name. The name is applied as a regular
expression and all parameters that match that regular expression are
returned.
"""
matches = self.grep_param_names(regexp)
if len(matches):
if return_names:
return self._get_params()[matches], np.asarray(self._get_param_names())[matches].tolist()
else:
return self._get_params()[matches]
else:
raise AttributeError, "no parameter matches %s" % regexp
def __setitem__(self, name, val):
"""
Set model parameter(s) by name. The name is provided as a regular
expression. All parameters matching that regular expression are set to
the given value.
"""
matches = self.grep_param_names(name)
if len(matches):
val = np.array(val)
assert (val.size == 1) or val.size == len(matches), "Shape mismatch: {}:({},)".format(val.size, len(matches))
x = self._get_params()
x[matches] = val
self._set_params(x)
else:
raise AttributeError, "no parameter matches %s" % name
def tie_params(self, regexp):
"""
Tie (all!) parameters matching the regular expression `regexp`.
"""
matches = self.grep_param_names(regexp)
assert matches.size > 0, "need at least something to tie together"
if len(self.tied_indices):
assert not np.any(matches[:, None] == np.hstack(self.tied_indices)), "Some indices are already tied!"
self.tied_indices.append(matches)
# TODO only one of the priors will be evaluated. Give a warning message if the priors are not identical
if hasattr(self, 'prior'):
pass
self._set_params_transformed(self._get_params_transformed()) # sets tied parameters to single value
def untie_everything(self):
"""Unties all parameters by setting tied_indices to an empty list."""
self.tied_indices = []
def grep_param_names(self, regexp, transformed=False, search=False):
"""
:param regexp: regular expression to select parameter names
:type regexp: re | str | int
:rtype: the indices of self._get_param_names which match the regular expression.
Note:-
Other objects are passed through - i.e. integers which weren't meant for grepping
"""
if transformed:
names = self._get_param_names_transformed()
else:
names = self._get_param_names()
if type(regexp) in [str, np.string_, np.str]:
regexp = re.compile(regexp)
elif type(regexp) is re._pattern_type:
pass
else:
return regexp
if search:
return np.nonzero([regexp.search(name) for name in names])[0]
else:
return np.nonzero([regexp.match(name) for name in names])[0]
def num_params_transformed(self):
removed = 0
for tie in self.tied_indices:
removed += tie.size - 1
for fix in self.fixed_indices:
removed += fix.size
return len(self._get_params()) - removed
def unconstrain(self, regexp):
"""Unconstrain matching parameters. Does not untie parameters"""
matches = self.grep_param_names(regexp)
# tranformed contraints:
for match in matches:
self.constrained_indices = [i[i <> match] for i in self.constrained_indices]
# remove empty constraints
tmp = zip(*[(i, t) for i, t in zip(self.constrained_indices, self.constraints) if len(i)])
if tmp:
self.constrained_indices, self.constraints = zip(*[(i, t) for i, t in zip(self.constrained_indices, self.constraints) if len(i)])
self.constrained_indices, self.constraints = list(self.constrained_indices), list(self.constraints)
# fixed:
self.fixed_values = [np.delete(values, np.nonzero(np.sum(indices[:, None] == matches[None, :], 1))[0]) for indices, values in zip(self.fixed_indices, self.fixed_values)]
self.fixed_indices = [np.delete(indices, np.nonzero(np.sum(indices[:, None] == matches[None, :], 1))[0]) for indices in self.fixed_indices]
# remove empty elements
tmp = [(i, v) for i, v in zip(self.fixed_indices, self.fixed_values) if len(i)]
if tmp:
self.fixed_indices, self.fixed_values = zip(*tmp)
self.fixed_indices, self.fixed_values = list(self.fixed_indices), list(self.fixed_values)
else:
self.fixed_indices, self.fixed_values = [], []
def constrain_negative(self, regexp, warning=True):
""" Set negative constraints. """
self.constrain(regexp, transformations.negative_logexp(), warning=warning)
def constrain_positive(self, regexp, warning=True):
""" Set positive constraints. """
self.constrain(regexp, transformations.logexp(), warning=warning)
def constrain_bounded(self, regexp, lower, upper, warning=True):
""" Set bounded constraints. """
self.constrain(regexp, transformations.logistic(lower, upper), warning=warning)
def all_constrained_indices(self):
if len(self.constrained_indices) or len(self.fixed_indices):
return np.hstack(self.constrained_indices + self.fixed_indices)
else:
return np.empty(shape=(0,))
def constrain(self, regexp, transform, warning=True):
assert isinstance(transform, transformations.transformation)
matches = self.grep_param_names(regexp)
if warning:
overlap = set(matches).intersection(set(self.all_constrained_indices()))
if overlap:
self.unconstrain(np.asarray(list(overlap)))
print 'Warning: re-constraining these parameters'
pn = self._get_param_names()
for i in overlap:
print pn[i]
self.constrained_indices.append(matches)
self.constraints.append(transform)
x = self._get_params()
x[matches] = transform.initialize(x[matches])
self._set_params(x)
def constrain_fixed(self, regexp, value=None, warning=True):
"""
:param regexp: which parameters need to be fixed.
:type regexp: ndarray(dtype=int) or regular expression object or string
:param value: the vlaue to fix the parameters to. If the value is not specified,
the parameter is fixed to the current value
:type value: float
**Notes**
Fixing a parameter which is tied to another, or constrained in some way will result in an error.
To fix multiple parameters to the same value, simply pass a regular expression which matches both parameter names, or pass both of the indexes.
"""
matches = self.grep_param_names(regexp)
if warning:
overlap = set(matches).intersection(set(self.all_constrained_indices()))
if overlap:
self.unconstrain(np.asarray(list(overlap)))
print 'Warning: re-constraining these parameters'
pn = self._get_param_names()
for i in overlap:
print pn[i]
self.fixed_indices.append(matches)
if value != None:
self.fixed_values.append(value)
else:
self.fixed_values.append(self._get_params()[self.fixed_indices[-1]])
# self.fixed_values.append(value)
self._set_params_transformed(self._get_params_transformed())
def _get_params_transformed(self):
"""use self._get_params to get the 'true' parameters of the model, which are then tied, constrained and fixed"""
x = self._get_params()
[np.put(x, i, t.finv(x[i])) for i, t in zip(self.constrained_indices, self.constraints)]
to_remove = self.fixed_indices + [t[1:] for t in self.tied_indices]
if len(to_remove):
return np.delete(x, np.hstack(to_remove))
else:
return x
def _set_params_transformed(self, x):
""" takes the vector x, which is then modified (by untying, reparameterising or inserting fixed values), and then call self._set_params"""
self._set_params(self._untransform_params(x))
def _untransform_params(self, x):
"""
The transformation required for _set_params_transformed.
This moves the vector x seen by the optimiser (unconstrained) to the
valid parameter vector seen by the model
Note:
- This function is separate from _set_params_transformed for downstream flexibility
"""
# work out how many places are fixed, and where they are. tricky logic!
fix_places = self.fixed_indices + [t[1:] for t in self.tied_indices]
if len(fix_places):
fix_places = np.hstack(fix_places)
Nfix_places = fix_places.size
else:
Nfix_places = 0
free_places = np.setdiff1d(np.arange(Nfix_places + x.size, dtype=np.int), fix_places)
# put the models values in the vector xx
xx = np.zeros(Nfix_places + free_places.size, dtype=np.float64)
xx[free_places] = x
[np.put(xx, i, v) for i, v in zip(self.fixed_indices, self.fixed_values)]
[np.put(xx, i, v) for i, v in [(t[1:], xx[t[0]]) for t in self.tied_indices] ]
[np.put(xx, i, t.f(xx[i])) for i, t in zip(self.constrained_indices, self.constraints)]
if hasattr(self, 'debug'):
stop # @UndefinedVariable
return xx
def _get_param_names_transformed(self):
"""
Returns the parameter names as propagated after constraining,
tying or fixing, i.e. a list of the same length as _get_params_transformed()
"""
n = self._get_param_names()
# remove/concatenate the tied parameter names
if len(self.tied_indices):
for t in self.tied_indices:
n[t[0]] = "<tie>".join([n[tt] for tt in t])
remove = np.hstack([t[1:] for t in self.tied_indices])
else:
remove = np.empty(shape=(0,), dtype=np.int)
# also remove the fixed params
if len(self.fixed_indices):
remove = np.hstack((remove, np.hstack(self.fixed_indices)))
# add markers to show that some variables are constrained
for i, t in zip(self.constrained_indices, self.constraints):
for ii in i:
n[ii] = n[ii] + t.__str__()
n = [nn for i, nn in enumerate(n) if not i in remove]
return n
#@property
#def all(self):
# return self.__str__(self._get_param_names())
#def __str__(self, names=None, nw=30):
def __str__(self, nw=30):
"""
Return a string describing the parameter names and their ties and constraints
"""
names = self._get_param_names()
#if names is None:
# names = self._get_print_names()
#name_indices = self.grep_param_names("|".join(names))
N = len(names)
if not N:
return "This object has no free parameters."
header = ['Name', 'Value', 'Constraints', 'Ties']
values = self._get_params() # map(str,self._get_params())
#values = self._get_params()[name_indices] # map(str,self._get_params())
# sort out the constraints
constraints = [''] * len(names)
#constraints = [''] * len(self._get_param_names())
for i, t in zip(self.constrained_indices, self.constraints):
for ii in i:
constraints[ii] = t.__str__()
for i in self.fixed_indices:
for ii in i:
constraints[ii] = 'Fixed'
# sort out the ties
ties = [''] * len(names)
for i, tie in enumerate(self.tied_indices):
for j in tie:
ties[j] = '(' + str(i) + ')'
if values.size == 1:
values = ['%.4f' %float(values)]
else:
values = ['%.4f' % float(v) for v in values]
max_names = max([len(names[i]) for i in range(len(names))] + [len(header[0])])
max_values = max([len(values[i]) for i in range(len(values))] + [len(header[1])])
max_constraint = max([len(constraints[i]) for i in range(len(constraints))] + [len(header[2])])
max_ties = max([len(ties[i]) for i in range(len(ties))] + [len(header[3])])
cols = np.array([max_names, max_values, max_constraint, max_ties]) + 4
# columns = cols.sum()
header_string = ["{h:^{col}}".format(h=header[i], col=cols[i]) for i in range(len(cols))]
header_string = map(lambda x: '|'.join(x), [header_string])
separator = '-' * len(header_string[0])
param_string = ["{n:^{c0}}|{v:^{c1}}|{c:^{c2}}|{t:^{c3}}".format(n=names[i], v=values[i], c=constraints[i], t=ties[i], c0=cols[0], c1=cols[1], c2=cols[2], c3=cols[3]) for i in range(len(values))]
return ('\n'.join([header_string[0], separator] + param_string)) + '\n'
def grep_model(self,regexp):
regexp_indices = self.grep_param_names(regexp)
all_names = self._get_param_names()
names = [all_names[pj] for pj in regexp_indices]
N = len(names)
if not N:
return "Match not found."
header = ['Name', 'Value', 'Constraints', 'Ties']
all_values = self._get_params()
values = np.array([all_values[pj] for pj in regexp_indices])
constraints = [''] * len(names)
_constrained_indices,aux = self._pick_elements(regexp_indices,self.constrained_indices)
_constraints = [self.constraints[pj] for pj in aux]
for i, t in zip(_constrained_indices, _constraints):
for ii in i:
iii = regexp_indices.tolist().index(ii)
constraints[iii] = t.__str__()
_fixed_indices,aux = self._pick_elements(regexp_indices,self.fixed_indices)
for i in _fixed_indices:
for ii in i:
iii = regexp_indices.tolist().index(ii)
constraints[ii] = 'Fixed'
_tied_indices,aux = self._pick_elements(regexp_indices,self.tied_indices)
ties = [''] * len(names)
for i,ti in zip(_tied_indices,aux):
for ii in i:
iii = regexp_indices.tolist().index(ii)
ties[iii] = '(' + str(ti) + ')'
if values.size == 1:
values = ['%.4f' %float(values)]
else:
values = ['%.4f' % float(v) for v in values]
max_names = max([len(names[i]) for i in range(len(names))] + [len(header[0])])
max_values = max([len(values[i]) for i in range(len(values))] + [len(header[1])])
max_constraint = max([len(constraints[i]) for i in range(len(constraints))] + [len(header[2])])
max_ties = max([len(ties[i]) for i in range(len(ties))] + [len(header[3])])
cols = np.array([max_names, max_values, max_constraint, max_ties]) + 4
header_string = ["{h:^{col}}".format(h=header[i], col=cols[i]) for i in range(len(cols))]
header_string = map(lambda x: '|'.join(x), [header_string])
separator = '-' * len(header_string[0])
param_string = ["{n:^{c0}}|{v:^{c1}}|{c:^{c2}}|{t:^{c3}}".format(n=names[i], v=values[i], c=constraints[i], t=ties[i], c0=cols[0], c1=cols[1], c2=cols[2], c3=cols[3]) for i in range(len(values))]
print header_string[0]
print separator
for string in param_string:
print string
def _pick_elements(self,regexp_ind,array_list):
"""Removes from array_list the elements different from regexp_ind"""
new_array_list = [] #New list with elements matching regexp_ind
array_indices = [] #Indices that matches the arrays in new_array_list and array_list
array_index = 0
for array in array_list:
_new = []
for ai in array:
if ai in regexp_ind:
_new.append(ai)
if len(_new):
new_array_list.append(np.array(_new))
array_indices.append(array_index)
array_index += 1
return new_array_list, array_indices

217
GPy/core/tmp/priors.py Normal file
View file

@ -0,0 +1,217 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
from scipy.special import gammaln, digamma
from ..util.linalg import pdinv
from GPy.core.domains import REAL, POSITIVE
import warnings
class Prior:
domain = None
def pdf(self, x):
return np.exp(self.lnpdf(x))
def plot(self):
rvs = self.rvs(1000)
pb.hist(rvs, 100, normed=True)
xmin, xmax = pb.xlim()
xx = np.linspace(xmin, xmax, 1000)
pb.plot(xx, self.pdf(xx), 'r', linewidth=2)
class Gaussian(Prior):
"""
Implementation of the univariate Gaussian probability function, coupled with random variables.
:param mu: mean
:param sigma: standard deviation
.. Note:: Bishop 2006 notation is used throughout the code
"""
domain = REAL
def __init__(self, mu, sigma):
self.mu = float(mu)
self.sigma = float(sigma)
self.sigma2 = np.square(self.sigma)
self.constant = -0.5 * np.log(2 * np.pi * self.sigma2)
def __str__(self):
return "N(" + str(np.round(self.mu)) + ', ' + str(np.round(self.sigma2)) + ')'
def lnpdf(self, x):
return self.constant - 0.5 * np.square(x - self.mu) / self.sigma2
def lnpdf_grad(self, x):
return -(x - self.mu) / self.sigma2
def rvs(self, n):
return np.random.randn(n) * self.sigma + self.mu
class LogGaussian(Prior):
"""
Implementation of the univariate *log*-Gaussian probability function, coupled with random variables.
:param mu: mean
:param sigma: standard deviation
.. Note:: Bishop 2006 notation is used throughout the code
"""
domain = POSITIVE
def __init__(self, mu, sigma):
self.mu = float(mu)
self.sigma = float(sigma)
self.sigma2 = np.square(self.sigma)
self.constant = -0.5 * np.log(2 * np.pi * self.sigma2)
def __str__(self):
return "lnN(" + str(np.round(self.mu)) + ', ' + str(np.round(self.sigma2)) + ')'
def lnpdf(self, x):
return self.constant - 0.5 * np.square(np.log(x) - self.mu) / self.sigma2 - np.log(x)
def lnpdf_grad(self, x):
return -((np.log(x) - self.mu) / self.sigma2 + 1.) / x
def rvs(self, n):
return np.exp(np.random.randn(n) * self.sigma + self.mu)
class MultivariateGaussian:
"""
Implementation of the multivariate Gaussian probability function, coupled with random variables.
:param mu: mean (N-dimensional array)
:param var: covariance matrix (NxN)
.. Note:: Bishop 2006 notation is used throughout the code
"""
domain = REAL
def __init__(self, mu, var):
self.mu = np.array(mu).flatten()
self.var = np.array(var)
assert len(self.var.shape) == 2
assert self.var.shape[0] == self.var.shape[1]
assert self.var.shape[0] == self.mu.size
self.input_dim = self.mu.size
self.inv, self.hld = pdinv(self.var)
self.constant = -0.5 * self.input_dim * np.log(2 * np.pi) - self.hld
def summary(self):
raise NotImplementedError
def pdf(self, x):
return np.exp(self.lnpdf(x))
def lnpdf(self, x):
d = x - self.mu
return self.constant - 0.5 * np.sum(d * np.dot(d, self.inv), 1)
def lnpdf_grad(self, x):
d = x - self.mu
return -np.dot(self.inv, d)
def rvs(self, n):
return np.random.multivariate_normal(self.mu, self.var, n)
def plot(self):
if self.input_dim == 2:
rvs = self.rvs(200)
pb.plot(rvs[:, 0], rvs[:, 1], 'kx', mew=1.5)
xmin, xmax = pb.xlim()
ymin, ymax = pb.ylim()
xx, yy = np.mgrid[xmin:xmax:100j, ymin:ymax:100j]
xflat = np.vstack((xx.flatten(), yy.flatten())).T
zz = self.pdf(xflat).reshape(100, 100)
pb.contour(xx, yy, zz, linewidths=2)
def gamma_from_EV(E, V):
warnings.warn("use Gamma.from_EV to create Gamma Prior", FutureWarning)
return Gamma.from_EV(E, V)
class Gamma(Prior):
"""
Implementation of the Gamma probability function, coupled with random variables.
:param a: shape parameter
:param b: rate parameter (warning: it's the *inverse* of the scale)
.. Note:: Bishop 2006 notation is used throughout the code
"""
domain = POSITIVE
def __init__(self, a, b):
self.a = float(a)
self.b = float(b)
self.constant = -gammaln(self.a) + a * np.log(b)
def __str__(self):
return "Ga(" + str(np.round(self.a)) + ', ' + str(np.round(self.b)) + ')'
def summary(self):
ret = {"E[x]": self.a / self.b, \
"E[ln x]": digamma(self.a) - np.log(self.b), \
"var[x]": self.a / self.b / self.b, \
"Entropy": gammaln(self.a) - (self.a - 1.) * digamma(self.a) - np.log(self.b) + self.a}
if self.a > 1:
ret['Mode'] = (self.a - 1.) / self.b
else:
ret['mode'] = np.nan
return ret
def lnpdf(self, x):
return self.constant + (self.a - 1) * np.log(x) - self.b * x
def lnpdf_grad(self, x):
return (self.a - 1.) / x - self.b
def rvs(self, n):
return np.random.gamma(scale=1. / self.b, shape=self.a, size=n)
@staticmethod
def from_EV(E, V):
"""
Creates an instance of a Gamma Prior by specifying the Expected value(s)
and Variance(s) of the distribution.
:param E: expected value
:param V: variance
"""
a = np.square(E) / V
b = E / V
return Gamma(a, b)
class inverse_gamma(Prior):
"""
Implementation of the inverse-Gamma probability function, coupled with random variables.
:param a: shape parameter
:param b: rate parameter (warning: it's the *inverse* of the scale)
.. Note:: Bishop 2006 notation is used throughout the code
"""
domain = POSITIVE
def __init__(self, a, b):
self.a = float(a)
self.b = float(b)
self.constant = -gammaln(self.a) + a * np.log(b)
def __str__(self):
return "iGa(" + str(np.round(self.a)) + ', ' + str(np.round(self.b)) + ')'
def lnpdf(self, x):
return self.constant - (self.a + 1) * np.log(x) - self.b / x
def lnpdf_grad(self, x):
return -(self.a + 1.) / x + self.b / x ** 2
def rvs(self, n):
return 1. / np.random.gamma(scale=1. / self.b, shape=self.a, size=n)

View file

@ -0,0 +1,143 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from GPy.core.domains import POSITIVE, NEGATIVE, BOUNDED
import sys
lim_val = -np.log(sys.float_info.epsilon)
class transformation(object):
domain = None
def f(self, x):
raise NotImplementedError
def finv(self, x):
raise NotImplementedError
def gradfactor(self, f):
""" df_dx evaluated at self.f(x)=f"""
raise NotImplementedError
def initialize(self, f):
""" produce a sensible initial value for f(x)"""
raise NotImplementedError
def __str__(self):
raise NotImplementedError
class logexp(transformation):
domain = POSITIVE
def f(self, x):
return np.where(x<-lim_val, np.log(1+np.exp(-lim_val)), np.where(x>lim_val, x, np.log(1. + np.exp(x))))
def finv(self, f):
return np.where(f>lim_val, f, np.log(np.exp(f) - 1.))
def gradfactor(self, f):
return np.where(f>lim_val, 1., 1 - np.exp(-f))
def initialize(self, f):
if np.any(f < 0.):
print "Warning: changing parameters to satisfy constraints"
return np.abs(f)
def __str__(self):
return '(+ve)'
class negative_logexp(transformation):
domain = NEGATIVE
def f(self, x):
return -logexp.f(x)
def finv(self, f):
return logexp.finv(-f)
def gradfactor(self, f):
return -logexp.gradfactor(-f)
def initialize(self, f):
return -logexp.initialize(f)
def __str__(self):
return '(-ve)'
class logexp_clipped(logexp):
max_bound = 1e100
min_bound = 1e-10
log_max_bound = np.log(max_bound)
log_min_bound = np.log(min_bound)
domain = POSITIVE
def __init__(self, lower=1e-6):
self.lower = lower
def f(self, x):
exp = np.exp(np.clip(x, self.log_min_bound, self.log_max_bound))
f = np.log(1. + exp)
# if np.isnan(f).any():
# import ipdb;ipdb.set_trace()
return np.clip(f, self.min_bound, self.max_bound)
def finv(self, f):
return np.log(np.exp(f - 1.))
def gradfactor(self, f):
ef = np.exp(f) # np.clip(f, self.min_bound, self.max_bound))
gf = (ef - 1.) / ef
return gf # np.where(f < self.lower, 0, gf)
def initialize(self, f):
if np.any(f < 0.):
print "Warning: changing parameters to satisfy constraints"
return np.abs(f)
def __str__(self):
return '(+ve_c)'
class exponent(transformation):
domain = POSITIVE
def f(self, x):
return np.where(x<lim_val, np.where(x>-lim_val, np.exp(x), np.exp(-lim_val)), np.exp(lim_val))
def finv(self, x):
return np.log(x)
def gradfactor(self, f):
return f
def initialize(self, f):
if np.any(f < 0.):
print "Warning: changing parameters to satisfy constraints"
return np.abs(f)
def __str__(self):
return '(+ve)'
class negative_exponent(exponent):
domain = NEGATIVE
def f(self, x):
return -exponent.f(x)
def finv(self, f):
return exponent.finv(-f)
def gradfactor(self, f):
return f
def initialize(self, f):
return -exponent.initialize(f) #np.abs(f)
def __str__(self):
return '(-ve)'
class square(transformation):
domain = POSITIVE
def f(self, x):
return x ** 2
def finv(self, x):
return np.sqrt(x)
def gradfactor(self, f):
return 2 * np.sqrt(f)
def initialize(self, f):
return np.abs(f)
def __str__(self):
return '(+sq)'
class logistic(transformation):
domain = BOUNDED
def __init__(self, lower, upper):
assert lower < upper
self.lower, self.upper = float(lower), float(upper)
self.difference = self.upper - self.lower
def f(self, x):
return self.lower + self.difference / (1. + np.exp(-x))
def finv(self, f):
return np.log(np.clip(f - self.lower, 1e-10, np.inf) / np.clip(self.upper - f, 1e-10, np.inf))
def gradfactor(self, f):
return (f - self.lower) * (self.upper - f) / self.difference
def initialize(self, f):
if np.any(np.logical_or(f < self.lower, f > self.upper)):
print "Warning: changing parameters to satisfy constraints"
return np.where(np.logical_or(f < self.lower, f > self.upper), self.f(f * 0.), f)
def __str__(self):
return '({},{})'.format(self.lower, self.upper)

View file

@ -0,0 +1,28 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
import GPy
np.random.seed(1)
print "GPLVM with RBF kernel"
N = 100
Q = 1
D = 2
X = np.random.rand(N, Q)
k = GPy.kern.rbf(Q, 1.0, 2.0) + GPy.kern.white(Q, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,D).T
m = GPy.models.GPLVM(Y, Q)
m.constrain_positive('(rbf|bias|white)')
pb.figure()
m.plot()
pb.title('PCA initialisation')
pb.figure()
m.optimize(messages = 1)
m.plot()
pb.title('After optimisation')

View file

@ -0,0 +1,51 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
"""
Simple Gaussian Processes regression with an RBF kernel
"""
import pylab as pb
import numpy as np
import GPy
pb.ion()
pb.close('all')
######################################
## 1 dimensional example
# sample inputs and outputs
X = np.random.uniform(-3.,3.,(20,1))
Y = np.sin(X)+np.random.randn(20,1)*0.05
# create simple GP model
m = GPy.models.GP_regression(X,Y)
# contrain all parameters to be positive
m.constrain_positive('')
# optimize and plot
m.optimize('tnc', max_f_eval = 1000)
m.plot()
print(m)
######################################
## 2 dimensional example
# sample inputs and outputs
X = np.random.uniform(-3.,3.,(40,2))
Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(40,1)*0.05
# create simple GP model
m = GPy.models.GP_regression(X,Y)
# contrain all parameters to be positive
m.constrain_positive('')
# optimize and plot
pb.figure()
m.optimize('tnc', max_f_eval = 1000)
m.plot()
print(m)

View file

@ -0,0 +1,33 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
"""
Simple one-dimensional Gaussian Processes with assorted kernel functions
"""
import pylab as pb
import numpy as np
import GPy
# sample inputs and outputs
D = 1
X = np.random.randn(10,D)*2
X = np.linspace(-1.5,1.5,5)[:,None]
X = np.append(X,[[5]],0)
Y = np.sin(np.pi*X/2) #+np.random.randn(X.shape[0],1)*0.05
models = [GPy.models.GP_regression(X,Y, k) for k in (GPy.kern.rbf(D), GPy.kern.Matern52(D), GPy.kern.Matern32(D), GPy.kern.exponential(D), GPy.kern.linear(D) + GPy.kern.white(D), GPy.kern.bias(D) + GPy.kern.white(D))]
pb.figure(figsize=(12,8))
for i,m in enumerate(models):
m.constrain_positive('')
m.optimize()
pb.subplot(3,2,i+1)
m.plot()
#pb.title(m.kern.parts[0].name)
GPy.util.plot.align_subplots(3,2,(-3,6),(-2.5,2.5))
pb.show()

View file

@ -0,0 +1,53 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import cPickle as pickle
import numpy as np
import pylab as pb
import GPy
import pylab as plt
np.random.seed(1)
def plot_oil(X, theta, labels, label):
plt.figure()
X = X[:,np.argsort(theta)[:2]]
flow_type = (X[labels[:,0]==1])
plt.plot(flow_type[:,0], flow_type[:,1], 'rx')
flow_type = (X[labels[:,1]==1])
plt.plot(flow_type[:,0], flow_type[:,1], 'gx')
flow_type = (X[labels[:,2]==1])
plt.plot(flow_type[:,0], flow_type[:,1], 'bx')
plt.title(label)
data = pickle.load(open('../util/datasets/oil_flow_3classes.pickle', 'r'))
Y = data['DataTrn']
N, D = Y.shape
selected = np.random.permutation(N)[:200]
labels = data['DataTrnLbls'][selected]
Y = Y[selected]
N, D = Y.shape
Y -= Y.mean(axis=0)
Y /= Y.std(axis=0)
Q = 2
m1 = GPy.models.sparse_GPLVM(Y, Q, M = 15)
m1.constrain_positive('(rbf|bias|noise)')
m1.constrain_bounded('white', 1e-6, 1.0)
plot_oil(m1.X, np.array([1,1]), labels, 'PCA initialization')
# m.optimize(messages = True)
m1.optimize('bfgs', messages = True)
plot_oil(m1.X, np.array([1,1]), labels, 'sparse GPLVM')
# pb.figure()
# m.plot()
# pb.title('PCA initialisation')
# pb.figure()
# m.optimize(messages = 1)
# m.plot()
# pb.title('After optimisation')
m = GPy.models.GPLVM(Y, Q)
m.constrain_positive('(white|rbf|bias|noise)')
m.optimize()
plot_oil(m.X, np.array([1,1]), labels, 'GPLVM')

View file

@ -0,0 +1,31 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
import GPy
np.random.seed(1)
print "sparse GPLVM with RBF kernel"
N = 100
Q = 1
D = 2
#generate GPLVM-like data
X = np.random.rand(N, Q)
k = GPy.kern.rbf(Q, 1.0, 2.0) + GPy.kern.white(Q, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,D).T
m = GPy.models.sparse_GPLVM(Y, Q, M = 10)
m.constrain_positive('(rbf|bias|noise)')
m.constrain_bounded('white', 1e-3, 0.1)
# m.plot()
pb.figure()
m.plot()
pb.title('PCA initialisation')
pb.figure()
m.optimize(messages = 1)
m.plot()
pb.title('After optimisation')

View file

@ -0,0 +1,70 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
"""
Sparse Gaussian Processes regression with an RBF kernel
"""
import pylab as pb
import numpy as np
import GPy
np.random.seed(2)
pb.ion()
N = 500
######################################
## 1 dimensional example
# sample inputs and outputs
X = np.random.uniform(-3.,3.,(N,1))
Y = np.sin(X)+np.random.randn(N,1)*0.05
# construct kernel
rbf = GPy.kern.rbf(1)
noise = GPy.kern.white(1)
kernel = rbf + noise
# create simple GP model
m1 = GPy.models.sparse_GP_regression(X,Y,kernel, M = 10)
# contrain all parameters to be positive
m1.constrain_positive('(variance|lengthscale|precision)')
#m1.constrain_positive('(variance|lengthscale)')
#m1.constrain_fixed('prec',10.)
#check gradient FIXME unit test please
m1.checkgrad()
# optimize and plot
m1.optimize('tnc', messages = 1)
m1.plot()
# print(m1)
######################################
## 2 dimensional example
# # sample inputs and outputs
# X = np.random.uniform(-3.,3.,(N,2))
# Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(N,1)*0.05
# # construct kernel
# rbf = GPy.kern.rbf(2)
# noise = GPy.kern.white(2)
# kernel = rbf + noise
# # create simple GP model
# m2 = GPy.models.sparse_GP_regression(X,Y,kernel, M = 50)
# create simple GP model
# # contrain all parameters to be positive (but not inducing inputs)
# m2.constrain_positive('(variance|lengthscale|precision)')
# #check gradient FIXME unit test please
# m2.checkgrad()
# # optimize and plot
# pb.figure()
# m2.optimize('tnc', messages = 1)
# m2.plot()
# print(m2)

View file

@ -0,0 +1,31 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
import numpy as np
import GPy
pb.ion()
pb.close('all')
######################################
## 1 dimensional example
# sample inputs and outputs
S = np.ones((20,1))
X = np.random.uniform(-3.,3.,(20,1))
Y = np.sin(X)+np.random.randn(20,1)*0.05
k = GPy.kern.bias(1) + GPy.kern.white(1)
# create simple GP model
m = GPy.models.uncertain_input_GP_regression(X,Y,S,kernel=k)
# contrain all parameters to be positive
m.constrain_positive('(variance|prec)')
# optimize and plot
m.optimize('tnc', max_f_eval = 1000, messages=1)
m.plot()
print(m)

View file

@ -0,0 +1,45 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import scipy as sp
import pdb, sys, pickle
import matplotlib.pylab as plt
import GPy
np.random.seed(1)
N = 100
# sample inputs and outputs
X = np.random.uniform(-np.pi,np.pi,(N,1))
Y = np.sin(X)+np.random.randn(N,1)*0.05
# Y += np.abs(Y.min()) + 0.5
Z = np.exp(Y)# Y**(1/3.0)
# rescaling targets?
Zmax = Z.max()
Zmin = Z.min()
Z = (Z-Zmin)/(Zmax-Zmin) - 0.5
m = GPy.models.warpedGP(X, Z, warping_terms = 2)
m.constrain_positive('(tanh_a|tanh_b|rbf|white|bias)')
m.randomize()
plt.figure()
plt.xlabel('predicted f(Z)')
plt.ylabel('actual f(Z)')
plt.plot(m.Y, Y, 'o', alpha = 0.5, label = 'before training')
m.optimize(messages = True)
plt.plot(m.Y, Y, 'o', alpha = 0.5, label = 'after training')
plt.legend(loc = 0)
m.plot_warping()
plt.figure()
plt.title('warped GP fit')
m.plot()
m1 = GPy.models.GP_regression(X, Z)
m1.constrain_positive('(rbf|white|bias)')
m1.randomize()
m1.optimize(messages = True)
plt.figure()
plt.title('GP fit')
m1.plot()

14
GPy/kern/__init__.py~ Normal file
View file

@ -0,0 +1,14 @@
from _src.kern import Kern
from _src.rbf import RBF
from _src.linear import Linear, LinearFull
from _src.static import Bias, White
from _src.brownian import Brownian
from _src.sympykern import Sympykern
from _src.stationary import Exponential, Matern32, Matern52, ExpQuad, RatQuad, Cosine
from _src.mlp import MLP
from _src.periodic import PeriodicExponential, PeriodicMatern32, PeriodicMatern52
from _src.independent_outputs import IndependentOutputs, Hierarchical
from _src.coregionalize import Coregionalize
from _src.ssrbf import SSRBF # TODO: ZD: did you remove this?
from _src.ODE_UY import ODE_UY

5
GPy/kern/_src/notes.txt Normal file
View file

@ -0,0 +1,5 @@
Combination covariances, with a particular flag (partition?) allow interogation of the individual covariance contribution. So if we have k_3 = k_1 + k_2 because f_3 = f_1 + f_2 then we allow a multi output covariance over f_1, f_2 and f_3 jointly. Then users can make observations of f_3 and f_1 and build a posterior over f_2 using multiple output encoding. When this flag is set an extra input is added to deal with it. We allow for a new 'combination covariance' of the form scale and add so that
f_3 = f_1*a + f_2
Where a can also be negative.

View file

@ -0,0 +1,52 @@
try:
import sympy as sym
sympy_available=True
except ImportError:
sympy_available=False
import numpy as np
from GPy.util.symbolic import differfln
from symbolic import Symbolic
from sympy import Function, S, oo, I, cos, sin, asin, log, erf, pi, exp, sqrt, sign, gamma, polygamma
class Ode1_eq_lfm(Symbolic):
"""
A symbolic covariance based on a first order differential equation being driven by a latent force that is an exponentiated quadratic.
"""
def __init__(self, output_dim=1, param=None, name='Ode1_eq_lfm'):
input_dim = 2
x_0, z_0, decay_i, decay_j, lengthscale = sym.symbols('x_0, z_0, decay_i, decay_j, lengthscale', positive=True)
scale_i, scale_j = sym.symbols('scale_i, scale_j')
# note that covariance only valid for positive time.
class sim_h(Function):
nargs = 5
@classmethod
def eval(cls, t, tprime, d_i, d_j, l):
half_l_di = 0.5*l*d_i
arg_1 = half_l_di + tprime/l
arg_2 = half_l_di - (t-tprime)/l
ln_part_1 = differfln(arg_1, arg_2)
arg_1 = half_l_di
arg_2 = half_l_di - t/l
ln_part_2 = differfln(half_l_di, half_l_di - t/l)
return (exp(half_l_di*half_l_di
- d_i*(t-tprime)
+ ln_part_1
- log(d_i + d_j))
- exp(half_l_di*half_l_di
- d_i*t - d_j*tprime
+ ln_part_2
- log(d_i + d_j)))
f = scale_i*scale_j*(sim_h(x_0, z_0, decay_i, decay_j, lengthscale)
+ sim_h(z_0, x_0, decay_j, decay_i, lengthscale))
# extra input dim is to signify the output dimension.
super(Ode1_eq_lfm, self).__init__(input_dim, k=f, output_dim=output_dim, name=name)
self.lengthscale.constrain_positive()
self.decay.constrain_positive()

View file

@ -0,0 +1,445 @@
# Check Matthew Rocklin's blog post.
try:
import sympy as sym
sympy_available=True
from sympy.utilities.lambdify import lambdify
from GPy.util.symbolic import stabilise
except ImportError:
sympy_available=False
import numpy as np
from kern import Kern
from scipy.special import gammaln, gamma, erf, erfc, erfcx, polygamma
from GPy.util.functions import normcdf, normcdfln, logistic, logisticln, differfln
from ...core.parameterization import Param
class Symbolic(Kern):
"""
A kernel object, where all the hard work is done by sympy.
:param k: the covariance function
:type k: a positive definite sympy function of x_0, z_0, x_1, z_1, x_2, z_2...
To construct a new sympy kernel, you'll need to define:
- a kernel function using a sympy object. Ensure that the kernel is of the form k(x,z).
- that's it! we'll extract the variables from the function k.
Note:
- to handle multiple inputs, call them x_1, z_1, etc
- to handle multpile correlated outputs, you'll need to add parameters with an index, such as lengthscale_i and lengthscale_j.
"""
def __init__(self, input_dim, k=None, output_dim=1, name='symbolic', param=None, active_dims=None, operators=None, func_modules=[]):
if k is None:
raise ValueError, "You must provide an argument for the covariance function."
self.func_modules = func_modules
self.func_modules += [{'gamma':gamma,
'gammaln':gammaln,
'erf':erf, 'erfc':erfc,
'erfcx':erfcx,
'polygamma':polygamma,
'differfln':differfln,
'normcdf':normcdf,
'normcdfln':normcdfln,
'logistic':logistic,
'logisticln':logisticln},
'numpy']
super(Symbolic, self).__init__(input_dim, active_dims, name)
self._sym_k = k
# pull the variable names out of the symbolic covariance function.
sym_vars = [e for e in k.atoms() if e.is_Symbol]
self._sym_x= sorted([e for e in sym_vars if e.name[0:2]=='x_'],key=lambda x:int(x.name[2:]))
self._sym_z= sorted([e for e in sym_vars if e.name[0:2]=='z_'],key=lambda z:int(z.name[2:]))
# Check that variable names make sense.
assert all([x.name=='x_%i'%i for i,x in enumerate(self._sym_x)])
assert all([z.name=='z_%i'%i for i,z in enumerate(self._sym_z)])
assert len(self._sym_x)==len(self._sym_z)
x_dim=len(self._sym_x)
self._sym_kdiag = k
for x, z in zip(self._sym_x, self._sym_z):
self._sym_kdiag = self._sym_kdiag.subs(z, x)
# If it is a multi-output covariance, add an input for indexing the outputs.
self._real_input_dim = x_dim
# Check input dim is number of xs + 1 if output_dim is >1
assert self.input_dim == x_dim + int(output_dim > 1)
self.output_dim = output_dim
# extract parameter names from the covariance
thetas = sorted([e for e in sym_vars if not (e.name[0:2]=='x_' or e.name[0:2]=='z_')],key=lambda e:e.name)
# Look for parameters with index (subscripts), they are associated with different outputs.
if self.output_dim>1:
self._sym_theta_i = sorted([e for e in thetas if (e.name[-2:]=='_i')], key=lambda e:e.name)
self._sym_theta_j = sorted([e for e in thetas if (e.name[-2:]=='_j')], key=lambda e:e.name)
# Make sure parameter appears with both indices!
assert len(self._sym_theta_i)==len(self._sym_theta_j)
assert all([theta_i.name[:-2]==theta_j.name[:-2] for theta_i, theta_j in zip(self._sym_theta_i, self._sym_theta_j)])
# Extract names of shared parameters (those without a subscript)
self._sym_theta = [theta for theta in thetas if theta not in self._sym_theta_i and theta not in self._sym_theta_j]
self.num_split_params = len(self._sym_theta_i)
self._split_theta_names = ["%s"%theta.name[:-2] for theta in self._sym_theta_i]
# Add split parameters to the model.
for theta in self._split_theta_names:
# TODO: what if user has passed a parameter vector, how should that be stored and interpreted?
setattr(self, theta, Param(theta, np.ones(self.output_dim), None))
self.add_parameter(getattr(self, theta))
self.num_shared_params = len(self._sym_theta)
for theta_i, theta_j in zip(self._sym_theta_i, self._sym_theta_j):
self._sym_kdiag = self._sym_kdiag.subs(theta_j, theta_i)
else:
self.num_split_params = 0
self._split_theta_names = []
self._sym_theta = thetas
self.num_shared_params = len(self._sym_theta)
# Add parameters to the model.
for theta in self._sym_theta:
val = 1.0
# TODO: what if user has passed a parameter vector, how should that be stored and interpreted? This is the old way before params class.
if param is not None:
if param.has_key(theta.name):
val = param[theta.name]
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
# Differentiate with respect to parameters.
derivative_arguments = self._sym_x + self._sym_theta
if self.output_dim > 1:
derivative_arguments += self._sym_theta_i
self.derivatives = {theta.name : stabilise(sym.diff(self._sym_k,theta)) for theta in derivative_arguments}
self.diag_derivatives = {theta.name : stabilise(sym.diff(self._sym_kdiag,theta)) for theta in derivative_arguments}
# This gives the parameters for the arg list.
self.arg_list = self._sym_x + self._sym_z + self._sym_theta
self.diag_arg_list = self._sym_x + self._sym_theta
if self.output_dim > 1:
self.arg_list += self._sym_theta_i + self._sym_theta_j
self.diag_arg_list += self._sym_theta_i
# Check if there are additional linear operators on the covariance.
self._sym_operators = operators
# TODO: Deal with linear operators
#if self._sym_operators:
# for operator in self._sym_operators:
# psi_stats aren't yet implemented.
if False:
self.compute_psi_stats()
# generate the code for the covariance functions
self._gen_code()
def __add__(self,other):
return spkern(self._sym_k+other._sym_k)
def _gen_code(self):
#fn_theano = theano_function([self.arg_lists], [self._sym_k + self.derivatives], dims={x: 1}, dtypes={x_0: 'float64', z_0: 'float64'})
self._K_function = lambdify(self.arg_list, self._sym_k, self.func_modules)
self._K_derivatives_code = {key: lambdify(self.arg_list, self.derivatives[key], self.func_modules) for key in self.derivatives.keys()}
self._Kdiag_function = lambdify(self.diag_arg_list, self._sym_kdiag, self.func_modules)
self._Kdiag_derivatives_code = {key: lambdify(self.diag_arg_list, self.diag_derivatives[key], self.func_modules) for key in self.diag_derivatives.keys()}
def K(self,X,X2=None):
self._K_computations(X, X2)
return self._K_function(**self._arguments)
def Kdiag(self,X):
self._K_computations(X)
return self._Kdiag_function(**self._diag_arguments)
def _param_grad_helper(self,partial,X,Z,target):
pass
def gradients_X(self, dL_dK, X, X2=None):
#if self._X is None or X.base is not self._X.base or X2 is not None:
self._K_computations(X, X2)
gradients_X = np.zeros_like(X)
for i, x in enumerate(self._sym_x):
gf = self._K_derivatives_code[x.name]
gradients_X[:, i] = (gf(**self._arguments)*dL_dK).sum(1)
if X2 is None:
gradients_X *= 2
return gradients_X
def gradients_X_diag(self, dL_dK, X):
self._K_computations(X)
dX = np.zeros_like(X)
for i, x in enumerate(self._sym_x):
gf = self._Kdiag_derivatives_code[x.name]
dX[:, i] = gf(**self._diag_arguments)*dL_dK
return dX
def update_gradients_full(self, dL_dK, X, X2=None):
# Need to extract parameters to local variables first
self._K_computations(X, X2)
for theta in self._sym_theta:
parameter = getattr(self, theta.name)
gf = self._K_derivatives_code[theta.name]
gradient = (gf(**self._arguments)*dL_dK).sum()
if X2 is not None:
gradient += (gf(**self._reverse_arguments)*dL_dK).sum()
setattr(parameter, 'gradient', gradient)
if self.output_dim>1:
for theta in self._sym_theta_i:
parameter = getattr(self, theta.name[:-2])
gf = self._K_derivatives_code[theta.name]
A = gf(**self._arguments)*dL_dK
gradient = np.asarray([A[np.where(self._output_ind==i)].T.sum()
for i in np.arange(self.output_dim)])
if X2 is None:
gradient *= 2
else:
A = gf(**self._reverse_arguments)*dL_dK.T
gradient += np.asarray([A[np.where(self._output_ind2==i)].T.sum()
for i in np.arange(self.output_dim)])
setattr(parameter, 'gradient', gradient)
def update_gradients_diag(self, dL_dKdiag, X):
self._K_computations(X)
for theta in self._sym_theta:
parameter = getattr(self, theta.name)
gf = self._Kdiag_derivatives_code[theta.name]
setattr(parameter, 'gradient', (gf(**self._diag_arguments)*dL_dKdiag).sum())
if self.output_dim>1:
for theta in self._sym_theta_i:
parameter = getattr(self, theta.name[:-2])
gf = self._Kdiag_derivatives_code[theta.name]
a = gf(**self._diag_arguments)*dL_dKdiag
setattr(parameter, 'gradient',
np.asarray([a[np.where(self._output_ind==i)].sum()
for i in np.arange(self.output_dim)]))
def _K_computations(self, X, X2=None):
"""Set up argument lists for the derivatives."""
# Could check if this needs doing or not, there could
# definitely be some computational savings by checking for
# parameter updates here.
self._arguments = {}
self._diag_arguments = {}
for i, x in enumerate(self._sym_x):
self._arguments[x.name] = X[:, i][:, None]
self._diag_arguments[x.name] = X[:, i][:, None]
if self.output_dim > 1:
self._output_ind = np.asarray(X[:, -1], dtype='int')
for i, theta in enumerate(self._sym_theta_i):
self._arguments[theta.name] = np.asarray(getattr(self, theta.name[:-2])[self._output_ind])[:, None]
self._diag_arguments[theta.name] = self._arguments[theta.name]
for theta in self._sym_theta:
self._arguments[theta.name] = np.asarray(getattr(self, theta.name))
self._diag_arguments[theta.name] = self._arguments[theta.name]
if X2 is not None:
for i, z in enumerate(self._sym_z):
self._arguments[z.name] = X2[:, i][None, :]
if self.output_dim > 1:
self._output_ind2 = np.asarray(X2[:, -1], dtype='int')
for i, theta in enumerate(self._sym_theta_j):
self._arguments[theta.name] = np.asarray(getattr(self, theta.name[:-2])[self._output_ind2])[None, :]
else:
for z in self._sym_z:
self._arguments[z.name] = self._arguments['x_'+z.name[2:]].T
if self.output_dim > 1:
self._output_ind2 = self._output_ind
for theta in self._sym_theta_j:
self._arguments[theta.name] = self._arguments[theta.name[:-2] + '_i'].T
if X2 is not None:
# These arguments are needed in gradients when X2 is not equal to X.
self._reverse_arguments = self._arguments
for x, z in zip(self._sym_x, self._sym_z):
self._reverse_arguments[x.name] = self._arguments[z.name].T
self._reverse_arguments[z.name] = self._arguments[x.name].T
if self.output_dim > 1:
for theta_i, theta_j in zip(self._sym_theta_i, self._sym_theta_j):
self._reverse_arguments[theta_i.name] = self._arguments[theta_j.name].T
self._reverse_arguments[theta_j.name] = self._arguments[theta_i.name].T
if False:
class Symcombine(CombinationKernel):
"""
Combine list of given sympy covariances together with the provided operations.
"""
def __init__(self, subkerns, operations, name='sympy_combine'):
super(Symcombine, self).__init__(subkerns, name)
for subkern, operation in zip(subkerns, operations):
self._sym_k += self._k_double_operate(subkern._sym_k, operation)
#def _double_operate(self, k, operation):
@Cache_this(limit=2, force_kwargs=['which_parts'])
def K(self, X, X2=None, which_parts=None):
"""
Combine covariances with a linear operator.
"""
assert X.shape[1] == self.input_dim
if which_parts is None:
which_parts = self.parts
elif not isinstance(which_parts, (list, tuple)):
# if only one part is given
which_parts = [which_parts]
return reduce(np.add, (p.K(X, X2) for p in which_parts))
@Cache_this(limit=2, force_kwargs=['which_parts'])
def Kdiag(self, X, which_parts=None):
assert X.shape[1] == self.input_dim
if which_parts is None:
which_parts = self.parts
elif not isinstance(which_parts, (list, tuple)):
# if only one part is given
which_parts = [which_parts]
return reduce(np.add, (p.Kdiag(X) for p in which_parts))
def update_gradients_full(self, dL_dK, X, X2=None):
[p.update_gradients_full(dL_dK, X, X2) for p in self.parts]
def update_gradients_diag(self, dL_dK, X):
[p.update_gradients_diag(dL_dK, X) for p in self.parts]
def gradients_X(self, dL_dK, X, X2=None):
"""Compute the gradient of the objective function with respect to X.
:param dL_dK: An array of gradients of the objective function with respect to the covariance function.
:type dL_dK: np.ndarray (num_samples x num_inducing)
:param X: Observed data inputs
:type X: np.ndarray (num_samples x input_dim)
:param X2: Observed data inputs (optional, defaults to X)
:type X2: np.ndarray (num_inducing x input_dim)"""
target = np.zeros(X.shape)
[target.__iadd__(p.gradients_X(dL_dK, X, X2)) for p in self.parts]
return target
def gradients_X_diag(self, dL_dKdiag, X):
target = np.zeros(X.shape)
[target.__iadd__(p.gradients_X_diag(dL_dKdiag, X)) for p in self.parts]
return target
def psi0(self, Z, variational_posterior):
return reduce(np.add, (p.psi0(Z, variational_posterior) for p in self.parts))
def psi1(self, Z, variational_posterior):
return reduce(np.add, (p.psi1(Z, variational_posterior) for p in self.parts))
def psi2(self, Z, variational_posterior):
psi2 = reduce(np.add, (p.psi2(Z, variational_posterior) for p in self.parts))
#return psi2
# compute the "cross" terms
from static import White, Bias
from rbf import RBF
#from rbf_inv import RBFInv
from linear import Linear
#ffrom fixed import Fixed
for p1, p2 in itertools.combinations(self.parts, 2):
# i1, i2 = p1.active_dims, p2.active_dims
# white doesn;t combine with anything
if isinstance(p1, White) or isinstance(p2, White):
pass
# rbf X bias
#elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)):
elif isinstance(p1, Bias) and isinstance(p2, (RBF, Linear)):
tmp = p2.psi1(Z, variational_posterior)
psi2 += p1.variance * (tmp[:, :, None] + tmp[:, None, :])
#elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)):
elif isinstance(p2, Bias) and isinstance(p1, (RBF, Linear)):
tmp = p1.psi1(Z, variational_posterior)
psi2 += p2.variance * (tmp[:, :, None] + tmp[:, None, :])
elif isinstance(p2, (RBF, Linear)) and isinstance(p1, (RBF, Linear)):
assert np.intersect1d(p1.active_dims, p2.active_dims).size == 0, "only non overlapping kernel dimensions allowed so far"
tmp1 = p1.psi1(Z, variational_posterior)
tmp2 = p2.psi1(Z, variational_posterior)
psi2 += (tmp1[:, :, None] * tmp2[:, None, :]) + (tmp2[:, :, None] * tmp1[:, None, :])
else:
raise NotImplementedError, "psi2 cannot be computed for this kernel"
return psi2
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
from static import White, Bias
for p1 in self.parts:
#compute the effective dL_dpsi1. Extra terms appear becaue of the cross terms in psi2!
eff_dL_dpsi1 = dL_dpsi1.copy()
for p2 in self.parts:
if p2 is p1:
continue
if isinstance(p2, White):
continue
elif isinstance(p2, Bias):
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.variance * 2.
else:# np.setdiff1d(p1.active_dims, ar2, assume_unique): # TODO: Careful, not correct for overlapping active_dims
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.psi1(Z, variational_posterior) * 2.
p1.update_gradients_expectations(dL_dpsi0, eff_dL_dpsi1, dL_dpsi2, Z, variational_posterior)
def gradients_Z_expectations(self, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
from static import White, Bias
target = np.zeros(Z.shape)
for p1 in self.parts:
#compute the effective dL_dpsi1. extra terms appear becaue of the cross terms in psi2!
eff_dL_dpsi1 = dL_dpsi1.copy()
for p2 in self.parts:
if p2 is p1:
continue
if isinstance(p2, White):
continue
elif isinstance(p2, Bias):
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.variance * 2.
else:
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.psi1(Z, variational_posterior) * 2.
target += p1.gradients_Z_expectations(eff_dL_dpsi1, dL_dpsi2, Z, variational_posterior)
return target
def gradients_qX_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
from static import White, Bias
target_mu = np.zeros(variational_posterior.shape)
target_S = np.zeros(variational_posterior.shape)
for p1 in self._parameters_:
#compute the effective dL_dpsi1. extra terms appear becaue of the cross terms in psi2!
eff_dL_dpsi1 = dL_dpsi1.copy()
for p2 in self._parameters_:
if p2 is p1:
continue
if isinstance(p2, White):
continue
elif isinstance(p2, Bias):
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.variance * 2.
else:
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.psi1(Z, variational_posterior) * 2.
a, b = p1.gradients_qX_expectations(dL_dpsi0, eff_dL_dpsi1, dL_dpsi2, Z, variational_posterior)
target_mu += a
target_S += b
return target_mu, target_S
def _getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return super(Add, self)._getstate()
def _setstate(self, state):
super(Add, self)._setstate(state)
def add(self, other, name='sum'):
if isinstance(other, Add):
other_params = other._parameters_.copy()
for p in other_params:
other.remove_parameter(p)
self.add_parameters(*other_params)
else: self.add_parameter(other)
return self

View file

@ -0,0 +1,564 @@
# Check Matthew Rocklin's blog post.
try:
import sympy as sp
sympy_available=True
from sympy.utilities.autowrap import ufuncify
except ImportError:
sympy_available=False
exit()
from sympy.core.cache import clear_cache
from sympy.utilities.codegen import codegen
try:
from scipy import weave
weave_available = True
except ImportError:
weave_available = False
import os
current_dir = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
import sys
import numpy as np
import re
import tempfile
import pdb
import ast
from kernpart import Kernpart
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
# TODO have this set up in a set up file!
user_code_storage = tempfile.gettempdir()
class spkern(Kern):
"""
A kernel object, where all the hard work in done by sympy.
:param k: the covariance function
:type k: a positive definite sympy function of x_0, z_0, x_1, z_1, x_2, z_2...
To construct a new sympy kernel, you'll need to define:
- a kernel function using a sympy object. Ensure that the kernel is of the form k(x,z).
- that's it! we'll extract the variables from the function k.
Note:
- to handle multiple inputs, call them x_1, z_1, etc
- to handle multpile correlated outputs, you'll need to add parameters with an index, such as lengthscale_i and lengthscale_j.
"""
def __init__(self, input_dim, k=None, output_dim=1, name=None, param=None):
if name is None:
name='sympykern'
if k is None:
raise ValueError, "You must provide an argument for the covariance function."
super(spkern, self).__init__(input_dim, name)
self._sp_k = k
# pull the variable names out of the symbolic covariance function.
sp_vars = [e for e in k.atoms() if e.is_Symbol]
self._sp_x= sorted([e for e in sp_vars if e.name[0:2]=='x_'],key=lambda x:int(x.name[2:]))
self._sp_z= sorted([e for e in sp_vars if e.name[0:2]=='z_'],key=lambda z:int(z.name[2:]))
# Check that variable names make sense.
assert all([x.name=='x_%i'%i for i,x in enumerate(self._sp_x)])
assert all([z.name=='z_%i'%i for i,z in enumerate(self._sp_z)])
assert len(self._sp_x)==len(self._sp_z)
x_dim=len(self._sp_x)
# If it is a multi-output covariance, add an input for indexing the outputs.
self._real_input_dim = x_dim
# Check input dim is number of xs + 1 if output_dim is >1
assert self.input_dim == x_dim + int(output_dim > 1)
self.output_dim = output_dim
# extract parameter names from the covariance
thetas = sorted([e for e in sp_vars if not (e.name[0:2]=='x_' or e.name[0:2]=='z_')],key=lambda e:e.name)
# Look for parameters with index (subscripts), they are associated with different outputs.
if self.output_dim>1:
self._sp_theta_i = sorted([e for e in thetas if (e.name[-2:]=='_i')], key=lambda e:e.name)
self._sp_theta_j = sorted([e for e in thetas if (e.name[-2:]=='_j')], key=lambda e:e.name)
# Make sure parameter appears with both indices!
assert len(self._sp_theta_i)==len(self._sp_theta_j)
assert all([theta_i.name[:-2]==theta_j.name[:-2] for theta_i, theta_j in zip(self._sp_theta_i, self._sp_theta_j)])
# Extract names of shared parameters (those without a subscript)
self._sp_theta = [theta for theta in thetas if theta not in self._sp_theta_i and theta not in self._sp_theta_j]
self.num_split_params = len(self._sp_theta_i)
self._split_theta_names = ["%s"%theta.name[:-2] for theta in self._sp_theta_i]
for theta in self._split_theta_names:
setattr(self, theta, Param(theta, np.ones(self.output_dim), None))
self.add_parameters(getattr(self, theta))
#setattr(self, theta, np.ones(self.output_dim))
self.num_shared_params = len(self._sp_theta)
#self.num_params = self.num_shared_params+self.num_split_params*self.output_dim
else:
self.num_split_params = 0
self._split_theta_names = []
self._sp_theta = thetas
self.num_shared_params = len(self._sp_theta)
#self.num_params = self.num_shared_params
# Add parameters to the model.
for theta in self._sp_theta:
val = 1.0
if param is not None:
if param.has_key(theta):
val = param[theta]
#setattr(self, theta.name, val)
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
#deal with param
#self._set_params(self._get_params())
# Differentiate with respect to parameters.
self._sp_dk_dtheta = [sp.diff(k,theta).simplify() for theta in self._sp_theta]
if self.output_dim > 1:
self._sp_dk_dtheta_i = [sp.diff(k,theta).simplify() for theta in self._sp_theta_i]
# differentiate with respect to input variables.
self._sp_dk_dx = [sp.diff(k,xi).simplify() for xi in self._sp_x]
# psi_stats aren't yet implemented.
if False:
self.compute_psi_stats()
self._code = {}
# generate the code for the covariance functions
self._gen_code()
if weave_available:
if False:
extra_compile_args = ['-ftree-vectorize', '-mssse3', '-ftree-vectorizer-verbose=5']
else:
extra_compile_args = []
self.weave_kwargs = {
'support_code': None, #self._function_code,
'include_dirs':[user_code_storage, os.path.join(current_dir,'parts/')],
'headers':['"sympy_helpers.h"', '"'+self.name+'.h"'],
'sources':[os.path.join(current_dir,"parts/sympy_helpers.cpp"), os.path.join(user_code_storage, self.name+'.cpp')],
'extra_compile_args':extra_compile_args,
'extra_link_args':['-lgomp'],
'verbose':True}
self.parameters_changed() # initializes caches
def __add__(self,other):
return spkern(self._sp_k+other._sp_k)
def _gen_code(self):
argument_sequence = self._sp_x+self._sp_z+self._sp_theta
code_list = [('k',self._sp_k)]
# gradients with respect to covariance input
code_list += [('dk_d%s'%x.name,dx) for x,dx in zip(self._sp_x,self._sp_dk_dx)]
# gradient with respect to parameters
code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta,self._sp_dk_dtheta)]
# gradient with respect to multiple output parameters
if self.output_dim > 1:
argument_sequence += self._sp_theta_i + self._sp_theta_j
code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta_i,self._sp_dk_dtheta_i)]
# generate c functions from sympy objects
if weave_available:
code_type = "C"
else:
code_type = "PYTHON"
# Need to add the sympy_helpers header in here.
(foo_c,self._function_code), (foo_h,self._function_header) = \
codegen(code_list,
code_type,
self.name,
argument_sequence=argument_sequence)
# Use weave to compute the underlying functions.
if weave_available:
# put the header file where we can find it
f = file(os.path.join(user_code_storage, self.name + '.h'),'w')
f.write(self._function_header)
f.close()
if weave_available:
# Substitute any known derivatives which sympy doesn't compute
self._function_code = re.sub('DiracDelta\(.+?,.+?\)','0.0',self._function_code)
# put the cpp file in user code storage (defaults to temp file location)
f = file(os.path.join(user_code_storage, self.name + '.cpp'),'w')
else:
# put the python file in user code storage
f = file(os.path.join(user_code_storage, self.name + '.py'),'w')
f.write(self._function_code)
f.close()
if weave_available:
# arg_list will store the arguments required for the C code.
input_arg_list = (["X2(i, %s)"%x.name[2:] for x in self._sp_x]
+ ["Z2(j, %s)"%z.name[2:] for z in self._sp_z])
# for multiple outputs reverse argument list is also required
if self.output_dim>1:
reverse_input_arg_list = list(input_arg_list)
reverse_input_arg_list.reverse()
# This gives the parameters for the arg list.
param_arg_list = [shared_params.name for shared_params in self._sp_theta]
arg_list = input_arg_list + param_arg_list
precompute_list=[]
if self.output_dim > 1:
reverse_arg_list= reverse_input_arg_list + list(param_arg_list)
# For multiple outputs, also need the split parameters.
split_param_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['ii', 'jj'] for theta in self._sp_theta_i]
split_param_reverse_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['jj', 'ii'] for theta in self._sp_theta_i]
arg_list += split_param_arg_list
reverse_arg_list += split_param_reverse_arg_list
# Extract the right output indices from the inputs.
c_define_output_indices = [' '*16 + "int %s=(int)%s(%s, %i);"%(index, var, index2, self.input_dim-1) for index, var, index2 in zip(['ii', 'jj'], ['X2', 'Z2'], ['i', 'j'])]
precompute_list += c_define_output_indices
reverse_arg_string = ", ".join(reverse_arg_list)
arg_string = ", ".join(arg_list)
precompute_string = "\n".join(precompute_list)
# Now we use the arguments in code that computes the separate parts.
# Any precomputations will be done here eventually.
self._precompute = \
"""
// Precompute code would go here. It will be called when parameters are updated.
"""
# Here's the code to do the looping for K
self._code['K'] =\
"""
// _K_code
// Code for computing the covariance function.
int i;
int j;
int n = target_array->dimensions[0];
int num_inducing = target_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<n;i++){
for (j=0;j<num_inducing;j++){
%s
//target[i*num_inducing+j] =
TARGET2(i, j) += k(%s);
}
}
%s
"""%(precompute_string,arg_string,"/*"+str(self._sp_k)+"*/")
# adding a string representation of the function in the
# comment forces recompile when needed
self._code['K_X'] = self._code['K'].replace('Z2(', 'X2(')
# Code to compute diagonal of covariance.
diag_arg_string = re.sub('Z','X',arg_string)
diag_arg_string = re.sub('int jj','//int jj',diag_arg_string)
diag_arg_string = re.sub('j','i',diag_arg_string)
diag_precompute_string = re.sub('int jj','//int jj',precompute_string)
diag_precompute_string = re.sub('Z','X',diag_precompute_string)
diag_precompute_string = re.sub('j','i',diag_precompute_string)
# Code to do the looping for Kdiag
self._code['Kdiag'] =\
"""
// _code['Kdiag']
// Code for computing diagonal of covariance function.
int i;
int n = target_array->dimensions[0];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for
for (i=0;i<n;i++){
%s
//target[i] =
TARGET1(i)=k(%s);
}
%s
"""%(diag_precompute_string,diag_arg_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
# Code to compute gradients
if self.output_dim>1:
for i, theta in enumerate(self._sp_theta_i):
grad_func_list = [' '*26 + 'TARGET1(ii) += PARTIAL2(i, j)*dk_d%s(%s);'%(theta.name, arg_string)]
grad_func_list += [' '*26 + 'TARGET1(jj) += PARTIAL2(i, j)*dk_d%s(%s);'%(theta.name, reverse_arg_string)]
grad_func_list = c_define_output_indices+grad_func_list
grad_func_string = '\n'.join(grad_func_list)
self._code['dK_d' + theta.name] =\
"""
int i;
int j;
int n = partial_array->dimensions[0];
int num_inducing = partial_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<n;i++){
for (j=0;j<num_inducing;j++){
%s
}
}
%s
"""%(grad_func_string,"/*"+str(self._sp_k)+"*/") # adding a string representation forces recompile when needed
self._code['dK_d' +theta.name + '_X'] = self._code['dK_d' + theta.name].replace('Z2(', 'X2(')
# Code to compute gradients for Kdiag TODO: needs clean up
diag_grad_func_string = re.sub('Z','X',grad_func_string,count=0)
diag_grad_func_string = re.sub('int jj','//int jj',diag_grad_func_string)
diag_grad_func_string = re.sub('j','i',diag_grad_func_string)
diag_grad_func_string = re.sub('PARTIAL2\(i, i\)','PARTIAL(i)',diag_grad_func_string)
self._code['dKdiag_d' + theta.name] =\
"""
// _dKdiag_dtheta_code
// Code for computing gradient of diagonal with respect to parameters.
int i;
int n = partial_array->dimensions[0];
int input_dim = X_array->dimensions[1];
for (i=0;i<n;i++){
%s
}
%s
"""%(diag_grad_func_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
for i, theta in enumerate(self._sp_theta):
grad_func_list = [' '*26 + 'TARGET1(%i) += PARTIAL2(i, j)*dk_d%s(%s);'%(i,theta.name,arg_string)]
grad_func_string = '\n'.join(grad_func_list)
self._code['dK_d' + theta.name] =\
"""
// _dK_dtheta_code
// Code for computing gradient of covariance with respect to parameters.
int i;
int j;
int n = partial_array->dimensions[0];
int num_inducing = partial_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<n;i++){
for (j=0;j<num_inducing;j++){
%s
}
}
%s
"""%(grad_func_string,"/*"+str(self._sp_k)+"*/") # adding a string representation forces recompile when needed
self._code['dK_d' + theta.name +'_X'] = self._code['dK_d' + theta.name].replace('Z2(', 'X2(')
# Code to compute gradients for Kdiag TODO: needs clean up
diag_grad_func_string = re.sub('Z','X',grad_func_string,count=0)
diag_grad_func_string = re.sub('int jj','//int jj',diag_grad_func_string)
diag_grad_func_string = re.sub('j','i',diag_grad_func_string)
diag_grad_func_string = re.sub('PARTIAL2\(i, i\)','PARTIAL(i)',diag_grad_func_string)
self._code['dKdiag_d' + theta.name] =\
"""
// _dKdiag_dtheta_code
// Code for computing gradient of diagonal with respect to parameters.
int i;
int n = partial_array->dimensions[0];
int input_dim = X_array->dimensions[1];
for (i=0;i<n;i++){
%s
}
%s
"""%(diag_grad_func_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
# Code for gradients wrt X, TODO: may need to deal with special case where one input is actually an output.
gradX_func_list = []
if self.output_dim>1:
gradX_func_list += c_define_output_indices
gradX_func_list += ["TARGET2(i, %i) += partial[i*num_inducing+j]*dk_dx_%i(%s);"%(q,q,arg_string) for q in range(self._real_input_dim)]
gradX_func_string = "\n".join(gradX_func_list)
self._code['dK_dX'] = \
"""
// _dK_dX_code
// Code for computing gradient of covariance with respect to inputs.
int i;
int j;
int n = partial_array->dimensions[0];
int num_inducing = partial_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<n; i++){
for (j=0; j<num_inducing; j++){
%s
}
}
%s
"""%(gradX_func_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
self._code['dK_dX_X'] = self._code['dK_dX'].replace('Z2(', 'X2(')
diag_gradX_func_string = re.sub('Z','X',gradX_func_string,count=0)
diag_gradX_func_string = re.sub('int jj','//int jj',diag_gradX_func_string)
diag_gradX_func_string = re.sub('j','i',diag_gradX_func_string)
diag_gradX_func_string = re.sub('PARTIAL2\(i\, i\)','2*PARTIAL(i)',diag_gradX_func_string)
# Code for gradients of Kdiag wrt X
self._code['dKdiag_dX'] = \
"""
// _dKdiag_dX_code
// Code for computing gradient of diagonal with respect to inputs.
int n = partial_array->dimensions[0];
int input_dim = X_array->dimensions[1];
for (int i=0;i<n; i++){
%s
}
%s
"""%(diag_gradX_func_string,"/*"+str(self._sp_k)+"*/") #adding a
# string representation forces recompile when needed Get rid
# of Zs in argument for diagonal. TODO: Why wasn't
# diag_func_string called here? Need to check that.
#TODO: insert multiple functions here via string manipulation
#TODO: similar functions for psi_stats
#TODO: similar functions when cython available.
#TODO: similar functions when only python available.
def _get_arg_names(self, target=None, Z=None, partial=None):
arg_names = ['X']
if target is not None:
arg_names += ['target']
for shared_params in self._sp_theta:
arg_names += [shared_params.name]
if Z is not None:
arg_names += ['Z']
if partial is not None:
arg_names += ['partial']
if self.output_dim>1:
arg_names += self._split_theta_names
arg_names += ['output_dim']
return arg_names
def _generate_inline(self, code, X, target=None, Z=None, partial=None):
output_dim = self.output_dim
# Need to extract parameters to local variables first
for shared_params in self._sp_theta:
locals()[shared_params.name] = getattr(self, shared_params.name)
for split_params in self._split_theta_names:
locals()[split_params] = np.asarray(getattr(self, split_params))
arg_names = self._get_arg_names(target, Z, partial)
if weave_available:
return weave.inline(code=code, arg_names=arg_names,**self.weave_kwargs)
else:
raise RuntimeError('Weave not available and other variants of sympy covariance not yet implemented')
def K(self,X,Z,target):
if Z is None:
self._generate_inline(self._code['K_X'], X, target)
else:
self._generate_inline(self._code['K'], X, target, Z)
def Kdiag(self,X,target):
self._generate_inline(self._code['Kdiag'], X, target)
def _param_grad_helper(self,partial,X,Z,target):
if Z is None:
self._generate_inline(self._code['dK_dtheta_X'], X, target, Z, partial)
else:
self._generate_inline(self._code['dK_dtheta'], X, target, Z, partial)
def dKdiag_dtheta(self,partial,X,target):
self._generate_inline(self._code['dKdiag_dtheta'], X, target, Z=None, partial=partial).namelocals()[shared_params.name] = getattr(self, shared_params.name)
def gradients_X(self,partial,X,Z,target):
if Z is None:
self._generate_inline(self._code['dK_dX_X'], X, target, Z, partial)
else:
self._generate_inline(self._code['dK_dX'], X, target, Z, partial)
def dKdiag_dX(self,partial,X,target):
self._generate_inline(self._code['dKdiag_dX'], X, target, Z, partial)
def compute_psi_stats(self):
#define some normal distributions
mus = [sp.var('mu_%i'%i,real=True) for i in range(self.input_dim)]
Ss = [sp.var('S_%i'%i,positive=True) for i in range(self.input_dim)]
normals = [(2*sp.pi*Si)**(-0.5)*sp.exp(-0.5*(xi-mui)**2/Si) for xi, mui, Si in zip(self._sp_x, mus, Ss)]
#do some integration!
#self._sp_psi0 = ??
self._sp_psi1 = self._sp_k
for i in range(self.input_dim):
print 'perfoming integrals %i of %i'%(i+1,2*self.input_dim)
sys.stdout.flush()
self._sp_psi1 *= normals[i]
self._sp_psi1 = sp.integrate(self._sp_psi1,(self._sp_x[i],-sp.oo,sp.oo))
clear_cache()
self._sp_psi1 = self._sp_psi1.simplify()
#and here's psi2 (eek!)
zprime = [sp.Symbol('zp%i'%i) for i in range(self.input_dim)]
self._sp_psi2 = self._sp_k.copy()*self._sp_k.copy().subs(zip(self._sp_z,zprime))
for i in range(self.input_dim):
print 'perfoming integrals %i of %i'%(self.input_dim+i+1,2*self.input_dim)
sys.stdout.flush()
self._sp_psi2 *= normals[i]
self._sp_psi2 = sp.integrate(self._sp_psi2,(self._sp_x[i],-sp.oo,sp.oo))
clear_cache()
self._sp_psi2 = self._sp_psi2.simplify()
def parameters_changed(self):
# Reset the caches
self._cache, self._cache2 = np.empty(shape=(2, 1))
self._cache3, self._cache4, self._cache5 = np.empty(shape=(3, 1))
def update_gradients_full(self, dL_dK, X):
# Need to extract parameters to local variables first
self._K_computations(X, None)
for shared_params in self._sp_theta:
parameter = getattr(self, shared_params.name)
code = self._code['dK_d' + shared_params.name]
setattr(parameter, 'gradient', self._generate_inline(code, X, target=None, Z=None, partial=dL_dK))
for split_params in self._split_theta_names:
parameter = getattr(self, split_params.name)
code = self._code['dK_d' + split_params.name]
setattr(parameter, 'gradient', self._generate_inline(code, X, target=None, Z=None, partial=dL_dK))
# def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
# #contributions from Kdiag
# self.variance.gradient = np.sum(dL_dKdiag)
# #from Knm
# self._K_computations(X, Z)
# self.variance.gradient += np.sum(dL_dKnm * self._K_dvar)
# if self.ARD:
# self.lengthscale.gradient = self._dL_dlengthscales_via_K(dL_dKnm, X, Z)
# else:
# self.lengthscale.gradient = (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dKnm)
# #from Kmm
# self._K_computations(Z, None)
# self.variance.gradient += np.sum(dL_dKmm * self._K_dvar)
# if self.ARD:
# self.lengthscale.gradient += self._dL_dlengthscales_via_K(dL_dKmm, Z, None)
# else:
# self.lengthscale.gradient += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dKmm)
#---------------------------------------#
# Precomputations #
#---------------------------------------#
def _K_computations(self, X, Z):
if Z is None:
self._generate_inline(self._precompute, X)
else:
self._generate_inline(self._precompute, X, Z=Z)

134
GPy/kern/_src/poly_tmp.py Normal file
View file

@ -0,0 +1,134 @@
# Copyright (c) 2013, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kernpart import Kernpart
import numpy as np
four_over_tau = 2./np.pi
class POLY(Kernpart):
"""
Polynomial kernel parameter initialisation. Included for completeness, but generally not recommended, is the polynomial kernel:
.. math::
k(x, y) = \sigma^2\*(\sigma_w^2 x'y+\sigma_b^b)^d
The kernel parameters are :math:`\sigma^2` (variance), :math:`\sigma^2_w`
(weight_variance), :math:`\sigma^2_b` (bias_variance) and d
(degree). Only gradients of the first three are provided for
kernel optimisation, it is assumed that polynomial degree would
be set by hand.
The kernel is not recommended as it is badly behaved when the
:math:`\sigma^2_w\*x'\*y + \sigma^2_b` has a magnitude greater than one. For completeness
there is an automatic relevance determination version of this
kernel provided (NOTE YET IMPLEMENTED!).
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance: the variance :math:`\sigma^2`
:type variance: float
:param weight_variance: the vector of the variances of the prior over input weights in the neural network :math:`\sigma^2_w`
:type weight_variance: array or list of the appropriate size (or float if there is only one weight variance parameter)
:param bias_variance: the variance of the prior over bias parameters :math:`\sigma^2_b`
:param degree: the degree of the polynomial.
:type degree: int
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one weight variance parameter :math:`\sigma^2_w`), otherwise there is one weight variance parameter per dimension.
:type ARD: Boolean
:rtype: Kernpart object
"""
def __init__(self, input_dim, variance=1., weight_variance=None, bias_variance=1., degree=2, ARD=False):
self.input_dim = input_dim
self.ARD = ARD
if not ARD:
self.num_params=3
if weight_variance is not None:
weight_variance = np.asarray(weight_variance)
assert weight_variance.size == 1, "Only one weight variance needed for non-ARD kernel"
else:
weight_variance = 1.*np.ones(1)
else:
self.num_params = self.input_dim + 2
if weight_variance is not None:
weight_variance = np.asarray(weight_variance)
assert weight_variance.size == self.input_dim, "bad number of weight variances"
else:
weight_variance = np.ones(self.input_dim)
raise NotImplementedError
self.degree=degree
self.name='poly_deg' + str(self.degree)
self._set_params(np.hstack((variance, weight_variance.flatten(), bias_variance)))
def _get_params(self):
return np.hstack((self.variance, self.weight_variance.flatten(), self.bias_variance))
def _set_params(self, x):
assert x.size == (self.num_params)
self.variance = x[0]
self.weight_variance = x[1:-1]
self.weight_std = np.sqrt(self.weight_variance)
self.bias_variance = x[-1]
def _get_param_names(self):
if self.num_params == 3:
return ['variance', 'weight_variance', 'bias_variance']
else:
return ['variance'] + ['weight_variance_%i' % i for i in range(self.lengthscale.size)] + ['bias_variance']
def K(self, X, X2, target):
"""Return covariance between X and X2."""
self._K_computations(X, X2)
target += self.variance*self._K_dvar
def Kdiag(self, X, target):
"""Compute the diagonal of the covariance matrix for X."""
self._K_diag_computations(X)
target+= self.variance*self._K_diag_dvar
def dK_dtheta(self, dL_dK, X, X2, target):
"""Derivative of the covariance with respect to the parameters."""
self._K_computations(X, X2)
base = self.variance*self.degree*self._K_poly_arg**(self.degree-1)
base_cov_grad = base*dL_dK
target[0] += np.sum(self._K_dvar*dL_dK)
target[1] += (self._K_inner_prod*base_cov_grad).sum()
target[2] += base_cov_grad.sum()
def dK_dX(self, dL_dK, X, X2, target):
"""Derivative of the covariance matrix with respect to X"""
self._K_computations(X, X2)
arg = self._K_poly_arg
if X2 is None:
target += 2*self.weight_variance*self.degree*self.variance*(((X[None,:, :])) *(arg**(self.degree-1))[:, :, None]*dL_dK[:, :, None]).sum(1)
else:
target += self.weight_variance*self.degree*self.variance*(((X2[None,:, :])) *(arg**(self.degree-1))[:, :, None]*dL_dK[:, :, None]).sum(1)
def dKdiag_dX(self, dL_dKdiag, X, target):
"""Gradient of diagonal of covariance with respect to X"""
self._K_diag_computations(X)
arg = self._K_diag_poly_arg
target += 2.*self.weight_variance*self.degree*self.variance*X*dL_dKdiag[:, None]*(arg**(self.degree-1))[:, None]
def _K_computations(self, X, X2):
if self.ARD:
pass
else:
if X2 is None:
self._K_inner_prod = np.dot(X,X.T)
else:
self._K_inner_prod = np.dot(X,X2.T)
self._K_poly_arg = self._K_inner_prod*self.weight_variance + self.bias_variance
self._K_dvar = self._K_poly_arg**self.degree
def _K_diag_computations(self, X):
if self.ARD:
pass
else:
self._K_diag_poly_arg = (X*X).sum(1)*self.weight_variance + self.bias_variance
self._K_diag_dvar = self._K_diag_poly_arg**self.degree

View file

@ -0,0 +1,70 @@
# Check Matthew Rocklin's blog post.
import sympy as sym
import numpy as np
from kern import Kern
from ...core.symbolic import Symbolic_core
class Symbolic(Kern, Symbolic_core):
"""
"""
def __init__(self, input_dim, k=None, output_dim=1, name='symbolic', parameters=None, active_dims=None, operators=None, func_modules=[]):
if k is None:
raise ValueError, "You must provide an argument for the covariance function."
Kern.__init__(self, input_dim, active_dims, name=name)
kdiag = k
self.cacheable = ['X', 'Z']
Symbolic_core.__init__(self, {'k':k,'kdiag':kdiag}, cacheable=self.cacheable, derivatives = ['X', 'theta'], parameters=parameters, func_modules=func_modules)
self.output_dim = output_dim
def __add__(self,other):
return spkern(self._sym_k+other._sym_k)
def _set_expressions(self, expressions):
"""This method is overwritten because we need to modify kdiag by substituting z for x. We do this by calling the parent expression method to extract variables from expressions, then subsitute the z variables that are present with x."""
Symbolic_core._set_expressions(self, expressions)
Symbolic_core._set_variables(self, self.cacheable)
# Substitute z with x to obtain kdiag.
for x, z in zip(self.variables['X'], self.variables['Z']):
expressions['kdiag'] = expressions['kdiag'].subs(z, x)
Symbolic_core._set_expressions(self, expressions)
def K(self,X,X2=None):
if X2 is None:
return self.eval_function('k', X=X, Z=X)
else:
return self.eval_function('k', X=X, Z=X2)
def Kdiag(self,X):
d = self.eval_function('kdiag', X=X)
if not d.shape[0] == X.shape[0]:
d = np.tile(d, (X.shape[0], 1))
return d
def gradients_X(self, dL_dK, X, X2=None):
#if self._X is None or X.base is not self._X.base or X2 is not None:
g = self.eval_gradients_X('k', dL_dK, X=X, Z=X2)
if X2 is None:
g *= 2
return g
def gradients_X_diag(self, dL_dK, X):
return self.eval_gradients_X('kdiag', dL_dK, X=X)
def update_gradients_full(self, dL_dK, X, X2=None):
# Need to extract parameters to local variables first
if X2 is None:
# need to double this inside ...
self.eval_update_gradients('k', dL_dK, X=X)
else:
self.eval_update_gradients('k', dL_dK, X=X, Z=X2)
def update_gradients_diag(self, dL_dKdiag, X):
self.eval_update_gradients('kdiag', dL_dKdiag, X)

View file

@ -0,0 +1,17 @@
# Copyright (c) 2013, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
# Construct covariance functions from matlab saves.
import numpy as np
from kern import kern
import parts
import scipy.io
def read_matlab(mat_data)
mat_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'frey_rawface.mat'))
if mat_data['type']=='cmpnd':
# cmpnd kernel
types = []
for i in range(mat_data['comp'][0][0]):
types.append(mat_data['comp'][0][0][i]

318
GPy/kern/parts/eq.py Normal file
View file

@ -0,0 +1,318 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kernpart import Kernpart
import numpy as np
from scipy import weave
from ...util.linalg import tdot
from ...util.misc import fast_array_equal
class EQ(Kernpart_stationary):
"""
Exponentiated Quadratic covariance function als known as radial basis function, squared-exponential or Gaussian kernel.
.. math::
k(r) = \sigma^2 \exp \\bigg(- \\frac{1}{2} r^2 \\bigg) \ \ \ \ \ \\text{ where } r^2 = \sum_{i=1}^d \\frac{ (x_i-x^\prime_i)^2}{\ell_i^2}
where \ell_i is the lengthscale, \sigma^2 the variance and d the dimensionality of the input.
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance: the variance of the kernel
:type variance: float
:param lengthscale: the vector of lengthscale of the kernel
:type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension.
:type ARD: Boolean
:rtype: kernel object
.. Note: this object implements both the ARD and 'spherical' version of the function
"""
def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False):
Kernpart_stationary.__init__(self, input_dim, lengthscale, ARD)
self.name = 'rbf'
self._set_params(np.hstack((variance, self.lengthscale.flatten())))
# a set of optional args to pass to weave
self.weave_options = {'headers' : ['<omp.h>'],
'extra_compile_args': ['-fopenmp -O3'], # -march=native'],
'extra_link_args' : ['-lgomp']}
def _get_params(self):
return np.hstack((self.variance, self.lengthscale))
def _set_params(self, x):
assert x.size == (self.num_params)
self.variance = x[0]
Kernpart_stationary._set_params(self, x[1:])
def _get_param_names(self):
if self.num_params == 2:
return ['variance', 'lengthscale']
else:
return ['variance'] + ['lengthscale_%i' % i for i in range(self.lengthscale.size)]
def K(self, X, X2, target):
self._K_computations(X, X2)
target += self.variance * self._K_dvar
def Kdiag(self, X, target):
np.add(target, self.variance, target)
def dK_dtheta(self, dL_dK, X, X2, target):
self._K_computations(X, X2)
target[0] += np.sum(self._K_dvar * dL_dK)
if self.ARD:
dvardLdK = self._K_dvar * dL_dK
var_len3 = self.variance / np.power(self.lengthscale, 3)
if X2 is None:
# save computation for the symmetrical case
dvardLdK = dvardLdK + dvardLdK.T
code = """
int q,i,j;
double tmp;
for(q=0; q<input_dim; q++){
tmp = 0;
for(i=0; i<num_data; i++){
for(j=0; j<i; j++){
tmp += (X(i,q)-X(j,q))*(X(i,q)-X(j,q))*dvardLdK(i,j);
}
}
target(q+1) += var_len3(q)*tmp;
}
"""
num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim
weave.inline(code, arg_names=['num_data',
'num_inducing',
'input_dim',
'X', 'X2',
'target',
'dvardLdK',
'var_len3'],
type_converters=weave.converters.blitz,
**self.weave_options)
else:
code = """
int q,i,j;
double tmp;
for(q=0; q<input_dim; q++){
tmp = 0;
for(i=0; i<num_data; i++){
for(j=0; j<num_inducing; j++){
tmp += (X(i,q)-X2(j,q))*(X(i,q)-X2(j,q))*dvardLdK(i,j);
}
}
target(q+1) += var_len3(q)*tmp;
}
"""
num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim
# [np.add(target[1+q:2+q],var_len3[q]*np.sum(dvardLdK*np.square(X[:,q][:,None]-X2[:,q][None,:])),target[1+q:2+q]) for q in range(self.input_dim)]
weave.inline(code, arg_names=['num_data',
'num_inducing',
'input_dim',
'X', 'X2',
'target',
'dvardLdK',
'var_len3'],
type_converters=weave.converters.blitz,
**self.weave_options)
else:
target[1] += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dK)
def dK_dX(self, dL_dK, X, X2, target):
self._K_computations(X, X2)
_K_dist = X[:, None, :] - X2[None, :, :] # don't cache this in _K_computations because it is high memory. If this function is being called, chances are we're not in the high memory arena.
dK_dX = (-self.variance / self.lengthscale2) * np.transpose(self._K_dvar[:, :, np.newaxis] * _K_dist, (1, 0, 2))
target += np.sum(dK_dX * dL_dK.T[:, :, None], 0)
def dKdiag_dX(self, dL_dKdiag, X, target):
pass
#---------------------------------------#
# PSI statistics #
#---------------------------------------#
def psi0(self, Z, mu, S, target):
target += self.variance
def dpsi0_dtheta(self, dL_dpsi0, Z, mu, S, target):
target[0] += np.sum(dL_dpsi0)
def dpsi0_dmuS(self, dL_dpsi0, Z, mu, S, target_mu, target_S):
pass
def psi1(self, Z, mu, S, target):
self._psi_computations(Z, mu, S)
target += self._psi1
def dpsi1_dtheta(self, dL_dpsi1, Z, mu, S, target):
self._psi_computations(Z, mu, S)
target[0] += np.sum(dL_dpsi1 * self._psi1 / self.variance)
d_length = self._psi1[:,:,None] * ((self._psi1_dist_sq - 1.)/(self.lengthscale*self._psi1_denom) +1./self.lengthscale)
dpsi1_dlength = d_length * dL_dpsi1[:, :, None]
if not self.ARD:
target[1] += dpsi1_dlength.sum()
else:
target[1:] += dpsi1_dlength.sum(0).sum(0)
def dpsi1_dZ(self, dL_dpsi1, Z, mu, S, target):
self._psi_computations(Z, mu, S)
denominator = (self.lengthscale2 * (self._psi1_denom))
dpsi1_dZ = -self._psi1[:, :, None] * ((self._psi1_dist / denominator))
target += np.sum(dL_dpsi1[:, :, None] * dpsi1_dZ, 0)
def dpsi1_dmuS(self, dL_dpsi1, Z, mu, S, target_mu, target_S):
self._psi_computations(Z, mu, S)
tmp = self._psi1[:, :, None] / self.lengthscale2 / self._psi1_denom
target_mu += np.sum(dL_dpsi1[:, :, None] * tmp * self._psi1_dist, 1)
target_S += np.sum(dL_dpsi1[:, :, None] * 0.5 * tmp * (self._psi1_dist_sq - 1), 1)
def psi2(self, Z, mu, S, target):
self._psi_computations(Z, mu, S)
target += self._psi2
def dpsi2_dtheta(self, dL_dpsi2, Z, mu, S, target):
"""Shape N,num_inducing,num_inducing,Ntheta"""
self._psi_computations(Z, mu, S)
d_var = 2.*self._psi2 / self.variance
d_length = 2.*self._psi2[:, :, :, None] * (self._psi2_Zdist_sq * self._psi2_denom + self._psi2_mudist_sq + S[:, None, None, :] / self.lengthscale2) / (self.lengthscale * self._psi2_denom)
target[0] += np.sum(dL_dpsi2 * d_var)
dpsi2_dlength = d_length * dL_dpsi2[:, :, :, None]
if not self.ARD:
target[1] += dpsi2_dlength.sum()
else:
target[1:] += dpsi2_dlength.sum(0).sum(0).sum(0)
def dpsi2_dZ(self, dL_dpsi2, Z, mu, S, target):
self._psi_computations(Z, mu, S)
term1 = self._psi2_Zdist / self.lengthscale2 # num_inducing, num_inducing, input_dim
term2 = self._psi2_mudist / self._psi2_denom / self.lengthscale2 # N, num_inducing, num_inducing, input_dim
dZ = self._psi2[:, :, :, None] * (term1[None] + term2)
target += (dL_dpsi2[:, :, :, None] * dZ).sum(0).sum(0)
def dpsi2_dmuS(self, dL_dpsi2, Z, mu, S, target_mu, target_S):
"""Think N,num_inducing,num_inducing,input_dim """
self._psi_computations(Z, mu, S)
tmp = self._psi2[:, :, :, None] / self.lengthscale2 / self._psi2_denom
target_mu += -2.*(dL_dpsi2[:, :, :, None] * tmp * self._psi2_mudist).sum(1).sum(1)
target_S += (dL_dpsi2[:, :, :, None] * tmp * (2.*self._psi2_mudist_sq - 1)).sum(1).sum(1)
#---------------------------------------#
# Precomputations #
#---------------------------------------#
def _K_computations(self, X, X2):
params = self._get_params()
if not (fast_array_equal(X, self._X) and fast_array_equal(X2, self._X2) and fast_array_equal(self._params , params)):
self._X = X.copy()
self._params = params.copy()
if X2 is None:
self._X2 = None
X = X / self.lengthscale
Xsquare = np.sum(np.square(X), 1)
self._K_dist2 = -2.*tdot(X) + (Xsquare[:, None] + Xsquare[None, :])
else:
self._X2 = X2.copy()
X = X / self.lengthscale
X2 = X2 / self.lengthscale
self._K_dist2 = -2.*np.dot(X, X2.T) + (np.sum(np.square(X), 1)[:, None] + np.sum(np.square(X2), 1)[None, :])
self._K_dvar = np.exp(-0.5 * self._K_dist2)
def _psi_computations(self, Z, mu, S):
# here are the "statistics" for psi1 and psi2
if not fast_array_equal(Z, self._Z):
# Z has changed, compute Z specific stuff
self._psi2_Zhat = 0.5 * (Z[:, None, :] + Z[None, :, :]) # M,M,Q
self._psi2_Zdist = 0.5 * (Z[:, None, :] - Z[None, :, :]) # M,M,Q
self._psi2_Zdist_sq = np.square(self._psi2_Zdist / self.lengthscale) # M,M,Q
if not fast_array_equal(Z, self._Z) or not fast_array_equal(mu, self._mu) or not fast_array_equal(S, self._S):
# something's changed. recompute EVERYTHING
# psi1
self._psi1_denom = S[:, None, :] / self.lengthscale2 + 1.
self._psi1_dist = Z[None, :, :] - mu[:, None, :]
self._psi1_dist_sq = np.square(self._psi1_dist) / self.lengthscale2 / self._psi1_denom
self._psi1_exponent = -0.5 * np.sum(self._psi1_dist_sq + np.log(self._psi1_denom), -1)
self._psi1 = self.variance * np.exp(self._psi1_exponent)
# psi2
self._psi2_denom = 2.*S[:, None, None, :] / self.lengthscale2 + 1. # N,M,M,Q
self._psi2_mudist, self._psi2_mudist_sq, self._psi2_exponent, _ = self.weave_psi2(mu, self._psi2_Zhat)
# self._psi2_mudist = mu[:,None,None,:]-self._psi2_Zhat #N,M,M,Q
# self._psi2_mudist_sq = np.square(self._psi2_mudist)/(self.lengthscale2*self._psi2_denom)
# self._psi2_exponent = np.sum(-self._psi2_Zdist_sq -self._psi2_mudist_sq -0.5*np.log(self._psi2_denom),-1) #N,M,M,Q
self._psi2 = np.square(self.variance) * np.exp(self._psi2_exponent) # N,M,M,Q
# store matrices for caching
self._Z, self._mu, self._S = Z, mu, S
def weave_psi2(self, mu, Zhat):
N, input_dim = mu.shape
num_inducing = Zhat.shape[0]
mudist = np.empty((N, num_inducing, num_inducing, input_dim))
mudist_sq = np.empty((N, num_inducing, num_inducing, input_dim))
psi2_exponent = np.zeros((N, num_inducing, num_inducing))
psi2 = np.empty((N, num_inducing, num_inducing))
psi2_Zdist_sq = self._psi2_Zdist_sq
_psi2_denom = self._psi2_denom.squeeze().reshape(N, self.input_dim)
half_log_psi2_denom = 0.5 * np.log(self._psi2_denom).squeeze().reshape(N, self.input_dim)
variance_sq = float(np.square(self.variance))
if self.ARD:
lengthscale2 = self.lengthscale2
else:
lengthscale2 = np.ones(input_dim) * self.lengthscale2
code = """
double tmp;
#pragma omp parallel for private(tmp)
for (int n=0; n<N; n++){
for (int m=0; m<num_inducing; m++){
for (int mm=0; mm<(m+1); mm++){
for (int q=0; q<input_dim; q++){
//compute mudist
tmp = mu(n,q) - Zhat(m,mm,q);
mudist(n,m,mm,q) = tmp;
mudist(n,mm,m,q) = tmp;
//now mudist_sq
tmp = tmp*tmp/lengthscale2(q)/_psi2_denom(n,q);
mudist_sq(n,m,mm,q) = tmp;
mudist_sq(n,mm,m,q) = tmp;
//now psi2_exponent
tmp = -psi2_Zdist_sq(m,mm,q) - tmp - half_log_psi2_denom(n,q);
psi2_exponent(n,mm,m) += tmp;
if (m !=mm){
psi2_exponent(n,m,mm) += tmp;
}
//psi2 would be computed like this, but np is faster
//tmp = variance_sq*exp(psi2_exponent(n,m,mm));
//psi2(n,m,mm) = tmp;
//psi2(n,mm,m) = tmp;
}
}
}
}
"""
support_code = """
#include <omp.h>
#include <math.h>
"""
weave.inline(code, support_code=support_code, libraries=['gomp'],
arg_names=['N', 'num_inducing', 'input_dim', 'mu', 'Zhat', 'mudist_sq', 'mudist', 'lengthscale2', '_psi2_denom', 'psi2_Zdist_sq', 'psi2_exponent', 'half_log_psi2_denom', 'psi2', 'variance_sq'],
type_converters=weave.converters.blitz, **self.weave_options)
return mudist, mudist_sq, psi2_exponent, psi2

428
GPy/kern/parts/ode1.py Normal file
View file

@ -0,0 +1,428 @@
# Copyright (c) 2012, James Hensman and Ricardo Andrade
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kernpart import Kernpart
import numpy as np
from GPy.util.linalg import mdot, pdinv
from GPy.util import ln_diff_erfs
import pdb
from scipy import weave
class Ode1(Kernpart):
"""
Covariance function for first order differential equation driven by an exponetiated quadratic covariance.
This kernel has the form
.. math::
:param num_outputs: number of outputs driven by latent function.
:type num_outputs: int
.. Note: see first order differential equation examples in GPy.examples.regression for some usage.
"""
def __init__(self,num_outputs, W=None, rank=1, delay=None, kappa=None):
self.rank = rank
self.input_dim = 1
self.name = 'ode1'
self.num_outputs = num_outputs
self.num_params = self.num_outputs*(1. + self.rank) + 1
if kappa is not None:
self.num_params+=num_outputs
if delay is not None:
self.num_params+=num_outputs
self.rank = rank
if W is None:
self.W = 0.5*np.random.randn(self.num_outputs,self.rank)/np.sqrt(self.rank)
else:
assert W.shape==(self.num_outputs,self.rank)
self.W = W
if kappa is not None:
assert kappa.shape==(self.num_outputs,)
self.kappa = kappa
if delay is not None:
assert delay.shape==(self.num_outputs,)
self.delay = delay
self._set_params(self._get_params())
def _get_params(self):
param_list = [self.W.flatten()]
if self.kappa is not None:
param_list.append(self.kappa)
param_list.append(self.decay)
if self.delay is not None:
param_list.append(self.delay)
param_list.append(self.length_scale)
return np.hstack(param_list)
def _set_params(self,x):
assert x.size == self.num_params
end = self.num_outputs*self.rank
self.W = x[:end].reshape(self.num_outputs,self.rank)
start = end
self.B = np.dot(self.W,self.W.T)
if self.kappa is not None:
end+=self.num_outputs
self.kappa = x[start:end]
self.B += np.diag(self.kappa)
start=end
end+=num_outputs
self.decay = x[start:end]
start=end
if self.delay is not None:
end+=num_outputs
self.delay = x[start:end]
start=end
end+=1
self.length_scale = x[start]
self.sigma = np.sqrt(2)*self.length_scale
def _get_param_names(self):
param_names = sum([['W%i_%i'%(i,j) for j in range(self.rank)] for i in range(self.num_outputs)],[])
if self.kappa is not None:
param_names += ['kappa_%i'%i for i in range(self.num_outputs)]
param_names += ['decay_%i'%i for i in range(self.num_outputs)]
if self.delay is not None:
param_names += ['delay_%i'%i for i in range(self.num_outputs)]
param_names+= ['length_scale']
return param_names
def K(self,X,X2,target):
if X.shape[1] > 2:
raise ValueError('Input matrix for ode1 covariance should have at most two columns, one containing times, the other output indices')
self._K_computations()
target += self._scales*self._dK_dvar
if self.gaussian_initial:
# Add covariance associated with initial condition.
t1_mat = self._t[self._rorder, None]
t2_mat = self._t2[None, self._rorder2]
target+=self.initial_variance * np.exp(- self.decay * (t1_mat + t2_mat))
def Kdiag(self,index,target):
#target += np.diag(self.B)[np.asarray(index,dtype=np.int).flatten()]
pass
def dK_dtheta(self,dL_dK,index,index2,target):
pass
def dKdiag_dtheta(self,dL_dKdiag,index,target):
pass
def dK_dX(self,dL_dK,X,X2,target):
pass
def _extract_t_indices(X, X2=None):
"""Extract times and output indices from the input matrix X. Times are ordered according to their index for convenience of computation, this ordering is stored in self._order and self.order2. These orderings are then mapped back to the original ordering (in X) using self._rorder and self._rorder2. """
# TODO: some fast checking here to see if this needs recomputing?
self._t = X[:, 0]
if X.shape[1]==1:
# No index passed, assume single output of ode model.
self._index = np.ones_like(X[:, 1],dtype=np.int)
self._index = np.asarray(X[:, 1],dtype=np.int)
# Sort indices so that outputs are in blocks for computational
# convenience.
self._order = self._index.argsort()
self._index = self._index[self._order]
self._t = self._t[self._order]
self._rorder = self._order.argsort() # rorder is for reversing the order
if X2 is None:
self._t2 = None
self._index2 = None
self._rorder2 = self._rorder
else:
if X2.shape[1] > 2:
raise ValueError('Input matrix for ode1 covariance should have at most two columns, one containing times, the other output indices')
self._t2 = X2[:, 0]
if X.shape[1]==1:
# No index passed, assume single output of ode model.
self._index2 = np.ones_like(X2[:, 1],dtype=np.int)
self._index2 = np.asarray(X2[:, 1],dtype=np.int)
self._order2 = self._index2.argsort()
slef._index2 = self._index2[self._order2]
self._t2 = self._t2[self._order2]
self._rorder2 = self._order2.argsort() # rorder2 is for reversing order
def _K_computations(X, X2):
"""Perform main body of computations for the ode1 covariance function."""
# First extract times and indices.
self._extract_t_indices(X, X2)
self._K_compute_eq()
self._K_compute_eq_x_ode()
if X2 is None:
self._K_ode_eq = self._K_ode_eq.T
else:
self._K_compute_eq_x_ode(transpose=True)
self._K_compute_ode()
# Reorder values of blocks for placing back into _K_dvar.
self._K_dvar[self._rorder, :] = np.vstack((
np.hstack((self._K_eq, self._Keq_ode))
np.hstack((self._K_ode_eq, self.K_ode))))[:, self._rorder2]
def _K_compute_eq():
"""Compute covariance for latent covariance."""
t_eq = self._t[self._index==0]
if t_eq.shape[0]==0:
self._K_eq = np.zeros((0, 0))
return
if self._t2 is None:
self._dist2 = np.square(t_eq[:, None] - t_eq[None, :])
else:
t2_eq = self._t2[self._index2==0]
if t2_eq.shape[0]==0:
self._K_eq_eq = np.zeros((0, 0))
return
self._dist2 = np.square(t_eq[:, None] - t2_eq[None, :])
self._K_eq = np.exp(-self._dist2/(2*self.length_scale*self.length_scale))
if self.is_normalise:
self._K_eq/=(np.sqrt(2*np.pi)*self.length_scale)
def _K_compute_ode_eq(transpose=False):
"""Compute the cross covariances between latent exponentiated quadratic and observed ordinary differential equations.
:param transpose: if set to false the exponentiated quadratic is on the rows of the matrix and is computed according to self._t, if set to true it is on the columns and is computed according to self._t2 (default=False).
:type transpose: bool"""
if transpose:
t_ode = self._t2[self._index>0]
index_ode = self._index2[self._index>0]-1
if t_ode.shape[0]==0:
self._K_ode = np.zeros((0, 0))
return
if self._t2 is not None:
t2_ode = self._t2[self._index2>0]
index2_ode = self._index2[self._index2>0]-1
if t2_eq.shape[0]==0:
self._K_ode = np.zeros((0, 0))
return
else:
# Matrix giving scales of each output
self._scale = np.zeros((t_ode.shape[0], t_eq.shape[0]))
code="""
for(int i=0;i<N; i++){
for(int j=0; j<N2; j++){
scale_mat[i+j*N] = self.W[index_sim[i]+index_eq[j]*num_outputs];
}
}
"""
scale_mat, B = self._scale, self._B
N, N2, num_outputs = index_ode.size, index_eq.size, self.num_outputs
weave.inline(code,['index_ode', 'index_eq',
'scale_mat', 'B',
'N', 'N2', 'num_outputs'])
else:
self._scale = np.zeros((t_ode.shape[0], t2_ode.shape[0]))
code = """
for(int i=0; i<N; i++){
for(int j=0; j<N2; j++){
scale_mat[i+j*N] = B[index_ode[i]+num_outputs*index2_ode[j]]
}
}
"""
scale_mat, B = self._scale, self._B
N, N2, num_outputs = index_ode.size, index2.size, self.num_outputs
weave.inline(code, ['index_ode', 'index2_ode',
'scale_mat', 'B',
'N', 'N2', 'num_outputs'])
if transpose:
t_ode = t2 - self.delay
t_eq_mat = t1[None, :]
else:
t_ode = t1 - self.delay
t_eq_mat = t2[None, :]
t_ode_mat = t_ode[:, None]
diff_t = (t_ode_mat - t_eq_mat)
sigma = sqrt(2/self.inverseWidth)
invSigmaDiffT = 1/sigma*diff_t
halfSigmaD_i = 0.5*sigma*self.decay
if self.isStationary == false
[ln_part, signs] = ln_diff_erfs(halfSigmaD_i + t2Mat/sigma, halfSigmaD_i - invSigmaDiffT)
else
[ln_part, signs] = ln_diff_erfs(inf, halfSigmaD_i - invSigmaDiffT)
end
sK = signs .* exp(halfSigmaD_i*halfSigmaD_i - self.decay*diff_t + ln_part)
sK *= 0.5
if not self.is_normalised:
sK *= sqrt(pi)*self.sigma
if transpose:
self._K_eq_ode =
else:
self._K_ode_eq = sK
return K
def _K_compute_ode():
# Compute covariances between outputs of the ODE models.
t_ode = self._t[self._index>0]
index_ode = self._index[self._index>0]-1
if t_ode.shape[0]==0:
self._K_ode = np.zeros((0, 0))
return
if self._t2 is not None:
t2_ode = self._t2[self._index2>0]
index2_ode = self._index2[self._index2>0]-1
if t2_eq.shape[0]==0:
self._K_ode = np.zeros((0, 0))
return
if self._index2 is None:
# Matrix giving scales of each output
self._scale = np.zeros((t_ode.shape[0], t_ode.shape[0]))
code="""
for(int i=0;i<N; i++){
scale_mat[i+i*N] = B[index_ode[i]+num_outputs*(index_ode[i])];
for(int j=0; j<i; j++){
scale_mat[j+i*N] = B[index_ode[i]+num_outputs*index_ode[j]];
scale_mat[i+j*N] = scale_mat[j+i*N];
}
}
"""
scale_mat, B = self._scale, self._B
N, num_outputs = index_ode.size, self.num_outputs
weave.inline(code,['index_ode',
'scale_mat', 'B',
'N', 'num_outputs'])
else:
self._scale = np.zeros((t_ode.shape[0], t2_ode.shape[0]))
code = """
for(int i=0; i<N; i++){
for(int j=0; j<N2; j++){
scale_mat[i+j*N] = B[index_ode[i]+num_outputs*index2_ode[j]]
}
}
"""
scale_mat, B = self._scale, self._B
N, N2, num_outputs = index_ode.size, index2.size, self.num_outputs
weave.inline(code, ['index_ode', 'index2_ode',
'scale_mat', 'B',
'N', 'N2', 'num_outputs'])
# When index is identical
if self.is_stationary:
h = self._compute_H_stat(t_ode, index_ode, t2_ode, index2_ode)
else:
h = self._compute_H(t_ode, index_ode, t2_ode, index2_ode)
if t2 is None:
self._K_ode = 0.5 * (h + h.T)
else:
if self.is_stationary:
h2 = self._compute_H_stat(t2, index2, t, index)
else:
h2 = self._compute_H(t2, index2, t, index)
self._K_ode += 0.5 * (h + h2.T)
if not self.is_normalized:
self._K_ode *= np.sqrt(np.pi)*sigma
def _compute_H(t, index, t2, index2, update_derivatives=False):
"""Helper function for computing part of the ode1 covariance function.
:param t: first time input.
:type t: array
:param index: Indices of first output.
:type index: array of int
:param t2: second time input.
:type t2: array
:param index2: Indices of second output.
:type index2: array of int
:param update_derivatives: whether to update derivatives (default is False)
:return h : result of this subcomponent of the kernel for the given values.
:rtype: ndarray
"""
if t.shape[1] > 1 or t2.shape[1] > 1:
raise error('Input can only have one column')
# Vector of decays and delays associated with each output.
Decay = np.zeros(t.shape[0])
Delay = np.zeros(t.shape[0])
Decay2 = np.zeros(t2.shape[0])
Delay2 = np.zeros(t2.shape[0])
code="""
for(int i=0;i<N; i++){
Delay[i] = decays[index[i]];
Decay[i] = delays[index[i]];
}
for(int i=0; i<N2; i++){
Delay2[i] = decays[index2[i]];
Decay2[i] = delays[index2[i]];
}
"""
delays, decays = self.delays, self.decays
N, N2 = index.size, index2.size
weave.inline(code,['index_ode',
'Delay', 'Decay',
'Delay2', 'Decay2',
'delays', 'decays',
'N', 'N2'])
t_mat = t[:, None]-Delay[:, None]
t2_mat = t2[None, :]-Delay2[None, :]
diff_t = (t_mat - t2_mat)
inv_sigma_diff_t = 1./self.sigma*diff_t
half_sigma_decay_i = 0.5*self.sigma*Decay[:, None]
ln_part_1, sign1 = ln_diff_erfs(half_sigma_decay_i + t2_mat/self.sigma,
half_sigma_decay_i - inv_sigma_diff_t)
ln_part_2, sign2 = ln_diff_erfs(half_sigma_decay_i,
half_sigma_decay_i - t_mat/self.sigma)
h = sign1*np.exp(half_sigma_decay_i
*half_sigma_decay_i
-Decay[:, None]*diff_t+ln_part_1
-np.log(Decay[:, None] + Decay2[None, :]))
h -= sign2*np.exp(half_sigma_decay_i*half_sigma_decay_i
-Decay[:, None]*t_mat-Decay2[None, :]*t2_mat+ln_part_2
-np.log(Decay[:, None] + Decay2[None, :]))
# if update_derivatives:
# sigma2 = self.sigma*self.sigma
# # Update ith decay gradient
# dh_ddecay += (0.5*self.decay[i]*sigma2*(self.decay[i] + decay[j])-1)*h
# + (-diff_t*sign1*np.exp(half_sigma_decay_i*half_sigma_decay_i-self.decay[i]*diff_t+ln_part_1)
# +t_mat*sign2*np.exp(half_sigma_decay_i*half_sigma_decay_i-self.decay[i]*t_mat - decay[j]*t2_mat+ln_part_2)) ...
# +self.sigma/sqrt(pi)*(-np.exp(-diff_t*diff_t/sigma2)
# +np.exp(-t2_mat*t2_mat/sigma2-self.decay[i]*t_mat)
# +np.exp(-t_mat*t_mat/sigma2-decay[j]*t2_mat) ...
# -np.exp(-(self.decay[i]*t_mat + decay[j]*t2_mat)))
# self._dh_ddecay[i] += real(dh_ddecay/(self.decay[i]+decay[j]))
# # Update jth decay gradient
# dh_ddecay = t2_mat*sign2*np.exp(half_sigma_decay_i*half_sigma_decay_i-(self.decay[i]*t_mat + decay[j]*t2_mat)+ln_part_2)-h
# self._dh_ddecay[j] += real(dh_ddecay/(self.decay[i] + decay[j]))
# # Update sigma gradient
# self._dh_dsigma += 0.5*self.decay[i]*self.decay[i]*self.sigma*h + 2/(np.sqrt(np.pi)*(self.decay[i]+decay[j]))*((-diff_t/sigma2-self.decay[i]/
# 2)*np.exp(-diff_t*
# diff_t/sigma2)
# + (-t2_mat/sigma2+self.decay[i]/2)
# *np.exp(-t2_mat*t2_mat/sigma2
# -self.decay[i]*t_mat)
# - (-t_mat/sigma2-self.decay[i]/2)
# *np.exp(-t_mat*t_mat/sigma2-decay[j]*t2_mat)
# - self.decay[i]/2*np.exp(-(self.decay[i]*t_mat+decay[j]*t2_mat)))

View file

@ -0,0 +1,461 @@
import numpy as np
import sympy as sp
from sympy.utilities.codegen import codegen
from sympy.core.cache import clear_cache
from scipy import weave
import re
import os
import sys
current_dir = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
import tempfile
import pdb
import ast
from kernpart import Kernpart
from ...util.config import config
class spkern(Kernpart):
"""
A kernel object, where all the hard work in done by sympy.
:param k: the covariance function
:type k: a positive definite sympy function of x_0, z_0, x_1, z_1, x_2, z_2...
To construct a new sympy kernel, you'll need to define:
- a kernel function using a sympy object. Ensure that the kernel is of the form k(x,z).
- that's it! we'll extract the variables from the function k.
Note:
- to handle multiple inputs, call them x_1, z_1, etc
- to handle multpile correlated outputs, you'll need to add parameters with an index, such as lengthscale_i and lengthscale_j.
"""
def __init__(self, input_dim, k=None, output_dim=1, name=None, param=None):
if name is None:
self.name='sympykern'
else:
self.name = name
if k is None:
raise ValueError, "You must provide an argument for the covariance function."
self._sp_k = k
sp_vars = [e for e in k.atoms() if e.is_Symbol]
self._sp_x= sorted([e for e in sp_vars if e.name[0:2]=='x_'],key=lambda x:int(x.name[2:]))
self._sp_z= sorted([e for e in sp_vars if e.name[0:2]=='z_'],key=lambda z:int(z.name[2:]))
# Check that variable names make sense.
assert all([x.name=='x_%i'%i for i,x in enumerate(self._sp_x)])
assert all([z.name=='z_%i'%i for i,z in enumerate(self._sp_z)])
assert len(self._sp_x)==len(self._sp_z)
self.input_dim = len(self._sp_x)
self._real_input_dim = self.input_dim
if output_dim > 1:
self.input_dim += 1
assert self.input_dim == input_dim
self.output_dim = output_dim
# extract parameter names
thetas = sorted([e for e in sp_vars if not (e.name[0:2]=='x_' or e.name[0:2]=='z_')],key=lambda e:e.name)
# Look for parameters with index.
if self.output_dim>1:
self._sp_theta_i = sorted([e for e in thetas if (e.name[-2:]=='_i')], key=lambda e:e.name)
self._sp_theta_j = sorted([e for e in thetas if (e.name[-2:]=='_j')], key=lambda e:e.name)
# Make sure parameter appears with both indices!
assert len(self._sp_theta_i)==len(self._sp_theta_j)
assert all([theta_i.name[:-2]==theta_j.name[:-2] for theta_i, theta_j in zip(self._sp_theta_i, self._sp_theta_j)])
# Extract names of shared parameters
self._sp_theta = [theta for theta in thetas if theta not in self._sp_theta_i and theta not in self._sp_theta_j]
self.num_split_params = len(self._sp_theta_i)
self._split_theta_names = ["%s"%theta.name[:-2] for theta in self._sp_theta_i]
for theta in self._split_theta_names:
setattr(self, theta, np.ones(self.output_dim))
self.num_shared_params = len(self._sp_theta)
self.num_params = self.num_shared_params+self.num_split_params*self.output_dim
else:
self.num_split_params = 0
self._split_theta_names = []
self._sp_theta = thetas
self.num_shared_params = len(self._sp_theta)
self.num_params = self.num_shared_params
for theta in self._sp_theta:
val = 1.0
if param is not None:
if param.has_key(theta):
val = param[theta]
setattr(self, theta.name, val)
#deal with param
self._set_params(self._get_params())
#Differentiate!
self._sp_dk_dtheta = [sp.diff(k,theta).simplify() for theta in self._sp_theta]
if self.output_dim > 1:
self._sp_dk_dtheta_i = [sp.diff(k,theta).simplify() for theta in self._sp_theta_i]
self._sp_dk_dx = [sp.diff(k,xi).simplify() for xi in self._sp_x]
if False:
self.compute_psi_stats()
self._gen_code()
if False:
extra_compile_args = ['-ftree-vectorize', '-mssse3', '-ftree-vectorizer-verbose=5']
else:
extra_compile_args = []
self.weave_kwargs = {
'support_code':self._function_code,
'include_dirs':[tempfile.gettempdir(), os.path.join(current_dir,'parts/')],
'headers':['"sympy_helpers.h"'],
'sources':[os.path.join(current_dir,"parts/sympy_helpers.cpp")],
'extra_compile_args':extra_compile_args,
'extra_link_args':[],
'verbose':True}
if config.getboolean('parallel', 'openmp'): self.weave_kwargs.append('-lgomp')
def __add__(self,other):
return spkern(self._sp_k+other._sp_k)
def _gen_code(self):
"""Generates the C functions necessary for computing the covariance function using the sympy objects as input."""
#TODO: maybe generate one C function only to save compile time? Also easier to take that as a basis and hand craft other covariances??
#generate c functions from sympy objects
argument_sequence = self._sp_x+self._sp_z+self._sp_theta
code_list = [('k',self._sp_k)]
# gradients with respect to covariance input
code_list += [('dk_d%s'%x.name,dx) for x,dx in zip(self._sp_x,self._sp_dk_dx)]
# gradient with respect to parameters
code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta,self._sp_dk_dtheta)]
# gradient with respect to multiple output parameters
if self.output_dim > 1:
argument_sequence += self._sp_theta_i + self._sp_theta_j
code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta_i,self._sp_dk_dtheta_i)]
(foo_c,self._function_code), (foo_h,self._function_header) = \
codegen(code_list, "C",'kernel_code',argument_sequence=argument_sequence)
#put the header file where we can find it
f = file(os.path.join(tempfile.gettempdir(),'kernel_code.h'),'w')
f.write(self._function_header)
f.close()
# Substitute any known derivatives which sympy doesn't compute
self._function_code = re.sub('DiracDelta\(.+?,.+?\)','0.0',self._function_code)
############################################################
# This is the basic argument construction for the C code. #
############################################################
arg_list = (["X2(i, %s)"%x.name[2:] for x in self._sp_x]
+ ["Z2(j, %s)"%z.name[2:] for z in self._sp_z])
# for multiple outputs need to also provide these arguments reversed.
if self.output_dim>1:
reverse_arg_list = list(arg_list)
reverse_arg_list.reverse()
# Add in any 'shared' parameters to the list.
param_arg_list = [shared_params.name for shared_params in self._sp_theta]
arg_list += param_arg_list
precompute_list=[]
if self.output_dim > 1:
reverse_arg_list+=list(param_arg_list)
split_param_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['ii', 'jj'] for theta in self._sp_theta_i]
split_param_reverse_arg_list = ["%s1(%s)"%(theta.name[:-2].upper(),index) for index in ['jj', 'ii'] for theta in self._sp_theta_i]
arg_list += split_param_arg_list
reverse_arg_list += split_param_reverse_arg_list
# Extract the right output indices from the inputs.
c_define_output_indices = [' '*16 + "int %s=(int)%s(%s, %i);"%(index, var, index2, self.input_dim-1) for index, var, index2 in zip(['ii', 'jj'], ['X2', 'Z2'], ['i', 'j'])]
precompute_list += c_define_output_indices
reverse_arg_string = ", ".join(reverse_arg_list)
arg_string = ", ".join(arg_list)
precompute_string = "\n".join(precompute_list)
# Code to compute argments string needed when only X is provided.
X_arg_string = re.sub('Z','X',arg_string)
# Code to compute argument string when only diagonal is required.
diag_arg_string = re.sub('int jj','//int jj',X_arg_string)
diag_arg_string = re.sub('j','i',diag_arg_string)
if precompute_string == '':
# if it's not multioutput, the precompute strings are set to zero
diag_precompute_string = ''
diag_precompute_replace = ''
else:
# for multioutput we need to extract the index of the output form the input.
diag_precompute_string = precompute_list[0]
diag_precompute_replace = precompute_list[1]
# Here's the code to do the looping for K
self._K_code =\
"""
// _K_code
// Code for computing the covariance function.
int i;
int j;
int N = target_array->dimensions[0];
int num_inducing = target_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<N;i++){
for (j=0;j<num_inducing;j++){
%s
//target[i*num_inducing+j] =
TARGET2(i, j) += k(%s);
}
}
%s
"""%(precompute_string,arg_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
self._K_code_X = """
// _K_code_X
// Code for computing the covariance function.
int i;
int j;
int N = target_array->dimensions[0];
int num_inducing = target_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<N;i++){
%s // int ii=(int)X2(i, 1);
TARGET2(i, i) += k(%s);
for (j=0;j<i;j++){
%s //int jj=(int)X2(j, 1);
double kval = k(%s); //double kval = k(X2(i, 0), shared_lengthscale, LENGTHSCALE1(ii), SCALE1(ii));
TARGET2(i, j) += kval;
TARGET2(j, i) += kval;
}
}
/*%s*/
"""%(diag_precompute_string, diag_arg_string, re.sub('Z2', 'X2', diag_precompute_replace), X_arg_string,str(self._sp_k)) #adding a string representation forces recompile when needed
# Code to do the looping for Kdiag
self._Kdiag_code =\
"""
// _Kdiag_code
// Code for computing diagonal of covariance function.
int i;
int N = target_array->dimensions[0];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for
for (i=0;i<N;i++){
%s
//target[i] =
TARGET1(i)=k(%s);
}
%s
"""%(diag_precompute_string,diag_arg_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
# Code to compute gradients
grad_func_list = []
if self.output_dim>1:
grad_func_list += c_define_output_indices
grad_func_list += [' '*16 + 'TARGET1(%i+ii) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, arg_string) for i, theta in enumerate(self._sp_theta_i)]
grad_func_list += [' '*16 + 'TARGET1(%i+jj) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, reverse_arg_string) for i, theta in enumerate(self._sp_theta_i)]
grad_func_list += ([' '*16 + 'TARGET1(%i) += PARTIAL2(i, j)*dk_d%s(%s);'%(i,theta.name,arg_string) for i,theta in enumerate(self._sp_theta)])
grad_func_string = '\n'.join(grad_func_list)
self._dK_dtheta_code =\
"""
// _dK_dtheta_code
// Code for computing gradient of covariance with respect to parameters.
int i;
int j;
int N = partial_array->dimensions[0];
int num_inducing = partial_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<N;i++){
for (j=0;j<num_inducing;j++){
%s
}
}
%s
"""%(grad_func_string,"/*"+str(self._sp_k)+"*/") # adding a string representation forces recompile when needed
# Code to compute gradients for Kdiag TODO: needs clean up
diag_grad_func_string = re.sub('Z','X',grad_func_string,count=0)
diag_grad_func_string = re.sub('int jj','//int jj',diag_grad_func_string)
diag_grad_func_string = re.sub('j','i',diag_grad_func_string)
diag_grad_func_string = re.sub('PARTIAL2\(i, i\)','PARTIAL1(i)',diag_grad_func_string)
self._dKdiag_dtheta_code =\
"""
// _dKdiag_dtheta_code
// Code for computing gradient of diagonal with respect to parameters.
int i;
int N = partial_array->dimensions[0];
int input_dim = X_array->dimensions[1];
for (i=0;i<N;i++){
%s
}
%s
"""%(diag_grad_func_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
# Code for gradients wrt X, TODO: may need to deal with special case where one input is actually an output.
gradX_func_list = []
if self.output_dim>1:
gradX_func_list += c_define_output_indices
gradX_func_list += ["TARGET2(i, %i) += PARTIAL2(i, j)*dk_dx_%i(%s);"%(q,q,arg_string) for q in range(self._real_input_dim)]
gradX_func_string = "\n".join(gradX_func_list)
self._dK_dX_code = \
"""
// _dK_dX_code
// Code for computing gradient of covariance with respect to inputs.
int i;
int j;
int N = partial_array->dimensions[0];
int num_inducing = partial_array->dimensions[1];
int input_dim = X_array->dimensions[1];
//#pragma omp parallel for private(j)
for (i=0;i<N; i++){
for (j=0; j<num_inducing; j++){
%s
}
}
%s
"""%(gradX_func_string,"/*"+str(self._sp_k)+"*/") #adding a string representation forces recompile when needed
diag_gradX_func_string = re.sub('Z','X',gradX_func_string,count=0)
diag_gradX_func_string = re.sub('int jj','//int jj',diag_gradX_func_string)
diag_gradX_func_string = re.sub('j','i',diag_gradX_func_string)
diag_gradX_func_string = re.sub('PARTIAL2\(i, i\)','2*PARTIAL1(i)',diag_gradX_func_string)
# Code for gradients of Kdiag wrt X
self._dKdiag_dX_code= \
"""
// _dKdiag_dX_code
// Code for computing gradient of diagonal with respect to inputs.
int N = partial_array->dimensions[0];
int input_dim = X_array->dimensions[1];
for (int i=0;i<N; i++){
%s
}
%s
"""%(diag_gradX_func_string,"/*"+str(self._sp_k)+"*/") #adding a
# string representation forces recompile when needed Get rid
# of Zs in argument for diagonal. TODO: Why wasn't
# diag_func_string called here? Need to check that.
#self._dKdiag_dX_code = self._dKdiag_dX_code.replace('Z[j', 'X[i')
# Code to use when only X is provided.
self._dK_dtheta_code_X = self._dK_dtheta_code.replace('Z[', 'X[')
self._dK_dX_code_X = self._dK_dX_code.replace('Z[', 'X[').replace('+= PARTIAL2(', '+= 2*PARTIAL2(')
self._dK_dtheta_code_X = self._dK_dtheta_code_X.replace('Z2(', 'X2(')
self._dK_dX_code_X = self._dK_dX_code_X.replace('Z2(', 'X2(')
#TODO: insert multiple functions here via string manipulation
#TODO: similar functions for psi_stats
def _get_arg_names(self, Z=None, partial=None):
arg_names = ['target','X']
for shared_params in self._sp_theta:
arg_names += [shared_params.name]
if Z is not None:
arg_names += ['Z']
if partial is not None:
arg_names += ['partial']
if self.output_dim>1:
arg_names += self._split_theta_names
arg_names += ['output_dim']
return arg_names
def _weave_inline(self, code, X, target, Z=None, partial=None):
output_dim = self.output_dim
for shared_params in self._sp_theta:
locals()[shared_params.name] = getattr(self, shared_params.name)
# Need to extract parameters first
for split_params in self._split_theta_names:
locals()[split_params] = getattr(self, split_params)
arg_names = self._get_arg_names(Z, partial)
weave.inline(code=code, arg_names=arg_names,**self.weave_kwargs)
def K(self,X,Z,target):
if Z is None:
self._weave_inline(self._K_code_X, X, target)
else:
self._weave_inline(self._K_code, X, target, Z)
def Kdiag(self,X,target):
self._weave_inline(self._Kdiag_code, X, target)
def dK_dtheta(self,partial,X,Z,target):
if Z is None:
self._weave_inline(self._dK_dtheta_code_X, X, target, Z, partial)
else:
self._weave_inline(self._dK_dtheta_code, X, target, Z, partial)
def dKdiag_dtheta(self,partial,X,target):
self._weave_inline(self._dKdiag_dtheta_code, X, target, Z=None, partial=partial)
def dK_dX(self,partial,X,Z,target):
if Z is None:
self._weave_inline(self._dK_dX_code_X, X, target, Z, partial)
else:
self._weave_inline(self._dK_dX_code, X, target, Z, partial)
def dKdiag_dX(self,partial,X,target):
self._weave_inline(self._dKdiag_dX_code, X, target, Z=None, partial=partial)
def compute_psi_stats(self):
#define some normal distributions
mus = [sp.var('mu_%i'%i,real=True) for i in range(self.input_dim)]
Ss = [sp.var('S_%i'%i,positive=True) for i in range(self.input_dim)]
normals = [(2*sp.pi*Si)**(-0.5)*sp.exp(-0.5*(xi-mui)**2/Si) for xi, mui, Si in zip(self._sp_x, mus, Ss)]
#do some integration!
#self._sp_psi0 = ??
self._sp_psi1 = self._sp_k
for i in range(self.input_dim):
print 'perfoming integrals %i of %i'%(i+1,2*self.input_dim)
sys.stdout.flush()
self._sp_psi1 *= normals[i]
self._sp_psi1 = sp.integrate(self._sp_psi1,(self._sp_x[i],-sp.oo,sp.oo))
clear_cache()
self._sp_psi1 = self._sp_psi1.simplify()
#and here's psi2 (eek!)
zprime = [sp.Symbol('zp%i'%i) for i in range(self.input_dim)]
self._sp_psi2 = self._sp_k.copy()*self._sp_k.copy().subs(zip(self._sp_z,zprime))
for i in range(self.input_dim):
print 'perfoming integrals %i of %i'%(self.input_dim+i+1,2*self.input_dim)
sys.stdout.flush()
self._sp_psi2 *= normals[i]
self._sp_psi2 = sp.integrate(self._sp_psi2,(self._sp_x[i],-sp.oo,sp.oo))
clear_cache()
self._sp_psi2 = self._sp_psi2.simplify()
def _set_params(self,param):
assert param.size == (self.num_params)
for i, shared_params in enumerate(self._sp_theta):
setattr(self, shared_params.name, param[i])
if self.output_dim>1:
for i, split_params in enumerate(self._split_theta_names):
start = self.num_shared_params + i*self.output_dim
end = self.num_shared_params + (i+1)*self.output_dim
setattr(self, split_params, param[start:end])
def _get_params(self):
params = np.zeros(0)
for shared_params in self._sp_theta:
params = np.hstack((params, getattr(self, shared_params.name)))
if self.output_dim>1:
for split_params in self._split_theta_names:
params = np.hstack((params, getattr(self, split_params).flatten()))
return params
def _get_param_names(self):
if self.output_dim>1:
return [x.name for x in self._sp_theta] + [x.name[:-2] + str(i) for x in self._sp_theta_i for i in range(self.output_dim)]
else:
return [x.name for x in self._sp_theta]

179
GPy/kern/ratquad.py Normal file
View file

@ -0,0 +1,179 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kernpart import kernpart
import numpy as np
import hashlib
class rbf(kernpart):
"""
Rational quadratic kernel.
.. math::
k(r) = \sigma^2 \left(\frac{(\ell^{-2} + r^2)}{2*\alpha}\right)^{-\alpha} \qquad \qquad \\text{ where } r = \sqrt{\frac{\sum_{i=1}^d (x_i-x^\prime_i)^2}{\ell^2}}
where \ell is the lengthscale, \alpha the smoothness, \sigma^2 the variance and d the dimensionality of the input.
:param D: the number of input dimensions
:type D: int
:param variance: the variance of the kernel
:type variance: float
:param lengthscale: the lengthscale of the kernel (l)
:type lengthscale: float
:param smoothness: the smoothness parameter of the kernel
:type lengthscale: float
.. Note: for rational quadratic with different lengthscale on each dimension, see ratquad_ARD
"""
def __init__(self,D,variance=1.,lengthscale=1.,smoothness)=1.:
self.D = D
self.Nparam = 3
self.name = 'ratquad'
self._set_params(np.hstack((variance,lengthscale,smoothness)))
#initialize cache
self._Z, self._mu, self._S = np.empty(shape=(3,1))
self._X, self._X2, self._params = np.empty(shape=(3,1))
def _get_params(self):
return np.hstack((self.variance,self.lengthscale,self.smoothness))
def _set_params(self,x):
self.variance, self.lengthscale, self.smoothness = x
self.lengthscale2 = np.square(self.lengthscale)
#reset cached results
self._X, self._X2, self._params = np.empty(shape=(3,1))
self._Z, self._mu, self._S = np.empty(shape=(3,1)) # cached versions of Z,mu,S
def _get_param_names(self):
return ['variance','lengthscale','smoothness']
def K(self,X,X2,target):
if X2 is None:
X2 = X
self._K_computations(X,X2)
np.add(self.variance*self._K_dvar, target,target)
def Kdiag(self,X,target):
np.add(target,self.variance,target)
def dK_dtheta(self,partial,X,X2,target):
self._K_computations(X,X2)
target[0] += np.sum(self._K_dvar*partial)
target[1] += np.sum(self._K_dvar*self.variance*self._K_dist2/self.lengthscale*partial)
target[2] +=
def dKdiag_dtheta(self,partial,X,target):
#NB: derivative of diagonal elements wrt lengthscale is 0
target[0] += np.sum(partial)
def dK_dX(self,partial,X,X2,target):
self._K_computations(X,X2)
if X2 is None:
_K_dist = 2*X[:, None, :] - X[None, :, :]
else:
_K_dist = X[:,None,:]-X2[None,:,:]
dK_dX = np.transpose(-self.variance*self._K_dvar[:,:,np.newaxis]*_K_dist/self.lengthscale2,(1,0,2))
target += np.sum(dK_dX*partial.T[:,:,None],0)
def dKdiag_dX(self,partial,X,target):
pass
def _K_computations(self,X,X2):
if not (np.all(X==self._X) and np.all(X2==self._X2)):
self._X = X
self._X2 = X2
if X2 is None: X2 = X
XXT = np.dot(X,X2.T)
if X is X2:
self._K_dist2 = (-2.*XXT + np.diag(XXT)[:,np.newaxis] + np.diag(XXT)[np.newaxis,:])/self.lengthscale2
else:
self._K_dist2 = (-2.*XXT + np.sum(np.square(X),1)[:,np.newaxis] + np.sum(np.square(X2),1)[np.newaxis,:])/self.lengthscale2
self._K_dvar = (0.5./self.smoothness*(1./self.lengthscale2 + self._K_dist2))**(-self.smoothness)
def psi0(self,Z,mu,S,target):
target += self.variance
def dpsi0_dtheta(self,partial,Z,mu,S,target):
target[0] += 1.
def dpsi0_dmuS(self,Z,mu,S,target_mu,target_S):
pass
def psi1(self,Z,mu,S,target):
self._psi_computations(Z,mu,S)
target += self._psi1
def dpsi1_dtheta(self,partial,Z,mu,S,target):
self._psi_computations(Z,mu,S)
denom_deriv = S[:,None,:]/(self.lengthscale**3+self.lengthscale*S[:,None,:])
d_length = self._psi1[:,:,None]*(self.lengthscale*np.square(self._psi1_dist/(self.lengthscale2+S[:,None,:])) + denom_deriv)
target[0] += np.sum(partial*self._psi1/self.variance)
target[1] += np.sum(d_length*partial[:,:,None])
def dpsi1_dZ(self,partial,Z,mu,S,target):
self._psi_computations(Z,mu,S)
target += np.sum(partial[:,:,None]*-self._psi1[:,:,None]*self._psi1_dist/self.lengthscale2/self._psi1_denom,0)
def dpsi1_dmuS(self,partial,Z,mu,S,target_mu,target_S):
self._psi_computations(Z,mu,S)
tmp = self._psi1[:,:,None]/self.lengthscale2/self._psi1_denom
target_mu += np.sum(partial*tmp*self._psi1_dist,1)
target_S += np.sum(partial*0.5*tmp*(self._psi1_dist_sq-1),1)
def psi2(self,Z,mu,S,target):
self._psi_computations(Z,mu,S)
target += self._psi2.sum(0) #TODO: psi2 should be NxMxM (for het. noise)
def dpsi2_dtheta(self,partial,Z,mu,S,target):
"""Shape N,M,M,Ntheta"""
self._psi_computations(Z,mu,S)
d_var = np.sum(2.*self._psi2/self.variance,0)
d_length = self._psi2[:,:,:,None]*(0.5*self._psi2_Zdist_sq*self._psi2_denom + 2.*self._psi2_mudist_sq + 2.*S[:,None,None,:]/self.lengthscale2)/(self.lengthscale*self._psi2_denom)
d_length = d_length.sum(0)
target[0] += np.sum(partial*d_var)
target[1] += np.sum(d_length*partial)
def dpsi2_dZ(self,partial,Z,mu,S,target):
"""Returns shape N,M,M,Q"""
self._psi_computations(Z,mu,S)
dZ = self._psi2[:,:,:,None]/self.lengthscale2*(-0.5*self._psi2_Zdist + self._psi2_mudist/self._psi2_denom)
target += np.sum(partial[None,:,:,None]*dZ,0).sum(1)
def dpsi2_dmuS(self,Z,mu,S,target_mu,target_S):
"""Think N,M,M,Q """
self._psi_computations(Z,mu,S)
tmp = self._psi2[:,:,:,None]/self.lengthscale2/self._psi2_denom
target_mu += (partial*-tmp*2.*self._psi2_mudist).sum(1).sum(1)
target_S += (partial*tmp*(2.*self._psi2_mudist_sq-1)).sum(1).sum(1)
def _psi_computations(self,Z,mu,S):
#here are the "statistics" for psi1 and psi2
if not np.all(Z==self._Z):
#Z has changed, compute Z specific stuff
self._psi2_Zhat = 0.5*(Z[:,None,:] +Z[None,:,:]) # M,M,Q
self._psi2_Zdist = Z[:,None,:]-Z[None,:,:] # M,M,Q
self._psi2_Zdist_sq = np.square(self._psi2_Zdist)/self.lengthscale2 # M,M,Q
self._Z = Z
if not (np.all(Z==self._Z) and np.all(mu==self._mu) and np.all(S==self._S)):
#something's changed. recompute EVERYTHING
#TODO: make more efficient for large Q (using NDL's dot product trick)
#psi1
self._psi1_denom = S[:,None,:]/self.lengthscale2 + 1.
self._psi1_dist = Z[None,:,:]-mu[:,None,:]
self._psi1_dist_sq = np.square(self._psi1_dist)/self.lengthscale2/self._psi1_denom
self._psi1_exponent = -0.5*np.sum(self._psi1_dist_sq+np.log(self._psi1_denom),-1)
self._psi1 = self.variance*np.exp(self._psi1_exponent)
#psi2
self._psi2_denom = 2.*S[:,None,None,:]/self.lengthscale2+1. # N,M,M,Q
self._psi2_mudist = mu[:,None,None,:]-self._psi2_Zhat #N,M,M,Q
self._psi2_mudist_sq = np.square(self._psi2_mudist)/(self.lengthscale2*self._psi2_denom)
self._psi2_exponent = np.sum(-self._psi2_Zdist_sq/4. -self._psi2_mudist_sq -0.5*np.log(self._psi2_denom),-1) #N,M,M
self._psi2 = np.square(self.variance)*np.exp(self._psi2_exponent) # N,M,M
self._Z, self._mu, self._S = Z, mu,S

View file

View file

@ -0,0 +1,31 @@
# Copyright (c) 2014 The GPy authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ..util.univariate_Gaussian import std_norm_pdf, std_norm_cdf
import link_functions
from likelihood import Likelihood
from scipy import stats
class Negative_binomial(Symbolic):
"""
Negative binomial
.. math::
p(y_{i}|\pi(f_{i})) = \\lambda(f_{i})^{y_{i}}(1-f_{i})^{1-y_{i}}
.. Note::
Y takes values in either {-1, 1} or {0, 1}.
link function should have the domain [0, 1], e.g. probit (default) or Heaviside
.. See also::
likelihood.py, for the parent class
"""
def __init__(self, gp_link=None):
if gp_link is None:
gp_link = link_functions.Probit()
super(Bernoulli, self).__init__(gp_link, 'Bernoulli')
if isinstance(gp_link , (link_functions.Heaviside, link_functions.Probit)):
self.log_concave = True

View file

@ -0,0 +1,53 @@
# Copyright (c) 2014 The GPy authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
try:
import sympy as sym
sympy_available=True
from sympy.utilities.lambdify import lambdify
from GPy.util.symbolic import gammaln, normcdfln, normcdf
from sympy.functions.elementary.piecewise import Piecewise
except ImportError:
sympy_available=False
import numpy as np
from ..util.univariate_Gaussian import std_norm_pdf, std_norm_cdf
import link_functions
from symbolic import Symbolic
from scipy import stats
if sympy_available:
class Null_category(Symbolic):
"""
Null category noise model.
.. math::
.. Note::
Y takes -1, 0 or 1.
.. See also::
symbolic.py, for the parent class
"""
def __init__(self, gp_link=None):
if gp_link is None:
gp_link = link_functions.Identity()
# width of the null category.
width = sym.Symbol('width', positive=True, real=True)
# prior probability of positive class
p = sym.Symbol('p', positive=True, real=True)
y = sym.Symbol('y', binary=True)
f = sym.Symbol('f', positive=True, real=True)
log_pdf_missing = sym.log((1-p)*normcdf(-f-width/2)
+p*normcdf(f+width/2))
log_pdf = (y-1)*normcdfln(-f-width/2)+y*normcdfln(f+width/2)
super(Null_category, self).__init__(log_pdf=log_pdf, missing_log_pdf=log_pdf_missing, gp_link=gp_link, name='Null_category')
self.p=0.5
self.p.constrain_bounded(0., 1.)
self.width = 1.
self.width.constrain_fixed()
self.log_concave = False

View file

@ -0,0 +1,46 @@
# Copyright (c) 2014 The GPy authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
try:
import sympy as sym
sympy_available=True
from sympy.utilities.lambdify import lambdify
from GPy.util.symbolic import gammaln, ln_cum_gaussian, cum_gaussian
except ImportError:
sympy_available=False
import numpy as np
from ..util.univariate_Gaussian import std_norm_pdf, std_norm_cdf
import link_functions
from symbolic import Symbolic
from scipy import stats
if sympy_available:
class Negative_binomial(Symbolic):
"""
Negative binomial
.. math::
p(y_{i}|\pi(f_{i})) = \left(\frac{r}{r+f_i}\right)^r \frac{\Gamma(r+y_i)}{y!\Gamma(r)}\left(\frac{f_i}{r+f_i}\right)^{y_i}
.. Note::
Y takes non zero integer values..
link function should have a positive domain, e.g. log (default).
.. See also::
symbolic.py, for the parent class
"""
def __init__(self, gp_link=None):
if gp_link is None:
gp_link = link_functions.Log()
dispersion = sym.Symbol('dispersion', positive=True, real=True)
y = sym.Symbol('y', nonnegative=True, integer=True)
f = sym.Symbol('f', positive=True, real=True)
log_pdf=dispersion*sym.log(dispersion) - (dispersion+y)*sym.log(dispersion+f) + gammaln(y+dispersion) - gammaln(y+1) - gammaln(dispersion) + y*sym.log(f)
super(Negative_binomial, self).__init__(log_pdf=log_pdf, gp_link=gp_link, name='Negative_binomial')
# TODO: Check this.
self.log_concave = False

View file

@ -0,0 +1,316 @@
# Copyright (c) 2014 GPy Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
try:
import sympy as sym
sympy_available=True
from sympy.utilities.lambdify import lambdify
except ImportError:
sympy_available=False
import numpy as np
import link_functions
from scipy import stats, integrate
from scipy.special import gammaln, gamma, erf, erfc, erfcx, polygamma
from GPy.util.functions import normcdf, normcdfln, logistic, logisticln
from likelihood import Likelihood
from ..core.parameterization import Param
if sympy_available:
class Symbolic(Likelihood):
"""
Symbolic likelihood.
Likelihood where the form of the likelihood is provided by a sympy expression.
"""
def __init__(self, log_pdf=None, logZ=None, missing_log_pdf=None, gp_link=None, name='symbolic', log_concave=False, param=None, func_modules=[]):
if gp_link is None:
gp_link = link_functions.Identity()
if log_pdf is None:
raise ValueError, "You must provide an argument for the log pdf."
self.func_modules = func_modules
self.func_modules += [{'gamma':gamma,
'gammaln':gammaln,
'erf':erf, 'erfc':erfc,
'erfcx':erfcx,
'polygamma':polygamma,
'normcdf':normcdf,
'normcdfln':normcdfln,
'logistic':logistic,
'logisticln':logisticln},
'numpy']
super(Symbolic, self).__init__(gp_link, name=name)
self.missing_data = False
self._sym_log_pdf = log_pdf
if missing_log_pdf:
self.missing_data = True
self._sym_missing_log_pdf = missing_log_pdf
# pull the variable names out of the symbolic pdf
sym_vars = [e for e in self._sym_log_pdf.atoms() if e.is_Symbol]
self._sym_f = [e for e in sym_vars if e.name=='f']
if not self._sym_f:
raise ValueError('No variable f in log pdf.')
self._sym_y = [e for e in sym_vars if e.name=='y']
if not self._sym_y:
raise ValueError('No variable y in log pdf.')
self._sym_theta = sorted([e for e in sym_vars if not (e.name=='f' or e.name=='y')],key=lambda e:e.name)
theta_names = [theta.name for theta in self._sym_theta]
if self.missing_data:
# pull the variable names out of missing data
sym_vars = [e for e in self._sym_missing_log_pdf.atoms() if e.is_Symbol]
sym_f = [e for e in sym_vars if e.name=='f']
if not sym_f:
raise ValueError('No variable f in missing data log pdf.')
sym_y = [e for e in sym_vars if e.name=='y']
if sym_y:
raise ValueError('Data is present in missing data portion of likelihood.')
# additional missing data parameters
missing_theta = sorted([e for e in sym_vars if not (e.name=='f' or e.name=='missing' or e.name in theta_names)],key=lambda e:e.name)
self._sym_theta += missing_theta
self._sym_theta = sorted(self._sym_theta, key=lambda e:e.name)
# These are all the arguments need to compute likelihoods.
self.arg_list = self._sym_y + self._sym_f + self._sym_theta
# these are arguments for computing derivatives.
derivative_arguments = self._sym_f + self._sym_theta
# Do symbolic work to compute derivatives.
self._log_pdf_derivatives = {theta.name : stabilise(sym.diff(self._sym_log_pdf,theta)) for theta in derivative_arguments}
self._log_pdf_second_derivatives = {theta.name : stabilise(sym.diff(self._log_pdf_derivatives['f'],theta)) for theta in derivative_arguments}
self._log_pdf_third_derivatives = {theta.name : stabilise(sym.diff(self._log_pdf_second_derivatives['f'],theta)) for theta in derivative_arguments}
if self.missing_data:
# Do symbolic work to compute derivatives.
self._missing_log_pdf_derivatives = {theta.name : stabilise(sym.diff(self._sym_missing_log_pdf,theta)) for theta in derivative_arguments}
self._missing_log_pdf_second_derivatives = {theta.name : stabilise(sym.diff(self._missing_log_pdf_derivatives['f'],theta)) for theta in derivative_arguments}
self._missing_log_pdf_third_derivatives = {theta.name : stabilise(sym.diff(self._missing_log_pdf_second_derivatives['f'],theta)) for theta in derivative_arguments}
# Add parameters to the model.
for theta in self._sym_theta:
val = 1.0
# TODO: need to decide how to handle user passing values for the se parameter vectors.
if param is not None:
if param.has_key(theta.name):
val = param[theta.name]
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
# TODO: Is there an easy way to check whether the pdf is log
# concave? For the moment, need user to specify.
self.log_concave = log_concave
# initialise code arguments
self._arguments = {}
# generate the code for the pdf and derivatives
self._gen_code()
def list_functions(self):
"""Return a list of all symbolic functions in the model and their names."""
def _gen_code(self):
"""Generate the code from the symbolic parts that will be used for likleihod computation."""
# TODO: Check here whether theano is available and set up
# functions accordingly.
symbolic_functions = [self._sym_log_pdf]
deriv_list = [self._log_pdf_derivatives, self._log_pdf_second_derivatives, self._log_pdf_third_derivatives]
symbolic_functions += [deriv[key] for key in sorted(deriv.keys()) for deriv in deriv_list]
if self.missing_data:
symbolic_functions+=[self._sym_missing_log_pdf]
deriv_list = [self._missing_log_pdf_derivatives, self._missing_log_pdf_second_derivatives, self._missing_log_pdf_third_derivatives]
symbolic_functions += [deriv[key] for key in sorted(deriv.keys()) for deriv in deriv_list]
# self._log_pdf_function = lambdify(self.arg_list, self._sym_log_pdf, self.func_modules)
# # compute code for derivatives
# self._derivative_code = {key: lambdify(self.arg_list, self._log_pdf_derivatives[key], self.func_modules) for key in self._log_pdf_derivatives.keys()}
# self._second_derivative_code = {key: lambdify(self.arg_list, self._log_pdf_second_derivatives[key], self.func_modules) for key in self._log_pdf_second_derivatives.keys()}
# self._third_derivative_code = {key: lambdify(self.arg_list, self._log_pdf_third_derivatives[key], self.func_modules) for key in self._log_pdf_third_derivatives.keys()}
# if self.missing_data:
# self._missing_derivative_code = {key: lambdify(self.arg_list, self._missing_log_pdf_derivatives[key], self.func_modules) for key in self._missing_log_pdf_derivatives.keys()}
# self._missing_second_derivative_code = {key: lambdify(self.arg_list, self._missing_log_pdf_second_derivatives[key], self.func_modules) for key in self._missing_log_pdf_second_derivatives.keys()}
# self._missing_third_derivative_code = {key: lambdify(self.arg_list, self._missing_log_pdf_third_derivatives[key], self.func_modules) for key in self._missing_log_pdf_third_derivatives.keys()}
# TODO: compute EP code parts based on logZ. We need dlogZ/dmu, d2logZ/dmu2 and dlogZ/dtheta
def parameters_changed(self):
pass
def update_gradients(self, grads):
"""
Pull out the gradients, be careful as the order must match the order
in which the parameters are added
"""
# The way the Laplace approximation is run requires the
# covariance function to compute the true gradient (because it
# is dependent on the mode). This means we actually compute
# the gradient outside this object. This function would
# normally ask the object to update its gradients internally,
# but here it provides them externally, because they are
# computed in the inference code. TODO: Thought: How does this
# effect EP? Shouldn't this be done by a separate
# Laplace-approximation specific call?
for grad, theta in zip(grads, self._sym_theta):
parameter = getattr(self, theta.name)
setattr(parameter, 'gradient', grad)
def _arguments_update(self, f, y):
"""Set up argument lists for the derivatives."""
# If we do make use of Theano, then at this point we would
# need to do a lot of precomputation to ensure that the
# likelihoods and gradients are computed together, then check
# for parameter changes before updating.
for i, fvar in enumerate(self._sym_f):
self._arguments[fvar.name] = f
for i, yvar in enumerate(self._sym_y):
self._arguments[yvar.name] = y
for theta in self._sym_theta:
self._arguments[theta.name] = np.asarray(getattr(self, theta.name))
def pdf_link(self, inv_link_f, y, Y_metadata=None):
"""
Likelihood function given inverse link of f.
:param inv_link_f: inverse link of latent variables.
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: likelihood evaluated for this point
:rtype: float
"""
return np.exp(self.logpdf_link(inv_link_f, y, Y_metadata=None))
def logpdf_link(self, inv_link_f, y, Y_metadata=None):
"""
Log Likelihood Function given inverse link of latent variables.
:param inv_inv_link_f: latent variables (inverse link of f)
:type inv_inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata
:returns: likelihood evaluated for this point
:rtype: float
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
if self.missing_data:
ll = np.where(np.isnan(y), self._missing_log_pdf_function(**self._missing_arguments), self._log_pdf_function(**self._arguments))
else:
ll = np.where(np.isnan(y), 0., self._log_pdf_function(**self._arguments))
return np.sum(ll)
def dlogpdf_dlink(self, inv_link_f, y, Y_metadata=None):
"""
Gradient of log likelihood with respect to the inverse link function.
:param inv_inv_link_f: latent variables (inverse link of f)
:type inv_inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata
:returns: gradient of likelihood with respect to each point.
:rtype: Nx1 array
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
if self.missing_data:
return np.where(np.isnan(y), self._missing_derivative_code['f'](**self._missing_argments), self._derivative_code['f'](**self._argments))
else:
return np.where(np.isnan(y), 0., self._derivative_code['f'](**self._arguments))
def d2logpdf_dlink2(self, inv_link_f, y, Y_metadata=None):
"""
Hessian of log likelihood given inverse link of latent variables with respect to that inverse link.
i.e. second derivative logpdf at y given inv_link(f_i) and inv_link(f_j) w.r.t inv_link(f_i) and inv_link(f_j).
:param inv_link_f: inverse link of the latent variables.
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: Diagonal of Hessian matrix (second derivative of likelihood evaluated at points f)
:rtype: Nx1 array
.. Note::
Returns diagonal of Hessian, since every where else it is
0, as the likelihood factorizes over cases (the
distribution for y_i depends only on link(f_i) not on
link(f_(j!=i))
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
if self.missing_data:
return np.where(np.isnan(y), self._missing_second_derivative_code['f'](**self._missing_argments), self._second_derivative_code['f'](**self._argments))
else:
return np.where(np.isnan(y), 0., self._second_derivative_code['f'](**self._arguments))
def d3logpdf_dlink3(self, inv_link_f, y, Y_metadata=None):
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
if self.missing_data:
return np.where(np.isnan(y), self._missing_third_derivative_code['f'](**self._missing_argments), self._third_derivative_code['f'](**self._argments))
else:
return np.where(np.isnan(y), 0., self._third_derivative_code['f'](**self._arguments))
def dlogpdf_link_dtheta(self, inv_link_f, y, Y_metadata=None):
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
g = np.zeros((np.atleast_1d(y).shape[0], len(self._sym_theta)))
for i, theta in enumerate(self._sym_theta):
if self.missing_data:
g[:, i:i+1] = np.where(np.isnan(y), self._missing_derivative_code[theta.name](**self._arguments), self._derivative_code[theta.name](**self._arguments))
else:
g[:, i:i+1] = np.where(np.isnan(y), 0., self._derivative_code[theta.name](**self._arguments))
return g.sum(0)
def dlogpdf_dlink_dtheta(self, inv_link_f, y, Y_metadata=None):
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
g = np.zeros((np.atleast_1d(y).shape[0], len(self._sym_theta)))
for i, theta in enumerate(self._sym_theta):
if self.missing_data:
g[:, i:i+1] = np.where(np.isnan(y), self._missing_second_derivative_code[theta.name](**self._arguments), self._second_derivative_code[theta.name](**self._arguments))
else:
g[:, i:i+1] = np.where(np.isnan(y), 0., self._second_derivative_code[theta.name](**self._arguments))
return g
def d2logpdf_dlink2_dtheta(self, inv_link_f, y, Y_metadata=None):
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
g = np.zeros((np.atleast_1d(y).shape[0], len(self._sym_theta)))
for i, theta in enumerate(self._sym_theta):
if self.missing_data:
g[:, i:i+1] = np.where(np.isnan(y), self._missing_third_derivative_code[theta.name](**self._arguments), self._third_derivative_code[theta.name](**self._arguments))
else:
g[:, i:i+1] = np.where(np.isnan(y), 0., self._third_derivative_code[theta.name](**self._arguments))
return g
def predictive_mean(self, mu, sigma, Y_metadata=None):
raise NotImplementedError
def predictive_variance(self, mu,variance, predictive_mean=None, Y_metadata=None):
raise NotImplementedError
def conditional_mean(self, gp):
raise NotImplementedError
def conditional_variance(self, gp):
raise NotImplementedError
def samples(self, gp, Y_metadata=None):
raise NotImplementedError

View file

@ -0,0 +1,234 @@
# Copyright (c) 2014 GPy Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import sympy as sp
from scipy import stats, special
import scipy as sp
import link_functions
from scipy import stats, integrate
from scipy.special import gammaln, gamma
from likelihood import Likelihood
from ..core.parameterization import Param
from ..core.parameterization.transformations import Logexp
class Symbolic(Likelihood):
"""
Symbolic likelihood.
Likelihood where the form of the likelihood is provided by a sympy expression.
"""
def __init__(self, likelihood=None, log_likelihood=None, gp_link=None, name='symbolic', log_concave=False):
if gp_link is None:
gp_link = link_functions.Identity()
if likelihood is None and log_likelihood is None:
raise ValueError, "You must provide an argument for the likelihood or the log likelihood."
super(Symbolic, self).__init__(gp_link, name=name)
if likelihood is None:
self._sp_likelihood = sp.exp(log_likelihood).simplify()
self._sp_log_likelihood = log_likelihood
if log_likelihood is None:
self._sp_likelihood = likelihood
self._sp_log_likelihood = sp.log(likelihood).simplify()
# extract parameter names from the covariance
# pull the variable names out of the symbolic covariance function.
sp_vars = [e for e in self._sp_likelihood.atoms() if e.is_Symbol]
# f subscript allows the likelihood of y to be dependent on multiple functions of f. The index of these functions would need to be specified in y_meta.
self._sp_f = sorted([e for e in sp_vars if e.name[0:2]=='f_'],key=lambda x:int(x.name[2:]))
self._sp_y = sorted([e for e in sp_vars if e.name=='y'],key=lambda x:int(x.name[2:]))
self._sp_theta = sorted([e for e in sp_vars if not (e.name[0:2]=='f_' or e.name=='y')],key=lambda e:e.name)
self.arg_list = self._sp_y + self._sp_f + self._sp_theta
# this gives us the arguments for first derivative
first_derivative_arguments = self._sp_f + self._sp_theta
second_derivative_arguments = {}
for arg in first_derivative_arguments:
if arg.name[0:2] == 'f_':
# take all second derivatives with respect to everything
second_derivative_arguments[arg.name] = first_derivative_arguments
second_derivative_arguments = self._sp_f + self._sp_theta
third_derivative_arguments = self._sp_f + self._sp_theta
self._likelihood_derivatives = {theta.name : sp.diff(self._sp_likelihood,theta).simplify() for theta in derivative_arguments}
self._log_likelihood_derivatives = {theta.name : sp.diff(self._sp_log_likelihood,theta).simplify() for theta in derivative_arguments}
# Add parameters to the model.
for theta in self._sp_theta:
val = 1.0
# TODO: what if user has passed a parameter vector, how should that be stored and interpreted? This is the old way before params class.
if param is not None:
if param.has_key(theta):
val = param[theta]
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
# By default it won't be log concave. It would be nice to check for this somehow though!
self.log_concave = log_concave
# initialise code arguments
self._arguments = {}
# generate the code for the likelihood and derivatives
self._gen_code()
def _gen_code(self):
# Potentially run theano here as an option.
self._likelihood_function = lambdify(self.arg_list, self._sp_likelihood, 'numpy')
self._log_likelihood_function = lambdify(self.arg_list, self._sp_log_likelihood, 'numpy')
# for derivatives we need gradient of the likelihood, gradient of log likelihood
for key in self.derivatives.keys():
setattr(self, '_log_likelihood_diff_' + key, lambdify(self.arg_list, self.derivatives[key], 'numpy'))
pass
def parameters_changed(self):
pass
def update_gradients(self, grads):
"""
Pull out the gradients, be careful as the order must match the order
in which the parameters are added
"""
for grad, theta in zip(grads, self._sp_theta):
parameter = getattr(self, theta.name)
setattr(parameter, 'gradient', grad)
def _arguments_update(self, f, y):
"""Set up argument lists for the derivatives."""
# Could check if this needs doing or not, there could
# definitely be some computational savings by checking for
# parameter updates here.
for i, f in enumerate(self._sp_f):
self._arguments[f.name] = f[:, i][:, None]
for i, y in enumerate(self._sp_y):
self._arguments[y.name] = y[:, i][:, None]
for theta in self._sp_theta:
self._arguments[theta.name] = np.asarray(getattr(self, theta.name))
def pdf_link(self, inv_link_f, y, Y_metadata=None):
"""
Likelihood function given inverse link of f.
:param inv_link_f: inverse link of latent variables.
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: likelihood evaluated for this point
:rtype: float
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
l = self._likelihood_function(**self._arguments)
return np.prod(l)
def logpdf_link(self, inv_link_f, y, Y_metadata=None):
"""
Log Likelihood Function given inverse link of latent variables.
:param inv_inv_link_f: latent variables (inverse link of f)
:type inv_inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata
:returns: likelihood evaluated for this point
:rtype: float
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
ll = self._log_likelihood_function(**self._arguments)
return np.sum(ll)
def dlogpdf_dlink(self, inv_link_f, y, Y_metadata=None):
"""
Gradient of log likelihood with respect to the inverse link function.
:param inv_inv_link_f: latent variables (inverse link of f)
:type inv_inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata
:returns: gradient of likelihood with respect to each point.
:rtype: Nx1 array
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
self._arguments_update(inv_link_f, y)
return self._log_likelihood_diff['f_0'](**self._arguments)
def d2logpdf_dlink2(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d3logpdf_dlink3(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def dlogpdf_link_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def dlogpdf_dlink_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d2logpdf_dlink2_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d2logpdf_dlink2(self, inv_link_f, y, Y_metadata=None):
"""
Hessian at y, given link(f), w.r.t link(f)
i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j)
The hessian will be 0 unless i == j
.. math::
\\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = \\frac{(v+1)((y_{i}-\lambda(f_{i}))^{2} - \\sigma^{2}v)}{((y_{i}-\lambda(f_{i}))^{2} + \\sigma^{2}v)^{2}}
:param inv_link_f: latent variables link(f)
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f)
:rtype: Nx1 array
.. Note::
Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases
(the distribution for y_i depends only on link(f_i) not on link(f_(j!=i))
"""
assert np.atleast_1d(inv_link_f).shape == np.atleast_1d(y).shape
e = y - inv_link_f
hess = ((self.v + 1)*(e**2 - self.v*self.sigma2)) / ((self.sigma2*self.v + e**2)**2)
return hess
def predictive_mean(self, mu, sigma, Y_metadata=None):
return self.gp_link.transf(mu) # only true in link is monotoci, which it is.
def predictive_variance(self, mu,variance, predictive_mean=None, Y_metadata=None):
if self.deg_free <2.:
return np.empty(mu.shape)*np.nan #not defined for small degress fo freedom
else:
return super(StudentT, self).predictive_variance(mu, variance, predictive_mean, Y_metadata)
def conditional_mean(self, gp):
return self.gp_link.transf(gp)
def conditional_variance(self, gp):
return self.deg_free/(self.deg_free - 2.)
def samples(self, gp, Y_metadata=None):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
"""
orig_shape = gp.shape
gp = gp.flatten()
pass

View file

@ -0,0 +1,318 @@
# Copyright (c) 2012, 2013 Ricardo Andrade
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import sympy as sp
from scipy import stats, special
import scipy as sp
import link_functions
from scipy import stats, integrate
from scipy.special import gammaln, gamma
from likelihood import Likelihood
from ..core.parameterization import Param
from ..core.parameterization.transformations import Logexp
class Symbolic(Likelihood):
"""
Symbolic likelihood.
Likelihood where the form of the likelihood is provided by a sympy expression.
"""
def __init__(self, likelihood=None, log_likelihood=None, gp_link=None, name='symbolic', log_concave=False):
if gp_link is None:
gp_link = link_functions.Identity()
if likelihood is None and log_likelihood is None:
raise ValueError, "You must provide an argument for the likelihood or the log likelihood."
super(Symbolic, self).__init__(gp_link, name=name)
if likelihood is None:
self._sp_likelihood = sp.exp(log_likelihood).simplify()
self._sp_log_likelihood = log_likelihood
if log_likelihood is None:
self._sp_likelihood = likelihood
self._sp_log_likelihood = sp.log(likelihood).simplify()
# extract parameter names from the covariance
# pull the variable names out of the symbolic covariance function.
sp_vars = [e for e in self._sp_likelihood.atoms() if e.is_Symbol]
self._sp_f = sorted([e for e in sp_vars if e.name[0:2]=='f_'],key=lambda x:int(x.name[2:]))
self._sp_y = sorted([e for e in sp_vars if e.name[0:2]=='y_'],key=lambda x:int(x.name[2:]))
self._sp_theta = sorted([e for e in sp_vars if not (e.name[0:2]=='f_' or e.name[0:2]=='y_')],key=lambda e:e.name)
self.arg_list = self._sp_y + self._sp_f + self._sp_theta
first_derivative_arguments = self._sp_f + self._sp_theta
second_derivative_arguments = self._sp_f + self._sp_theta
third_derivative_arguments = self._sp_f + self._sp_theta
self._likelihood_derivatives = {theta.name : sp.diff(self._sp_likelihood,theta).simplify() for theta in derivative_arguments}
self._log_likelihood_derivatives = {theta.name : sp.diff(self._sp_log_likelihood,theta).simplify() for theta in derivative_arguments}
# Add parameters to the model.
for theta in self._sp_theta:
val = 1.0
# TODO: what if user has passed a parameter vector, how should that be stored and interpreted? This is the old way before params class.
if param is not None:
if param.has_key(theta):
val = param[theta]
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
# By default it won't be log concave. It would be nice to check for this somehow though!
self.log_concave = log_concave
# initialise code arguments
self._arguments = {}
# generate the code for the covariance functions
self._gen_code()
def _gen_code(self):
# Potentially run theano here as an option.
self._likelihood_function = lambdify(self.arg_list, self._sp_likelihood, 'numpy')
self._log_likelihood_function = lambdify(self.arg_list, self._sp_log_likelihood, 'numpy')
for key in self.derivatives.keys():
setattr(self, '_likelihood_diff_' + key, lambdify(self.arg_list, self.derivatives[key], 'numpy'))
pass
def parameters_changed(self):
pass
def update_gradients(self, grads):
"""
Pull out the gradients, be careful as the order must match the order
in which the parameters are added
"""
#self.sigma2.gradient = grads[0]
#self.v.gradient = grads[1]
pass
def _arguments_update(self, f, y):
"""Set up argument lists for the derivatives."""
# Could check if this needs doing or not, there could
# definitely be some computational savings by checking for
# parameter updates here.
for i, f in enumerate(self._sp_f):
self._arguments[f.name] = f[:, i][:, None]
for i, y in enumerate(self._sp_y):
self._arguments[f.name] = y[:, i][:, None]
for theta in self._sp_theta:
self._arguments[theta.name] = np.asarray(getattr(self, theta.name))
def pdf_link(self, link_f, y, Y_metadata=None):
"""
Likelihood function given link(f)
.. math::
p(y_{i}|\\lambda(f_{i})) = \\frac{\\Gamma\\left(\\frac{v+1}{2}\\right)}{\\Gamma\\left(\\frac{v}{2}\\right)\\sqrt{v\\pi\\sigma^{2}}}\\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - \\lambda(f_{i}))^{2}}{\\sigma^{2}}\\right)\\right)^{\\frac{-v+1}{2}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: likelihood evaluated for this point
:rtype: float
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
self._arguments_update(link_f, y)
l = self._likelihood_function(**self._arguments)
return np.prod(l)
def logpdf_link(self, link_f, y, Y_metadata=None):
"""
Log Likelihood Function given link(f)
.. math::
\\ln p(y_{i}|\lambda(f_{i})) = \\ln \\Gamma\\left(\\frac{v+1}{2}\\right) - \\ln \\Gamma\\left(\\frac{v}{2}\\right) - \\ln \\sqrt{v \\pi\\sigma^{2}} - \\frac{v+1}{2}\\ln \\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - \lambda(f_{i}))^{2}}{\\sigma^{2}}\\right)\\right)
:param link_f: latent variables (link(f))
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: likelihood evaluated for this point
:rtype: float
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
self._arguments_update(link_f, y)
ll = self._log_likelihood_function(**self._arguments)
return np.sum(ll)
def dlogpdf_dlink(self, link_f, y, Y_metadata=None):
"""
Gradient of the log likelihood function at y, given link(f) w.r.t link(f)
.. math::
\\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\lambda(f)} = \\frac{(v+1)(y_{i}-\lambda(f_{i}))}{(y_{i}-\lambda(f_{i}))^{2} + \\sigma^{2}v}
:param link_f: latent variables (f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: gradient of likelihood evaluated at points
:rtype: Nx1 array
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
grad = ((self.v + 1) * e) / (self.v * self.sigma2 + (e**2))
return grad
def d2logpdf_dlink2(self, link_f, y, Y_metadata=None):
"""
Hessian at y, given link(f), w.r.t link(f)
i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j)
The hessian will be 0 unless i == j
.. math::
\\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = \\frac{(v+1)((y_{i}-\lambda(f_{i}))^{2} - \\sigma^{2}v)}{((y_{i}-\lambda(f_{i}))^{2} + \\sigma^{2}v)^{2}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f)
:rtype: Nx1 array
.. Note::
Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases
(the distribution for y_i depends only on link(f_i) not on link(f_(j!=i))
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
hess = ((self.v + 1)*(e**2 - self.v*self.sigma2)) / ((self.sigma2*self.v + e**2)**2)
return hess
def d3logpdf_dlink3(self, link_f, y, Y_metadata=None):
"""
Third order derivative log-likelihood function at y given link(f) w.r.t link(f)
.. math::
\\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{-2(v+1)((y_{i} - \lambda(f_{i}))^3 - 3(y_{i} - \lambda(f_{i})) \\sigma^{2} v))}{((y_{i} - \lambda(f_{i})) + \\sigma^{2} v)^3}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: third derivative of likelihood evaluated at points f
:rtype: Nx1 array
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
d3lik_dlink3 = ( -(2*(self.v + 1)*(-e)*(e**2 - 3*self.v*self.sigma2)) /
((e**2 + self.sigma2*self.v)**3)
)
return d3lik_dlink3
def dlogpdf_link_dvar(self, link_f, y, Y_metadata=None):
"""
Gradient of the log-likelihood function at y given f, w.r.t variance parameter (t_noise)
.. math::
\\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{d\\sigma^{2}} = \\frac{v((y_{i} - \lambda(f_{i}))^{2} - \\sigma^{2})}{2\\sigma^{2}(\\sigma^{2}v + (y_{i} - \lambda(f_{i}))^{2})}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: derivative of likelihood evaluated at points f w.r.t variance parameter
:rtype: float
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
dlogpdf_dvar = self.v*(e**2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e**2))
return np.sum(dlogpdf_dvar)
def dlogpdf_dlink_dvar(self, link_f, y, Y_metadata=None):
"""
Derivative of the dlogpdf_dlink w.r.t variance parameter (t_noise)
.. math::
\\frac{d}{d\\sigma^{2}}(\\frac{d \\ln p(y_{i}|\lambda(f_{i}))}{df}) = \\frac{-2\\sigma v(v + 1)(y_{i}-\lambda(f_{i}))}{(y_{i}-\lambda(f_{i}))^2 + \\sigma^2 v)^2}
:param link_f: latent variables link_f
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: derivative of likelihood evaluated at points f w.r.t variance parameter
:rtype: Nx1 array
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
dlogpdf_dlink_dvar = (self.v*(self.v+1)*(-e))/((self.sigma2*self.v + e**2)**2)
return dlogpdf_dlink_dvar
def d2logpdf_dlink2_dvar(self, link_f, y, Y_metadata=None):
"""
Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (t_noise)
.. math::
\\frac{d}{d\\sigma^{2}}(\\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}f}) = \\frac{v(v+1)(\\sigma^{2}v - 3(y_{i} - \lambda(f_{i}))^{2})}{(\\sigma^{2}v + (y_{i} - \lambda(f_{i}))^{2})^{3}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: derivative of hessian evaluated at points f and f_j w.r.t variance parameter
:rtype: Nx1 array
"""
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
e = y - link_f
d2logpdf_dlink2_dvar = ( (self.v*(self.v+1)*(self.sigma2*self.v - 3*(e**2)))
/ ((self.sigma2*self.v + (e**2))**3)
)
return d2logpdf_dlink2_dvar
def dlogpdf_link_dtheta(self, f, y, Y_metadata=None):
dlogpdf_dvar = self.dlogpdf_link_dvar(f, y, Y_metadata=Y_metadata)
dlogpdf_dv = np.zeros_like(dlogpdf_dvar) #FIXME: Not done yet
return np.hstack((dlogpdf_dvar, dlogpdf_dv))
def dlogpdf_dlink_dtheta(self, f, y, Y_metadata=None):
dlogpdf_dlink_dvar = self.dlogpdf_dlink_dvar(f, y, Y_metadata=Y_metadata)
dlogpdf_dlink_dv = np.zeros_like(dlogpdf_dlink_dvar) #FIXME: Not done yet
return np.hstack((dlogpdf_dlink_dvar, dlogpdf_dlink_dv))
def d2logpdf_dlink2_dtheta(self, f, y, Y_metadata=None):
d2logpdf_dlink2_dvar = self.d2logpdf_dlink2_dvar(f, y, Y_metadata=Y_metadata)
d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet
return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv))
def predictive_mean(self, mu, sigma, Y_metadata=None):
return self.gp_link.transf(mu) # only true in link is monotoci, which it is.
def predictive_variance(self, mu,variance, predictive_mean=None, Y_metadata=None):
if self.deg_free <2.:
return np.empty(mu.shape)*np.nan #not defined for small degress fo freedom
else:
return super(StudentT, self).predictive_variance(mu, variance, predictive_mean, Y_metadata)
def conditional_mean(self, gp):
return self.gp_link.transf(gp)
def conditional_variance(self, gp):
return self.deg_free/(self.deg_free - 2.)
def samples(self, gp, Y_metadata=None):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
"""
orig_shape = gp.shape
gp = gp.flatten()
pass

1
GPy/log_like_grad.txt Normal file
View file

@ -0,0 +1 @@
core/model.py: def get(self,name):

149
GPy/mappings/symbolic.py~ Normal file
View file

@ -0,0 +1,149 @@
# Copyright (c) 2014 GPy Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
try:
import sympy as sym
sympy_available=True
from sympy.utilities.lambdify import lambdify
from GPy.util.symbolic import stabilise
except ImportError:
sympy_available=False
from ..core.mapping import Mapping, Bijective_mapping
import numpy as np
from scipy.special import gammaln, gamma, erf, erfc, erfcx, polygamma
from GPy.util.functions import normcdf, normcdfln, logistic, logisticln
from ..core.parameterization import Param
if sympy_available:
class Symbolic(Mapping):
"""
Symbolic likelihood.
Likelihood where the form of the likelihood is provided by a sympy expression.
"""
def __init__(self, f=None, logZ=None, name='symbolic', param=None, func_modules=[]):
if f is None:
raise ValueError, "You must provide an argument for the function."
self.func_modules = func_modules
self.func_modules += [{'gamma':gamma,
'gammaln':gammaln,
'erf':erf, 'erfc':erfc,
'erfcx':erfcx,
'polygamma':polygamma,
'normcdf':normcdf,
'normcdfln':normcdfln,
'logistic':logistic,
'logisticln':logisticln},
'numpy']
super(Symbolic, self).__init__(gp_link, name=name)
self.symbolic['function'] = f
# pull the variable names out of the symbolic pdf
sym_vars = [e for e in f.atoms() if e.is_Symbol]
self.symbolic['x'] = [e for e in sym_vars if e.name[:2]=='x_']
if not self.symbolic['f']:
raise ValueError('No variable x in f().')
self.symbolic['theta'] = sorted([e for e in sym_vars if not e.name[:2]=='x_'],key=lambda e:e.name)
theta_names = [theta.name for theta in self.symbolic['theta']
# These are all the arguments need to compute the mapping.
self.arg_list = self.symbolic['x'] + self.symbolic['theta']
# these are arguments for computing derivatives.
derivative_arguments = self.arg_list
# Do symbolic work to compute derivatives.
self.symbolic['derivatives'] = {theta.name : stabilise(sym.diff(f,theta)) for theta in derivative_arguments}
# Add parameters to the model.
for theta in self._sym_theta:
val = 1.0
# TODO: need to decide how to handle user passing values for the se parameter vectors.
if param is not None:
if param.has_key(theta.name):
val = param[theta.name]
setattr(self, theta.name, Param(theta.name, val, None))
self.add_parameters(getattr(self, theta.name))
# initialise code arguments
self._arguments = {}
# generate the code for the pdf and derivatives
self._gen_code()
def _gen_code(self):
"""Generate the code from the symbolic parts that will be used for likleihod computation."""
self.code = GPy.util.function.gen_code(self.symbolic)
def parameters_changed(self):
# do all the precomputation codes.
for variable, code in self.code['precompute'].items():
self.setattr(variable, eval(code, self.namespace))
def update_gradients(self, grads):
"""
"""
for param, code in self.code['derivatives'].items():
self.getattr(param).setattr('gradient',
eval(code, self.namespace))
pass
def _arguments_update(self, x):
"""Set up argument lists for the derivatives."""
# If we do make use of Theano, then at this point we would
# need to do a lot of precomputation to ensure that the
# likelihoods and gradients are computed together, then check
# for parameter changes before updating.
for i, fvar in enumerate(self._sym_x):
self._arguments[fvar.name] = x[:, i]
for theta in self._sym_theta:
self._arguments[theta.name] = np.asarray(getattr(self, theta.name))
def f(self, x):
"""
Likelihood function given inverse link of f.
:param inv_link_f: inverse link of latent variables.
:type inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution
:returns: likelihood evaluated for this point
:rtype: float
"""
self._arguments_update(inv_link_f, y)
return self._f_function(x)
def df_dX(self, X):
"""
Gradient of log likelihood with respect to the inverse link function.
:param inv_inv_link_f: latent variables (inverse link of f)
:type inv_inv_link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata
:returns: gradient of likelihood with respect to each point.
:rtype: Nx1 array
"""
self._arguments_update(X)
return self._derivative_code['X'](**self._arguments)
def df_dtheta(self, X):
self._arguments_update(X)
g = np.zeros((np.atleast_1d(X).shape[0], len(self._sym_theta)))
for i, theta in enumerate(self._sym_theta):
g[:, i:i+1] = self._derivative_code[theta.name](**self._arguments)
return g.sum(0)

File diff suppressed because one or more lines are too long

View file

@ -405,11 +405,11 @@ def lee_yeast_ChIP(data_set='lee_yeast_ChIP'):
import zipfile
dir_path = os.path.join(data_path, data_set)
filename = os.path.join(dir_path, 'binding_by_gene.tsv')
X = read_csv(filename, header=1, index_col=0, sep='\t')
transcription_factors = [col for col in X.columns if col[:7] != 'Unnamed']
annotations = X[['Unnamed: 1', 'Unnamed: 2', 'Unnamed: 3']]
X = X[transcription_factors]
return data_details_return({'annotations' : annotations, 'X' : X, 'transcription_factors': transcription_factors}, data_set)
S = read_csv(filename, header=1, index_col=0, sep='\t')
transcription_factors = [col for col in S.columns if col[:7] != 'Unnamed']
annotations = S[['Unnamed: 1', 'Unnamed: 2', 'Unnamed: 3']]
S = S[transcription_factors]
return data_details_return({'annotations' : annotations, 'Y' : S, 'transcription_factors': transcription_factors}, data_set)
def fruitfly_tomancak(data_set='fruitfly_tomancak', gene_number=None):

View file

View file

@ -0,0 +1,213 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<base href="http://accad.osu.edu/" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>ACCAD | Page Not Found</title>
<link rel="alternate" type="application/rss+xml" title="ACCAD RSS feed" href="125.html">
<!-- OSU Navigation Bar: Required CSS. -->
<link rel="stylesheet" type="text/css" href="/assets/osu-navbar-media/css/navbar.css" />
<!-- OSU Navigation Bar: Required Conditional CSS. -->
<!--[if lte IE 6]>
<link rel="stylesheet" type="text/css" href="/assets/osu-navbar-media/css/navbar-ie6.css" />
<link rel="stylesheet" type="text/css" href="/assets/osu-navbar-media/extras/ie6-transparency/navbar-ie6-png.css" />
<![endif]-->
<!--[if IE 7]>
<link rel="stylesheet" type="text/css" href="/assets/osu-navbar-media/css/navbar-ie7.css" />
<![endif]-->
<style type="text/css">
<!--#page-content {width: 100%;}-->
</style>
<!-- OSU: Optional Favicon -->
<link rel="icon" href="/assets/osu-navbar-media/img/favicon.ico" type="image/x-icon" />
<link rel="shortcut icon" href="/assets/osu-navbar-media/img/favicon.ico" type="image/x-icon" />
<!-- Blueprint: Base CSS -->
<link href="/assets/css/screen.css" rel="stylesheet" type="text/css" media="screen, projection" />
<link href="/assets/css/print.css" rel="stylesheet" type="text/css" media="print" />
<!-- site specific styles -->
<link href="/assets/css/accad_styles.css" rel="stylesheet" type="text/css" media="screen, projection" />
<!-- [if IE]>
<link href="/assets/css/ie.css" rel="stylesheet" type="text/css" media="screen, projection" />
<![endif]-->
<!-- Base Javascript -->
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4/jquery.min.js"></script> <!-- base jquery library -->
<!-- Drop-down menu -->
<script type="text/javascript" src="/assets/js/hoverIntent.js"></script>
<script type="text/javascript" src="/assets/js/superfish.js"></script>
<script type="text/javascript">
// initialize plugins
jQuery(function(){
jQuery('ul#topnav').superfish({
delay: 1000, // one second delay on mouseout
animation: {height:'show'}, // slide-down animation
speed:'fast', // faster animation speed
dropShadows: false // disable drop shadows
});
});
</script>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-22129230-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName?('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body>
<div id="osu_navbar">
<div id="osu-Navbar">
<div class="container">
<p>
<a href="#page-content" id="skip" class="osu-semantic">skip to main content</a>
</p>
<h2 class="osu-semantic">OSU Navigation Bar</h2>
<div id="osu-NavbarBreadcrumb">
<p id="osu">
<a title="The Ohio State University homepage" href="http://www.osu.edu/">The Ohio State University</a>
</p>
<p id="site-name">
Arts and Sciences
</p>
</div>
<div id="osu-NavbarLinks">
<ul>
<li><a href="http://www.osu.edu/help.php" title="OSU Help">Help</a></li>
<li><a href="http://buckeyelink.osu.edu/" title="Buckeye Link">Buckeye Link</a></li>
<li><a href="http://www.osu.edu/map/" title="Campus map">Map</a></li>
<li><a href="http://www.osu.edu/findpeople.php" title="Find people at OSU">Find People</a></li>
<li><a href="https://webmail.osu.edu" title="OSU Webmail">Webmail</a></li>
<li><a href="http://www.osu.edu/search.php" title="Ohio State's main search page">Search Ohio State</a>
</li>
</ul>
</div>
</div>
</div>
</div>
<div id="header" style="z-index:99999;">
<div class="container">
<h1 class="osu-semantic">ACCAD Advanced Computing Center for the Arts and Design</h1>
<a href="index.html" title="ACCAD Home"><img src="/assets/images/site/accad_name.png" class="accad_name" alt="Advanced Computing Center for the Arts and Design"/></a>
<a href="http://www.osu.edu"><img src="/assets/images/site/logo-ohiostate.gif" class="osu_logo" alt="The Ohio State University" /></a>
</div><!-- end container -->
</div><!-- end header -->
<div id="global_nav">
<div class="container">
<ul id="topnav">
<li ><a href="/about_us.html" >About Us</a></li>
<li ><a href="researchmain/gallery.html" >Research</a></li>
<li ><a href="/academics.html" >Academics</a></li>
<li ><a href="/people.html" >People</a></li>
<li ><a href="/outreach.html" >Outreach</a></li>
<li class="last"><a href="/news.html" >News</a></li>
</ul>
</div><!-- end container -->
</div><!-- end global_nav -->
<div id="content" class="container">
<div id="subnav" class="span-6 prepend-1 column first">
</div>
<!-- end subnav -->
<div id="page_body" class="span-15 prepend-1 column last">
<h2>Page Not Found</h2>
<h4>Sorry, we couldn't find the page you are looking for</h4>
<p>The page may have moved, or been deleted. Please use the upper navigation menu to find what you are looking for, and don't forget to update your bookmarks. Thanks for your understanding.</p>
</div>
<br class="clear" />
</div><!-- end content -->
<div id="footer">
<div class="container">
<table border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td width="400px" style="color:white" ><img src="assets/images/Footer_underline1.png"><br><span style="font-size: 90%;"><strong>Contact Us</strong> | (614) 292.3416<br />
1224 Kinnear Rd., Columbus, OH 43212
<br>accad [at] accad [dot] osu [dot] edu
<br><img src="assets/images/Footer_underline2.png"></td>
<td style="text-align: left;" width="470px">
<div id="searchbox">
<form id="ajaxSearch_form" action="316.html" method="post">
<fieldset>
<input type="hidden" name="advsearch" value="oneword" />
<label>
<input id="ajaxSearch_input" class="cleardefault" type="text" name="search" value="Search here..." onfocus="this.value=(this.value=='Search here...')? '' : this.value ;" />
</label>
<label>
<input id="ajaxSearch_submit" type="submit" name="sub" value="Go!" />
</label>
</fieldset>
</form>
</div><!-- end searchbox -->
<td style="text-align: right;" width="250px">
<a href="https://www.giveto.osu.edu/igive/OnlineGiving/fund_results.aspx?Source_Code=WA&Fund=309285" target="_blank"><img style="vertical-align: middle;" src="assets/images/site/icon-giveto.png"></a>&nbsp;&nbsp;&nbsp;&nbsp;<a href="http://www.youtube.com/accadosu" target="_blank"><img style="vertical-align: middle;" src="assets/images/site/icon-youtube.png"></a>
</td>
</tr>
<tr>
<td width="430px" style="color:white"><span style="font-size: 90%;"><strong><a href="http://artsandsciences.osu.edu/privacy-policy
" target="_blank">Privacy Policy</a>
<br /><a href="academics/studying_at_accad/information_request.html">Questions, feedback, accessibility</a> &nbsp;&nbsp; </strong></span><img style="vertical-align: top;margin-top:-10px;"src="assets/images/site/icon-accessibility2.png" />
</td>
<td width="370px" style="color:white; padding-bottom:20px;" >
<span style="font-size: 90%;">© 2011, <a href="http://www.osu.edu">The Ohio State University</a>, <a href="http://artsandsciences.osu.edu/">College of Arts and Sciences</a><br />
</span></td>
<td style="text-align: right;" width="250px">&nbsp;</td></tr>
</tbody>
</table>
<a href="http://artsandsciences.osu.edu/"><img src="assets/images/site/CollegeofArtsLogo.png" class="asc_badge"></a>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-22129230-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</div><!-- end container -->
</div><!-- end footer -->
</body>
</html>

View file

@ -0,0 +1,22 @@
LFHD, RFHD
RFHD, RBHD
RBHD, LBHD
LBHD, LFHD
LELB, LWRB
LWRB, LFIN
LELB, LSHO
LSHO, RSHO
RSHO, STRN
LSHO, STRN
RSHO, RELB
RELB, RWRB
RWRB, RFIN
LSHO, LFWT
RSHO, RFWT
LFWT, RFWT
LFWT, LKNE
RFWT, RKNE
LKNE, LHEE
RKNE, RHEE
RMT5, RHEE
LMT5, LHEE

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,266 @@
Field Time EricAutoLabel:C7:X EricAutoLabel:C7:Y EricAutoLabel:C7:Z EricAutoLabel:CLAV:X EricAutoLabel:CLAV:Y EricAutoLabel:CLAV:Z EricAutoLabel:LANK:X EricAutoLabel:LANK:Y EricAutoLabel:LANK:Z EricAutoLabel:LBHD:X EricAutoLabel:LBHD:Y EricAutoLabel:LBHD:Z EricAutoLabel:LBWT:X EricAutoLabel:LBWT:Y EricAutoLabel:LBWT:Z EricAutoLabel:LELB:X EricAutoLabel:LELB:Y EricAutoLabel:LELB:Z EricAutoLabel:LFHD:X EricAutoLabel:LFHD:Y EricAutoLabel:LFHD:Z EricAutoLabel:LFIN:X EricAutoLabel:LFIN:Y EricAutoLabel:LFIN:Z EricAutoLabel:LFRM:X EricAutoLabel:LFRM:Y EricAutoLabel:LFRM:Z EricAutoLabel:LFWT:X EricAutoLabel:LFWT:Y EricAutoLabel:LFWT:Z EricAutoLabel:LHEE:X EricAutoLabel:LHEE:Y EricAutoLabel:LHEE:Z EricAutoLabel:LKNE:X EricAutoLabel:LKNE:Y EricAutoLabel:LKNE:Z EricAutoLabel:LMT5:X EricAutoLabel:LMT5:Y EricAutoLabel:LMT5:Z EricAutoLabel:LSHN:X EricAutoLabel:LSHN:Y EricAutoLabel:LSHN:Z EricAutoLabel:LSHO:X EricAutoLabel:LSHO:Y EricAutoLabel:LSHO:Z EricAutoLabel:LTHI:X EricAutoLabel:LTHI:Y EricAutoLabel:LTHI:Z EricAutoLabel:LTOE:X EricAutoLabel:LTOE:Y EricAutoLabel:LTOE:Z EricAutoLabel:LUPA:X EricAutoLabel:LUPA:Y EricAutoLabel:LUPA:Z EricAutoLabel:LWRA:X EricAutoLabel:LWRA:Y EricAutoLabel:LWRA:Z EricAutoLabel:LWRB:X EricAutoLabel:LWRB:Y EricAutoLabel:LWRB:Z EricAutoLabel:RANK:X EricAutoLabel:RANK:Y EricAutoLabel:RANK:Z EricAutoLabel:RBAC:X EricAutoLabel:RBAC:Y EricAutoLabel:RBAC:Z EricAutoLabel:RBHD:X EricAutoLabel:RBHD:Y EricAutoLabel:RBHD:Z EricAutoLabel:RBWT:X EricAutoLabel:RBWT:Y EricAutoLabel:RBWT:Z EricAutoLabel:RELB:X EricAutoLabel:RELB:Y EricAutoLabel:RELB:Z EricAutoLabel:RFHD:X EricAutoLabel:RFHD:Y EricAutoLabel:RFHD:Z EricAutoLabel:RFIN:X EricAutoLabel:RFIN:Y EricAutoLabel:RFIN:Z EricAutoLabel:RFRM:X EricAutoLabel:RFRM:Y EricAutoLabel:RFRM:Z EricAutoLabel:RFWT:X EricAutoLabel:RFWT:Y EricAutoLabel:RFWT:Z EricAutoLabel:RHEE:X EricAutoLabel:RHEE:Y EricAutoLabel:RHEE:Z EricAutoLabel:RKNE:X EricAutoLabel:RKNE:Y EricAutoLabel:RKNE:Z EricAutoLabel:RMT5:X EricAutoLabel:RMT5:Y EricAutoLabel:RMT5:Z EricAutoLabel:RSHN:X EricAutoLabel:RSHN:Y EricAutoLabel:RSHN:Z EricAutoLabel:RSHO:X EricAutoLabel:RSHO:Y EricAutoLabel:RSHO:Z EricAutoLabel:RTHI:X EricAutoLabel:RTHI:Y EricAutoLabel:RTHI:Z EricAutoLabel:RTOE:X EricAutoLabel:RTOE:Y EricAutoLabel:RTOE:Z EricAutoLabel:RUPA:X EricAutoLabel:RUPA:Y EricAutoLabel:RUPA:Z EricAutoLabel:RWRA:X EricAutoLabel:RWRA:Y EricAutoLabel:RWRA:Z EricAutoLabel:RWRB:X EricAutoLabel:RWRB:Y EricAutoLabel:RWRB:Z EricAutoLabel:STRN:X EricAutoLabel:STRN:Y EricAutoLabel:STRN:Z EricAutoLabel:T10:X EricAutoLabel:T10:Y EricAutoLabel:T10:Z
1 0.0000 -2114.7 -1520.2 1369.2 -9999.99 -9999.99 -9999.99 -2080.1 -1275.6 108.89 -2053.5 -1435.7 1485.6 -2279.6 -1465.8 934.99 -2175.1 -1228.9 999.83 -1937.4 -1376.6 1538.5 -2080.3 -1212.9 708.05 -2174.2 -1234.3 880.67 -2101.2 -1402.6 839.51 -2123.7 -1337.2 27.412 -2068.7 -1303.1 456.76 -1986.1 -1198.3 30.383 -2090.1 -1274.2 329.88 -2076.5 -1328.4 1341.4 -2081 -1315.8 688.45 -1907.8 -1219.2 57.456 -2144.7 -1246.1 1186.5 -2044.7 -1265.6 791.05 -2159.9 -1202 791.99 -2307.5 -1779.3 98.701 -2154.1 -1623.9 1289.1 -2002.2 -1571.7 1479.5 -2243.7 -1639.3 955.61 -1975 -1802.8 1012 -1856.9 -1482.8 1545.2 -1939.7 -1781.1 682.85 -1946.1 -1802.3 835.1 -2033.6 -1668.4 848.59 -2381.8 -1723.1 32.929 -2216.9 -1767.9 453.96 -2196.4 -1800.4 31.401 -2282.2 -1790.9 253.92 -1987.4 -1655.2 1329 -2129 -1749.6 592.38 -2107.2 -1753.5 46.253 -1970.5 -1731.1 1254 -1904.1 -1724.2 778.83 -1990.1 -1828.3 765.59 -9999.99 -9999.99 -9999.99 -2227.4 -1551.7 1212.3
2 0.0083 -2110.4 -1516.5 1368 -9999.99 -9999.99 -9999.99 -2079.8 -1275.4 109.23 -2049.1 -1432.1 1483.7 -2276.3 -1462.8 933.8 -2173.6 -1225.8 999.49 -1931.3 -1373.2 1536.2 -2082.4 -1211.2 706.7 -2174 -1232 879.83 -2092.2 -1399.4 838.66 -2124 -1337.3 28.006 -2066.4 -1302 458.03 -1985.9 -1198.7 30.128 -2087.8 -1273.1 330.31 -2072 -1324.7 1339.9 -2077.8 -1313.6 688.45 -1907.7 -1219.6 57.456 -2141.3 -1243 1185.5 -2045.4 -1264.3 788.85 -2159.3 -1199.8 790.29 -2307.6 -1779.3 98.532 -2149.6 -1620.4 1288 -1997.5 -1567.9 1477.5 -2241.1 -1636.3 955.27 -1970.3 -1800.1 1010.7 -1852.3 -1479 1543.4 -1936.3 -1775.8 681.66 -1942.1 -1798.8 834.17 -2030.9 -1666.1 847.15 -2381.8 -1723.2 32.929 -2217.3 -1767.6 454.13 -2196.1 -1800.7 31.062 -2282.4 -1790.9 254.01 -1982.4 -1651.3 1327.4 -2128.6 -1748.5 592.21 -2107.4 -1753.8 46.338 -1965.4 -1727.7 1252.7 -1900.9 -1719.2 777.9 -1984.7 -1824.1 764.15 -1938.4 -1463.5 1078.3 -2223.2 -1548.4 1211.4
3 0.0167 -2105.7 -1512.9 1366.8 -9999.99 -9999.99 -9999.99 -2079.7 -1275.5 109.23 -2044.3 -1428.2 1481.6 -2273.2 -1459.6 932.78 -2171.8 -1222.5 998.81 -1929.9 -1369 1535.3 -2083.5 -1210.2 705.25 -2173.3 -1229.4 878.81 -2087.5 -1397.4 837.14 -2124.2 -1337.3 28.176 -2062.5 -1300.3 458.12 -1985.7 -1199.2 29.619 -2085.4 -1271.8 330.82 -2067.4 -1321 1338.3 -2074.7 -1311.7 687.6 -1907.7 -1219.7 57.116 -2137.6 -1239.4 1184.8 -2046.4 -1263 786.98 -2159 -1197.4 788.85 -2307.6 -1779.5 98.701 -2145.2 -1616.6 1286.9 -1992.9 -1564 1475.7 -2237.9 -1633.3 954.76 -1965.4 -1797.3 1009.5 -1847.7 -1474.9 1541.5 -1932.2 -1769.8 680.64 -1936.9 -1794 832.55 -2028.6 -1664.1 845.96 -2382 -1723.3 33.268 -2217.6 -1767.4 454.3 -2196 -1800.8 30.977 -2282.4 -1790.7 254.09 -1977.6 -1647.5 1325.8 -2127.5 -1747.2 591.44 -2107.4 -1754 46.508 -1960.4 -1724 1251.3 -1897.1 -1713.5 777.05 -1979.5 -1819.4 762.54 -1935.8 -1459.3 1074.9 -2219.3 -1544.9 1210.6
4 0.0250 -2100.7 -1509 1365.2 -1942.7 -1447.4 1264.3 -2079.4 -1275.8 109.14 -2039.4 -1424.2 1479.6 -2270 -1456.3 931.85 -2169.5 -1218.7 997.79 -1922.7 -1365.4 1532.7 -2084.4 -1209.7 702.88 -2173.1 -1226.5 878.04 -2084.4 -1395.4 835.35 -2124 -1337.3 28.516 -2056.5 -1297.8 457.18 -1985.8 -1199.9 29.449 -2082.2 -1270.8 330.9 -2062.7 -1317 1336.8 -2071.3 -1309.8 686.92 -1907.9 -1220.6 56.777 -2133.8 -1235.8 1183.8 -2047.6 -1261.6 785.37 -2159.8 -1194.9 787.91 -2307.6 -1779.7 98.701 -2140.1 -1612.7 1285.5 -1988.1 -1560.2 1473.8 -2234.5 -1630.1 954.25 -1960.6 -1794.7 1008.6 -1843.2 -1471.1 1539.8 -1927.9 -1763.3 679.62 -1930.2 -1788.3 830.86 -2026.2 -1661.8 844.61 -2382.2 -1723.4 33.438 -2217.4 -1766.5 454.3 -2195.9 -1800.9 31.147 -2282.7 -1790.4 254.35 -1972.5 -1643.7 1323.9 -2125.9 -1745.3 590.51 -2107.4 -1754.1 46.338 -1955.1 -1720.3 1249.8 -1893 -1707.5 776.37 -1973.9 -1814.6 760.84 -1932.7 -1456.6 1073.7 -2215.1 -1541.1 1209.7
5 0.0333 -2095.3 -1504.3 1363.3 -1935.2 -1445.4 1263.3 -2079.3 -1276.2 109.14 -2034.3 -1420.2 1477.5 -2266.7 -1453 930.75 -2167.4 -1214.7 996.52 -1916.6 -1361.3 1530.2 -2086.5 -1208.6 700.5 -2173.3 -1223 877.62 -2081.5 -1392.6 833.4 -2123.9 -1338 29.11 -2051.3 -1295.4 456.67 -1986.2 -1201.4 29.449 -2079.3 -1269.5 331.24 -2057.5 -1313.1 1335 -2066.4 -1307.4 686.24 -1908.4 -1221.8 56.522 -2130.2 -1231.9 1182.5 -2049.1 -1260.7 782.99 -2160.5 -1192.6 786.81 -2307.6 -1779.7 98.701 -2135.3 -1608.9 1284.5 -1983.4 -1556.4 1471.9 -2230.9 -1626.6 953.49 -1955.4 -1792.2 1007.6 -1838.4 -1467.3 1537.7 -1922.7 -1756 678.18 -1923.5 -1782.8 829.07 -2023.7 -1659.3 843.33 -2382 -1723.3 33.098 -2217.2 -1765.5 454.38 -2196 -1800.9 31.571 -2282.7 -1790.1 254.69 -1967.3 -1639.6 1322.3 -2124.8 -1743.7 589.83 -2107.4 -1754.2 46.168 -1950.1 -1716.3 1248.3 -1887.7 -1701.4 775.95 -1967.6 -1808.8 759.48 -1929.3 -1453.4 1070.9 -2210.6 -1537.5 1208.9
6 0.0417 -2090.2 -1500.1 1361.9 -1929.6 -1441.4 1262.7 -2079.2 -1276.6 109.14 -2029.4 -1416.1 1475.3 -2262.9 -1449.5 929.56 -2165.1 -1210.6 995.25 -1913 -1357.5 1528 -2089.2 -1207.3 698.04 -2173.1 -1219.9 876.69 -2078.2 -1389.2 831.45 -2124 -1339.1 29.789 -2046.5 -1293.1 456.17 -1986.6 -1202.7 28.855 -2076.7 -1268.7 331.75 -2052.2 -1309.1 1333.1 -2060.8 -1305 684.8 -1908.6 -1223 55.843 -2126.1 -1227.6 1181.1 -2050.5 -1260.5 779.34 -2161.2 -1190.4 785.71 -2307.6 -1779.5 98.871 -2130.4 -1604.8 1283.2 -1978.6 -1552.5 1470.4 -2227.6 -1623 952.56 -1949.8 -1788.8 1006.2 -1833.1 -1463.4 1535.5 -1917 -1748 676.91 -1917.6 -1777.5 827.97 -2020.8 -1656.6 842.23 -2382 -1723.3 33.098 -2216.7 -1764.7 454.55 -2196.1 -1800.9 31.656 -2282.7 -1789.7 255.03 -1962 -1635.6 1320.6 -2123.8 -1742.1 589.58 -2107.5 -1754.2 45.914 -1944.9 -1712.1 1246.6 -1882.2 -1694.8 775.78 -1960.4 -1802.1 757.45 -1926.5 -1449.3 1067.4 -2206 -1533.8 1208.1
7 0.0500 -2085.3 -1496.1 1360.6 -1924.1 -1437.2 1261 -2078.8 -1277.3 108.89 -2024.3 -1412.1 1473.1 -2259.1 -1446 928.54 -2162.9 -1206.3 994.31 -1908.1 -1353 1525.6 -2091.1 -1206 695.66 -2172.7 -1217 875.75 -2074.9 -1386.1 829.16 -2124.7 -1340.5 30.892 -2041.8 -1290.7 455.83 -1986.8 -1203.9 28.006 -2074 -1267.8 331.66 -2046.9 -1304.9 1331.1 -2056 -1302.5 683.78 -1908.8 -1224.2 55.079 -2121.4 -1223.4 1179.7 -2051.7 -1259.7 776.03 -2162.2 -1187.9 784.52 -2307.5 -1779.3 98.956 -2124.9 -1600.4 1281.7 -1973.6 -1548.4 1468.6 -2224 -1619.6 951.79 -1944 -1784.9 1004.6 -1828.2 -1458.8 1533.6 -1910.8 -1739.6 676.14 -1911.6 -1771.6 826.87 -2017.4 -1653.6 840.36 -2381.8 -1723.1 33.438 -2216.2 -1763.7 454.38 -2196 -1800.9 31.401 -2282.7 -1789.4 255.37 -1956.7 -1631.1 1318.7 -2122.7 -1740.6 589.49 -2106.3 -1754.9 45.574 -1939.4 -1708.1 1245 -1876.3 -1688.2 775.44 -1952.4 -1794.6 754.9 -1921.7 -1445.2 1066.1 -2201.3 -1530 1206.9
8 0.0583 -2079.7 -1491.7 1359 -1916.7 -1433.4 1257.1 -2078.3 -1278.3 108.8 -2018.9 -1407.8 1471 -2257 -1443.4 928.37 -2160.5 -1202.2 993.29 -1902.3 -1348.1 1523 -2093.1 -1204.4 693.46 -2172.5 -1213.5 874.99 -2071.3 -1383.2 827.55 -2125.4 -1342 32.165 -2037.3 -1288.4 456 -1987.6 -1205.5 27.327 -2070.6 -1266.6 330.98 -2041.8 -1300.6 1328.9 -2052.1 -1299.7 682.93 -1909.3 -1225.4 54.231 -2116.4 -1219 1177.6 -2053.1 -1257.7 773.57 -2163.2 -1185 783.42 -2307.4 -1779.1 99.041 -2119.6 -1596.2 1280.3 -1967.7 -1543.7 1466.2 -2220.2 -1616.1 950.69 -1938 -1781.3 1002.9 -1823 -1454.5 1531.3 -1903.8 -1730.4 675.29 -1906 -1765.9 825.93 -2013.7 -1650.2 838.16 -2380.5 -1722.1 33.268 -2215.8 -1762.7 454.64 -2195.8 -1801.1 30.977 -2282.7 -1789.2 255.62 -1951.2 -1626.6 1316.7 -2121.4 -1738.9 589.15 -2105 -1755.6 44.98 -1934 -1703.6 1243.2 -1869.9 -1681.1 775.44 -1944.3 -1786.6 752.61 -1916.6 -1442.8 1063.7 -2196.1 -1525.4 1205.6
9 0.0667 -2073.7 -1487.1 1357 -1908.1 -1429.6 1253.2 -2078.2 -1280 109.14 -2013.3 -1402.9 1468.6 -2255.8 -1441.9 929.56 -2158.1 -1198.3 992.36 -1896.9 -1343.5 1520.3 -2095.7 -1202.6 690.91 -2172.2 -1210 874.05 -2067.2 -1380 826.19 -2125.9 -1344.3 33.523 -2032.6 -1286 456 -1988.5 -1207.5 27.243 -2068.1 -1265.6 331.32 -2036.2 -1296.1 1326.9 -2047.9 -1296.5 681.66 -1909.9 -1226.8 52.958 -2111.7 -1214 1175.4 -2055 -1255.5 771.03 -2164 -1182.1 782.23 -2307.4 -1778.9 99.211 -2114.2 -1591.7 1278.7 -1962 -1538.5 1463.8 -2216.1 -1612.5 949.76 -1931.8 -1777.6 1001.6 -1816.9 -1448.7 1528.6 -1896.2 -1720.7 674.78 -1899.5 -1759.1 825.17 -2010.4 -1646.7 836.2 -2380.4 -1721.8 33.098 -2215.1 -1761.4 454.64 -2195.8 -1801.4 30.722 -2282.4 -1788.4 255.54 -1945.8 -1621.8 1315 -2119.7 -1736.8 588.39 -2105.8 -1755.3 45.574 -1928.5 -1699.1 1241.5 -1862.9 -1673.3 775.52 -1936.5 -1778.7 750.83 -1912.7 -1438.1 1060.6 -2191.2 -1520.9 1204.4
10 0.0750 -2067.6 -1482 1355.1 -1901.5 -1425.3 1250.2 -2078.4 -1282.3 109.9 -2007.3 -1397.4 1466.1 -2251.5 -1438.7 928.79 -2155.5 -1194.1 991.26 -1891.5 -1338.6 1517.8 -2098.7 -1200.6 688.62 -2171.7 -1206.2 873.12 -2063.1 -1376.7 823.56 -2126.5 -1347.4 34.287 -2027.6 -1283.2 455.15 -1990 -1209.6 26.649 -2066 -1264.7 332.17 -2030.3 -1291.3 1324.9 -2043.2 -1293.6 680.73 -1910.8 -1228.4 51.345 -2107.1 -1209.5 1173.9 -2056.9 -1253.2 768.05 -2164.2 -1179.1 780.7 -2307.4 -1778.8 99.295 -2108.6 -1586.9 1277.3 -1956.5 -1533.7 1461.6 -2212 -1608.8 948.99 -1925.4 -1773.2 999.91 -1811.3 -1443.8 1526 -1888.1 -1710.4 674.78 -1890.6 -1749.1 823.98 -2006.6 -1643.1 833.83 -2381.8 -1723 33.693 -2214.1 -1759.9 454.3 -2195.9 -1801.7 30.552 -2282.3 -1787.8 255.62 -1940.2 -1617 1313 -2117.2 -1734.2 586.52 -2106.7 -1755.7 45.914 -1922.8 -1694.4 1239.9 -1856.6 -1663.7 776.54 -1928.5 -1770.4 749.21 -1907.7 -1434.1 1057.7 -2185.7 -1516.6 1203.1
11 0.0833 -2061.5 -1476.8 1353 -1896.2 -1420.3 1247.7 -2078.4 -1284.6 110.41 -2001.4 -1392.3 1463.8 -2244 -1433.2 924.98 -2152.7 -1189.6 990.24 -1885.8 -1333.4 1515.2 -2101.7 -1198.7 686.41 -2170.9 -1202.4 872.19 -2059.4 -1372.8 821.61 -2127 -1350.6 35.645 -2022.7 -1280.7 454.38 -1991.5 -1211.9 25.63 -2061.8 -1263.5 331.41 -2024.1 -1286.1 1322.6 -2038.5 -1290.8 679.96 -1911.9 -1230.3 50.412 -2101.8 -1204.7 1172 -2058.9 -1250.5 765.85 -2164.3 -1176.3 779.26 -2307.4 -1778.7 99.38 -2102.6 -1582 1275.8 -1950.7 -1528.8 1459.5 -2207.8 -1604.9 948.14 -1918.9 -1768.8 998.3 -1803.5 -1437.2 1522.4 -1878.9 -1698.9 674.78 -1881.9 -1738.8 823.3 -2002.8 -1639.4 831.79 -2382.4 -1723.6 34.202 -2212.9 -1758.2 453.87 -2195.9 -1802.1 30.383 -2282 -1787 256.05 -1934.1 -1611.7 1310.7 -2114.7 -1731.4 584.66 -2106.6 -1756.3 45.404 -1917 -1689.6 1238.1 -1850.5 -1653.3 778.66 -1919.5 -1761.3 747.43 -1903.4 -1428.2 1053.9 -2179.9 -1511.8 1201.5
12 0.0917 -2055.4 -1471.5 1351.1 -1890.8 -1415.4 1245.4 -2078.4 -1287.4 111.26 -1995.3 -1387.3 1461.5 -2238.6 -1428.8 922.68 -2149.4 -1184.9 988.97 -1879.6 -1328 1512.8 -2104.7 -1196.4 683.95 -2170.4 -1198.8 871.42 -2055.2 -1369 819.91 -2127.7 -1354.2 37.681 -2017.6 -1278.2 453.7 -1993 -1214.7 24.781 -2057.5 -1262.2 330.73 -2017.6 -1280.8 1320.2 -2034.1 -1287.5 678.69 -1913.6 -1232.5 49.393 -2096.2 -1199.7 1169.8 -2061.1 -1248.3 763.47 -2165.2 -1173 778.41 -2307.4 -1778.7 99.55 -2096.4 -1576.8 1274.3 -1943.9 -1522.9 1456.6 -2203.1 -1600.6 946.79 -1911.9 -1764.2 996.69 -1797 -1431.6 1519.6 -1868.7 -1687.2 674.95 -1873.7 -1730 822.71 -1999.1 -1635.8 830.01 -2382.3 -1723.5 34.287 -2211.6 -1756.5 453.79 -2195.6 -1802.7 29.958 -2282 -1786.5 256.47 -1927.9 -1606.6 1308.6 -2112.9 -1729.1 584.23 -2106.8 -1756 43.792 -1911 -1684.5 1236 -1843.6 -1643.2 780.78 -1909.5 -1751.3 745.73 -1898.3 -1423.9 1052.6 -2174.1 -1507.2 1200
13 0.1000 -2049 -1465.8 1349.2 -1885.1 -1410.6 1242.8 -2078.3 -1290.1 112.2 -1989 -1381.3 1459 -2234.8 -1425.4 922.51 -2145.6 -1180.5 987.52 -1873 -1321.7 1509.9 -2108.7 -1194.4 682.85 -2170.2 -1195.5 871.25 -2050.7 -1365.8 817.53 -2128.5 -1358.1 39.888 -2012.3 -1275.4 452.86 -1994.5 -1217.8 24.697 -2055.5 -1262.1 331.75 -2010.9 -1275.4 1317.7 -2029.6 -1283.7 677.08 -1915.2 -1235.3 47.611 -2090.7 -1194.8 1168.1 -2062.8 -1245.9 761.1 -2167.4 -1169.9 777.73 -2307.4 -1778.6 100.14 -2090.5 -1572 1273.1 -1937.5 -1516.9 1454.3 -2198.2 -1596.3 945.6 -1904.8 -1759.1 995.08 -1790.9 -1426.4 1517.4 -1858.4 -1675.1 676.31 -1864.6 -1721.6 821.61 -1995.3 -1632.3 827.97 -2382.3 -1723.5 34.372 -2210.1 -1754.7 453.79 -2195.4 -1803 29.619 -2282 -1786.4 256.73 -1922.1 -1601.4 1307.1 -2111 -1726.8 583.89 -2107.2 -1755.3 41.755 -1904.6 -1679.1 1234.4 -1835.1 -1632.8 782.99 -1899.3 -1740.8 744.55 -1893 -1419.9 1050.5 -2168.1 -1502.4 1198.5
14 0.1083 -2042.2 -1459.9 1347 -1878.9 -1405.2 1240.3 -2078.4 -1292.7 112.7 -1982.4 -1375.2 1456.3 -2230.2 -1421.5 921.5 -2141.5 -1176.3 986 -1865.7 -1315.8 1507.1 -2113 -1192.3 682.34 -2169.9 -1192 871.25 -2045.5 -1361.8 815.67 -2129.1 -1362.4 41.84 -2007 -1273 452.18 -1996.5 -1221.2 24.442 -2053.3 -1262.2 332.26 -2004 -1269.5 1314.9 -2024.8 -1279.9 675.38 -1916.8 -1238.6 46.168 -2084.8 -1189.3 1166.1 -2064.4 -1242.9 759.4 -2169.2 -1166.8 777.39 -2307.4 -1778.3 100.48 -2084.2 -1566.8 1271.8 -1931.2 -1511.2 1452.4 -2193.6 -1592.1 944.07 -1897.1 -1753.4 993.12 -1784.6 -1420.6 1515 -1847.7 -1662.1 678.69 -1855.9 -1712.5 821.18 -1991.4 -1628.7 825.68 -2382.2 -1723.6 34.796 -2208.3 -1752.3 453.19 -2195.2 -1803 29.449 -2281.3 -1785.7 256.81 -1915.7 -1595.9 1305.4 -2108 -1724 582.53 -2107.3 -1755.2 40.482 -1898.1 -1673.7 1233.1 -1826.3 -1622.3 786.05 -1889.1 -1729.4 743.95 -1886.8 -1415.5 1047.8 -2162.1 -1497.5 1196.7
15 0.1167 -2035.6 -1453.9 1345 -1872.4 -1399.8 1238.1 -2078.8 -1296 113.64 -1975.6 -1369.3 1453.8 -2225.5 -1417.4 920.31 -2137.7 -1171.5 984.81 -1858.7 -1309.5 1504.7 -2116.2 -1189.6 680.98 -2168.1 -1187 870.74 -2040.5 -1356.8 813.46 -2129.9 -1367 43.962 -2001.8 -1270.4 451.58 -1998.6 -1224.6 24.102 -2048.9 -1261.4 331.24 -1997 -1263.6 1312.4 -2019.9 -1275.8 673.6 -1918.9 -1241.7 45.574 -2078.3 -1183.9 1164 -2065.7 -1239.8 757.79 -2169.8 -1163.1 777.31 -2307 -1777.9 100.91 -2077.9 -1561.3 1270.3 -1924.7 -1505.3 1450.2 -2188.9 -1588.2 942.8 -1888.8 -1747.4 991.26 -1778.3 -1414.7 1512.4 -1836.8 -1648.4 680.98 -1847.2 -1702.9 821.78 -1987.2 -1624.6 823.3 -2381.5 -1722.9 34.796 -2205.5 -1749.5 452.35 -2195.4 -1803 29.279 -2279.8 -1784.1 256.56 -1909 -1589.9 1303.2 -2104.9 -1720.7 581.01 -2106.9 -1755.2 39.294 -1891.8 -1668.4 1231.9 -1817.2 -1611.9 790.21 -1878.9 -1717.8 742.85 -1880.1 -1411.5 1044.4 -2156 -1492.7 1195.4
16 0.1250 -2029 -1447.8 1342.9 -1866.2 -1393.7 1234.9 -2078.9 -1299.2 115 -1968.5 -1362.9 1451.2 -2219.7 -1412.2 918.7 -2133.9 -1166.3 984.3 -1851.8 -1303.1 1502.2 -2118.7 -1187.2 679.2 -2165.9 -1182 870.32 -2036 -1352 811.17 -2130.8 -1371.3 46.847 -1996.5 -1267.4 450.9 -2000.9 -1228.5 24.102 -2044.3 -1260 330.39 -1989.6 -1257.1 1309.9 -2015.1 -1271.4 671.73 -1920.9 -1244.5 44.895 -2072.5 -1178.6 1162.5 -2067 -1236.6 755.92 -2170.3 -1159.3 777.22 -2306.8 -1777.4 101.33 -2071.3 -1555.8 1268.8 -1918.4 -1499.3 1448.2 -2183.7 -1583.6 941.1 -1881.7 -1742 990.32 -1771.4 -1408.5 1510.1 -1825.4 -1634 683.36 -1837.9 -1692.6 822.54 -1982.6 -1620 820.93 -2380.6 -1721.8 34.796 -2202.9 -1746.6 451.75 -2195.6 -1803 29.279 -2278 -1782.1 256.39 -1901.9 -1583.6 1301.4 -2101.5 -1717.1 579.48 -2106.7 -1755.2 38.021 -1885.1 -1662.4 1230.5 -1807.5 -1600.5 794.79 -1867.6 -1706.2 741.83 -1875.2 -1404.6 1041.1 -2149.9 -1487.7 1193.9
17 0.1333 -2022.2 -1441.5 1340.9 -1859.9 -1386.6 1231.1 -2079.3 -1302.5 116.44 -1961.5 -1356 1448.8 -2212.7 -1405.9 915.98 -2130 -1161 983.62 -1844.7 -1296.7 1499.9 -2121.1 -1184.5 678.52 -2164.2 -1177.5 870.41 -2030.5 -1347.4 809.98 -2131.8 -1376.1 50.412 -1991.4 -1264.5 449.8 -2003.2 -1232.4 24.272 -2040.9 -1259.4 330.22 -1982 -1250.4 1307 -2010.3 -1266.9 669.69 -1922.5 -1247.6 43.368 -2066.5 -1172.9 1161.6 -2068.4 -1233.6 754.56 -2170.8 -1155.3 777.22 -2306.2 -1776.5 101.16 -2064.6 -1549.9 1267.5 -1911.5 -1492.7 1446.2 -2178.5 -1578.7 939.32 -1874.4 -1736.3 989.48 -1764.4 -1401.8 1507.6 -1813.6 -1619.4 686.33 -1828.2 -1681.7 823.39 -1978.1 -1615.3 818.72 -2381.3 -1722.1 35.645 -2199.7 -1743.4 450.99 -2195.8 -1803 29.279 -2276.2 -1780 256.3 -1895.2 -1577.4 1300.3 -2097.3 -1713 577.53 -2106.3 -1755.2 36.748 -1878.1 -1656.1 1229.3 -1797.3 -1589.3 799.29 -1856.2 -1693.7 742.34 -1869.2 -1398.5 1037.9 -2143.3 -1482.2 1192.3
18 0.1417 -2015.1 -1434.9 1339 -1853.3 -1379.9 1227.7 -2079.4 -1306.4 117.29 -1954.8 -1349.5 1446.7 -2206.8 -1400.9 914.45 -2125.8 -1155.5 982.69 -1837.6 -1289.3 1497.4 -2123.4 -1181.4 678.01 -2162.4 -1172.6 870.74 -2024.5 -1342.6 808.62 -2132.1 -1381.2 53.806 -1986.2 -1261.3 448.61 -2005.4 -1236.2 24.018 -2037.6 -1258.9 329.97 -1974.4 -1243.7 1304.2 -2004.9 -1261.8 667.57 -1924.2 -1251.2 41.925 -2059.4 -1165.9 1159.8 -2068.9 -1230.5 753.03 -2170.9 -1150.9 777.39 -2305.2 -1775.1 100.82 -2057.5 -1544.1 1266.5 -1904 -1485.3 1443.9 -2173.2 -1574.3 938.05 -1866.8 -1729.9 988.63 -1757.2 -1394.5 1505 -1802.7 -1604.3 689.72 -1818.4 -1670.7 824.83 -1973.3 -1611 816.26 -2381.5 -1722.3 36.154 -2196 -1739.7 450.14 -2195.4 -1803 29.11 -2274.4 -1778.1 256.39 -1888.2 -1571 1299.3 -2093.1 -1708.8 575.4 -2105.7 -1755.2 35.814 -1870.8 -1649.6 1228 -1789.8 -1575.8 803.53 -1845.9 -1680.8 744.29 -1862.6 -1393.4 1035.6 -2136.5 -1476.6 1190.8
19 0.1500 -2008 -1428.2 1337.1 -1846.1 -1373.8 1225.2 -2078.6 -1310.7 117.03 -1947.6 -1342.4 1444.6 -2202.5 -1397.5 914.79 -2121.2 -1149.9 981.75 -1830.3 -1281.8 1494.7 -2125.9 -1178.7 676.91 -2160.5 -1167.4 870.91 -2018.6 -1337.8 805.99 -2132.2 -1386.5 57.286 -1981 -1258.3 447.51 -2007.9 -1240.3 23.593 -2034.4 -1257.8 330.05 -1965.4 -1236.4 1301.3 -1999.8 -1256.6 665.45 -1926.6 -1254.2 41.5 -2052.3 -1158.7 1157.9 -2069.6 -1226.9 751.42 -2170.8 -1146.5 777.81 -2304.6 -1774.1 101.25 -2050.2 -1537.6 1264.7 -1896.5 -1478.1 1441.5 -2167.2 -1569.2 936.26 -1859.6 -1723.5 988.2 -1750 -1387.6 1502.5 -1792.1 -1589.1 693.88 -1808.8 -1659.8 826.78 -1968.5 -1606.6 813.54 -2381.1 -1722 36.578 -2192.2 -1736.1 449.04 -2194.9 -1803.4 28.516 -2272.5 -1775.9 256.56 -1880.8 -1563.7 1297.8 -2089.2 -1704.8 573.79 -2105.2 -1755.3 35.305 -1863 -1642.6 1226.7 -1780.8 -1564 808.62 -1834.9 -1667.6 746.07 -1856 -1388.3 1033.3 -2129.5 -1470.8 1189.2
20 0.1583 -2000.7 -1420.9 1335 -1838.8 -1367.1 1222.5 -2076.9 -1314.8 116.44 -1940.2 -1334.8 1442.4 -2197.2 -1393.3 914.54 -2116.8 -1144.2 981.24 -1823 -1274.6 1492 -2128.2 -1175.9 676.31 -2158.1 -1162.4 871.34 -2012.6 -1332.5 803.78 -2132.2 -1391.8 61.02 -1976 -1254.6 446.24 -2010.3 -1244.7 23.508 -2031.1 -1256.6 329.63 -1956.5 -1229.1 1298.3 -1994.4 -1251.9 663.58 -1928.8 -1257.3 40.906 -2045.2 -1151.8 1156.2 -2070.7 -1222.7 750.66 -2170.7 -1142.2 778.32 -2303.7 -1772.8 102.01 -2042.7 -1530.8 1263 -1889.8 -1471 1439.9 -2160.9 -1563.7 934.48 -1852.3 -1717.1 987.86 -1742.5 -1380.8 1500.5 -1780.7 -1573.6 698.12 -1799 -1649.1 829.07 -1962.8 -1600.9 810.15 -2381.1 -1721.5 37.512 -2188.3 -1731.9 447.59 -2194.9 -1803.7 28.091 -2270.5 -1773.6 256.9 -1873 -1556.3 1296.4 -2084.6 -1700.1 571.84 -2104.8 -1755.5 34.372 -1854.7 -1635 1225.2 -1770.8 -1552.7 813.97 -1823.4 -1653.8 747.6 -1849.6 -1382.8 1030.7 -2122 -1464.7 1187.4
21 0.1667 -1993.4 -1413.6 1333 -1832 -1359.6 1219.9 -2076 -1318.5 116.52 -1932.5 -1326.9 1440 -2189.9 -1386.9 912.58 -2112.3 -1138.3 980.9 -1815.4 -1267.2 1489.4 -2130.1 -1172.6 676.14 -2155.5 -1157.4 871.59 -2006.7 -1327.1 802.51 -2131.5 -1397.4 64.075 -1971.4 -1250.7 444.79 -2012.7 -1248.4 23.593 -2028 -1256 328.95 -1949.4 -1222.5 1296.1 -1989 -1246.5 661.72 -1930.8 -1260.7 40.482 -2038.1 -1145.1 1154.4 -2071.4 -1218 750.74 -2170.2 -1137.9 778.66 -2302.9 -1771.4 102.1 -2035.3 -1524.2 1261.8 -1882.8 -1463.5 1438.3 -2155.1 -1558.6 932.95 -1844.8 -1710.4 987.35 -1735.2 -1373.2 1498.3 -1768.9 -1558.9 702.54 -1789.2 -1638.5 831.62 -1956.8 -1595 807.26 -2380.1 -1720.4 37.342 -2184.2 -1727.3 446.24 -2194.2 -1803.5 28.176 -2268 -1770.7 256.98 -1865.7 -1549.3 1295.5 -2079.8 -1695 569.55 -2104.2 -1755.3 33.693 -1845.7 -1626.9 1223.4 -1760.6 -1541.1 819.57 -1811.8 -1640.3 749.64 -1842.7 -1377 1027.5 -2114.7 -1458.3 1186
22 0.1750 -1986.2 -1406.1 1331.3 -1824.6 -1353 1217.3 -2077 -1321.9 119.32 -1924.9 -1318.8 1437.9 -2181.7 -1379.6 909.87 -2107.7 -1132.3 980.39 -1807.5 -1259.1 1486.6 -2131.6 -1168.6 675.38 -2152.5 -1151.8 871.68 -2000.6 -1321.2 801.41 -2130.5 -1402.6 66.621 -1967 -1247 442.93 -2015.3 -1251.5 24.102 -2024.9 -1255.3 328.01 -1942.2 -1215.6 1294 -1983.7 -1240.4 659.59 -1933.3 -1263.1 40.821 -2030.7 -1138.3 1152.5 -2069.9 -1211.8 749.89 -2169.1 -1133.6 779.09 -2302.1 -1770.3 102.27 -2027.7 -1517.1 1260.4 -1875.2 -1455.4 1436.6 -2148.9 -1553.2 931.17 -1837.2 -1703.4 987.18 -1727.5 -1365.1 1495.8 -1756.9 -1544.9 707.71 -1779.3 -1628.1 834.76 -1951.2 -1589.6 804.29 -2376.7 -1716.6 36.578 -2179.8 -1722.9 444.88 -2193.8 -1803.1 28.516 -2265 -1767.4 256.73 -1857.6 -1541.5 1294.5 -2074.3 -1689.8 566.75 -2103.8 -1755.1 33.098 -1837.7 -1619.2 1222.4 -1750.4 -1530.7 826.1 -1801.1 -1627.3 751.84 -1835.9 -1370 1026.8 -2107.5 -1452.3 1184.5
23 0.1833 -1979 -1398.5 1329.3 -1816.4 -1346.6 1214.8 -2078.4 -1325 123.91 -1917.5 -1310.8 1436.2 -2174 -1372.9 907.83 -2103.5 -1126.4 980.22 -1799 -1250.9 1483.7 -2133 -1164.6 674.78 -2149.1 -1146.1 871.76 -1993.8 -1316 799.12 -2129.3 -1407.1 68.998 -1962.8 -1243.6 440.55 -2016.8 -1253.8 25.206 -2021.9 -1254.3 326.66 -1934.1 -1208.2 1291.3 -1978.5 -1234.4 657.39 -1935 -1263.9 40.652 -2023.1 -1130.9 1150.4 -2066.4 -1204 748.37 -2168 -1129 779.43 -2301.4 -1769.3 102.78 -2020 -1510.1 1258.8 -1867.8 -1447.3 1435.1 -2142.4 -1547.5 929.13 -1829.8 -1696.1 987.18 -1719.6 -1357 1493.3 -1744 -1530.9 713.91 -1769.8 -1617.6 837.9 -1945.9 -1584.1 801.49 -2370.6 -1709.7 36.748 -2175 -1718.2 443.43 -2193.8 -1802.9 28.855 -2262.3 -1764.6 256.73 -1848.6 -1533.1 1293 -2067.9 -1684.1 563.35 -2103.5 -1755.1 32.844 -1830.2 -1611.7 1221.8 -1740.3 -1520.8 832.89 -1790.3 -1613.9 755.07 -1829.3 -1362 1025.9 -2099.8 -1445.8 1182.9
24 0.1917 -1971.6 -1390.7 1327.2 -1808.8 -1338.8 1212.3 -2078.2 -1327.3 126.11 -1910.3 -1302.6 1434.4 -2166.8 -1366.5 906.47 -2098.7 -1119.9 979.46 -1790.8 -1242.6 1480.5 -2133.7 -1159.7 674.53 -2145.5 -1140.7 871.76 -1986.4 -1310.7 797.16 -2127.2 -1411.1 71.204 -1958.6 -1239.8 438 -2017.9 -1255.2 25.715 -2018.7 -1253.3 325.13 -1926.2 -1200.4 1288.6 -1973.3 -1228.4 655.1 -1935.8 -1264.2 40.058 -2015.6 -1123.3 1148.4 -2063.2 -1196.7 747.09 -2166.4 -1124.2 780.02 -2300.8 -1768.4 104.05 -2012.3 -1503.3 1257.4 -1860.8 -1439 1433.8 -2135.9 -1541.6 927.1 -1822.5 -1689.2 987.44 -1712 -1348.6 1490.9 -1731.6 -1516.9 720.87 -1760.5 -1607.1 840.45 -1940.3 -1578.6 798.69 -2373 -1711 38.106 -2169.9 -1713.1 441.48 -2193.8 -1802.7 29.025 -2259.7 -1761.7 257.23 -1840.3 -1525.1 1292.5 -2061.7 -1678.3 560.13 -2103.5 -1755.2 32.42 -1822.1 -1603.7 1221.4 -1729.9 -1511 839 -1780.2 -1601 758.8 -1821.1 -1357 1022.3 -2091.9 -1439.4 1181
25 0.2000 -1964.1 -1383.3 1325.3 -1800.6 -1332.2 1210.5 -2076.1 -1328.7 126.11 -1902.7 -1294.1 1432.2 -2159.5 -1360 905.37 -2092.2 -1113.3 977.76 -1783.7 -1234.1 1477.8 -2133.4 -1154.2 674.28 -2141.8 -1135.5 871.68 -1979.2 -1303.7 796.66 -2125.1 -1414 72.732 -1954.8 -1236.1 435.37 -2018.6 -1256 26.139 -2015.5 -1251.9 323.6 -1918.1 -1192.5 1285.8 -1967.7 -1222 652.55 -1936 -1264.2 39.633 -2008.4 -1115.5 1146.3 -2060.2 -1189.7 745.9 -2164.5 -1119.5 780.28 -2300.3 -1767.4 105.49 -2003.9 -1495.8 1255.6 -1852.8 -1429.9 1431.5 -2129.1 -1535.6 924.98 -1815.1 -1682.2 988.12 -1704.6 -1340.1 1488.6 -1720.7 -1502.9 727.23 -1751.2 -1597 843.25 -1934.2 -1572.9 795.81 -2371 -1709.3 40.567 -2164.3 -1708.4 439.7 -2193.8 -1802.7 28.516 -2256.8 -1758.8 257.74 -1832.6 -1517.2 1292.5 -2056.4 -1673 557.67 -2103.5 -1755.3 31.825 -1814 -1595.7 1221.3 -1719 -1501.5 845.2 -1770.4 -1588.3 761.1 -1812.6 -1350.4 1019.9 -2084 -1433 1179.3
26 0.2083 -1956.5 -1376.7 1323.9 -1790.3 -1327.4 1209.6 -2073.9 -1329.5 125.6 -1895 -1285.3 1430 -2153.1 -1354.4 905.12 -2086.2 -1106.6 976.49 -1775.9 -1224.9 1475.1 -2132.9 -1148.8 672.92 -2137.7 -1130 871.25 -1972.4 -1296.4 795.81 -2123.2 -1415.5 73.92 -1950.8 -1232.5 432.91 -2018.8 -1256.6 26.224 -2012.3 -1249.8 321.73 -1910 -1184.8 1283 -1962.2 -1215.1 649.58 -1935.8 -1264.1 39.209 -2000.7 -1107.5 1143.7 -2055.9 -1182.1 743.95 -2161.5 -1114.7 779.77 -2299.4 -1766.5 107.27 -1995.6 -1488 1254 -1844.3 -1420.6 1428.9 -2122.1 -1529.5 923.02 -1807.7 -1674.6 988.54 -1696.7 -1331.4 1486.2 -1709.8 -1489.4 733.51 -1742.3 -1587.4 846.73 -1927.9 -1566.5 792.67 -2378 -1716.4 43.537 -2158.6 -1703.6 438.26 -2193.6 -1802.9 28.091 -2253.3 -1755.7 258 -1824.7 -1509.4 1292.5 -2050.7 -1667.5 555.38 -2103.5 -1755.7 31.401 -1805.8 -1587.2 1221.3 -1707.9 -1492.3 852.16 -1760.5 -1576 763.05 -1804.5 -1343.4 1017.9 -2076 -1426.4 1177.4
27 0.2167 -1948.2 -1368.5 1322.2 -1780.7 -1321 1207.8 -2072.5 -1329.3 125.6 -1887 -1276.5 1427.5 -2146.9 -1349.1 904.95 -2081.3 -1100.1 975.56 -1767.1 -1215.7 1472.1 -2131.3 -1142.8 671.47 -2132.7 -1123.7 870.41 -1965.4 -1290.3 793.52 -2122 -1416.1 74.684 -1946.5 -1228.4 430.11 -2018.9 -1256.6 26.224 -2009.3 -1247 319.95 -1901.9 -1176.9 1280 -1956.5 -1207.8 646.44 -1935.6 -1264.1 38.954 -1993.8 -1099.6 1141.8 -2051.1 -1173.6 742.17 -2158.4 -1109.6 779.17 -2298.5 -1765.8 109.65 -1987.4 -1480.4 1252.7 -1836.9 -1411.8 1427.3 -2115 -1523.6 921.24 -1799.9 -1667.4 988.8 -1688.9 -1322.6 1483.9 -1698.8 -1476.9 740.05 -1733.5 -1578.2 850.12 -1921.4 -1560.3 790.21 -2380 -1718.2 45.574 -2152.7 -1699.1 436.73 -2193.5 -1802.7 28.261 -2249.5 -1752.9 258.34 -1816.4 -1501.3 1292.1 -2044 -1661.7 552.74 -2103.4 -1756 31.062 -1797.3 -1578.6 1221.3 -1698.4 -1483 858.1 -1751.3 -1564 766.44 -1796.4 -1336.9 1015.4 -2067.6 -1419.4 1175.4
28 0.2250 -1939.7 -1358.5 1318.8 -1772.6 -1312.6 1205.5 -2071.8 -1328.7 125.94 -1879.1 -1267.1 1425 -2139.5 -1342.8 904.44 -2076.5 -1093.4 974.37 -1758.9 -1206.2 1469.5 -2128 -1136.8 670.97 -2127.1 -1117.4 869.05 -1957.9 -1284.2 790.97 -2121.6 -1416.1 76.127 -1942.7 -1223.6 427.31 -2019.6 -1256.5 25.8 -2006.3 -1243.9 318.42 -1893.7 -1168.8 1276.8 -1950.9 -1201.2 643.38 -1935.5 -1264.2 38.53 -1987.7 -1091.8 1140.4 -2045.1 -1164.6 740.64 -2154.6 -1104.6 778.15 -2297.4 -1764.7 112.2 -1979 -1472.8 1251.5 -1830 -1403.2 1426 -2107.6 -1517.2 919.2 -1791.9 -1660.7 988.8 -1681.3 -1313.4 1481.5 -1687.9 -1465.2 746.58 -1724.4 -1568.4 852.67 -1914.6 -1553.9 787.66 -2379.5 -1717.7 48.205 -2146.1 -1694 435.29 -2193.4 -1802.2 29.025 -2245.6 -1750 259.19 -1807.9 -1492.7 1291.9 -2037.1 -1655.9 550.11 -2103.3 -1756.1 30.552 -1788.4 -1569.5 1221.1 -1689.7 -1474.5 862.68 -1742.8 -1552.5 770.09 -1788.3 -1330.4 1013.1 -2059.2 -1412.5 1173.8
29 0.2333 -1931.1 -1349.5 1317.2 -1764.7 -1304.1 1203.3 -2071.3 -1327.3 125.86 -1871.4 -1257.5 1422.5 -2132.1 -1336.4 904.18 -2070.8 -1086.8 972.5 -1750.8 -1196.7 1466.9 -2123.3 -1130.3 669.61 -2121.1 -1111.4 867.18 -1950.9 -1277.3 788.59 -2122.1 -1415.1 77.909 -1938.6 -1218.5 424.51 -2020.4 -1256 25.036 -2003 -1240.3 316.47 -1885.5 -1160.2 1273.5 -1945 -1194.3 640.24 -1935.4 -1264.3 38.106 -1980.6 -1083.7 1138.2 -2038.3 -1154.5 738.27 -2149.8 -1099 776.88 -2295.7 -1762.8 114.57 -1970.2 -1464.5 1250.3 -1822.7 -1394 1424.8 -2099.5 -1510.1 916.83 -1784 -1653.2 989.14 -1673.7 -1303.6 1479.2 -1677.5 -1453.5 753.03 -1715.8 -1558.9 855.21 -1907.7 -1546.6 784.86 -2378.3 -1716.6 51.939 -2138.7 -1688.8 434.18 -2193.2 -1801.6 29.704 -2241.1 -1746.6 260.63 -1799.3 -1483.7 1291.8 -2029.9 -1649.9 548.08 -2103.1 -1756.3 30.128 -1779.8 -1560.2 1220.8 -1680.4 -1466.9 867.52 -1734.2 -1540.9 773.15 -1780.7 -1322.8 1010.9 -2050.5 -1405.4 1172.3
30 0.2417 -1918 -1330.1 1307.5 -1757.4 -1294.2 1200.5 -2071.3 -1325.3 125.94 -1863.4 -1247.9 1419.8 -2123.7 -1329.3 903.42 -2064.8 -1080.2 970.46 -1742.5 -1187.3 1464.2 -2117.4 -1122.4 666.21 -2114.8 -1105.6 864.97 -1943.9 -1269.8 786.47 -2123.6 -1413.1 79.436 -1934.5 -1213.4 421.45 -2020.3 -1255.3 24.018 -1999.7 -1235.7 314.27 -1877.2 -1151.7 1270.3 -1938.9 -1187.4 637.27 -1935 -1264.6 37.851 -1973 -1075.6 1135.5 -2030.2 -1143.9 734.79 -2144.2 -1093.3 774.67 -2293.4 -1760.9 117.29 -1961.2 -1456.2 1249.1 -1815.3 -1384.6 1423.4 -2091.2 -1503.3 914.88 -1776.5 -1645.8 990.83 -1665.9 -1293.7 1477.6 -1667 -1442.8 759.31 -1708.1 -1550.9 858.61 -1900.4 -1539.5 782.4 -2376.3 -1713.9 55.758 -2130.2 -1682.9 432.57 -2192.8 -1800.8 30.892 -2236.2 -1743 262.41 -1790.3 -1474.7 1291.6 -2022.1 -1644.1 546.46 -2103 -1756.7 30.128 -1771.2 -1551.2 1220.7 -1671.1 -1459.2 872.53 -1725.8 -1530.3 775.86 -1773.1 -1315.3 1008.6 -2041.9 -1398.1 1170.8
31 0.2500 -1912.2 -1328.2 1310.2 -1750 -1284.1 1197.7 -2071.7 -1323.1 126.37 -1855.2 -1238 1417.4 -2115.8 -1322.4 902.99 -2058.4 -1073.4 968 -1734.6 -1177.5 1461.9 -2110.2 -1114.1 662.99 -2107.5 -1099 861.83 -1936.5 -1262.1 785.03 -2124.9 -1410.6 80.964 -1930.6 -1208.4 418.48 -2020 -1254.6 23.084 -1996.8 -1230.6 312.06 -1869.4 -1143 1267.4 -1933 -1180.7 634.47 -1934.6 -1264.7 37.766 -1964.9 -1067.3 1131.5 -2021.7 -1132.8 731.39 -2137.9 -1088 771.79 -2291 -1759.1 120.68 -1952.4 -1447.9 1248.2 -1807.8 -1374.9 1422 -2082.7 -1496.3 913.35 -1769.3 -1638.9 992.78 -1657.8 -1284.1 1475.9 -1656.9 -1433.1 765.08 -1700.3 -1543.4 861.83 -1892.6 -1531.8 779.6 -2375.3 -1712.4 60.511 -2120.7 -1676.6 430.62 -2192.3 -1800 32.165 -2230.8 -1739.5 264.45 -1781.2 -1465.4 1291.1 -2013.5 -1638.1 544.94 -2102.5 -1756.7 29.704 -1762.7 -1542.1 1220.8 -1662.6 -1452 876.94 -1718.2 -1520.7 778.92 -1764.8 -1308.2 1006.1 -2033.5 -1390.9 1169.7
32 0.2583 -1904.8 -1321.6 1310.4 -1741.6 -1274.9 1195.5 -2072.1 -1321.1 126.71 -1847.2 -1227.6 1415.4 -2109.1 -1316.2 903.5 -2051.3 -1067 964.78 -1726.7 -1167.3 1459.9 -2101.5 -1105.7 659.51 -2099.5 -1092.8 858.18 -1929.5 -1254.3 783.84 -2126.2 -1408.2 82.661 -1926.7 -1203.3 415.77 -2020.5 -1253.8 22.151 -1994.4 -1225.6 309.85 -1861.5 -1134.4 1264.7 -1927.4 -1173.8 631.33 -1934.7 -1264.8 37.681 -1956.4 -1059.2 1127 -2012.5 -1120.9 727.91 -2130.6 -1082.8 768.82 -2288 -1756.3 125.01 -1943.6 -1439.7 1247.6 -1800.2 -1364.8 1420.7 -2074.1 -1489.4 911.99 -1763.4 -1631.9 994.99 -1649.9 -1273.8 1474.3 -1647.6 -1424.1 770.18 -1693.2 -1536.2 864.55 -1884.4 -1523.4 777.31 -2374.9 -1711.4 66.961 -2111.1 -1670.7 429.6 -2191.6 -1798.5 33.947 -2224.4 -1735.5 266.74 -1772.3 -1455.9 1290.4 -2004.2 -1631.8 543.15 -2102 -1756.6 29.195 -1753.9 -1532.7 1220.9 -1654.2 -1446.7 880.25 -1710.8 -1511.7 781.55 -1756.1 -1301.2 1003.8 -2024.7 -1383.8 1168.7
33 0.2667 -1896.5 -1312.8 1309.3 -1734.8 -1265 1192.1 -2072.6 -1319.4 126.96 -1839 -1217.2 1413.6 -2100.7 -1308.9 902.66 -2043.6 -1060.8 960.96 -1718.8 -1156.9 1457.9 -2091.5 -1096.5 654.76 -2090.6 -1086.6 854.03 -1922.1 -1245.9 782.74 -2127 -1406.5 84.274 -1923.5 -1198.1 413.14 -2021 -1253.1 21.556 -1992.4 -1221.1 307.73 -1853.6 -1125.5 1261.9 -1921.8 -1167.4 628.28 -1935.2 -1264.7 37.512 -1949.6 -1051.4 1124.2 -2002.2 -1108.1 723.67 -2122 -1077.4 765.08 -2284.6 -1753.4 130.1 -1934.2 -1431 1246.7 -1792.2 -1354.2 1419.3 -2065.2 -1482.4 910.97 -1758.6 -1624.7 997.54 -1642.3 -1263.7 1473.2 -1639.5 -1415.9 774.5 -1686.4 -1529.9 867.1 -1876.2 -1515 775.52 -2372.8 -1708.6 73.92 -2100.2 -1664.4 428.75 -2190.8 -1796.9 36.069 -2217 -1730.9 269.29 -1763.2 -1446.1 1289.9 -1993.9 -1624.8 541.2 -2102.4 -1756.8 29.449 -1745.2 -1523.3 1220.9 -1645.6 -1441.7 882.54 -1704.1 -1502.9 783.42 -1747.2 -1293.5 1002.2 -2015.4 -1376.1 1167.4
34 0.2750 -1888.1 -1303.9 1308.2 -1728 -1255.2 1189.2 -2072.7 -1317.7 127.39 -1830.5 -1206.3 1411.9 -2091.1 -1300.6 901.72 -2035.1 -1054.1 956.72 -1710.4 -1146.1 1456.4 -2080.4 -1086.6 650.17 -2080.8 -1080.4 849.27 -1914.3 -1236.6 781.21 -2127.2 -1405.5 85.717 -1920.9 -1193 410.68 -2021.4 -1252.7 21.217 -1990.6 -1217.3 306.12 -1846.1 -1116.3 1259.5 -1916.4 -1160.7 625.39 -1935.2 -1264.5 37.342 -1943.2 -1043.7 1121.4 -1990.6 -1095.7 719.34 -2112.1 -1071.8 760.08 -2280.6 -1749.9 135.36 -1924.1 -1421.5 1245.5 -1784.2 -1343.8 1418.5 -2056.3 -1475.2 910.12 -1753.4 -1617.7 999.74 -1634.3 -1253.8 1472.7 -1631.9 -1408.9 777.64 -1679.9 -1523.7 868.96 -1867.5 -1506.2 773.74 -2367.6 -1702.7 80.2 -2087.5 -1657 427.31 -2190.2 -1795.4 39.039 -2208.7 -1725.6 271.75 -1754.1 -1436.5 1289.7 -1982.6 -1617.3 539.08 -2102.4 -1756.5 29.11 -1736.3 -1513.8 1221 -1636.8 -1436.9 886.02 -1698.3 -1495 785.11 -1738.7 -1285.4 1001.3 -2005.7 -1368.1 1166.6
35 0.2833 -1879.3 -1295 1307.4 -1719.3 -1245.5 1187.6 -2072.5 -1315.9 128.15 -1822.2 -1195.4 1410.7 -2082.4 -1293.4 901.64 -2026.1 -1047.7 952.13 -1701.3 -1135.5 1455.7 -2068.5 -1075.8 645.51 -2070.2 -1074.3 844.18 -1907.1 -1228.5 779.34 -2127.1 -1405 87.414 -1918.3 -1188.2 408.13 -2021.4 -1252.7 20.962 -1989.1 -1214 304.68 -1838.7 -1107 1257.3 -1911.4 -1153.6 622.76 -1935 -1264.3 37.172 -1935.9 -1036 1117.7 -1978 -1082.6 715.1 -2101 -1066.4 754.48 -2275.2 -1745.6 141.14 -1913.6 -1411.7 1244.1 -1776.5 -1333.3 1418.1 -2047.3 -1468 909.78 -1747.8 -1611.3 1001.9 -1626.2 -1243.7 1472.3 -1624.8 -1402.7 779.26 -1673.9 -1518.4 870.24 -1858.7 -1497.7 772.55 -2360.8 -1695 87.838 -2072.7 -1649.3 425.61 -2189.3 -1793.5 42.519 -2198.7 -1720.1 274.38 -1745.1 -1426.8 1289.7 -1970.5 -1609.7 536.62 -2102.1 -1756 28.855 -1727.1 -1504.2 1221.3 -1628.5 -1434.4 888.82 -1693.4 -1488.4 786.56 -1730.5 -1276.2 998.47 -1996.2 -1360 1165.8
36 0.2917 -1870.5 -1285.5 1306.5 -1710.4 -1235.8 1186.3 -2072 -1314.8 128.91 -1813.7 -1184.4 1409.5 -2074.1 -1286.3 902.15 -2016.5 -1041.2 947.04 -1692.4 -1125.3 1454.5 -2055 -1064.6 641.01 -2058 -1067.7 838.16 -1899.9 -1219.5 778.32 -2126.8 -1404.6 89.281 -1915.4 -1183.9 405.67 -2021.4 -1252.7 20.962 -1987.6 -1210.9 303.23 -1831.4 -1097.6 1254.9 -1906.6 -1146.5 619.79 -1935 -1264.2 37.257 -1928.3 -1028.2 1113.6 -1964.4 -1069.2 710.34 -2088.3 -1061.4 748.54 -2268.6 -1740.6 147.67 -1903.7 -1402.4 1243.4 -1768.6 -1322.7 1417.7 -2036.7 -1460 908.43 -1742.3 -1604.7 1004.5 -1618.3 -1233.1 1471.5 -1619 -1397.9 779.26 -1668.8 -1514.5 871.34 -1849.4 -1488.9 771.36 -2355.8 -1689.9 98.362 -2056.5 -1641 423.75 -2188 -1791.2 46.677 -2187.6 -1713.7 277.86 -1735.4 -1416.6 1289.2 -1957 -1601.1 533.56 -2102.2 -1756.2 29.449 -1718.2 -1494.6 1221.8 -1620.3 -1432.9 888.31 -1689.4 -1482.8 787.49 -1721.6 -1267.1 996.86 -1986.7 -1351.8 1165.3
37 0.3000 -1861.5 -1275.8 1306 -1701.6 -1226.6 1185 -2071.5 -1314.3 129.59 -1805.5 -1173.4 1408.5 -2065.5 -1278.9 902.74 -2006.4 -1035 941.61 -1684.1 -1114.7 1453.1 -2039.4 -1052.9 636.59 -2045 -1061.2 831.11 -1893 -1209.5 777.64 -2126.7 -1404.1 91.403 -1913 -1180 403.38 -2021.4 -1252.7 20.878 -1985.7 -1207.8 301.45 -1824.1 -1088.2 1252.6 -1902.1 -1139.4 616.99 -1935 -1264 37.257 -1920.7 -1020.1 1109.1 -1949.4 -1055.9 705.34 -2073.7 -1055.8 742.17 -2260.5 -1735 154.97 -1894.3 -1393.4 1243.4 -1760.5 -1311.5 1416.8 -2025.6 -1451.8 906.73 -1736.1 -1597.7 1006.9 -1610.2 -1222.9 1471.1 -1614.7 -1394.3 777.47 -1664.3 -1511.3 871.93 -1839.9 -1480 770.26 -2351.5 -1687 109.73 -2038.8 -1631.8 421.2 -2186 -1789.3 51.006 -2175 -1706.3 281.25 -1725.6 -1406.2 1289.1 -1942.1 -1591.5 530.17 -2101.9 -1755.8 29.449 -1709.1 -1484.8 1222.3 -1611.9 -1431.1 885.94 -1686.6 -1478 788.25 -1712.5 -1258.4 996.43 -1976.5 -1343 1164.2
38 0.3083 -1852.4 -1266.1 1305.4 -1693.2 -1217.1 1184 -2070.7 -1314 129.76 -1797.3 -1162.3 1407.6 -2057.3 -1272.3 903.84 -1995.7 -1028.8 935.67 -1676.3 -1103.8 1452.3 -2021.7 -1041.1 631.76 -2030.4 -1054.4 823.81 -1886.4 -1201.8 776.46 -2126.8 -1403.5 93.609 -1910.8 -1175.8 401.51 -2021.7 -1252.7 20.962 -1983.9 -1205.1 299.92 -1816.9 -1078.5 1250.4 -1898.3 -1132.6 614.19 -1934.7 -1263.9 37.087 -1913 -1012.2 1104.6 -1932.7 -1042.7 700.67 -2057.5 -1049.8 734.7 -2250.2 -1728.6 162.61 -1884.9 -1384.5 1243.8 -1752.8 -1300.3 1416.1 -2015.7 -1445 906.22 -1730.7 -1591.4 1009.9 -1602.4 -1212.4 1470.7 -1611.3 -1391.9 774.5 -1659.8 -1508.8 871.34 -1830.3 -1470.7 769.33 -2343.9 -1681.8 121.53 -2019.6 -1621.1 417.89 -2183 -1787 55.843 -2159.8 -1697.4 283.97 -1716.2 -1396.1 1289.1 -1926.2 -1581.2 526.52 -2101.2 -1755.2 29.279 -1699.8 -1474.9 1222.9 -1604 -1431 882.63 -1684.8 -1473.8 788.25 -1704.4 -1248.3 994.82 -1966 -1334.3 1162.9
39 0.3167 -1843.4 -1256.5 1304.4 -1685 -1207.2 1182.9 -2069.9 -1313.4 130.02 -1788.9 -1151.3 1406.9 -2049.2 -1266.1 905.12 -1984.3 -1022.3 929.64 -1668.3 -1092.9 1452.1 -2001.4 -1029.4 625.9 -2015 -1047.8 816.43 -1879.7 -1194.8 775.78 -2126.3 -1402.7 95.052 -1909.3 -1171.8 400.07 -2022.3 -1252.8 21.217 -1982.5 -1202.3 298.74 -1809.7 -1069 1248.1 -1894.3 -1125.7 611.3 -1934.6 -1263.9 36.918 -1905.1 -1004.2 1100.1 -1914.2 -1029.9 696.51 -2039.5 -1043.4 726.72 -2238.2 -1721.5 170.58 -1875.8 -1375.6 1244.3 -1745.1 -1289.2 1415.9 -2006.3 -1438.7 905.97 -1727.2 -1584.9 1014.1 -1594.3 -1201.8 1469.9 -1609.2 -1391.3 770.26 -1655.7 -1507 870.24 -1820.5 -1460.9 767.97 -2333.1 -1674.4 134.09 -1999.1 -1608.9 414.41 -2178.8 -1783.8 61.784 -2143 -1687.7 287.36 -1706.8 -1385.9 1289.2 -1909 -1570.2 522.53 -2100.2 -1754 28.77 -1690.8 -1465.4 1224 -1596.5 -1432.5 877.96 -1683.7 -1471.4 787.66 -1695.8 -1238.4 994.14 -1955.9 -1325.8 1162.1
40 0.3250 -1834.6 -1246.9 1303.9 -1676.1 -1196.9 1182.5 -2068.7 -1311.5 129.42 -1780.5 -1139.8 1406 -2041 -1259.7 906.22 -1972.3 -1015.4 923.19 -1659.7 -1082.3 1451.4 -1978.6 -1017.5 619.54 -1998.3 -1040.7 808.54 -1872.8 -1187.1 775.86 -2126.1 -1402.4 96.58 -1907.8 -1167.7 398.45 -2022.8 -1253.1 21.387 -1981.5 -1199.3 297.63 -1803.2 -1059.7 1245.7 -1889.6 -1118.6 608.84 -1934.7 -1263.9 37.087 -1897.1 -996.43 1095.7 -1893.8 -1017.7 692.35 -2019.9 -1035.9 718.07 -2224.6 -1713.5 179.5 -1866.3 -1366.7 1244.7 -1737.1 -1278.2 1415.2 -1996.3 -1432.1 905.46 -1722.5 -1578 1018.1 -1586.2 -1190.7 1469.1 -1608.7 -1391.9 764.91 -1652.7 -1506.5 869.22 -1810.7 -1451.2 766.78 -2320.3 -1665.7 147.25 -1976.9 -1595.6 410.59 -2172.4 -1778.5 68.658 -2124.8 -1676.9 291.18 -1697 -1375.2 1289.2 -1890.9 -1558 518.37 -2098.4 -1751.8 27.752 -1682.1 -1455.8 1225.7 -1589.9 -1434.6 872.61 -1683.3 -1470.6 787.15 -1686.8 -1228.7 993.8 -1946.9 -1318.1 1161.8
41 0.3333 -1826 -1237.5 1303.1 -1667 -1186.8 1182.7 -2067.3 -1308.7 128.49 -1772.2 -1127.6 1404.6 -2032.2 -1253.6 906.73 -1959.7 -1008.3 916.23 -1651.3 -1071.1 1450.2 -1953.8 -1005.3 613.6 -1980.5 -1033.1 800.22 -1866.1 -1179.2 776.54 -2126.8 -1401.8 99.211 -1905.5 -1164 396.67 -2022.7 -1252.8 21.132 -1979.8 -1195.8 296.36 -1796.5 -1050.3 1243.4 -1886.3 -1112.4 607.91 -1934.7 -1263.8 37.087 -1888.9 -988.8 1090.8 -1872.1 -1005.6 688.7 -1998.3 -1027.2 708.73 -2208.3 -1703.8 187.98 -1856.9 -1357.5 1245 -1729.3 -1267 1414.1 -1986.2 -1425.8 904.86 -1715.9 -1570.7 1021.7 -1578 -1180 1468.5 -1609 -1394.3 758.21 -1650 -1506.7 867.52 -1800.9 -1441.4 765.17 -2305.3 -1656.5 160.91 -1954.1 -1581.9 406.77 -2163.2 -1770.9 77.23 -2104.9 -1665.5 294.41 -1687.3 -1364.3 1289.1 -1872 -1544.9 514.13 -2095.3 -1747.9 27.243 -1672.7 -1445.8 1227.2 -1584.2 -1437.7 865.99 -1683.6 -1470.8 786.22 -1678.5 -1218.9 993.21 -1937.5 -1310.6 1161.8
42 0.3417 -1817.4 -1228 1301.6 -1658.8 -1176.5 1182.5 -2067.5 -1307 129.34 -1764.2 -1116.4 1403.5 -2023.3 -1248 907.41 -1946.3 -1000.7 908.94 -1643.3 -1059.2 1449 -1927.1 -992.87 607.48 -1960.7 -1024.6 792.24 -1859.5 -1170.8 778.49 -2127.6 -1400.5 101.67 -1902.9 -1160.3 394.55 -2022.1 -1252.6 20.283 -1977.8 -1192 294.92 -1789.4 -1041.1 1240.9 -1884.1 -1107.6 607.57 -1934.9 -1263.8 36.833 -1880.8 -980.9 1085.9 -1849 -993.12 685.73 -1975.2 -1018.1 699.23 -2189.9 -1692.9 195.71 -1847.6 -1348.3 1245.1 -1721.6 -1255.5 1413.1 -1975.8 -1419.9 904.18 -1709.2 -1563.7 1025.6 -1570.1 -1169.4 1467.5 -1610.5 -1398.5 750.74 -1647.9 -1507.5 865.57 -1791.1 -1431 763.56 -2288.5 -1646.6 174.74 -1930.7 -1567.3 402.95 -2152.4 -1761.7 86.056 -2083.6 -1653.5 297.12 -1677.8 -1353.5 1289.2 -1852.9 -1531.7 510.4 -2089.9 -1740.6 26.309 -1662.8 -1435.5 1228.5 -1579.8 -1441.6 858.86 -1684.6 -1472 784.86 -1670.4 -1208.6 992.45 -1928.2 -1303 1161.8
43 0.3500 -1809 -1218.5 1300.6 -1651.1 -1166 1181.1 -2068 -1305.5 130.7 -1756.2 -1105.3 1402.3 -2015.3 -1242.5 908.17 -1932.2 -992.45 901.55 -1635.1 -1047.4 1447.3 -1898.5 -980.82 601.63 -1939.7 -1015.4 784.52 -1853.6 -1163 780.11 -2127.6 -1398.4 103.79 -1900.3 -1156.4 392.6 -2022 -1252.1 19.604 -1975.9 -1188.4 293.81 -1782.7 -1031.7 1238.6 -1879.1 -1102.3 605.96 -1935 -1263.8 36.748 -1872.1 -972.93 1080.9 -1824.5 -980.31 682.76 -1950.6 -1008.6 690.15 -2170.8 -1682 203.77 -1837.8 -1339.2 1245.3 -1714 -1243.9 1412.2 -1965.4 -1414.1 903.25 -1702 -1556.5 1029.9 -1562.2 -1158.6 1466.2 -1612.8 -1404.1 742.76 -1646.2 -1509.3 864.04 -1780.4 -1421 762.45 -2271 -1637.1 188.92 -1906.7 -1551.8 399.22 -2139.7 -1750.3 93.015 -2061.2 -1640.8 300.01 -1668.1 -1342.9 1289.2 -1833.6 -1517.9 507.34 -2081.7 -1727.3 24.527 -1653.2 -1425.3 1229.3 -1576.7 -1445.7 851.82 -1686.6 -1474.8 783.84 -1661.4 -1199.5 991.68 -1918.8 -1295.4 1161.5
44 0.3583 -1800.3 -1209 1299.8 -1642.6 -1156 1180 -2067.8 -1303.5 131.21 -1748.2 -1093.9 1401.3 -2007.5 -1237.3 908.43 -1917.4 -983.7 893.91 -1627 -1036.3 1445.7 -1867.2 -968.85 596.71 -1919 -1006.1 775.78 -1848 -1155.1 780.02 -2127.6 -1396.3 105.49 -1897.8 -1152.3 390.73 -2022 -1251.8 19.18 -1974.3 -1185.3 292.54 -1776.1 -1022.5 1236.3 -1873 -1096.2 603.24 -1934.9 -1263.7 36.748 -1863.3 -965.2 1075.7 -1798.8 -968.94 679.96 -1924.1 -998.73 681.15 -2150.8 -1670.6 211.75 -1828.2 -1330.5 1245.4 -1706.2 -1232.5 1411.4 -1954.8 -1408 901.64 -1694.4 -1549.6 1033.9 -1554.1 -1147.3 1464.7 -1615.3 -1411.4 735.64 -1644.3 -1510.8 862.94 -1769.3 -1411.2 761.69 -2253 -1628.6 203.43 -1882.2 -1535.9 395.74 -2124.5 -1735.9 97.089 -2037.7 -1627.7 302.55 -1658.2 -1332.1 1289 -1813.9 -1503.6 504.37 -2071.4 -1708.6 23.763 -1643.6 -1415.1 1230.3 -1574.1 -1449.6 845.11 -1688.4 -1479.1 783.42 -1652.5 -1191 990.41 -1909.4 -1288 1161.2
45 0.3667 -1791.6 -1199.3 1298.7 -1634 -1146.3 1178.7 -2067.4 -1301.7 131.72 -1740.4 -1082.7 1400.2 -1999.4 -1232.4 908.34 -1901.9 -974.79 885.6 -1618.9 -1025.1 1443.9 -1833.5 -956.72 593.23 -1897.8 -997.11 766.87 -1842.1 -1147.3 779.09 -2127.5 -1394.3 106.93 -1895.6 -1148.5 389.12 -2022.2 -1251.4 19.01 -1972.3 -1182.1 291.1 -1769.5 -1013.2 1233.2 -1867.2 -1090 601.03 -1934.7 -1263.6 36.833 -1854.4 -957.56 1070.5 -1770.9 -958.41 679.11 -1895.6 -988.12 673.26 -2129.5 -1658.3 218.45 -1818.7 -1321.1 1244.8 -1698.1 -1221 1410.3 -1943.7 -1402.3 899.85 -1685.5 -1542.6 1037 -1546 -1135.5 1463.1 -1617.7 -1421.5 729.86 -1642.2 -1512.2 862.26 -1758 -1401.3 761.18 -2232.6 -1620.1 217.52 -1857.2 -1519 392.51 -2107.5 -1718.6 98.956 -2013.2 -1613.3 304.42 -1648.4 -1321.2 1288.4 -1793.8 -1488.5 501.57 -2059.7 -1685.9 23.933 -1633.8 -1405 1231.1 -1572.6 -1453 838.83 -1689.7 -1485.1 783.84 -1644.7 -1182.5 989.05 -1899.6 -1280.4 1160.2
46 0.3750 -1782.4 -1189.8 1297.7 -1625.4 -1136.6 1177.5 -2066.9 -1300.5 132.39 -1732 -1071 1398.5 -1990.5 -1226.8 908 -1885.8 -965.63 877.62 -1610 -1013.6 1442.1 -1798.8 -945 590 -1874.4 -987.44 758.89 -1835.9 -1139.7 778.32 -2127.2 -1392.6 108.29 -1893.3 -1145.5 387.34 -2022.3 -1251.2 18.841 -1970.5 -1178.8 289.99 -1762.9 -1004 1229.8 -1861.3 -1084.6 599 -1934.9 -1263.5 36.663 -1845.4 -949.67 1065.2 -1741.7 -947.13 680.05 -1865.6 -977.42 665.96 -2106.7 -1644.5 223.37 -1809.3 -1312.1 1244.6 -1690.1 -1209.5 1409.3 -1932.6 -1396.5 898.24 -1675.4 -1535.2 1040.2 -1537.8 -1124.2 1461.8 -1620.8 -1434.1 724.86 -1640.1 -1514.6 862.85 -1746.2 -1391.8 760.08 -2210.3 -1610.1 229.31 -1832.2 -1501.4 389.71 -2089.3 -1699.2 99.465 -1987.5 -1598.1 305.78 -1638.5 -1310.6 1287.7 -1773.3 -1472.9 499.36 -2047.4 -1660.3 23.593 -1623.4 -1394.5 1231.4 -1572.1 -1455.8 832.72 -1690.1 -1492.3 784.77 -1635.8 -1173 988.37 -1889.8 -1272.8 1159.3
47 0.3833 -1773.1 -1180.3 1296.8 -1616.7 -1126.6 1176.4 -2066.4 -1299.3 132.9 -1723.6 -1059.6 1396.8 -1981.6 -1221.2 907.92 -1869.4 -956.12 870.15 -1601.5 -1002.1 1440.1 -1762.9 -933.63 587.46 -1849.7 -977 751.84 -1829.3 -1131.7 777.81 -2127.2 -1391.7 109.82 -1891 -1142.6 385.3 -2022.8 -1251.2 19.18 -1968.9 -1176.3 288.98 -1756.2 -994.82 1226.8 -1855.9 -1079.5 596.54 -1935.3 -1263.6 36.239 -1835.9 -941.69 1059.7 -1711.9 -935.41 682.51 -1833.9 -967.49 658.91 -2082.6 -1628.9 226.85 -1799.6 -1303.7 1244.7 -1682.1 -1197.9 1408.2 -1921.2 -1390.3 896.37 -1665.4 -1528.3 1043.5 -1529 -1112.4 1460 -1624.3 -1447.3 720.95 -1637.5 -1516.6 864.21 -1733.5 -1383.5 758.21 -2187.3 -1599.1 239.84 -1807.1 -1483.2 387.76 -2070.4 -1678.3 99.55 -1961.6 -1582 307.22 -1628.5 -1299.8 1286.9 -1752.9 -1456.8 497.92 -2034.1 -1632.9 22.66 -1613 -1384.1 1231.5 -1571.8 -1457.7 827.29 -1689.7 -1500.5 785.54 -1627.2 -1163.5 987.44 -1880 -1265.5 1158.9
48 0.3917 -1763.9 -1171.2 1295.8 -1607.7 -1116 1175 -2066.3 -1298.2 132.99 -1715.2 -1047.9 1395.1 -1972.8 -1215.7 907.83 -1851.8 -946.28 862.6 -1592.9 -990.41 1438.3 -1725.4 -922.09 587.2 -1823.6 -966.22 745.73 -1822.1 -1123.7 777.31 -2127.6 -1391.2 111.26 -1889.2 -1139.4 383.18 -2024.1 -1251.4 19.52 -1967.4 -1174 287.62 -1749.6 -986.25 1224.4 -1851.6 -1074.5 594.16 -1935.4 -1263.6 36.069 -1826.2 -933.63 1054.4 -1681.7 -924.38 687.09 -1800.9 -957.48 653.14 -2058.5 -1612 229.74 -1789.4 -1294.8 1244.5 -1673.7 -1186.6 1407.1 -1909.5 -1384.2 894.68 -1655.3 -1521.6 1047 -1520.3 -1100.5 1458.1 -1627.3 -1460.9 719.09 -1633.8 -1518 865.99 -1724.2 -1372.8 760.42 -2164.1 -1585.8 248.92 -1781.6 -1463.9 385.89 -2051.3 -1655.7 100.74 -1935.6 -1565 307.99 -1618.6 -1289.6 1286 -1732 -1440.1 496.99 -2018.3 -1605.9 23.508 -1602.3 -1373.6 1231.9 -1571.5 -1459.5 823.39 -1688.8 -1509.8 788.42 -1618.6 -1154.1 986.25 -1870.3 -1257.8 1158
49 0.4000 -1754.6 -1161.7 1294.6 -1598.7 -1105.3 1173.1 -2064.8 -1297.3 131.97 -1705.8 -1036.2 1392.9 -1963.5 -1210 907.66 -1833.1 -936.35 855.04 -1584 -978.87 1436.2 -1686.7 -909.36 591.02 -1796.6 -955.7 740.98 -1814.4 -1115.3 776.03 -2127.6 -1390.9 112.7 -1887.5 -1137.1 381.57 -2024.6 -1251.4 19.604 -1966.3 -1172.5 286.17 -1742.8 -977.42 1222.3 -1847.8 -1069.5 592.12 -1935.4 -1263.6 36.069 -1816.3 -925.91 1049.3 -1650.7 -913.86 694.13 -1767.5 -944.83 650.77 -2034.5 -1593.6 232.62 -1779.1 -1285.9 1244.2 -1665.2 -1175.3 1406 -1898.1 -1378.4 893.4 -1644.7 -1514.8 1051 -1511.7 -1089.1 1456.8 -1629.6 -1475.2 719.51 -1628.6 -1519.9 868.11 -1712.4 -1362 761.69 -2140.3 -1569.6 255.37 -1755.6 -1444.1 384.62 -2030.5 -1633.7 101.25 -1909.3 -1546.3 307.65 -1608.1 -1279 1284.6 -1710.1 -1422.5 496.99 -1999.1 -1580 25.885 -1591.6 -1363 1231.9 -1571.2 -1460.7 821.35 -1687.2 -1519.3 794.87 -1609.6 -1144.7 984.98 -1860.2 -1249.9 1157
50 0.4083 -1745.1 -1152.2 1293.6 -1589.7 -1094.4 1171 -2062.2 -1296.4 130.1 -1696.8 -1025.1 1391.3 -1953.9 -1204.4 907.24 -1814.6 -926.42 849.02 -1574.9 -967.66 1434 -1647 -896.8 598.83 -1768.6 -945.17 738.1 -1806.9 -1107.3 775.01 -2127.6 -1390.7 113.55 -1886.1 -1135.4 379.78 -2024.6 -1251.5 19.52 -1965.3 -1171.2 284.56 -1735.4 -968.26 1220.1 -1844 -1064.8 590 -1935.5 -1263.5 36.069 -1805.9 -918.36 1045 -1619.4 -903.59 703.81 -1733 -931.93 651.28 -2010 -1573.8 234.74 -1768.6 -1276.8 1244.2 -1656.6 -1164.2 1405.2 -1886 -1372.2 892.56 -1633.6 -1507.8 1054.9 -1502.7 -1077.7 1455.2 -1631.3 -1489.7 722.23 -1622.7 -1522.2 871.34 -1699.8 -1351.4 762.28 -2115.4 -1550.9 259.19 -1729.3 -1423.7 384.03 -2007.3 -1611.6 101.59 -1883.1 -1526.1 307.05 -1597.3 -1268.4 1283.7 -1688.1 -1403.8 497.84 -1976.5 -1554.4 28.6 -1580.6 -1352 1232 -1572.3 -1460.2 820.59 -1683.9 -1529.5 803.11 -1600.2 -1135.2 983.7 -1849.7 -1242 1156.5
51 0.4167 -1735.1 -1142.7 1292.8 -1580.1 -1083.5 1169.2 -2060.8 -1296.1 129.08 -1687.8 -1014.4 1390 -1944.2 -1198.8 906.81 -1795.6 -916.74 845.28 -1565.7 -956.72 1432.2 -1606.9 -885.6 609.78 -1740 -934.99 737.59 -1798.8 -1099.5 773.91 -2127.6 -1391 113.98 -1885.1 -1134 377.58 -2024.4 -1251.5 19.265 -1964.4 -1170.3 282.78 -1727.7 -959.52 1218.6 -1840 -1060.5 587.46 -1935.4 -1263.5 36.154 -1795.1 -910.97 1041.7 -1588.5 -893.49 716.54 -1697.6 -921.07 653.48 -1985.2 -1552.6 236.1 -1757.9 -1267.5 1244.2 -1647.4 -1153.5 1404.4 -1873.3 -1365.6 892.05 -1622.4 -1501 1059.8 -1493.4 -1066.4 1453.9 -1632.6 -1503.9 727.57 -1617.6 -1523 876.6 -1687.4 -1340.6 762.28 -2090.2 -1530.6 260.97 -1703.5 -1403 384.96 -1982.3 -1587.5 103.96 -1857.5 -1505 307.56 -1586.4 -1257.5 1283.1 -1666.5 -1385.2 499.7 -1950.3 -1528.8 31.147 -1569.4 -1341.2 1232.5 -1575.1 -1459.6 819.99 -1679.2 -1540.3 812.61 -1590.6 -1125.4 982.77 -1839.3 -1233.8 1156.1
52 0.4250 -1724.9 -1133.1 1292.3 -1569.8 -1073.5 1168.5 -2061.7 -1296.5 129.68 -1678 -1004 1388.6 -1934.4 -1193.1 906.81 -1775.2 -906.81 842.74 -1556.2 -945.77 1431 -1567.3 -873.21 625.39 -1711.6 -924.47 739.2 -1790.4 -1091.8 772.55 -2127.6 -1391.6 114.15 -1884.3 -1132.2 375.63 -2023.7 -1251.6 18.756 -1963.7 -1169.3 281.08 -1719.6 -950.44 1218 -1836.2 -1056.2 585.08 -1935.3 -1263.3 36.239 -1784 -903.25 1039.7 -1558.2 -883.81 731.9 -1661.9 -909.78 658.58 -1959.7 -1530.3 237.12 -1746.7 -1257.8 1243.7 -1637.4 -1142.7 1403.2 -1860.6 -1358.7 891.37 -1611.8 -1494.6 1066.3 -1483.8 -1056 1453.3 -1633.1 -1517.4 736.06 -1613 -1522.4 884.49 -1674.6 -1329 761.18 -2065.2 -1509 261.48 -1678.3 -1382.4 387.51 -1955.9 -1562.5 105.83 -1831.9 -1483.2 308.92 -1575.4 -1246.6 1282.9 -1644.8 -1367.5 502.93 -1922.8 -1502.2 33.693 -1558.3 -1330.6 1233.9 -1577.9 -1459.6 820.93 -1673.3 -1550.1 824.41 -1580.7 -1115.6 982.26 -1828.4 -1225.2 1155.9
53 0.4333 -1714.2 -1123.7 1292 -1559.5 -1063.1 1168 -2062.6 -1297.2 130.27 -1667.6 -993.72 1387.8 -1924 -1187 906.56 -1754.4 -897.05 841.21 -1546.1 -935.24 1430.1 -1528.3 -860.56 645.08 -1682.9 -913.6 743.1 -1782.6 -1083.9 771.19 -2126.7 -1391.7 113.64 -1883.7 -1130.9 374.1 -2022.9 -1251.8 18.416 -1963.4 -1168.2 279.64 -1710.9 -941.19 1217.9 -1832.6 -1052.4 582.96 -1935.5 -1263.5 36.154 -1772 -895.78 1039 -1529.3 -874.65 750.32 -1626.3 -896.88 668.42 -1933.7 -1507.2 237.55 -1735.6 -1248 1243.4 -1627.1 -1132.2 1402.6 -1847.6 -1351 891.11 -1601.1 -1488.4 1073.7 -1473.7 -1045.9 1452.6 -1632.3 -1528.7 746.58 -1607.7 -1522.1 893.49 -1660.7 -1320.5 764.41 -2039.9 -1486.6 260.04 -1653.7 -1361.9 391.33 -1927.5 -1537 106.08 -1806.3 -1460.7 310.79 -1564.1 -1236 1282.8 -1623.3 -1349.8 507.09 -1893.4 -1475.6 35.39 -1547.2 -1320 1235.8 -1579.2 -1459.9 823.9 -1666.4 -1558.3 837.9 -1570.5 -1105.6 981.84 -1817.4 -1217 1155.8
54 0.4417 -1703.3 -1114.7 1292 -1549.1 -1052.2 1168.1 -2061.8 -1297 129.42 -1656.9 -983.45 1387.7 -1913.1 -1180.5 906.3 -1734 -887.89 841.38 -1535.5 -925.31 1429.8 -1490.8 -849.78 667.32 -1654.3 -903.59 748.96 -1774.7 -1075.7 769.75 -2125.6 -1391.7 112.62 -1883.8 -1130.4 372.66 -2022.8 -1251.9 18.247 -1963.4 -1167.5 278.37 -1701.2 -932.1 1218.9 -1829.2 -1048.8 580.5 -1935.8 -1263.3 36.323 -1758.5 -887.89 1038.7 -1502.2 -865.57 771.62 -1591.2 -883.73 681.83 -1907.1 -1483.7 237.29 -1724.3 -1238.6 1243.8 -1616.6 -1122.1 1403 -1834.3 -1343 891.37 -1590.6 -1482.3 1082 -1462.8 -1035.5 1452 -1630.3 -1537.8 759.4 -1602.1 -1522.3 903.5 -1647.2 -1310.1 766.36 -2014.2 -1463.5 256.81 -1629 -1341.2 396.25 -1897.6 -1511.2 104.98 -1780.9 -1437.7 312.74 -1552.9 -1225.4 1282.8 -1601.8 -1332.1 512.52 -1861.4 -1449.5 35.899 -1535.6 -1309.2 1238.1 -1580.7 -1460.7 828.99 -1659.2 -1564.5 852.33 -1559.6 -1094.9 981.24 -1806 -1208.2 1155.6
55 0.4500 -1692 -1105.2 1292.2 -1537.7 -1041.8 1168.8 -2062.5 -1297.2 129.34 -1645.6 -973.77 1387.8 -1901.9 -1173.6 906.3 -1713.9 -878.38 842.82 -1524.5 -915.81 1430 -1455.4 -839.17 691.84 -1626.7 -893.91 756.6 -1766.3 -1067.6 767.8 -2124.9 -1392.1 112.03 -1884.3 -1130.4 371.72 -2022.9 -1252.1 17.907 -1963.8 -1167.7 277.43 -1690.7 -923.02 1220.8 -1825.8 -1044.8 577.95 -1936.3 -1263.1 36.833 -1744.4 -879.06 1039.1 -1476.4 -857.08 794.79 -1556.9 -872.1 696.51 -1879.8 -1459.7 235.85 -1712.6 -1229.5 1244.5 -1605.4 -1112.4 1403.3 -1820.8 -1335.4 891.88 -1580.5 -1476.8 1091.6 -1451.2 -1025.6 1451.9 -1627.1 -1545.5 774.33 -1596.1 -1522.6 914.79 -1632.4 -1299.6 769.58 -1987.5 -1440.2 252.65 -1604.7 -1320.5 401.93 -1866.2 -1484.9 102.69 -1755.2 -1414.8 314.86 -1541.1 -1215 1282.9 -1580.5 -1314.8 518.71 -1827.9 -1421.5 37.087 -1524 -1298.4 1240.9 -1581.9 -1462 836.2 -1652.5 -1568.8 868.03 -1548.8 -1083.9 980.9 -1794.1 -1199 1155.4
56 0.4583 -1680.4 -1095.9 1292.6 -1525.6 -1031.6 1169.9 -2064.5 -1298.2 130.36 -1633.8 -964.61 1388.5 -1890.3 -1166.3 906.56 -1694.3 -868.79 845.71 -1512.4 -906.73 1430.6 -1422.2 -828.65 719.51 -1599.6 -882.54 767.88 -1756.7 -1060.2 765.42 -2124.5 -1392.8 111.52 -1884.9 -1130.3 371.3 -2023.2 -1252.6 17.653 -1964.6 -1168 277.18 -1679.3 -913.69 1223.3 -1822.1 -1040.8 576.25 -1936.6 -1263.2 37.087 -1729.6 -869.39 1040.8 -1452.6 -848.93 819.06 -1524.2 -860.99 713.4 -1852 -1435.4 233.47 -1700.6 -1219.9 1244.8 -1593.6 -1102.8 1403.7 -1806.8 -1327.7 892.64 -1570.3 -1471.4 1102.8 -1439.7 -1016.5 1452.6 -1622.9 -1551.6 790.71 -1590 -1522.9 927.78 -1617.1 -1288 772.98 -1960.5 -1416.6 247.39 -1580.4 -1299.8 408.04 -1833.1 -1457.6 100.48 -1729.2 -1392.1 317.32 -1528.9 -1204.5 1283.1 -1558.9 -1297.7 525.76 -1793.1 -1392.8 38.615 -1512.1 -1288 1244.7 -1580.1 -1464 846.47 -1647.1 -1571.2 884.75 -1537.1 -1072.8 981.33 -1782.1 -1190 1155.6
57 0.4667 -1668.2 -1086.8 1293.6 -1513.1 -1021.1 1171.4 -2065.5 -1299.2 130.7 -1621.5 -955.44 1390.3 -1878.5 -1158.4 907.24 -1675 -858.52 850.29 -1499.9 -897.82 1432.1 -1391 -818.98 749.64 -1573.5 -870.24 781.12 -1746.6 -1052.9 763.22 -2124.3 -1393.8 111.09 -1886 -1130.3 371.38 -2023.4 -1252.8 17.737 -1965.5 -1168.6 277.35 -1667.1 -904.35 1226.2 -1818.2 -1037 575.32 -1936.9 -1263.3 37.087 -1713.9 -859.12 1043.9 -1431 -840.96 844.69 -1493.3 -849.78 732.83 -1824.1 -1411.2 230.33 -1688.2 -1210 1245.7 -1581.4 -1093.8 1404.9 -1792.5 -1319.2 893.66 -1560.6 -1466.2 1115.9 -1427.5 -1007.6 1453.9 -1617.7 -1555.3 807.86 -1584.3 -1523.3 943.39 -1602.4 -1275.2 775.78 -1933.5 -1392.7 241.02 -1556.5 -1279.4 414.83 -1799.4 -1428.8 98.277 -1703.8 -1369.3 319.87 -1516.5 -1194.7 1284.2 -1537.6 -1280.7 533.31 -1756.5 -1363.8 39.548 -1500 -1278 1249.1 -1574.5 -1466.4 860.05 -1642.4 -1571.7 902.23 -1524.7 -1062.2 982.43 -1769.8 -1180.5 1156.2
58 0.4750 -1655.4 -1077.6 1295.3 -1500 -1010.7 1173.1 -2064.9 -1299.4 129.76 -1608.9 -946.62 1392.6 -1866.2 -1150.1 908 -1655.4 -847.58 855.3 -1487.7 -888.82 1434.9 -1362 -810.66 780.53 -1548.5 -858.86 794.87 -1736.8 -1044.6 762.62 -2124.2 -1394.6 111.18 -1887.5 -1130.4 372.57 -2023.7 -1252.8 18.077 -1966.8 -1169.2 278.2 -1654.2 -894.85 1230.3 -1813.6 -1033.4 574.73 -1936.9 -1263.3 37.172 -1698.2 -848.51 1048.3 -1411.7 -833.32 871.59 -1464.2 -838.24 754.22 -1795.2 -1386.6 226.6 -1675.5 -1199.9 1246.6 -1568.6 -1085.4 1406.5 -1778 -1309.9 894.93 -1551.6 -1461.1 1131.3 -1414.7 -998.56 1455.5 -1611.7 -1556.7 825.76 -1578 -1522.9 960.45 -1588.8 -1262.3 779.26 -1905.6 -1368.9 233.81 -1533.1 -1259.4 422.3 -1765 -1399.8 95.392 -1678.3 -1346.3 321.73 -1503.9 -1184.9 1286.3 -1516.6 -1263.6 541.37 -1718.8 -1334 40.482 -1488 -1268 1254.5 -1565.3 -1468.3 877.03 -1637.9 -1570.2 920.05 -1511.8 -1051.8 984.3 -1757.1 -1170.1 1156.7
59 0.4833 -1642.2 -1068.2 1297.7 -1486.4 -1000.8 1175.5 -2063.3 -1298.7 128.91 -1595.9 -938.38 1395.5 -1853.3 -1140.9 909.36 -1635.8 -835.35 861.32 -1474.9 -880.17 1438.2 -1336.4 -802.94 812.36 -1524.9 -847.58 809.72 -1726.8 -1035.3 763.05 -2123.8 -1395 111.86 -1889.3 -1130.7 374.95 -2023.8 -1252.7 18.416 -1968.6 -1169.6 280.06 -1640.7 -885.43 1235.7 -1808.9 -1029.6 574.73 -1936.9 -1263.1 37.342 -1682.5 -837.48 1054.1 -1394.5 -826.53 898.75 -1437.7 -825.6 777.31 -1765.6 -1361.3 221.84 -1662.6 -1189.6 1248.2 -1554.4 -1077.2 1408.8 -1763.3 -1300.3 896.54 -1542.7 -1455.7 1148.1 -1401.2 -989.73 1458 -1604.8 -1556.4 844.18 -1570.6 -1520.7 977.93 -1574.8 -1249.6 783.16 -1877 -1344.8 225.92 -1510.6 -1240.1 429.43 -1730.1 -1370.4 92.251 -1652.2 -1323.5 322.5 -1490.8 -1175 1288.8 -1495.7 -1246.5 550.11 -1681.7 -1303 41.076 -1475.4 -1258 1261 -1553.6 -1468.6 895.87 -1632 -1566.7 938.05 -1498.7 -1041 986.67 -1743.8 -1159.3 1157.9
60 0.4917 -1628.6 -1059 1300.3 -1472.3 -991.43 1178.7 -2064 -1298.2 130.36 -1582.4 -930.41 1398.7 -1839.9 -1131.2 911.14 -1617.1 -822.88 868.88 -1461 -871.34 1441.5 -1313.6 -795.21 844.61 -1502.4 -835.78 825.34 -1715.5 -1025.5 763.05 -2123.4 -1394.5 112.79 -1891 -1130.9 378.34 -2023.9 -1252.6 19.18 -1970 -1169.3 282.95 -1626.3 -875.92 1241.8 -1803.6 -1025.5 575.66 -1936.9 -1262.8 37.257 -1666.3 -825.93 1060.6 -1379.4 -819.91 925.06 -1413.1 -812.78 800.98 -1735.9 -1335.4 216.5 -1649.3 -1178.8 1250.2 -1539.9 -1069.2 1411.6 -1749 -1290.4 898.92 -1534 -1449.5 1165.5 -1387 -980.73 1461 -1596.2 -1553.4 861.92 -1562.4 -1517 996.26 -1559.9 -1236.7 787.66 -1847.3 -1319.8 216.58 -1488.4 -1220.9 436.65 -1695.4 -1340 87.838 -1626.1 -1301.2 322.67 -1477.2 -1165.3 1292.2 -1475.1 -1229.5 558.94 -1645.4 -1270.5 41.331 -1462.9 -1247.6 1268.1 -1542.1 -1466.3 914.11 -1624.8 -1561.2 955.7 -1485.1 -1030.2 989.73 -1730.4 -1148.8 1160.1
61 0.5000 -1614.7 -1049.3 1303.7 -1457.4 -982.77 1182.7 -2065.1 -1297.8 133.92 -1568.2 -922.34 1402.9 -1826.2 -1120.9 913.18 -1598.5 -809.89 877.36 -1446.7 -863.11 1444.9 -1293.4 -787.49 876.69 -1481 -823.9 841.38 -1703.6 -1015.4 763.56 -2122.7 -1393 114.83 -1892.4 -1131.2 383.18 -2024.6 -1252.4 19.689 -1970.9 -1169.1 286.94 -1611.1 -866.42 1248.4 -1797.8 -1021.3 577.61 -1936.8 -1262.3 37.257 -1649.4 -814.39 1067.7 -1365.6 -813.8 950.18 -1390.4 -800.9 824.92 -1705.8 -1309.7 210.56 -1635.7 -1168.1 1252.4 -1525.1 -1060.9 1415.2 -1734.3 -1279.7 901.98 -1524.9 -1441.7 1183.1 -1372.4 -971.57 1464.5 -1585.8 -1547.8 879.23 -1553 -1511.4 1014.7 -1544.7 -1223.4 792.84 -1816.6 -1293.7 205.8 -1466.1 -1201.6 444.2 -1661.4 -1308.4 82.746 -1600 -1279 322.24 -1463.5 -1155.6 1297.2 -1455 -1212.8 567.94 -1609.2 -1236.7 42.349 -1450.1 -1237 1276.6 -1530.2 -1461.9 932.1 -1616.3 -1553.7 973.01 -1470.8 -1019.3 993.29 -1716.3 -1137.5 1162.5
62 0.5083 -1600.4 -1039.2 1307.4 -1442.4 -973.69 1187.6 -2064.2 -1296.3 137.66 -1553.3 -914.45 1407.8 -1811.8 -1109.6 915.81 -1580.6 -796.23 886.11 -1432.1 -854.54 1449.6 -1275.8 -780.28 908.68 -1461.4 -811.85 857.84 -1691.2 -1003.7 765.08 -2121.8 -1391.3 118.22 -1893.3 -1131.2 389.12 -2025.1 -1252.6 20.114 -1971.1 -1168.7 292.12 -1594.8 -856.91 1255.6 -1791.6 -1016.9 580.41 -1936.5 -1262.1 37.766 -1632.3 -802.6 1075.8 -1353.4 -808.28 974.62 -1369.1 -789.02 848.85 -1675.4 -1283.5 204.28 -1621.4 -1157 1255.6 -1509.5 -1052.4 1419.2 -1718.7 -1268.7 905.54 -1515.3 -1432.5 1200.6 -1357.2 -963.25 1468.6 -1573.8 -1540.3 896.37 -1542.3 -1503.7 1032.4 -1529 -1209.5 798.69 -1785.5 -1267.3 194.86 -1444.5 -1183.1 450.9 -1626.7 -1276.3 78.927 -1574 -1256.9 321.39 -1449.3 -1145.5 1303 -1435.4 -1196.6 576.59 -1572.7 -1202.1 44.216 -1437.5 -1226.2 1286.1 -1517.4 -1455.7 950.01 -1606.4 -1544.4 989.73 -1455.7 -1009 998.13 -1701.6 -1125.6 1165
63 0.5167 -1585.8 -1029.1 1311.6 -1426.9 -964.44 1192.8 -2062.1 -1293 140.8 -1537.6 -906.3 1413.1 -1796.7 -1098 918.86 -1563.7 -781.72 895.78 -1416.8 -845.71 1454.8 -1260.7 -773.57 939.91 -1443.4 -799.2 874.48 -1677.4 -991.94 766.95 -2119.6 -1388.9 123.14 -1893.2 -1130.9 396.25 -2024.4 -1252.7 19.52 -1970.9 -1167.7 298.65 -1577.9 -847.07 1263.2 -1784 -1012 584.06 -1936.5 -1262.2 38.021 -1615.5 -790.21 1085 -1342.3 -803.78 997.79 -1350 -777.31 872.53 -1644.7 -1257.3 197.4 -1607.1 -1145.5 1259.4 -1493.7 -1044.1 1423.9 -1703 -1257.4 909.36 -1505.2 -1422 1217.3 -1341.8 -955.19 1473.7 -1560.3 -1530.6 912.92 -1531.3 -1494.4 1049.4 -1512.9 -1195.5 804.72 -1753.9 -1240.8 184.25 -1423.7 -1165.7 457.01 -1590.8 -1244.2 75.448 -1547.8 -1234.8 319.1 -1435.1 -1134.9 1309.7 -1416.8 -1181.3 585.16 -1535.7 -1167.4 45.659 -1424.2 -1214.8 1296.3 -1504.2 -1446.9 967.07 -1595.9 -1533.7 1005.5 -1439.9 -999.32 1003.6 -1686.8 -1113.9 1167.8
64 0.5250 -1570.8 -1018.8 1316.5 -1410.3 -955.78 1198.6 -2059.7 -1288.8 144.95 -1521.6 -898.16 1418.7 -1781 -1086.2 922.51 -1547.3 -766.78 905.97 -1401.2 -837.22 1460.2 -1247.9 -767.04 969.53 -1426.5 -786.47 890.43 -1663 -980.48 769.92 -2116.2 -1384.1 129.59 -1891.5 -1129.5 404.57 -2023.1 -1253.4 18.077 -1969.8 -1165.6 306.37 -1560.3 -837.31 1270.8 -1775.2 -1006.4 588.47 -1936.5 -1262.2 37.851 -1598.1 -777.31 1094.5 -1332.4 -799.03 1019.2 -1332.9 -766.36 895.1 -1613.7 -1230.8 190.02 -1592.3 -1133.6 1263.6 -1477.6 -1036.2 1429.3 -1686.5 -1245.5 913.18 -1494.9 -1410.8 1233.3 -1325.6 -946.28 1478.7 -1545.8 -1519.3 928.2 -1519.6 -1483.7 1064.8 -1497.2 -1181.6 810.06 -1722.3 -1213.4 173.05 -1403.7 -1148.9 462.95 -1554.7 -1211.8 71.374 -1521.7 -1212.8 316.13 -1420.9 -1124.2 1317.2 -1398.8 -1166.3 593.4 -1498.7 -1133.2 47.271 -1410.7 -1203.3 1306.5 -1490.6 -1436.4 982.86 -1584.5 -1521.6 1019.8 -1423.6 -989.48 1009.2 -1671.6 -1101.8 1170.7
65 0.5333 -1555.5 -1008 1321.6 -1393.4 -947.21 1204.7 -2056.9 -1284.9 151.4 -1505.6 -889.92 1424.9 -1765 -1074 926.76 -1531.1 -751.59 916.23 -1385.2 -829.16 1465.8 -1236.7 -761.18 996.86 -1410.8 -773.74 905.97 -1648.4 -968.43 774.84 -2111.9 -1378 137.23 -1888.2 -1126.5 414.07 -2022.3 -1254.1 17.737 -1967.2 -1163 315.28 -1542.3 -827.8 1279 -1765.1 -1000.3 593.23 -1935.9 -1261.9 38.106 -1580.2 -764.41 1104.3 -1323.6 -793.94 1038.7 -1316.6 -755.92 916.06 -1582.4 -1203.6 182.04 -1577.1 -1121.7 1268.4 -1461.2 -1028.3 1435.5 -1669.6 -1232.8 917 -1484.2 -1399 1247.9 -1309.3 -937.28 1484 -1530.3 -1506.9 942.46 -1507.3 -1471.6 1078.9 -1481 -1167.4 814.9 -1690.1 -1185.8 160.99 -1384.3 -1132.7 468.3 -1518.5 -1178.8 67.215 -1495.6 -1190.7 312.82 -1405.9 -1113.1 1324.5 -1381.2 -1151.1 600.36 -1462.1 -1098.9 49.648 -1396.7 -1191.6 1316.7 -1476.6 -1424.5 997.11 -1572.4 -1508.2 1032.4 -1406.9 -980.22 1015.4 -1655.6 -1089.4 1174.4
66 0.5417 -1539.8 -997.11 1326.7 -1375.8 -938.3 1211.3 -2051.9 -1279.5 158.19 -1489.2 -881.61 1431.2 -1748.5 -1061.4 931 -1515.7 -736.48 926.76 -1368.8 -820.59 1471.8 -1226.9 -755.49 1021.2 -1396.2 -761.18 921.92 -1632.9 -955.78 779.77 -2106.3 -1373.2 146.74 -1883.6 -1122.3 423.92 -2022 -1253.4 20.114 -1963.2 -1159.3 324.79 -1524.1 -817.79 1287 -1753.7 -993.04 598.57 -1935.7 -1262.1 38.021 -1562.6 -751.59 1114.3 -1315.2 -789.02 1056.4 -1301.2 -745.48 935.92 -1550.9 -1176.2 173.98 -1561.5 -1109.6 1273.1 -1444.6 -1019.9 1441.7 -1652.1 -1219.9 921.41 -1472.4 -1386.6 1260.5 -1292.5 -928.71 1489.5 -1514.3 -1493.3 955.27 -1494.4 -1459 1091.3 -1464.8 -1153.6 819.99 -1657.6 -1158 148.77 -1365.6 -1116.8 472.37 -1482 -1145.9 63.396 -1469.7 -1169.2 309 -1390.5 -1101.6 1331.9 -1363.9 -1136.1 606.13 -1425.8 -1064.2 52.194 -1381.7 -1179.3 1326.3 -1461.5 -1411.3 1010.2 -1559.6 -1494.1 1043.1 -1389.8 -969.45 1021.6 -1639.7 -1077.1 1178.8
67 0.5500 -1523.6 -986.17 1331.8 -1358.7 -928.54 1217.8 -2045.1 -1271.7 165.15 -1472.5 -873.12 1437.5 -1731.6 -1048.4 935.33 -1501.4 -721.46 937.37 -1351.9 -811.85 1478.4 -1218.1 -749.3 1043.2 -1381.7 -747.86 936.94 -1616.6 -943.22 784.77 -2098.3 -1367.7 158.19 -1878 -1117.1 434.69 -2023.2 -1251.8 24.102 -1957.9 -1154.4 334.55 -1506 -807.77 1295.1 -1741 -984.38 604.77 -1936.6 -1261.5 37.681 -1545.1 -738.44 1124.6 -1306.5 -783.84 1072.1 -1287 -735.89 954.34 -1519.3 -1148.7 165.41 -1545.5 -1097 1278.3 -1428 -1011.3 1447.3 -1634.6 -1206.7 925.57 -1460.2 -1374.1 1271.3 -1275.2 -920.56 1494.9 -1497.6 -1478.9 965.8 -1481.1 -1445.4 1101.8 -1448.5 -1139.7 824.24 -1624.7 -1130.5 136.04 -1348 -1101.2 475.77 -1445.5 -1112.7 59.917 -1444.3 -1147.9 304.51 -1375.3 -1090 1339.2 -1347.4 -1122.3 611.47 -1389.8 -1030.4 54.4 -1366.1 -1166.8 1334.9 -1446 -1397.8 1021.8 -1546 -1479.2 1052.1 -1371.7 -960.2 1028.4 -1623.3 -1064.6 1183.8
68 0.5583 -1507.2 -975.05 1337.1 -1341.5 -918.19 1224 -2037.7 -1263.3 172.37 -1455.5 -864.21 1443.6 -1713.8 -1035.4 940 -1487.6 -706.61 948.14 -1334.8 -803.36 1484.9 -1209.9 -743.53 1062.8 -1368 -735.13 949.93 -1599.9 -930.58 789.53 -2087.8 -1359.7 169.4 -1871.4 -1111 445.64 -2023.3 -1251 25.715 -1951.5 -1148.7 344.56 -1488.1 -797.93 1303.7 -1727.7 -975.22 611.3 -1936.6 -1260.5 38.021 -1527.9 -725.62 1134.7 -1298.1 -777.81 1086.1 -1273.4 -728.17 970.04 -1487.6 -1121.9 156.84 -1528.9 -1084.4 1283.9 -1410.8 -1002.3 1452.6 -1617.2 -1193.9 929.64 -1447.9 -1361.3 1280.7 -1258.3 -912.5 1500.3 -1480.4 -1464 974.71 -1467.4 -1431.6 1110.4 -1431.7 -1125.9 827.97 -1591.4 -1102.8 123.65 -1331.8 -1086.6 478.57 -1409.1 -1080 56.522 -1419.3 -1126.7 299.5 -1359.6 -1078.5 1346.2 -1331.5 -1109.1 615.97 -1353.9 -998.13 57.71 -1350.4 -1154.5 1342.9 -1430.2 -1383.8 1032 -1531.4 -1464.1 1058.7 -1354.6 -947.97 1034.6 -1606.6 -1052.1 1189.1
69 0.5667 -1490.5 -964.18 1342.8 -1324.1 -908 1230 -2030 -1253.8 179.16 -1438.2 -855.21 1449.5 -1695.7 -1022.6 944.75 -1474.7 -692.27 958.5 -1317.6 -794.87 1491 -1201.2 -737.67 1078.5 -1355.8 -725.71 960.62 -1582.4 -917.68 794.11 -2075.9 -1349.4 179.92 -1863.9 -1104.2 456.5 -2021 -1250.4 25.715 -1943.9 -1142.7 355.68 -1469.9 -788.17 1311.9 -1713 -965.29 617.41 -1935.6 -1259.9 38.445 -1511.2 -713.4 1144.9 -1289.2 -771.7 1098.2 -1260.7 -722.14 982.77 -1456.2 -1095.1 149.03 -1511.9 -1071.9 1288.9 -1394 -993.63 1458 -1599.9 -1181.5 934.31 -1435 -1348.5 1287.8 -1241.5 -904.18 1505.8 -1462.3 -1448.4 981.41 -1452.5 -1417.3 1116.9 -1414.5 -1113.6 832.72 -1558 -1073.9 111.86 -1316.6 -1072.8 479.93 -1373.1 -1048.3 53.297 -1395.4 -1106.2 294.24 -1342.9 -1067.2 1352 -1316.5 -1096.3 619.28 -1318.1 -967.49 62.208 -1334.5 -1142.7 1350.2 -1413.1 -1369 1039.8 -1516.1 -1448.3 1063.4 -1337.6 -936.43 1040.6 -1589.6 -1039.7 1194.4
70 0.5750 -1473.5 -953.66 1348.1 -1307 -897.48 1235.8 -2021.7 -1242.9 186.12 -1420.9 -846.05 1455.2 -1677.3 -1009.8 949.5 -1462.4 -678.6 968 -1300 -786.47 1496.5 -1192.3 -731.31 1090.4 -1344.4 -715.1 970.55 -1564.1 -904.61 799.03 -2065 -1338 191.97 -1855.2 -1097.1 467.2 -2018.7 -1248.8 28.855 -1935.5 -1135.9 366.88 -1452.1 -778.49 1319.4 -1697.6 -954.34 623.19 -1934.1 -1258.5 38.106 -1495.7 -701.43 1155 -1279.5 -765.17 1108.4 -1249.4 -713.48 994.14 -1425.3 -1068.1 141.22 -1494.9 -1059.3 1293.6 -1377.2 -984.64 1463 -1581.9 -1168.8 938.55 -1422.3 -1336.1 1293 -1224.3 -895.87 1511.2 -1443.5 -1432.2 985.66 -1436.8 -1402.2 1120.9 -1397.4 -1101.5 837.31 -1525.2 -1044.8 99.55 -1302 -1059.6 479.84 -1338 -1017.9 50.836 -1372.3 -1086.1 288.81 -1326 -1056.2 1357 -1302.1 -1084.3 621.74 -1283 -938.05 66.791 -1318.1 -1131.2 1355.8 -1394.8 -1354 1045.7 -1500 -1432.1 1065.8 -1320 -924.98 1046.1 -1572.3 -1027.1 1199
71 0.5833 -1456.2 -942.88 1353 -1290.2 -886.19 1241 -2012.8 -1231.9 194.18 -1403.7 -836.63 1460.7 -1658.9 -996.69 953.92 -1450.3 -665.28 977.08 -1282.1 -777.73 1501.3 -1182.9 -724.6 1098.7 -1333.1 -702.37 979.29 -1545.5 -891.88 803.78 -2055.6 -1325.8 205.72 -1844.7 -1089.2 477.21 -2017.1 -1246.5 34.541 -1925.9 -1128.1 377.58 -1434.9 -769.16 1326.9 -1682.1 -943.22 628.7 -1931.8 -1255.7 38.021 -1481.1 -689.98 1164.8 -1269 -757.53 1116.3 -1238.2 -703.05 1003.1 -1395.2 -1042.1 133.67 -1477.6 -1046.8 1297.8 -1360 -975.3 1467.2 -1564 -1156.1 942.54 -1409.5 -1323.6 1296.3 -1206.9 -887.12 1515.2 -1423.7 -1415.7 987.44 -1420.8 -1387.5 1122.9 -1380.7 -1089.6 841.04 -1493 -1016.9 87.669 -1288.7 -1047.7 478.99 -1304.1 -989.48 49.139 -1349.9 -1066.3 283.29 -1309.3 -1045.3 1360.8 -1288.2 -1072.6 623.1 -1248.9 -909.78 71.374 -1301.6 -1119.8 1360 -1376.1 -1339 1049.8 -1482.9 -1415.6 1065.8 -1302.2 -913.01 1051.4 -1554.9 -1014.8 1203.1
72 0.5917 -1438.9 -932.19 1357.5 -1274 -874.99 1245.4 -2003.3 -1220.7 202.75 -1386.5 -827.04 1465.3 -1640.3 -983.7 957.65 -1438.9 -652.13 985.49 -1263.7 -769.33 1505.5 -1172.5 -717.56 1103.5 -1322.6 -691.93 985.32 -1527.1 -878.89 808.2 -2045.9 -1312.1 218.79 -1832.1 -1080 485.95 -2014.8 -1242.9 40.991 -1914.7 -1119.3 387.76 -1417.7 -759.4 1334 -1665.7 -931.43 633.45 -1929.2 -1252 38.785 -1467.3 -678.77 1173.9 -1258.1 -749.47 1121.6 -1227.2 -694.47 1008.4 -1366.1 -1017.1 126.88 -1460.2 -1034.6 1301.3 -1342.9 -966.05 1471.2 -1547.1 -1143.9 946.62 -1396.4 -1311.2 1296.8 -1189.7 -878.47 1518.8 -1403.5 -1398.9 986.76 -1404.4 -1373.1 1122.9 -1363.5 -1077.7 844.1 -1461.7 -989.39 76.551 -1276.6 -1036.1 477.04 -1271.1 -963.17 48.205 -1328.9 -1047.4 277.26 -1292.6 -1034.9 1363.7 -1274.5 -1061.2 622.51 -1215.7 -884.15 75.702 -1285.3 -1109 1363.3 -1357 -1323.5 1051.7 -1465.2 -1399.3 1063.6 -1284.6 -900.53 1056.2 -1537.7 -1002.8 1206.9
73 0.6000 -1422 -921.92 1361.4 -1257.7 -863.53 1248.9 -1992.6 -1208.9 211.66 -1369.1 -817.36 1469.5 -1622.3 -970.97 960.62 -1428.2 -639.39 993.12 -1245.6 -760.59 1509.7 -1161.5 -710.51 1104.6 -1313.1 -683.02 989.05 -1509.4 -865.74 812.36 -2035.2 -1297.5 231.69 -1817.2 -1068.7 493.17 -2010.6 -1237.2 48.544 -1901.3 -1108.6 396.25 -1400.5 -749.89 1339.6 -1648.5 -919.54 637.36 -1927.3 -1247.6 39.633 -1453.6 -667.66 1182.5 -1246.5 -741.58 1124.1 -1217.5 -686.33 1010.9 -1339 -993.21 120.68 -1442.9 -1023.2 1303.8 -1326 -957.06 1474.8 -1530 -1131.2 950.1 -1383.1 -1298.9 1295.5 -1172.5 -869.73 1522.2 -1382.4 -1381.8 984.13 -1387.1 -1358.1 1120.8 -1345.8 -1066.1 846.64 -1431.8 -961.55 66.961 -1265 -1024.4 474.16 -1239.4 -938.98 47.696 -1309.5 -1029.2 271.15 -1276.3 -1024.7 1365.8 -1261.4 -1050.2 621.06 -1184.2 -861.66 80.03 -1269.2 -1098.7 1365 -1336.9 -1307.6 1051.4 -1446.7 -1382.7 1059.2 -1267.5 -887.97 1060.2 -1520.2 -990.83 1210
74 0.6083 -1405.2 -911.91 1364.7 -1241.2 -852.16 1252.1 -1980.7 -1196.9 220.49 -1351.6 -807.77 1473.5 -1604.3 -957.65 963.25 -1418.8 -627.09 999.83 -1228.4 -751.42 1513.9 -1150.6 -703.05 1102.5 -1304.8 -674.11 991.34 -1491 -852.75 815.24 -2023.3 -1283.2 245.01 -1800.3 -1055.7 497.92 -2007.2 -1230.9 58.474 -1885.8 -1095.9 402.1 -1383.3 -740.05 1344.2 -1630.2 -907.24 640.16 -1926.5 -1243.1 40.058 -1440 -656.45 1190.2 -1234.6 -733.77 1124.2 -1209 -678.09 1010.7 -1313.8 -970.55 115.08 -1426.3 -1012.3 1305.8 -1308.7 -947.64 1477.5 -1512.7 -1118 952.47 -1369.8 -1287.4 1292.2 -1155.3 -860.48 1525.2 -1360.3 -1364.2 979.04 -1369.1 -1343.4 1116.1 -1328.2 -1054.5 848.93 -1404.1 -934.48 58.983 -1254.2 -1013.6 470.85 -1210.6 -917.25 47.696 -1291.3 -1012 265.47 -1260 -1015 1367.1 -1248.9 -1039.3 619.2 -1154.5 -842.31 84.019 -1253.2 -1089.4 1365.3 -1316 -1291.9 1049.1 -1426.9 -1365.9 1052 -1250.8 -875.24 1063.3 -1502.5 -978.87 1212.3
75 0.6167 -1388.1 -901.81 1367.4 -1224.4 -841.3 1254.9 -1967.7 -1185.4 229.06 -1334.4 -798.27 1477 -1586.4 -943.99 965.12 -1410.8 -615.04 1005.5 -1211.3 -741.92 1517.4 -1139.9 -695.41 1097.1 -1297.9 -666.3 992.36 -1472.5 -840.02 816.85 -2009.7 -1270.1 258.17 -1781.9 -1041.6 500.38 -2003.9 -1224 70.356 -1868.9 -1081.6 406.43 -1366.1 -729.95 1348.2 -1611 -894.17 641.52 -1927.3 -1239.1 41.161 -1426.5 -645.17 1196.7 -1222.9 -725.11 1121.8 -1201.2 -670.2 1007.3 -1291.3 -949.67 110.41 -1409.7 -1001.9 1307.2 -1291.5 -938.13 1479.6 -1495.4 -1105.2 954 -1355.8 -1275.6 1285.9 -1137.7 -851.23 1527.6 -1336.9 -1346.2 971.74 -1350.5 -1328.6 1109 -1311 -1042.7 849.87 -1378.9 -909.78 52.448 -1243.9 -1003.3 466.6 -1184.4 -898.24 47.611 -1275.6 -995.76 259.87 -1243.9 -1005.6 1367.6 -1236.5 -1028.8 616.48 -1126.7 -826.53 86.65 -1237.3 -1080.9 1364.7 -1294.8 -1276.2 1045 -1406.2 -1348.9 1042.6 -1234.1 -862.17 1065.8 -1485.6 -967.32 1215
76 0.6250 -1371 -891.88 1369.4 -1207.8 -830.86 1257.1 -1953.9 -1174.8 237.72 -1317.3 -789.19 1479.8 -1568.4 -930.58 966.48 -1403.5 -603.58 1010.3 -1194.4 -732.33 1519.8 -1128.9 -687.51 1088.4 -1292.1 -658.24 991.34 -1453.6 -826.87 818.21 -1995.7 -1258.4 271.15 -1761.4 -1026.1 500.72 -1998.2 -1215.3 82.746 -1850.4 -1066.7 409.15 -1349.9 -720.02 1351.9 -1590.6 -880.93 641.94 -1928.2 -1234.1 45.659 -1413.5 -634.22 1202.5 -1211.4 -715.69 1116.4 -1194.3 -661.12 1000.9 -1271.7 -930.66 106.51 -1392.9 -992.28 1307.7 -1274.5 -928.79 1481.8 -1478.6 -1092.3 955.27 -1341.5 -1263.8 1277.3 -1119.8 -842.06 1529.5 -1312.6 -1327.3 962.49 -1331.3 -1313.6 1099.4 -1293.6 -1030.9 849.87 -1357 -887.29 47.187 -1233.6 -993.21 462.36 -1161.6 -881.69 47.187 -1261.8 -980.56 254.52 -1228 -996.94 1367.1 -1224 -1018.8 612.66 -1102.2 -813.29 88.517 -1221.2 -1073 1362.4 -1273.1 -1259.7 1039.1 -1384.7 -1331.6 1031.2 -1217.3 -849.1 1067.6 -1469.1 -956.29 1217.1
77 0.6333 -1353.9 -882.63 1370.4 -1191.4 -819.91 1258.5 -1940.3 -1164.2 247.22 -1300 -780.28 1481.6 -1550.7 -917.17 967.15 -1397.1 -592.72 1014.8 -1177.7 -723.24 1521.9 -1118.5 -678.94 1076.4 -1287 -648.31 989.05 -1434.9 -813.46 819.23 -1981.9 -1247.2 284.99 -1739.5 -1009.8 498.85 -1991.5 -1205.3 95.731 -1831 -1051.4 410.51 -1334.5 -710.09 1354.8 -1569.7 -867.43 641.26 -1925.8 -1225.3 54.231 -1400.7 -624.12 1207.5 -1199.7 -706.36 1108.1 -1188.8 -650.68 993.21 -1255.2 -913.69 102.78 -1376.3 -983.28 1307.3 -1257.3 -919.29 1483.2 -1461.6 -1079.4 955.87 -1326.8 -1252.7 1266.6 -1102.3 -832.55 1530.8 -1286.9 -1308.4 951.96 -1311 -1298.1 1088.4 -1276.7 -1019.6 849.53 -1338.5 -867.35 43.198 -1223.3 -983.19 458.46 -1142.7 -867.35 46.253 -1248.8 -967.07 250.36 -1211.7 -988.46 1365.7 -1211.4 -1009 608.42 -1081.6 -800.98 89.111 -1204.8 -1065.5 1358.7 -1250.4 -1242.9 1031.7 -1362.2 -1314.6 1018 -1201 -835.78 1068.7 -1452.3 -945.6 1217.9
78 0.6417 -1336.6 -873.55 1370.9 -1174.8 -808.96 1259.3 -1927.4 -1153.5 257.66 -1282.5 -771.11 1483 -1533.1 -903.59 967.15 -1391.3 -582.36 1018.3 -1160.4 -714.33 1523.1 -1108.6 -670.03 1061.1 -1283.1 -636.76 986 -1416.4 -800.3 819.57 -1968.7 -1235.1 299.24 -1716.4 -992.78 495.97 -1983.4 -1194.5 109.14 -1811.2 -1035.9 411.69 -1318.2 -699.91 1356.4 -1548.5 -853.35 639.31 -1919.5 -1214.9 64.839 -1388.1 -614.27 1211.4 -1188.2 -697.19 1097.8 -1184.8 -640.24 983.62 -1241.5 -899.77 99.38 -1359.5 -974.71 1305.9 -1239.8 -910.04 1484.4 -1444.6 -1066.3 955.78 -1311.5 -1241.9 1253.8 -1084.9 -823.3 1531.8 -1260.9 -1289.7 939.32 -1290.2 -1282.5 1075.6 -1260.7 -1008.1 846.81 -1323.6 -850.89 40.821 -1212.8 -973.35 454.64 -1128.1 -855.04 44.216 -1236.7 -954.76 246.12 -1195.2 -980.05 1363 -1198.1 -999.49 604 -1065.4 -790.12 87.329 -1188.7 -1058.4 1353.6 -1227.2 -1226.5 1022.1 -1338.8 -1297.7 1002.9 -1184.5 -822.37 1069.4 -1435.5 -934.9 1218
79 0.6500 -1319.4 -864.21 1370.8 -1158.4 -798.27 1259.4 -1913.6 -1141.6 268.18 -1265.2 -762.2 1483.9 -1515.5 -890.26 966.22 -1385.7 -572.43 1021.3 -1142.8 -705.59 1523.6 -1100 -660.78 1042.4 -1280.6 -625.39 981.84 -1397.6 -787.49 819.23 -1954.6 -1222.5 312.57 -1692.3 -975.13 492.15 -1973.6 -1183.4 122.21 -1790.4 -1019.9 412.97 -1301.2 -689.04 1357.3 -1526.4 -839.09 636.42 -1911 -1205.5 76.89 -1375.8 -604.51 1215 -1176.9 -688.53 1085.9 -1181.7 -629.3 971.74 -1231 -888.31 96.58 -1342.8 -966.31 1303.7 -1221.8 -901.13 1484.8 -1427.9 -1053 955.19 -1294.9 -1230.5 1239 -1067.7 -814.22 1532.3 -1234.7 -1270.6 925.23 -1268.8 -1266.8 1061 -1244.7 -996.52 842.31 -1313.5 -838.16 39.633 -1201.6 -963.25 450.56 -1117.7 -844.44 40.058 -1226.2 -943.9 242.3 -1178.9 -972.16 1359.5 -1184.1 -990.32 599.34 -1053.4 -779.77 82.237 -1172.5 -1051.7 1347.3 -1204 -1209.5 1010.6 -1314.6 -1280.2 986.17 -1168.1 -809.72 1070.1 -1418.9 -924.47 1217.5
80 0.6583 -1302.5 -855.21 1369.9 -1142.3 -787.66 1259.4 -1898.5 -1128.4 278.28 -1247.7 -753.71 1484.3 -1498.2 -877.03 964.44 -1380.7 -563.35 1024.1 -1125.7 -696.77 1524.1 -1092.7 -650.51 1021.1 -1278.5 -614.78 976.66 -1379.3 -774.67 818.47 -1938.9 -1209 325.21 -1667.3 -957.06 487.48 -1962.2 -1172.3 135.36 -1768.8 -1002.5 413.82 -1284.5 -678.18 1357.6 -1503.4 -825.34 633.03 -1901 -1196.2 89.366 -1363.8 -594.58 1217.7 -1166.3 -679.03 1071.5 -1180.6 -617.84 958.16 -1223.6 -879.32 93.609 -1326.2 -958.07 1300.8 -1203.9 -892.73 1484.3 -1411.2 -1040.1 953.75 -1277.9 -1219.2 1222.5 -1050.3 -805.74 1532 -1208.4 -1251.4 910.21 -1246.3 -1250.7 1045 -1226.3 -985.15 840.28 -1307.9 -830.35 38.615 -1190.3 -952.9 447 -1111.3 -836.2 35.22 -1217.1 -934.06 239.67 -1162.7 -964.44 1355.2 -1169.8 -981.33 594.84 -1045.8 -770.09 73.75 -1156.2 -1045.2 1339.6 -1180.7 -1192.6 997.62 -1289.6 -1262.8 968.26 -1151.7 -797.25 1070.1 -1402.5 -913.69 1216.4
81 0.6667 -1285.4 -846.47 1368.4 -1126.1 -777.22 1259.1 -1881.9 -1114.2 288.04 -1230.6 -745.39 1483.8 -1481.3 -863.79 961.89 -1375.5 -554.7 1026.8 -1108.9 -688.28 1523.5 -1086.8 -638.63 997.54 -1276.8 -604.68 970.55 -1360.9 -762.2 817.02 -1921.3 -1194 337.52 -1640.9 -938.55 482.05 -1949.6 -1161.2 148.18 -1745.9 -984.38 414.24 -1267.8 -667.91 1357.2 -1480.6 -811.51 629.21 -1889.8 -1185.5 100.91 -1351.5 -584.91 1219.5 -1156.4 -668.17 1054.5 -1180.9 -606.04 944.24 -1219.7 -873.8 91.572 -1309.7 -949.5 1297.5 -1186.4 -884.75 1483 -1394.8 -1027.6 952.05 -1260.2 -1207.8 1205 -1032.7 -797.25 1530.7 -1181.4 -1231.5 893.91 -1222.9 -1234.2 1027.8 -1206.3 -973.6 839.94 -1306 -827.21 37.851 -1178.3 -942.46 443.94 -1108 -832.38 31.995 -1209.1 -925.57 237.72 -1146.6 -956.8 1349.9 -1155.6 -971.57 590.6 -1043 -763.56 64.839 -1140.1 -1038.6 1330.6 -1156.9 -1175.6 983.7 -1263.8 -1245.1 949.93 -1135.7 -784.18 1069.2 -1386.1 -903.25 1214.4
82 0.6750 -1268.4 -837.56 1366.2 -1109.9 -766.53 1257.6 -1862.5 -1099.5 297.12 -1213.5 -737.67 1482.6 -1464.7 -850.8 958.67 -1369.6 -546.46 1029.1 -1091.6 -680.05 1522.3 -1083 -626.16 971.91 -1275.8 -594.75 963.84 -1342.1 -749.89 814.05 -1901.9 -1177.5 350 -1613.8 -919.63 476.02 -1935.6 -1149.8 161.08 -1721.5 -966.31 413.99 -1250.9 -657.9 1355.8 -1457.9 -796.4 624.71 -1876.9 -1175 112.28 -1339.1 -575.83 1220.8 -1147.5 -656.88 1036 -1182.9 -594.25 929.3 -1218.9 -871.68 90.894 -1293 -940.68 1293.6 -1169.1 -877.11 1481.3 -1378.9 -1015.4 950.01 -1241.7 -1195.9 1185.9 -1015.2 -788.68 1528.9 -1153.8 -1211.5 875.92 -1199.3 -1217.6 1009.9 -1186.9 -962.49 838.92 -1306.2 -827.21 37.681 -1164.2 -931.51 441.74 -1107.4 -832.38 30.722 -1202 -918.7 236.87 -1130.5 -949.42 1344.1 -1140.9 -959.86 586.69 -1044 -761.27 60.256 -1123.6 -1031.7 1320.5 -1132.2 -1157.9 969.19 -1238 -1227 930.58 -1120 -770.86 1067.6 -1369.6 -892.73 1212
83 0.6833 -1251.4 -828.74 1363.5 -1094.2 -756.09 1255.6 -1841.5 -1082.8 305.69 -1196.6 -730.54 1480.9 -1448.2 -838.07 954.51 -1363.7 -538.32 1030.8 -1074.3 -672.07 1520.7 -1082.3 -614.53 944.83 -1275.5 -584.82 956.8 -1323.4 -737.33 811 -1880.3 -1158.6 362.73 -1586.4 -900.36 469.74 -1920.2 -1137.5 174.74 -1696.1 -947.72 413.31 -1234.3 -647.88 1353.7 -1435.7 -780.7 619.71 -1862.9 -1164.9 125.1 -1326.8 -567 1221.6 -1139.9 -645.34 1016 -1186.3 -582.36 913.52 -1217.3 -870.74 91.318 -1276.6 -931.85 1288.7 -1151.6 -869.39 1479.2 -1363.1 -1002.9 947.04 -1221.6 -1183.1 1165.3 -997.88 -780.53 1527.1 -1126.4 -1191.4 857.25 -1175.4 -1200.6 991.26 -1166.3 -952.9 839.34 -1306.3 -828.99 37.766 -1148 -919.37 440.97 -1106.8 -832.98 29.619 -1194.5 -913.43 238.31 -1114.7 -941.86 1337.9 -1125.3 -946.28 584.23 -1044.5 -760.25 57.116 -1106.7 -1024.9 1308.9 -1106.6 -1139.4 952.9 -1211.9 -1208.4 910.29 -1104.6 -757.96 1065.5 -1353.4 -882.29 1208.9
84 0.6917 -1234.4 -820.08 1360.3 -1078.7 -745.48 1253.2 -1820.7 -1063.6 314.18 -1179.6 -723.16 1478.7 -1432.2 -825.17 949.59 -1357.5 -530.09 1032.2 -1057.5 -664.35 1519.4 -1083.9 -603.41 916.4 -1276.2 -574.81 949.42 -1305.1 -724.77 808.03 -1856.7 -1137.4 375.12 -1558.3 -880.76 462.7 -1903.4 -1123.3 188.92 -1669.4 -927.95 412.03 -1218.6 -637.87 1351.6 -1413.5 -764.58 614.53 -1847.7 -1153.8 137.91 -1314.5 -558.09 1221.8 -1134 -633.71 995.08 -1191.4 -570.31 897.82 -1213.7 -869.56 91.912 -1260.4 -923.11 1283.5 -1133.2 -861.75 1476.5 -1347.7 -990.41 943.9 -1200.9 -1170.2 1144.2 -980.82 -772.3 1524.7 -1099.4 -1170.7 837.99 -1150.7 -1183.4 971.23 -1156.3 -937.79 832.89 -1305.1 -830.35 39.294 -1130.7 -907.41 440.97 -1105.2 -832.81 28.346 -1184.6 -908.68 240.6 -1099 -933.97 1331.2 -1109 -931.85 583.3 -1042.4 -758.21 51.769 -1089.9 -1017.5 1296.5 -1081.1 -1122.3 934.82 -1185.5 -1189.2 889.33 -1089.6 -745.39 1063.1 -1337.6 -871.93 1205.3
85 0.7000 -1217.8 -812.02 1356.5 -1063.3 -735.3 1250.4 -1797.6 -1043.1 322.16 -1162.3 -715.78 1476.3 -1416.9 -812.53 944.33 -1350.4 -522.28 1032.5 -1041.2 -656.79 1517.8 -1088.2 -591.27 887.55 -1277.1 -564.63 941.44 -1287 -712.64 804.38 -1830.3 -1115.1 386.83 -1529.2 -859.54 455.23 -1885.3 -1107.6 202.83 -1641.3 -907.41 409.83 -1203.6 -628.53 1349.9 -1391.2 -748.28 609.44 -1830.9 -1140.9 150.13 -1302.1 -549.43 1221.2 -1129.3 -622.08 973.35 -1198.1 -558.35 882.97 -1209.3 -867.52 92.761 -1244.1 -914.54 1278 -1115.2 -854.2 1473 -1332.9 -978.44 940.59 -1179.2 -1156.7 1122.5 -963.68 -764.41 1521.4 -1072.6 -1149.7 817.7 -1125.5 -1166 949.67 -1139.9 -925.57 832.55 -1303.4 -830.35 41.585 -1114.4 -896.54 440.46 -1103.4 -832.55 27.582 -1172.1 -902.91 241.62 -1083.1 -926.16 1323.6 -1092.3 -918.61 583.13 -1039.3 -755.49 45.319 -1073 -1009.8 1283.5 -1055.5 -1105.2 916.23 -1159.2 -1169.2 867.94 -1074.7 -732.58 1060.5 -1322.2 -862.09 1201.6
86 0.7083 -1201.5 -804.04 1352.5 -1047.9 -725.11 1247.2 -1771.4 -1022.3 329.46 -1145 -708.65 1473.5 -1401.3 -799.96 938.21 -1342.5 -514.81 1032.6 -1025 -649.66 1515.5 -1095.7 -577.61 858.61 -1278 -554.53 934.23 -1269.4 -700.92 800.98 -1802.1 -1091.6 397.78 -1498.9 -837.22 448.02 -1864.5 -1090.6 215.9 -1612 -885.43 406.86 -1189 -619.28 1348 -1369 -731.31 603.84 -1811.3 -1125.8 162.27 -1289.6 -541.03 1220.4 -1126.5 -610.03 950.61 -1205.1 -547.06 869.22 -1205.6 -864.97 93.609 -1228 -905.97 1272.3 -1097.9 -846.56 1468.8 -1318.1 -965.88 937.11 -1156.6 -1142.5 1100.1 -946.45 -756.6 1518.4 -1045.8 -1128.3 796.99 -1100.1 -1148 927.78 -1120.5 -915.05 834.76 -1302 -829.67 43.792 -1099.6 -886.36 438.26 -1101.9 -832.38 27.158 -1160.3 -896.04 241.11 -1066.9 -918.1 1315.5 -1075.9 -907.32 582.7 -1036.8 -752.86 39.718 -1056.5 -1001.7 1270.3 -1029.3 -1086.4 897.73 -1132.4 -1148.5 846.47 -1059.5 -719.34 1057.4 -1306.9 -851.9 1197.6
87 0.7167 -1185.1 -795.55 1348.6 -1032.8 -714.16 1243.7 -1743.4 -1000.5 335.99 -1127.9 -701.09 1470.4 -1386 -786.98 931.68 -1334 -506.83 1032.6 -1007.9 -642.28 1512.9 -1106.5 -563.44 830.77 -1278.9 -544.09 927.44 -1251.9 -689.21 798.52 -1772.2 -1067.4 407.62 -1467.3 -814.65 440.55 -1839.5 -1070.9 226.43 -1582 -862.34 403.55 -1174.3 -609.78 1345.2 -1346.6 -714.16 598.06 -1789.2 -1108.4 173.72 -1276.3 -532.63 1219.2 -1125.9 -597.64 927.18 -1212.9 -535.01 856.06 -1203 -862.34 94.034 -1211.7 -896.63 1265.9 -1080.5 -838.92 1464.1 -1303.6 -952.9 933.46 -1133.2 -1127.6 1077.3 -929.47 -749.72 1515.2 -1018.1 -1105.7 776.54 -1073.9 -1128.5 906.9 -1101.3 -904.52 836.12 -1301.2 -829.24 44.98 -1085.4 -876.52 434.35 -1101 -832.3 27.327 -1151.7 -889.92 239.67 -1050.7 -909.87 1306.9 -1060.6 -897.73 581.09 -1035.6 -751.17 35.899 -1040.1 -992.87 1256.2 -1002.5 -1066.8 878.98 -1104.8 -1127.2 824.83 -1044.3 -706.19 1053.5 -1291.7 -841.38 1193.1
88 0.7250 -1169.1 -787.23 1344.8 -1017.7 -703.47 1239.8 -1713.1 -978.02 342.61 -1111.2 -693.54 1467.7 -1370.9 -774.33 925.57 -1324.8 -498.68 1032 -990.66 -634.73 1510.2 -1121.2 -548.59 805.74 -1280 -532.97 921.75 -1233.4 -677.33 794.53 -1741 -1043.1 416.7 -1434.5 -791.22 433.25 -1811 -1048.6 234.57 -1550.9 -838.24 399.56 -1159.2 -599.93 1341.8 -1323.9 -696.6 592.8 -1763.8 -1088.3 184.16 -1262.7 -523.89 1217.5 -1127.9 -584.74 903.59 -1222.3 -521.94 842.99 -1201.5 -861.24 94.288 -1195.7 -887.12 1259.7 -1063 -831.11 1459.7 -1288.9 -940 929.81 -1108.6 -1112.7 1054.8 -912.41 -742.59 1511.8 -990.07 -1082.1 756.34 -1046.8 -1108.1 886.36 -1083.4 -893.23 835.35 -1300.9 -829.16 45.319 -1072.8 -867.69 429.43 -1100.7 -832.47 27.837 -1145.6 -885 237.72 -1034.5 -901.72 1297.7 -1046.5 -888.06 577.95 -1035.4 -750.23 33.777 -1023.4 -983.45 1242.1 -975.3 -1045.7 860.22 -1076.4 -1105.6 803.36 -1028.9 -693.88 1049.6 -1276.4 -831.11 1188.8
89 0.7333 -1153.2 -779.17 1341.1 -1002.7 -693.29 1236.2 -1681.8 -956.55 348.13 -1094.4 -686.33 1465.5 -1355.4 -762.45 919.71 -1315.4 -489.77 1031.1 -973.44 -627.26 1507.3 -1138.6 -533.39 783.92 -1281.2 -521.51 917 -1214.7 -665.7 789.19 -1709 -1019.3 424.76 -1401.3 -767.38 426.63 -1781.7 -1025.9 242.47 -1519 -813.29 395.48 -1144.2 -590.51 1339 -1301.3 -678.6 588.47 -1735.7 -1065.5 193.33 -1248.9 -514.81 1215.3 -1132.1 -571.92 880.93 -1233.1 -508.78 832.21 -1200.6 -860.39 94.203 -1180.3 -878.81 1254.3 -1046 -823.56 1456.2 -1274 -927.44 926.67 -1083.1 -1097.2 1033 -895.53 -734.79 1507.7 -961.81 -1057.1 737.08 -1018.5 -1086.6 865.99 -1067.6 -881.86 833.4 -1301.2 -828.99 45.319 -1063.1 -860.22 424.42 -1100.7 -832.55 27.837 -1140.8 -880.59 235 -1018.2 -893.15 1288.7 -1033.7 -878.81 573.2 -1035.3 -749.47 32.42 -1006.7 -973.44 1228 -947.55 -1022 841.89 -1046.5 -1083 782.74 -1013.7 -681.49 1046 -1261.5 -820.59 1184.8
90 0.7417 -1137.1 -771.03 1337.8 -987.44 -682.85 1232.9 -1650 -934.99 352.12 -1077.8 -679.03 1463.3 -1340 -750.66 914.37 -1305.3 -480.01 1030.9 -956.55 -619.79 1504.1 -1157.7 -517.35 766.27 -1282.6 -509.72 913.43 -1198.3 -653.99 785.03 -1676.3 -995.93 431.47 -1367.9 -743.27 420.61 -1750.4 -1000.5 249.43 -1486.6 -788.42 392.09 -1129.4 -581.68 1336.7 -1278.3 -660.19 584.74 -1704.7 -1039.7 200.12 -1234.6 -505.81 1213.3 -1138.6 -559.62 860.39 -1245.1 -495.88 825.17 -1200.4 -859.03 93.27 -1165.2 -870.41 1249.8 -1029.4 -816.17 1453 -1258.8 -915.56 923.79 -1056.4 -1080 1012.1 -878.47 -726.64 1503.4 -931.68 -1029.6 720.02 -988.88 -1062.5 847.15 -1051.8 -870.91 830.43 -1301.6 -828.23 45.15 -1055.9 -853.69 419.59 -1100.9 -832.47 27.667 -1136.6 -876.43 231.43 -1002.1 -884.58 1280.7 -1022.6 -870.83 567 -1035.4 -749.04 31.91 -989.73 -962.4 1214.2 -919.2 -995.16 825.43 -1015.4 -1059.2 764.15 -998.47 -669.35 1042.3 -1246.5 -809.64 1181.1
91 0.7500 -1121 -762.54 1334.5 -971.48 -672.07 1230 -1617 -910.63 356.95 -1061.7 -671.56 1461.2 -1324.4 -739.03 909.78 -1294.2 -469.4 1031.3 -940.08 -611.9 1501.7 -1177.3 -501.06 753.8 -1284.1 -497.92 911.91 -1183.1 -642.03 781.04 -1643.3 -973.18 436.82 -1334.4 -719.34 415.94 -1716.6 -972.5 254.86 -1453.1 -764.24 389.29 -1114.2 -572.43 1334 -1255.1 -641.18 581.85 -1671.2 -1011.3 204.62 -1219.7 -496.48 1211.6 -1147.5 -548.16 843.33 -1255.8 -481.63 821.18 -1200 -857.42 91.742 -1149.9 -861.32 1245.5 -1012.4 -808.79 1450.2 -1243.1 -903.67 921.41 -1028.3 -1061 993.63 -861.32 -718.07 1500 -900.19 -999.83 706.27 -957.99 -1036.3 830.86 -1035.9 -859.88 825.93 -1302.1 -827.29 45.235 -1049.1 -847.92 413.99 -1101 -832.47 27.412 -1132.8 -872.61 228.29 -986 -875.16 1273.4 -1012.2 -863.7 560.13 -1035.6 -748.87 31.571 -971.48 -950.44 1202.2 -890.69 -965.2 812.53 -983.11 -1033.3 748.54 -982.69 -657.47 1039.3 -1231 -798.86 1177.7
92 0.7583 -1105.1 -753.71 1331.3 -955.27 -661.12 1227.7 -1582.2 -885.94 362.05 -1045.8 -663.92 1459.6 -1308.2 -727.66 906.47 -1283.1 -457.69 1033.5 -923.79 -603.67 1499.8 -1194.8 -485.87 746.84 -1285.2 -485.02 913.43 -1168.9 -629.8 775.95 -1609.6 -950.86 440.38 -1300.1 -694.98 412.46 -1680.9 -944.24 257.83 -1418.5 -740.39 387.51 -1098.9 -563.35 1331.5 -1232.2 -622.34 580.16 -1636.1 -982.6 206.74 -1205.4 -486.97 1210.5 -1157.8 -537.38 830.35 -1264.5 -465.76 821.1 -1199.6 -855.98 91.148 -1134.2 -851.9 1241.8 -995.16 -801.07 1447.9 -1227.5 -891.88 919.37 -999.49 -1040.7 978.36 -844.78 -709.67 1497.2 -867.52 -966.65 697.02 -926.42 -1007.8 817.79 -1020.1 -849.44 820.67 -1302.6 -827.63 45.829 -1041.3 -842.74 408.64 -1101.2 -832.72 27.243 -1129.3 -869.3 225.66 -969.45 -865.31 1267.6 -1002.6 -857.08 553.51 -1035.6 -748.87 31.316 -953.32 -938.05 1192.8 -862.43 -932.78 803.36 -950.52 -1006.6 735.13 -966.14 -645.84 1037.3 -1215.4 -788.59 1174.9
93 0.7667 -1088.8 -744.55 1329 -938.64 -650.34 1226.3 -1546.6 -862.26 365.36 -1030.3 -655.94 1458.1 -1292 -716.12 904.18 -1273.5 -445.22 1038.3 -908.43 -595.18 1498.4 -1208.4 -471.7 745.06 -1285.8 -470.51 918.36 -1152.1 -617.75 775.52 -1575.8 -928.62 441.99 -1265.4 -670.54 409.83 -1643.9 -915.72 258.68 -1383.1 -716.29 387.17 -1083.1 -554.44 1330 -1209 -604.6 579.82 -1599.6 -953.83 207.42 -1191.4 -477.64 1211 -1168.7 -527.2 819.74 -1270.8 -449.04 825.6 -1198.8 -855.55 91.233 -1118 -842.48 1238.8 -978.53 -792.41 1445.9 -1212 -880.67 917.85 -971.74 -1019.6 966.22 -828.99 -701.09 1495 -834.25 -928.79 692.52 -894.59 -977.42 809.64 -1005.2 -839.34 814.9 -1302.7 -828.06 46.847 -1033.7 -838.07 404.31 -1101.2 -832.81 27.073 -1125.1 -866.84 223.97 -952.56 -855.47 1263.7 -993.12 -851.06 547.74 -1035.6 -748.79 31.147 -934.99 -925.06 1186.3 -834.59 -898.24 799.03 -915.13 -972.42 727.06 -949.16 -634.73 1035.6 -1199.5 -778.32 1172.5
94 0.7750 -1072.6 -735.04 1327.3 -922.09 -639.9 1225.3 -1511.2 -838.07 367.31 -1014.7 -647.03 1456.8 -1275.5 -704.32 902.32 -1265.9 -431.81 1045.5 -893.49 -586.52 1497.2 -1219.5 -458.12 747.6 -1285.3 -455.57 926.67 -1134.7 -605.79 775.78 -1541.6 -906.56 442.25 -1231 -646.78 408.89 -1605.8 -887.12 256.81 -1347.4 -692.35 387.85 -1066.9 -544.94 1328.8 -1185.4 -586.78 580.58 -1562.1 -924.98 206.65 -1177.6 -468.47 1213.5 -1177.4 -516.51 822.03 -1274.9 -431.89 833.32 -1197.9 -855.64 91.318 -1101.8 -832.55 1236.5 -962.57 -782.99 1443.9 -1197.4 -869.05 917 -944.83 -998.3 957.65 -813.12 -691.93 1493.4 -800.9 -887.97 693.2 -863.11 -945.34 805.99 -991.34 -830.43 810.23 -1302.7 -827.04 48.29 -1026.7 -834 400.58 -1101 -832.81 27.243 -1121.2 -864.55 222.27 -935.67 -845.03 1261.3 -984.55 -846.22 543.92 -1035.6 -748.7 31.062 -916.83 -912.16 1182.4 -807.18 -862.26 800.64 -881.35 -942.63 722.4 -931.93 -624.2 1034.1 -1183.1 -767.72 1170.6
95 0.7833 -1055.9 -724.86 1326.5 -905.54 -629.3 1224.7 -1475.3 -813.71 367.99 -999.32 -637.36 1455.5 -1259.4 -692.1 901.3 -1259.4 -417.72 1055.7 -878.38 -576.85 1496.6 -1227.5 -445.05 753.8 -1284.1 -439.87 937.54 -1117 -594.33 776.8 -1507.2 -885.17 440.8 -1197.3 -623.19 409.83 -1567.6 -859.8 253.92 -1311.6 -668.33 389.37 -1050.2 -534.92 1328.3 -1161.6 -568.53 582.96 -1524.1 -896.54 203.43 -1164 -459.05 1218.2 -1184.8 -504.12 827.46 -1277.4 -415.26 843.84 -1197.4 -855.47 91.403 -1084.7 -822.37 1235.1 -947.21 -773.49 1442.7 -1183.4 -857.08 916.74 -918.61 -977 951.88 -797.59 -682 1493 -767.38 -845.37 700.08 -832.3 -913.09 806.33 -978.36 -822.03 807.77 -1302.7 -826.19 49.393 -1020.3 -830.6 397.44 -1101 -833.15 27.243 -1117.9 -862.43 220.74 -918.78 -833.66 1260.7 -976.74 -841.97 541.12 -1035.6 -748.79 30.892 -898.41 -899.68 1180.5 -780.7 -827.21 808.28 -844.78 -906.05 723.92 -914.45 -613.68 1033.2 -1166.5 -756.94 1169.6
96 0.7917 -1039.2 -714.08 1326.5 -888.65 -618.35 1224.6 -1439.6 -790.54 367.14 -983.7 -626.92 1455 -1243.6 -679.2 901.72 -1253.2 -403.89 1068.2 -862.85 -566.41 1497.2 -1232.5 -431.38 763.73 -1282.6 -424.17 950.95 -1099.5 -583.38 778.41 -1473.1 -864.29 437.49 -1163.7 -599.76 411.95 -1529.5 -833.66 252.65 -1276.4 -644.83 391.58 -1033.1 -524.06 1328.4 -1137.3 -550.54 586.95 -1484.8 -867.69 198.59 -1150.7 -449.12 1224.8 -1188.7 -489.77 836.54 -1278.9 -399.13 856.57 -1197.1 -855.04 91.742 -1067.2 -811.93 1234.7 -932.1 -763.13 1442.5 -1169.1 -845.2 916.74 -892.73 -956.04 948.14 -782.57 -671.98 1493.9 -735.55 -801.49 712.47 -802.51 -881.18 811.17 -965.2 -813.97 806.16 -1303 -826.36 50.072 -1015.2 -827.8 394.98 -1101.2 -833.49 26.903 -1115.2 -861.15 219.89 -901.81 -821.95 1261.8 -969.11 -837.9 538.66 -1035.9 -749.04 30.977 -879.74 -886.87 1180.3 -754.9 -793.01 821.1 -811.08 -868.28 729.02 -897.05 -602.9 1033 -1149 -746.41 1169.1
97 0.8000 -1022.6 -702.71 1327.1 -871.25 -606.38 1225.2 -1403.7 -768.05 364.42 -968.17 -615.55 1455.2 -1226.7 -666.21 902.91 -1246.7 -389.88 1082.9 -847.24 -555.72 1498.7 -1234.9 -417.21 777.31 -1280.3 -409.57 966.31 -1081 -573.03 780.36 -1439.7 -843.93 432.57 -1130.7 -576.25 415.85 -1490.5 -807.43 249.34 -1241.7 -621.83 393.96 -1016 -512.77 1329.2 -1112.3 -532.63 592.29 -1443.6 -838.07 193.16 -1137.5 -438.77 1233 -1189.3 -474.33 848.85 -1279.4 -383.94 871.34 -1196.6 -855.04 92.421 -1049.6 -800.9 1234.8 -917.17 -751.84 1442.8 -1154.4 -833.32 917.17 -867.1 -935.24 946.7 -767.88 -661.63 1496 -706.78 -756.77 729.1 -774.08 -848.51 819.99 -951.88 -806.58 804.8 -1303 -827.04 51.345 -1011.4 -825.68 393.11 -1101.3 -833.74 26.394 -1113.4 -860.39 219.72 -884.58 -810.57 1264.7 -961.98 -833.66 536.2 -1036.2 -749.3 31.147 -860.99 -873.8 1182.1 -730.29 -758.89 837.65 -779.26 -829.24 737.67 -880.17 -592.21 1033.4 -1131.4 -735.64 1169.2
98 0.8083 -1005.9 -690.91 1328.5 -853.86 -593.4 1226.8 -1368.4 -745.99 360.43 -952.81 -603.41 1456.2 -1209.2 -653.82 904.52 -1239.8 -375.8 1099 -832.21 -544.26 1501.4 -1235.3 -402.53 793.18 -1276.6 -395.57 983.19 -1061.9 -562.33 782.57 -1406.5 -823.9 425.27 -1097.8 -553.59 421.54 -1450.2 -780.11 242.13 -1207.6 -599.59 396.42 -998.98 -501.31 1331.1 -1086.6 -515.49 598.91 -1401.3 -809.56 186.12 -1123.7 -427.73 1242.7 -1187.3 -458.71 863.62 -1279 -369.94 888.23 -1196.3 -855.47 93.524 -1032 -789.27 1236 -902.06 -740.3 1444.4 -1139.4 -820.84 918.19 -841.47 -914.11 947.38 -752.86 -650.6 1498.9 -680.13 -712.72 749.81 -747.43 -815.5 831.11 -938.13 -799.03 803.61 -1303 -827.72 53.467 -1008.3 -823.73 392.17 -1101.3 -834 26.139 -1111.8 -859.46 219.72 -866.16 -798.69 1268.9 -955.87 -829.5 533.99 -1036.5 -749.47 31.316 -841.89 -860.31 1185.4 -707.29 -725.71 857.59 -748.96 -790.12 750.32 -862.94 -581.26 1035 -1113.7 -724.86 1170.2
99 0.8167 -988.63 -678.94 1330.9 -837.05 -581.09 1229.6 -1333.4 -723.16 355.68 -937.28 -590.51 1458.2 -1191.1 -641.01 906.9 -1231.9 -361.71 1116.3 -817.02 -531.61 1504.7 -1233.6 -387.42 810.57 -1271.6 -382.16 1001.6 -1041.9 -551.05 785.45 -1374.9 -804.21 415.77 -1065.9 -531.61 428.92 -1409.7 -753.03 232.11 -1174.1 -578.04 399.3 -981.84 -489.43 1334.2 -1060.4 -498.09 607.06 -1358.2 -781.55 176.86 -1109 -416.36 1253.8 -1183.3 -443.01 880.17 -1276.9 -357.04 906.39 -1196 -855.98 94.967 -1013.7 -777.81 1238.1 -886.95 -727.57 1446.7 -1123.6 -808.37 920.56 -815.75 -893.15 950.35 -738.1 -638.72 1502.8 -656.2 -670.97 774.84 -722.31 -782.65 844.78 -923.36 -790.71 802.85 -1302.2 -828.57 55.758 -1005.9 -821.44 392.09 -1101.3 -834.25 25.885 -1110.3 -858.18 220.49 -847.75 -786.22 1274.1 -950.1 -824.83 532.97 -1036.7 -749.72 31.316 -822.37 -846.3 1190.2 -685.39 -694.73 880.67 -720.78 -752.1 767.04 -845.03 -570.4 1037.2 -1095.9 -713.48 1172.3
100 0.8250 -971.48 -666.38 1334 -820.42 -568.53 1233.3 -1298.7 -700.92 349.32 -921.67 -576.76 1461.2 -1172.9 -627.77 910.29 -1222.9 -348.38 1134.6 -801.92 -518.29 1509 -1229.7 -372.32 829.24 -1265.2 -369.01 1020.5 -1021.3 -539.59 789.19 -1343.5 -784.6 404.31 -1035.5 -510.74 437.24 -1367.9 -726.64 220.23 -1141.3 -557.33 402.61 -964.95 -477.21 1338 -1034.9 -480.01 617.41 -1313.4 -752.52 166.6 -1093.7 -404.82 1265.7 -1177 -427.73 897.99 -1272.6 -344.48 925.31 -1195.4 -856.4 96.58 -995.25 -766.27 1241.4 -872.19 -713.74 1449.8 -1106.8 -796.15 923.45 -789.61 -873.12 955.19 -723.58 -625.9 1507.9 -634.56 -631.59 803.44 -697.61 -750.74 861.83 -907.83 -782.23 802.43 -1301.5 -829.58 58.05 -1004.1 -818.98 393.02 -1101.4 -834 25.63 -1109.1 -857.25 222.18 -829.5 -773.06 1280.7 -944.5 -820.08 533.06 -1037 -749.89 31.401 -802.51 -831.88 1196.2 -664.94 -666.04 905.97 -693.96 -715.18 786.81 -826.78 -559.02 1040.5 -1077.6 -701.86 1175.1
101 0.8333 -954.34 -653.99 1338.5 -803.53 -554.78 1237.7 -1264.1 -681.15 340.49 -906.05 -562.33 1465.2 -1154 -614.78 914.88 -1212.4 -335.14 1153.4 -786.47 -504.45 1514.1 -1223.6 -357.8 848.51 -1257.1 -356.02 1039.4 -1000.3 -527.71 794.36 -1310.4 -766.02 390.31 -1005.9 -490.96 446.24 -1325.3 -699.91 207.93 -1109.4 -537.21 405.84 -948.14 -464.82 1343 -1010.2 -462.87 628.79 -1267.4 -723.24 156.58 -1077.7 -393.19 1278.3 -1169.3 -412.97 915.89 -1266.1 -331.75 944.66 -1194.4 -856.23 98.786 -976.49 -753.8 1245.6 -857.17 -699.23 1454.5 -1089.4 -783.59 926.76 -763.47 -852.84 961.81 -708.9 -612.15 1513.9 -614.53 -594.58 834.42 -673.26 -721.8 880.84 -892.05 -773.15 802.85 -1300.8 -830.86 60.935 -1002.4 -816.26 395.4 -1101.8 -833.49 25.291 -1108 -856.49 225.07 -810.74 -759.4 1289.2 -939.15 -814.99 534.07 -1037.4 -749.98 31.571 -781.72 -816.6 1203.9 -646.44 -638.72 932.36 -669.18 -679.79 808.79 -808.96 -547.06 1045.1 -1058.7 -689.98 1178.4
102 0.8417 -937.11 -641.26 1343.5 -786.47 -540.61 1243 -1229 -661.46 330.31 -890.69 -547.4 1470.3 -1134.7 -602.14 919.54 -1200.6 -322.33 1172.3 -770.69 -489.69 1520.2 -1215.3 -343.72 867.6 -1246.7 -343.21 1058.7 -979.63 -515.4 801.58 -1277.3 -747.6 374.69 -976.66 -471.61 455.49 -1282.2 -672.83 195.37 -1078.5 -517.69 408.72 -931.17 -452.43 1349.1 -985.32 -446.74 640.41 -1220.4 -694.47 146.74 -1060.8 -381.48 1291.2 -1160.1 -399.13 933.55 -1257.6 -319.53 964.18 -1193.2 -855.98 102.01 -956.63 -740.98 1250.4 -841.97 -684.54 1460.6 -1071.1 -771.03 930.58 -737.16 -832.38 970.97 -693.8 -597.22 1520.5 -596.54 -560.21 866.67 -650.77 -694.22 901.13 -875.75 -763.3 804.55 -1299.7 -830.94 66.027 -1000.5 -813.54 399.13 -1101.5 -833.23 25.291 -1107.2 -855.47 228.97 -791.48 -744.8 1298.8 -933.04 -808.37 536.37 -1037.8 -749.98 31.825 -760.59 -800.98 1213.3 -629.97 -612.66 959.09 -646.52 -646.69 832.47 -791.14 -534.92 1050.9 -1039.9 -678.01 1182.5
103 0.8500 -919.54 -628.28 1349.1 -769.58 -526.18 1249.4 -1193.6 -640.58 319.44 -875.41 -532.46 1475.8 -1115.1 -588.81 924.81 -1187.9 -309.68 1191.1 -755.83 -474.5 1526.6 -1205.1 -330.05 886.36 -1234.9 -330.73 1077.7 -959.01 -502.59 809.64 -1244.5 -729.02 358.4 -948.48 -452.86 464.4 -1238.1 -646.35 182.04 -1048.3 -499.19 411.1 -914.03 -439.87 1356.2 -960.54 -431.3 652.38 -1172.5 -665.87 136.72 -1043.4 -369.35 1304.6 -1148.8 -385.39 951.28 -1247.2 -307.22 983.36 -1191.4 -855.81 106.34 -936.6 -728.59 1256 -826.78 -669.18 1467.1 -1052.7 -758.29 934.9 -711.28 -812.02 981.5 -678.69 -581.77 1528 -580.16 -528.64 898.92 -629.47 -667.4 922.68 -858.86 -752.18 806.67 -1297.9 -830.94 72.223 -998.56 -810.57 404.06 -1101.2 -832.72 25.8 -1105.6 -854.11 233.81 -771.79 -729.78 1309.1 -926.33 -801.49 539.51 -1037.9 -749.89 31.571 -739.03 -785.28 1224.2 -615.12 -589.32 986.17 -625.82 -615.12 856.74 -773.57 -522.28 1057.2 -1020.9 -666.47 1187.2
104 0.8583 -901.72 -615.38 1355.3 -752.27 -511.24 1256.2 -1158.2 -619.71 307.31 -859.97 -517.35 1482.2 -1094.6 -574.81 930.15 -1173.8 -296.87 1209.6 -741.24 -459.05 1533.1 -1193.1 -317.07 904.35 -1221.8 -318.25 1095.9 -937.54 -490.11 817.87 -1210.6 -710.51 339.81 -921.92 -435.2 473.14 -1193.4 -620.13 168.04 -1018.7 -481.46 412.88 -896.8 -427.31 1364.2 -936.43 -416.7 663.92 -1124.5 -637.36 126.71 -1025.8 -357.63 1317.7 -1135.7 -371.81 969.02 -1234.9 -295 1001.6 -1188.6 -855.47 111.77 -916.32 -715.86 1262.3 -811.42 -653.91 1474.3 -1034 -745.23 939.57 -685.39 -792.5 992.87 -663.84 -566.32 1535.9 -565.9 -499.79 930.66 -609.52 -642.2 944.83 -841.47 -739.71 809.13 -1295.4 -830.77 79.861 -996.09 -807.35 410.51 -1100 -831.88 27.667 -1103.2 -852.67 240.26 -752.27 -714.16 1320 -919.29 -794.53 543.92 -1037.9 -749.89 31.571 -717.22 -768.9 1235.7 -600.78 -567.94 1012.7 -607.4 -584.91 882.12 -755.83 -508.78 1064.2 -1001.4 -655.01 1192.6
105 0.8667 -883.81 -602.56 1362 -735.04 -496.14 1263.7 -1122.7 -598.91 294.83 -844.61 -501.74 1489.1 -1073.7 -560.98 935.33 -1158.7 -284.48 1226.9 -725.96 -442.93 1540.2 -1179.2 -304.34 921.5 -1207.7 -306.2 1113.3 -915.81 -477.81 826.1 -1176.2 -691.59 318.68 -896.88 -418.48 480.95 -1147.8 -593.91 153.78 -989.39 -464.14 413.73 -879.4 -414.83 1372.3 -912.92 -402.44 674.95 -1076.6 -609.44 116.61 -1007.6 -346.01 1330.4 -1121.4 -358.91 986.42 -1221.4 -283.29 1018.8 -1185.1 -854.28 118.82 -896.04 -702.71 1269.4 -795.98 -638.38 1482.2 -1014.4 -731.99 944.66 -659.85 -774 1005.9 -649.07 -550.88 1543.9 -553.51 -474.07 961.55 -590.85 -619.03 967.07 -823.22 -726.72 812.61 -1292 -829.58 89.366 -992.28 -803.87 418.57 -1099 -831.11 29.449 -1100.1 -850.55 248.15 -732.75 -697.95 1331.5 -911.23 -787.32 549.6 -1037.5 -749.98 31.656 -694.73 -751.68 1247.6 -587.03 -548.08 1038.2 -590.43 -557.58 907.49 -737.42 -494.7 1072.5 -981.24 -643.47 1198.6
106 0.8750 -866.25 -589.15 1369.3 -717.98 -481.46 1271.7 -1086.7 -578.21 281.68 -829.16 -485.7 1496.1 -1053.8 -548.5 942.03 -1143.1 -272 1243.4 -711.02 -426.72 1547.4 -1164 -291.61 938.05 -1192.4 -293.98 1130 -894.42 -464.4 834.68 -1140.9 -671.73 297.38 -873.21 -402.02 488.08 -1101 -567.68 139.35 -961.38 -447.08 413.48 -861.83 -402.1 1380.5 -889.16 -388.19 685.39 -1027.6 -581.09 107.1 -989.14 -333.62 1342.3 -1105.2 -346.43 1002.8 -1206.9 -272.09 1034.9 -1180.6 -852.33 127.39 -875.67 -689.72 1276.9 -780.53 -622.68 1490.5 -994.48 -719.09 950.1 -634.98 -756.6 1018.9 -633.88 -534.58 1552.1 -542.39 -450.82 991.26 -572.35 -598.06 988.2 -803.95 -713.65 817.7 -1287.3 -827.46 100.82 -987.01 -799.8 427.56 -1099.9 -830.43 29.704 -1095.8 -847.75 257.23 -713.74 -681.49 1342.9 -901.55 -779.34 555.88 -1036.9 -749.3 31.147 -672.41 -734.19 1260.5 -573.62 -529.83 1062.9 -573.37 -533.56 931.51 -719.17 -480.69 1080.7 -960.87 -631.76 1204.8
107 0.8833 -848.42 -575.23 1376.5 -700.92 -467.2 1280.1 -1050.1 -556.82 268.18 -813.71 -469.91 1503.2 -1033.9 -536.62 950.18 -1126.6 -259.61 1259 -696.68 -410.68 1554.5 -1147.4 -279.3 953.41 -1175.9 -282.02 1145.1 -873.29 -449.46 843.76 -1105.6 -650.68 276.5 -850.55 -386.57 494.1 -1053.9 -540.95 125.6 -934.4 -430.2 412.2 -843.93 -389.2 1389 -865.48 -373.93 695.75 -977.68 -552.74 98.701 -970.97 -321.39 1353.7 -1087.6 -333.45 1018.2 -1191.4 -261.65 1049.7 -1174.6 -849.87 137.15 -855.55 -677.08 1285 -765.08 -606.64 1499 -973.94 -706.02 956.04 -611.39 -740.39 1031.7 -618.6 -517.69 1560.7 -531.27 -429.86 1019.3 -555.12 -578.04 1008.3 -784.43 -700.33 823.56 -1280.8 -823.98 113.72 -979.8 -795.21 436.82 -1100 -828.9 30.552 -1090.4 -844.35 267.76 -695.24 -665.45 1355.1 -889.76 -770.77 563.01 -1036.2 -748.7 30.468 -651.19 -717.05 1273.7 -560.47 -513.45 1086.2 -557.75 -511.16 954.34 -701.01 -466.43 1088.9 -940.08 -619.45 1211.4
108 0.8917 -830.6 -561.32 1383.8 -683.7 -452.43 1288.8 -1012.7 -534.75 254.01 -798.61 -454.21 1510.3 -1013.6 -523.89 958.5 -1109.6 -246.97 1273 -682 -394.47 1561.3 -1129 -267.67 966.82 -1159 -270.47 1158.4 -852.67 -435.8 851.23 -1069.5 -628.7 255.54 -828.9 -372.23 498.09 -1006.4 -514.13 112.62 -907.83 -413.48 409.74 -826.36 -376.13 1397.6 -842.74 -360.18 705.08 -927.69 -525.5 90.894 -952.56 -309.17 1364.8 -1068.7 -319.95 1032.2 -1174.7 -251.04 1063 -1167.4 -846.13 147.59 -835.61 -663.92 1293.1 -749.64 -590.94 1507.2 -952.98 -692.27 962.06 -588.64 -725.2 1044.6 -603.58 -501.06 1569 -520.24 -410.93 1045.3 -539.51 -559.45 1028.3 -764.24 -686.16 829.58 -1272.7 -819.57 127.64 -971.23 -790.29 447.17 -1098.6 -827.29 32.929 -1083.9 -840.53 279.05 -676.82 -649.07 1366.7 -876.35 -761.77 570.82 -1035.5 -748.37 30.043 -630.74 -700.08 1286.5 -546.72 -498.43 1107.6 -544 -489.43 976.24 -682.59 -451.92 1097.4 -919.37 -606.98 1218.1
109 0.9000 -813.03 -547.57 1391.2 -666.04 -437.58 1297.3 -974.88 -512.09 239.33 -783.42 -438 1517.4 -992.87 -510.4 965.63 -1092.5 -234.66 1285.3 -666.38 -377.41 1568.4 -1109.1 -256.47 978.44 -1141.2 -259.19 1170.3 -832.38 -422.9 858.18 -1032.1 -606.21 233.98 -808.2 -358.31 501.06 -958.07 -487.65 99.465 -881.61 -396.08 405.92 -808.71 -363.06 1406.1 -821.1 -346.86 713.74 -877.96 -498.68 83.68 -934.4 -296.78 1375.3 -1048.1 -305.1 1044.4 -1156.5 -240.43 1074.5 -1158.4 -841.55 158.36 -815.84 -650.6 1301.2 -733.68 -574.9 1514.8 -931.68 -678.18 968.43 -566.58 -711.02 1058 -587.88 -484.43 1577.3 -510.31 -393.45 1068.7 -523.64 -542.48 1046.8 -743.78 -671.64 836.03 -1263 -814.22 141.73 -961.04 -785.11 458.2 -1097.2 -825.93 35.39 -1076.3 -836.12 290.67 -658.74 -632.61 1377.5 -861.32 -752.27 578.97 -1034.8 -747.94 29.534 -610.79 -683.7 1299.2 -532.38 -484.77 1127 -530.59 -470.42 996.43 -663.58 -437.49 1105.7 -899.43 -594.75 1224.9
110 0.9083 -795.21 -533.31 1398.4 -648.65 -422.98 1305.1 -937.11 -489.35 224.48 -767.8 -421.88 1524.3 -971.91 -496.65 971.48 -1075.4 -223.03 1296.1 -650.85 -360.35 1575.6 -1088.2 -245.18 988.63 -1122.7 -247.9 1180.3 -811.93 -409.32 865.91 -993.38 -583.13 211.24 -788.08 -344.82 502.76 -908.94 -460.92 86.735 -855.81 -378.77 400.92 -790.97 -349.57 1414.2 -800.3 -333.45 721.21 -827.8 -471.02 77.824 -916.06 -283.97 1384.7 -1026.1 -290.25 1055.3 -1137 -229.99 1084.4 -1148 -836.97 169.23 -795.89 -636.93 1308.9 -717.64 -559.02 1522.4 -910.12 -664.01 974.79 -545.02 -697.7 1070.9 -572.26 -467.45 1585.2 -500.81 -377.58 1089.4 -507.09 -526.94 1062.8 -722.82 -657.64 842.48 -1251.5 -807.43 156.33 -949.67 -779.34 469.06 -1095.2 -824.83 38.021 -1066.8 -830.86 302.04 -641.26 -615.89 1387.6 -845.62 -742.42 587.2 -1033.6 -746.84 29.025 -591.7 -667.74 1311.2 -517.69 -472.37 1144 -517.02 -453.96 1014.2 -644.57 -422.9 1114.1 -879.49 -582.53 1231.7
111 0.9167 -777.47 -518.71 1405.7 -631.59 -408.3 1312.7 -899.09 -465.76 209.79 -751.84 -406.18 1530.4 -950.69 -483.15 977.59 -1058.1 -211.15 1305 -636.09 -344.14 1582.2 -1065.3 -233.22 996.52 -1103.5 -236.44 1188.4 -791.05 -394.47 873.29 -954.17 -559.19 188.58 -768.22 -331.83 503.69 -859.54 -433.42 75.193 -830.52 -361.71 395.31 -773.23 -335.74 1421.8 -779.85 -319.87 726.64 -778.32 -442.84 72.817 -898.16 -270.98 1393.1 -1003.6 -276.84 1065.4 -1116.8 -219.81 1092.4 -1136.6 -832.3 179.58 -776.54 -623.02 1316.5 -702.28 -543.32 1529.9 -888.23 -649.83 980.9 -524.4 -685.31 1082.8 -557.07 -450.73 1592.5 -490.45 -363.74 1107.2 -490.96 -512.69 1076.5 -701.26 -643.38 848.68 -1238.5 -800.47 171.18 -937.62 -772.72 479.76 -1092.3 -823.81 41.84 -1055.8 -825.09 313.25 -623.78 -599.08 1397.1 -829.24 -732.41 595.26 -1032.4 -745.9 28.091 -573.62 -652.21 1322.2 -503.52 -460.41 1158.7 -503.18 -439.36 1029.3 -626.33 -407.71 1122.7 -859.46 -569.72 1238.4
112 0.9250 -760.42 -504.54 1412.7 -614.61 -393.19 1319.6 -861.15 -441.82 195.37 -736.14 -390.56 1536.4 -929.81 -469.57 983.79 -1041.1 -199.35 1311.9 -621.23 -328.1 1588.1 -1041.1 -221 1002.6 -1083.7 -225.07 1194.4 -769.75 -379.19 880.67 -913.86 -534.84 166.6 -748.96 -319.61 503.61 -811.51 -405.07 64.754 -805.65 -344.99 389.54 -755.58 -321.48 1428.9 -759.74 -306.37 730.8 -729.86 -414.07 69.337 -880.33 -257.32 1400.7 -980.31 -264.36 1074.7 -1095.7 -209.79 1097.8 -1124.5 -827.46 189.85 -757.62 -608.59 1323.3 -686.75 -527.37 1536.8 -866.08 -635.41 986.59 -504.71 -673.77 1093.8 -541.97 -433.93 1599.1 -478.99 -351.18 1121.4 -475.43 -499.96 1088.4 -679.54 -628.45 854.7 -1224.8 -794.45 186.54 -924.55 -765.42 490.62 -1087.6 -822.71 48.205 -1044 -818.72 324.53 -606.64 -582.96 1406.1 -812.1 -721.89 602.73 -1030.6 -744.97 26.733 -555.8 -636.85 1332.2 -487.82 -448.53 1171.8 -489.43 -426.21 1042.1 -607.99 -392.51 1130.5 -839.6 -556.31 1244.6
113 0.9333 -743.36 -490.2 1419 -597.64 -378.51 1326.3 -822.88 -417.8 181.79 -720.78 -374.86 1542.2 -908.94 -456 989.39 -1024.4 -187.98 1317.3 -605.96 -312.31 1593.7 -1015.7 -209.45 1007.5 -1063.6 -214.12 1198.6 -749.04 -364.25 886.95 -872.61 -510.06 145.46 -730.8 -307.99 502.25 -765.17 -375.63 55.419 -780.78 -328.69 383.26 -737.67 -306.8 1435.1 -740.22 -292.96 734.19 -682.76 -384.62 66.791 -862.68 -243.66 1407.5 -956.21 -251.12 1081.8 -1073.4 -198.85 1101.3 -1111.5 -822.37 200.97 -739.28 -594.58 1330 -670.54 -511.5 1543 -843.67 -620.81 991.68 -486.04 -663.33 1103.6 -526.35 -417.72 1605.1 -467.2 -339.13 1131.5 -460.07 -488.5 1098.4 -657.73 -613.17 860.39 -1210.4 -789.19 201.65 -910.21 -757.11 500.3 -1081.4 -821.01 56.607 -1031.1 -811.93 335.74 -589.75 -567.17 1414.2 -794.11 -710.77 609.78 -1028.8 -744.29 24.527 -538.57 -622.17 1341.8 -471.36 -437.15 1181.6 -475.68 -414.58 1052.2 -588.98 -375.2 1138 -820.08 -542.14 1250.2
114 0.9417 -726.21 -475.43 1424.6 -580.5 -364.08 1332.5 -784.52 -393.53 168.97 -705.59 -359.08 1547.5 -887.55 -442.5 994.14 -1007.5 -176.86 1321 -590.6 -296.44 1599.4 -989.05 -197.66 1010.2 -1043 -203.17 1200.7 -728.93 -349.49 892.13 -831.03 -484.51 125.01 -713.4 -296.7 499.96 -719.09 -345.24 48.205 -756.6 -312.91 376.98 -720.27 -291.95 1440.6 -721.38 -279.47 736.23 -637.44 -354.24 65.348 -845.28 -229.74 1413.3 -931.43 -237.38 1087.2 -1050 -187.64 1103.1 -1098.1 -817.11 212.76 -721.21 -580.07 1336.1 -654.76 -495.63 1548.7 -821.01 -606.21 995.93 -468.13 -653.48 1112 -511.16 -401.93 1610.5 -455.57 -328.1 1137.8 -444.28 -478.06 1106 -635.83 -598.15 865.48 -1195.8 -783.67 217.01 -894 -746.58 508.19 -1075.6 -819.4 65.688 -1016.8 -804.29 346.69 -572.69 -551.9 1421.1 -775.52 -698.97 616.06 -1028.3 -744.46 21.726 -521.26 -608.08 1350.1 -456.33 -426.29 1187.8 -461.51 -404.48 1059.5 -569.72 -357.46 1144.4 -800.73 -527.62 1255.1
115 0.9500 -709.5 -460.75 1429.3 -563.35 -349.49 1338 -746.67 -369.35 157.09 -690.23 -343.8 1552.3 -866.42 -429.18 998.13 -990.32 -165.83 1322.4 -575.57 -280.57 1604.3 -961.55 -185.52 1011.7 -1022.1 -192.4 1200.7 -708.48 -335.14 896.54 -789.1 -458.2 105.92 -696.77 -286 496.56 -674.11 -314.1 43.198 -733.51 -298.06 370.7 -703.39 -277.09 1445 -703.39 -266.06 736.91 -594.5 -323.43 65.688 -828.23 -215.56 1417.4 -905.88 -223.8 1090.9 -1026 -177.03 1102.5 -1084.4 -811.17 224.9 -702.96 -565.22 1341 -639.39 -480.27 1553.9 -798.78 -591.95 999.91 -450.82 -643.98 1119.8 -496.05 -385.89 1615.1 -442.08 -317.92 1140.4 -427.23 -468.05 1111 -613.6 -583.81 869.73 -1181.3 -777.73 232.79 -874.56 -734.53 514.47 -1070 -818.47 76.296 -1000.5 -795.72 356.53 -555.8 -536.96 1426.9 -755.66 -686.41 621.32 -1028.7 -746.58 20.114 -503.78 -594.08 1357 -440.8 -415.51 1192.1 -446.66 -395.91 1063.6 -550.71 -342.44 1149.8 -781.97 -513.54 1259.4
116 0.9583 -692.61 -445.9 1433.3 -546.21 -334.8 1342.6 -709.84 -345.16 146.48 -674.78 -328.95 1556.8 -845.37 -415.68 1001.1 -973.01 -154.97 1321.4 -560.47 -265.13 1608.1 -933.55 -172.45 1012.2 -1000.3 -181.36 1198.7 -688.02 -321.23 899.85 -747.01 -431.89 88.772 -680.47 -276.5 492.06 -632.01 -283.03 40.906 -710.26 -283.46 364 -686.41 -261.99 1448.3 -685.31 -253.16 736.23 -553.85 -292.45 67.47 -811.42 -201.31 1419.3 -879.83 -210.05 1092.7 -1001.6 -166.17 1100.1 -1070.7 -804.89 237.04 -685.22 -550.45 1345.4 -624.2 -465.25 1558.9 -776.71 -577.78 1003.6 -433.84 -635.15 1127 -480.78 -369.85 1619.1 -426.29 -308.24 1139.4 -410.17 -458.97 1113.8 -591.44 -569.21 873.72 -1166.9 -770.77 249.17 -853.43 -721.72 519.82 -1064.2 -817.7 89.366 -982.6 -786.73 365.61 -539 -522.28 1431.2 -734.7 -673 625.05 -1029.4 -750.74 21.726 -486.63 -580.58 1363.2 -422.9 -404.9 1194.3 -431.3 -388.7 1065 -531.53 -329.37 1154.2 -763.3 -499.79 1263.3
117 0.9667 -675.72 -431.04 1436.5 -529.07 -319.95 1346.3 -673.85 -320.63 137.49 -659.59 -314.1 1560.2 -824.66 -401.93 1003.2 -955.7 -144.28 1318.3 -545.45 -249.68 1611.3 -904.86 -158.79 1011.2 -978.19 -170.33 1194.7 -667.15 -306.71 901.98 -706.02 -405.92 73.835 -663.84 -267.42 486.8 -592.21 -252.65 40.143 -686.92 -269.29 356.95 -669.69 -246.88 1450.3 -667.4 -240.6 734.36 -515.32 -261.48 69.931 -794.87 -187.13 1419.6 -853.35 -195.71 1092.9 -976.32 -154.88 1096.2 -1056.8 -798.61 250.19 -667.66 -536.2 1348.9 -608.08 -450.05 1563 -754.48 -563.86 1006.6 -417.55 -626.92 1133.2 -465.33 -354.24 1623.3 -409.66 -298.99 1135.4 -394.13 -451.41 1114.1 -569.38 -554.02 877.03 -1152.6 -762.96 265.72 -831.37 -708.39 523.89 -1057.3 -816.43 103.45 -963.76 -777.22 374.18 -522.02 -507.85 1435 -712.47 -658.58 627.43 -1027.9 -754.73 28.77 -469.83 -567.51 1368.8 -404.9 -394.47 1193.4 -415.77 -382.5 1063.7 -512.09 -317.92 1158 -744.55 -486.46 1266.6
118 0.9750 -658.74 -416.7 1438.7 -511.75 -305.69 1349.1 -638.8 -297.21 130.02 -644.83 -299.5 1563.1 -803.61 -388.36 1004.6 -938.05 -133.33 1313.1 -530.68 -234.24 1613.8 -875.16 -145.04 1008.3 -955.44 -159.3 1188.7 -646.52 -291.52 903.25 -666.3 -380.72 61.529 -647.03 -258.17 481.46 -555.21 -222.69 40.821 -664.77 -255.54 350.17 -653.57 -231.86 1451.4 -650.09 -227.96 731.39 -479.42 -230.93 73.326 -778.32 -173.05 1418.3 -826.36 -181.36 1091.6 -950.1 -143.17 1090.3 -1043 -792.16 264.02 -650.17 -521.77 1351.8 -592.29 -434.86 1565.8 -732.24 -549.69 1008.7 -401.68 -618.6 1139 -449.97 -339.13 1626.9 -391.16 -290.59 1128.3 -378.34 -444.88 1112.2 -547.23 -538.74 878.98 -1138.2 -754.14 282.36 -808.03 -694.39 526.35 -1048.8 -814.82 118.31 -944.83 -767.38 383.09 -504.71 -493.25 1438.3 -689.38 -643.55 629.13 -1022.8 -757.28 39.718 -452.86 -554.02 1373.1 -387.17 -384.54 1189.8 -399.81 -377.49 1059.6 -493.17 -307.22 1162 -725.62 -472.63 1268.5
119 0.9833 -642.03 -402.44 1440.2 -494.27 -291.44 1351 -605.79 -275.48 123.74 -630.31 -285.67 1565.5 -782.57 -374.95 1004.8 -920.05 -122.72 1305.4 -515.83 -219.38 1616.1 -844.78 -130.78 1004 -932.1 -148.09 1180.8 -626.83 -276.41 903.25 -628.53 -356.95 51.26 -630.82 -248.83 476.11 -521.17 -194.77 42.349 -643.47 -242.55 344.05 -637.87 -217.01 1451.4 -633.11 -215.56 727.4 -446.83 -201.82 77.06 -762.03 -158.96 1415.2 -798.52 -166.26 1088.5 -922.85 -131.29 1082.3 -1028.8 -785.28 277.86 -632.69 -507.43 1353.4 -577.19 -420.27 1568.4 -710 -535.43 1009.8 -386.91 -611.13 1144.3 -435.12 -324.2 1629.5 -371.38 -283.63 1118 -362.22 -438.94 1108.5 -525.42 -523.64 880.76 -1123.1 -745.48 298.65 -783.59 -679.79 527.88 -1039.2 -812.36 133.5 -925.48 -757.28 392.26 -487.31 -478.91 1440.6 -665.87 -627.94 629.89 -1015.2 -757.96 52.024 -436.22 -541.03 1376.6 -369.35 -374.95 1183.9 -382.67 -373.76 1052.9 -473.73 -291.01 1163.2 -706.95 -458.12 1269.4
120 0.9917 -625.05 -387.93 1441 -476.96 -277.26 1352.2 -575.66 -255.28 119.15 -615.55 -271.49 1566.8 -761.77 -361.11 1004 -902.15 -112.11 1295.5 -501.15 -204.62 1617.9 -813.88 -115.59 997.79 -908.17 -136.55 1170.6 -606.72 -261.65 902.06 -593.14 -335.14 43.028 -614.7 -239.16 471.27 -490.2 -169.91 43.368 -622.76 -230.25 338.54 -622.17 -202.33 1450.3 -616.57 -203.26 722.65 -417.38 -174.74 81.219 -746.41 -144.45 1410.3 -769.75 -150.22 1084.4 -894.85 -119.32 1072.4 -1013.1 -777.73 291.61 -615.38 -493 1354.2 -561.91 -406.01 1570.2 -688.28 -521.6 1010.2 -372.57 -603.58 1148.4 -420.27 -309 1631.1 -351.35 -278.28 1105.3 -345.07 -433.34 1104.1 -503.78 -508.44 882.12 -1107 -738.44 313.84 -758.55 -664.77 528.81 -1027.8 -808.62 148.6 -905.29 -746.33 401.26 -470.93 -464.82 1442 -641.6 -611.22 629.47 -1006.6 -756.94 64.754 -419.42 -528.56 1378.9 -352.2 -365.19 1175.8 -364.68 -371.04 1044.8 -453.96 -277.09 1164.1 -688.79 -443.94 1269.3
121 1.0000 -607.99 -373.59 1440.7 -459.47 -262.75 1352.5 -547.4 -236.87 115 -600.02 -256.47 1566.7 -741.41 -347.62 1002.7 -883.73 -101.25 1283.5 -486.04 -190.02 1618.5 -781.72 -99.465 989.9 -883.48 -124.67 1158.2 -586.01 -246.03 900.28 -560.81 -315.54 36.833 -598.49 -228.72 466.6 -462.62 -147.59 43.707 -602.73 -218.53 333.79 -606.38 -187.56 1448.1 -599.85 -191.38 717.64 -390.82 -150.56 84.359 -731.31 -130.1 1403.7 -740.81 -133.67 1078.9 -866.16 -106.85 1060.6 -995.5 -769.67 305.1 -598.4 -478.99 1354.8 -546.29 -391.58 1570.3 -666.98 -507.85 1009.7 -358.06 -595.18 1151.7 -405.5 -294.24 1632.9 -330.56 -274.38 1090.1 -327.42 -428.24 1099 -481.46 -492.49 882.54 -1089.1 -731.48 329.03 -733.17 -648.73 528.73 -1015 -803.44 162.95 -883.81 -734.36 409.66 -454.38 -451.07 1442.5 -616.99 -594.08 627.77 -997.79 -753.54 77.484 -402.87 -515.57 1380.2 -335.14 -355.68 1165.7 -346.01 -369.35 1035.5 -434.44 -263.94 1164.9 -671.05 -430.03 1268.4
122 1.0083 -591.19 -359.25 1439.5 -441.99 -248.49 1352.5 -522.02 -220.23 111.69 -584.4 -241.62 1565.3 -721.29 -334.21 1000.7 -864.46 -90.724 1269.2 -471.02 -175.76 1618.5 -749.81 -83.171 980.82 -858.01 -112.45 1143.8 -565.56 -229.99 897.9 -532.04 -298.06 31.995 -581.94 -217.94 462.19 -438.17 -127.9 43.283 -583.3 -207.42 329.46 -590.85 -172.79 1444.8 -582.79 -179.41 712.47 -366.88 -129.59 85.886 -716.54 -116.27 1395.7 -711.87 -116.95 1071.5 -836.71 -94.288 1047.4 -976.49 -760.84 317.58 -581.68 -464.74 1354.2 -530.51 -376.98 1569.5 -645.76 -494.19 1008.3 -344.05 -586.61 1153.8 -390.56 -279.55 1634.2 -309.09 -271.49 1073.2 -310.36 -423.75 1092.1 -459.05 -476.19 881.69 -1069.5 -723.75 344.65 -707.03 -632.1 527.71 -1000.8 -797.16 176.86 -861.15 -721.89 417.13 -437.07 -437.49 1442.1 -592.29 -576.76 625.14 -988.03 -748.28 89.875 -387 -502.93 1380.7 -317.58 -346.86 1153.7 -326.49 -368.84 1024.9 -415.34 -250.53 1164.6 -653.65 -416.11 1266.7
123 1.0167 -574.39 -344.65 1437.3 -424.76 -234.91 1351.6 -499.7 -205.47 109.65 -568.78 -227.11 1563.6 -700.92 -319.95 997.79 -844.35 -80.03 1252.7 -456.33 -161.42 1618.2 -717.64 -66.536 970.55 -831.79 -100.23 1127.8 -545.45 -214.21 894.68 -507.34 -283.12 28.77 -565.39 -207.42 458.03 -415.85 -112.03 41.84 -564.63 -196.64 325.55 -575.83 -158.11 1440.3 -565.81 -166.77 707.46 -345.16 -111.86 85.462 -701.77 -102.61 1385.7 -682.51 -99.295 1063 -806.41 -81.303 1033 -955.78 -751.68 329.37 -564.97 -450.31 1352.8 -514.38 -362.64 1568.3 -624.54 -480.78 1006.4 -330.82 -578.46 1155.1 -375.46 -264.53 1634.2 -287.79 -269.71 1054.9 -293.56 -420.01 1083.5 -437.75 -460.83 880.25 -1048.2 -715.35 360.35 -680.05 -614.61 525.25 -985.32 -790.29 190.53 -837.31 -708.65 423.58 -419.93 -423.58 1441 -567.26 -559.02 621.74 -977 -741.92 102.52 -371.38 -490.37 1380.7 -299.75 -338.96 1140.5 -307.99 -369.77 1013.2 -396.25 -236.61 1163.9 -635.92 -401.76 1264.3
124 1.0250 -558.09 -330.05 1434.5 -407.45 -221.67 1350 -479.33 -193.33 107.1 -553.34 -213.36 1561.9 -680.39 -305.19 994.23 -823.47 -69.761 1234.4 -441.48 -146.74 1617.1 -684.88 -49.478 959.43 -804.55 -87.923 1110.2 -525.5 -198.85 891.37 -486.8 -271.58 27.073 -548.67 -196.64 454.38 -395.91 -100.14 39.294 -546.38 -185.78 322.16 -561.15 -143.34 1434.9 -548.5 -153.87 702.71 -325.13 -97.853 82.746 -686.84 -89.111 1374.1 -652.63 -81.134 1053.1 -775.61 -68.488 1017.1 -933.46 -741.92 341.17 -548.59 -436.05 1351.1 -498.09 -348.04 1565.7 -603.75 -467.54 1003.3 -317.92 -570.06 1155.9 -360.01 -249.68 1633.1 -266.82 -269.71 1035.3 -276.92 -416.96 1074.7 -416.62 -445.3 878.04 -1025.2 -706.27 375.8 -652.21 -596.45 520.83 -968.6 -782.57 204.36 -812.36 -694.22 428.75 -403.12 -409.74 1439.5 -541.37 -540.69 617.41 -964.27 -734.79 115.34 -355.6 -477.81 1380 -281.85 -332.17 1126.1 -289.74 -371.98 1000.8 -376.73 -223.12 1162.8 -617.84 -387.51 1261.4
125 1.0333 -541.97 -315.88 1431 -389.8 -207.59 1347.6 -461.26 -183.74 104.22 -537.81 -199.19 1559.4 -659.76 -290.67 990.32 -801.66 -59.153 1214.6 -426.38 -132.65 1615.2 -651.7 -31.825 946.79 -776.03 -75.617 1091.1 -505.9 -184.16 888.31 -471.78 -263.51 26.649 -531.19 -185.1 450.99 -379.19 -91.912 35.814 -528.3 -174.74 318.68 -546.38 -128.83 1428.3 -530.34 -140.54 698.63 -306.37 -87.838 76.296 -671.9 -75.532 1360.8 -621.91 -62.293 1042.3 -743.78 -56.013 999.49 -909.53 -730.88 352.12 -532.72 -422.3 1348.9 -481.88 -333.45 1562.6 -583.13 -453.96 998.98 -304.76 -561.15 1155.6 -344.82 -235.08 1631.4 -246.63 -271.24 1014.8 -260.37 -414.33 1064.3 -394.89 -429.6 875.5 -1000.3 -696.09 390.31 -623.44 -577.61 515.15 -950.18 -773.74 218.28 -785.28 -678.94 432.74 -386.91 -396.33 1437.2 -515.06 -522.28 611.9 -949.93 -726.89 128.15 -339.9 -465.5 1378.3 -263.68 -326.15 1110 -271.66 -375.03 988.12 -356.7 -210.47 1161.1 -600.19 -373.67 1257.5
126 1.0417 -525.5 -301.45 1426.8 -372.49 -193.33 1344.5 -446.74 -177.2 102.27 -522.11 -184.67 1556 -640.84 -277.43 986.67 -779.09 -48.629 1193.2 -411.61 -119.15 1613.5 -617.58 -13.918 932.78 -746.58 -62.972 1071.1 -486.46 -169.65 885.17 -464.06 -259.27 27.667 -512.09 -172.54 448.53 -368.84 -87.414 32.844 -510.82 -163.88 316.56 -531.7 -114.15 1421.2 -511.33 -125.94 695.24 -288.81 -81.388 66.452 -656.79 -61.36 1345.9 -590.6 -43.028 1030.7 -710.43 -42.094 981.33 -884.07 -718.15 362.3 -516.93 -408.21 1345.7 -466.09 -319.19 1559.2 -562.33 -440.38 994.31 -291.35 -551.98 1154.4 -329.8 -221 1629.5 -225.92 -274.12 993.89 -243.91 -412.63 1053.3 -373.93 -414.24 872.19 -973.69 -684.29 404.48 -593.14 -558.52 508.36 -929.81 -763.73 231.69 -756.51 -663.16 435.8 -370.7 -382.92 1433.7 -488.58 -504.2 605.53 -933.29 -717.9 141.14 -324.79 -453.19 1375.8 -245.61 -320.8 1092.5 -253.84 -378.94 974.88 -337.52 -197.49 1158.6 -582.79 -359.84 1253.2
127 1.0500 -509.12 -286.77 1421.5 -355.26 -179.58 1341.2 -439.36 -175.68 101.59 -506.58 -170.16 1552.1 -623.44 -264.87 983.62 -755.07 -38.106 1170.5 -396.42 -105.24 1611.6 -582.45 4.0737 917.76 -716.12 -49.648 1050.2 -467.54 -155.39 881.78 -462.62 -258.42 29.025 -491.22 -158.79 448.1 -365.44 -87.159 31.571 -494.02 -152.59 316.73 -517.27 -99.295 1413.3 -491.81 -110.07 692.78 -285.5 -90.809 58.983 -641.35 -47.696 1329.8 -559.02 -23.763 1018.7 -675.97 -26.903 963.25 -857.42 -704.66 372.82 -500.64 -393.45 1341.4 -450.82 -304.68 1555 -542.14 -427.31 989.05 -277.77 -542.56 1152.3 -315.2 -207.16 1627.1 -205.04 -278.54 972.5 -227.53 -411.61 1042.1 -353.64 -399.13 867.94 -945.68 -670.54 418.14 -561.57 -539.08 500.72 -907.41 -752.61 244.93 -726.21 -646.78 438 -354.49 -369.77 1429.3 -462.19 -486.46 598.57 -913.6 -708.14 154.46 -309.85 -440.97 1372.7 -227.62 -316.47 1074.3 -236.19 -383.43 961.13 -318.85 -184.25 1155.5 -564.8 -345.58 1248.1
128 1.0583 -492.74 -272.09 1415.5 -338.37 -165.75 1337.3 -438.94 -177.12 101.84 -491.13 -155.9 1547.7 -605.96 -252.31 980.56 -729.86 -27.327 1146.3 -381.14 -90.809 1608.7 -546.29 22.405 902.57 -684.29 -36.069 1028.7 -449.38 -142.41 877.96 -463.38 -258.42 30.128 -469.4 -144.11 449.12 -365.53 -87.669 31.571 -477.98 -141.05 318.51 -502.93 -84.359 1404.4 -472.21 -93.185 691.42 -285.75 -90.978 55.758 -625.22 -34.202 1312 -527.2 -4.5829 1005.9 -640.75 -10.778 944.83 -828.31 -690.06 382.25 -484.09 -378.77 1336.2 -435.37 -290.16 1550.3 -522.7 -414.58 982.86 -264.36 -532.97 1149.6 -300.35 -193.33 1624.6 -185.1 -284.39 950.69 -210.81 -410.42 1030.6 -333.19 -384.11 863.7 -915.98 -655.43 430.62 -529.58 -519.31 493 -882.29 -740.47 257.66 -694.47 -630.14 439.53 -338.03 -356.45 1424.5 -435.29 -468.56 591.44 -890.77 -697.36 167.19 -294.41 -428.75 1368.6 -209.03 -313.25 1055.4 -219.21 -388.19 947.55 -300.18 -171.77 1151.7 -547.06 -331.83 1242.1
129 1.0667 -476.45 -257.32 1409.4 -320.8 -151.83 1332.9 -438.17 -176.27 101.67 -475.26 -141.56 1542.8 -588.39 -240.6 977.51 -703.47 -16.04 1121.4 -365.87 -76.127 1605.1 -508.95 40.397 887.46 -651.28 -22.066 1006.6 -431.81 -129.59 874.9 -463.55 -257.32 31.91 -448.44 -130.1 449.88 -366.12 -85.717 30.977 -463.38 -130.19 319.78 -488.75 -69.846 1394.5 -451.58 -77.994 691.08 -283.8 -89.96 50.666 -608.76 -20.453 1292.5 -494.53 14.003 992.7 -604.34 5.2618 926.42 -796.99 -673.85 390.73 -467.62 -364.25 1330.5 -419.76 -275.91 1545.6 -503.01 -401.51 975.73 -250.61 -523.04 1145.9 -284.82 -179.07 1621.3 -165.66 -291.35 929.13 -193.84 -410 1018.7 -312.82 -369.26 860.05 -884.92 -639.31 441.82 -497.24 -499.7 485.27 -854.28 -727.49 269.63 -662.22 -612.66 440.63 -321.31 -343.12 1419.2 -407.79 -450.48 583.89 -865.31 -685.82 178.9 -278.88 -416.28 1363.5 -190.36 -310.62 1035.6 -202.24 -393.36 934.06 -281.42 -159.13 1147.4 -529.49 -318.68 1235.7
130 1.0750 -459.98 -242.47 1403.1 -303.06 -137.74 1328.3 -432.23 -172.37 101.16 -459.31 -126.88 1537.7 -570.31 -229.31 974.11 -675.72 -4.498 1095.9 -350.17 -61.699 1601 -470.51 58.898 873.97 -617.33 -8.1473 984.98 -413.9 -115.51 873.63 -462.45 -255.96 33.862 -429.69 -117.97 448.87 -365.44 -84.274 30.128 -448.44 -119.41 319.19 -474.33 -55.419 1383.3 -430.11 -66.027 690.91 -281.17 -87.923 46.847 -591.7 -6.9592 1272.3 -461 32.08 979.38 -567.26 21.896 907.75 -765 -656.37 398.62 -450.9 -350.33 1324.4 -404.14 -261.65 1540.4 -483.07 -388.95 968.09 -236.44 -512.69 1141.1 -269.2 -164.81 1617.3 -146.65 -299.5 908 -177.63 -410.08 1006 -295 -351.86 853.09 -851.99 -622.34 451.84 -464.4 -479.25 477.04 -823.9 -713.31 281.17 -629.38 -594.16 440.97 -304.34 -330.22 1413.2 -379.87 -431.81 576 -837.05 -673.77 189.51 -263.01 -403.97 1357.7 -171.77 -309.51 1015.7 -185.78 -399.98 920.73 -262.58 -146.14 1142.7 -511.75 -306.03 1229.3
131 1.0833 -442.93 -227.45 1396.7 -285.41 -123.57 1323.3 -424.93 -168.72 101.25 -442.84 -112.2 1532.1 -551.56 -217.69 969.96 -646.69 7.2986 1070.4 -334.38 -47.187 1596.8 -431.38 78.078 862.34 -581.51 6.2802 963.34 -396.84 -99.89 873.72 -461 -254.86 35.135 -412.88 -107.1 445.9 -363.91 -84.189 30.383 -433.51 -108.72 315.88 -459.98 -41.076 1371.7 -409.57 -55.164 689.72 -279.72 -86.65 45.319 -573.79 5.8559 1251.4 -426.21 49.733 966.31 -528.81 38.445 889.16 -731.82 -637.44 405.5 -434.01 -336.67 1318.3 -387.93 -246.97 1534.7 -463.89 -377.49 960.96 -221.84 -502.59 1136 -253.5 -150.56 1613.3 -127.73 -310.11 887.46 -161.42 -410.59 993.38 -275.82 -335.74 847.49 -817.53 -604.6 460.49 -430.96 -458.12 468.56 -791.39 -697.02 291.44 -595.09 -574.64 440.63 -287.19 -317.32 1406.9 -351.86 -412.63 568.28 -806.16 -660.78 198.68 -246.54 -391.16 1351.5 -153.36 -309.85 995.25 -170.08 -407.2 907.92 -243.91 -132.05 1137.7 -494.02 -293.22 1223.3
132 1.0917 -425.36 -212.42 1391 -267.67 -109.23 1318.1 -421.03 -167.61 101.59 -426.12 -97.598 1526 -532.97 -206.48 965.63 -615.46 20.283 1045 -318.51 -32.844 1592.5 -391.5 96.41 851.48 -544.34 21.217 942.54 -380.63 -84.953 873.38 -460.83 -254.18 35.305 -397.95 -97.513 441.4 -362.98 -84.359 30.892 -421.03 -100.65 311.72 -445.05 -26.394 1360 -391.67 -43.537 686.67 -278.11 -87.074 44.471 -554.53 18.841 1229.5 -390.14 66.961 955.19 -488.41 54.74 871.76 -697.11 -617.58 411.61 -416.62 -323.18 1312.6 -371.3 -232.54 1528.9 -445.13 -366.29 953.75 -207.08 -492.23 1130.9 -237.88 -136.04 1609.1 -109.73 -321.99 867.6 -145.21 -411.69 981.24 -257.32 -320.38 840.96 -781.8 -585.5 467.96 -396.76 -436.31 460.92 -756 -679.2 300.35 -559.62 -553.34 439.7 -269.2 -303.83 1400.2 -323.26 -393.7 560.98 -773.23 -646.27 206.48 -229.4 -377.83 1344.8 -134.6 -311.47 975.22 -155.39 -414.49 896.37 -225.41 -117.8 1132.3 -476.02 -280.91 1217.3
133 1.1000 -407.37 -198.34 1385.9 -249.6 -95.052 1312.9 -420.44 -167.7 100.74 -409.32 -83.171 1520.2 -514.64 -195.28 961.38 -581.51 33.693 1020.3 -302.04 -18.756 1587.7 -350.25 113.13 841.8 -504.62 36.154 923.87 -362.39 -71.798 872.87 -461.68 -254.43 35.39 -384.96 -89.281 436.73 -362.56 -85.462 30.722 -411.52 -95.137 308.07 -429.18 -12.56 1349.1 -377.24 -31.656 680.56 -277.26 -87.584 43.707 -533.14 31.91 1208 -352.63 83.171 945.85 -445.64 71.459 856.91 -660.7 -596.62 416.28 -398.37 -310.11 1307.5 -354.49 -217.86 1522.9 -426.29 -354.92 946.36 -191.55 -481.37 1126.1 -222.18 -122.21 1604.9 -93.355 -334.21 849.78 -130.1 -413.31 969.87 -237.72 -304.76 835.19 -745.65 -565.22 473.56 -362.3 -413.82 453.79 -718.58 -660.53 306.88 -522.96 -530.93 438.43 -250.28 -289.99 1393.7 -293.81 -374.44 554.19 -738.01 -629.47 212.68 -211.41 -364.17 1337.8 -116.52 -313.93 955.87 -142.15 -422.05 885.68 -206.57 -103.45 1126.9 -458.03 -268.61 1212.1
134 1.1083 -389.37 -184.93 1380.5 -231.43 -80.794 1307.4 -420.01 -167.78 99.38 -392.43 -68.488 1514.7 -496.39 -183.65 957.82 -545.19 47.781 997.96 -285.24 -5.0072 1583 -307.65 131.12 836.29 -463.21 51.515 908 -343.72 -58.983 871.25 -461.94 -255.88 35.814 -374.1 -83.171 432.23 -362.64 -85.632 30.468 -404.4 -91.318 304.76 -413.31 0.16974 1339.6 -363.57 -20.623 672.92 -277.43 -88.008 43.283 -509.72 44.895 1187.8 -313.76 99.295 939.15 -400.75 88.008 845.28 -623.27 -574.64 419.08 -379.87 -298.06 1303 -337.6 -203.09 1517.1 -408.04 -343.63 939.91 -175.59 -470.51 1122 -205.97 -108.55 1600.4 -77.484 -348.38 835.78 -115.67 -415.26 960.11 -218.37 -289.99 829.41 -709.33 -543.75 477.47 -327.25 -390.82 447.08 -680.47 -640.24 311.13 -485.44 -507.85 436.39 -231.27 -275.99 1387.8 -263.94 -354.32 548.25 -699.99 -610.88 217.35 -192.74 -350.17 1331.4 -99.974 -317.32 937.88 -129.08 -430.2 876.43 -187.56 -88.942 1121.4 -440.04 -256.05 1207.3
135 1.1167 -371.98 -172.11 1375.8 -213.7 -66.027 1302.2 -417.89 -168.04 98.362 -375.12 -53.891 1509.8 -477.89 -171.35 955.27 -507.34 62.802 979.38 -268.18 8.7414 1578.7 -264.45 150.3 836.12 -420.18 67.47 895.78 -327.17 -45.914 866.93 -461.94 -256.81 36.323 -365.53 -77.994 427.56 -363.74 -84.953 30.383 -398.88 -88.263 300.86 -397.35 12.9 1331.8 -350.17 -10.608 665.28 -277.43 -88.432 43.028 -484.77 57.71 1170.8 -273.87 115.84 936.35 -354.32 104.22 837.65 -585.5 -551.64 420.27 -361.54 -285.75 1298.7 -320.55 -188.58 1512.2 -390.31 -331.83 934.48 -159.64 -460.24 1119.7 -189 -94.458 1595.8 -62.463 -362.22 825.6 -101.59 -417.38 952.73 -200.12 -276.33 824.41 -673 -522.11 479.42 -291.44 -367.31 441.82 -640.75 -618.09 312.99 -448.19 -484.51 434.78 -212.68 -261.73 1382.2 -233.56 -334.55 543.41 -660.53 -590.09 219.81 -173.72 -335.65 1326.3 -83.85 -321.39 923.11 -116.44 -437.75 870.41 -168.8 -73.92 1116.2 -421.54 -243.4 1203.1
136 1.1250 -354.32 -159.04 1372.2 -195.79 -50.921 1298.2 -416.28 -167.7 97.174 -357.8 -39.548 1505.6 -459.81 -158.62 953.24 -469.06 77.569 965.54 -250.87 23.084 1574.9 -220.23 168.12 840.11 -377.07 81.813 887.89 -311.38 -33.098 859.54 -462.02 -256.98 36.748 -358.57 -72.223 423.07 -364.76 -84.189 30.213 -394.55 -84.613 297.12 -381.23 25.8 1326.1 -338.88 -0.50921 658.74 -277.6 -88.432 43.028 -459.05 69.507 1158.2 -233.73 131.8 938.38 -307.82 119.58 834.59 -547.82 -527.71 420.44 -343.12 -272.43 1295.8 -303.66 -174.15 1507.6 -372.32 -319.36 930.41 -144.53 -450.48 1119.9 -171.86 -79.946 1591.6 -50.496 -373.16 816.09 -88.008 -419.25 948.57 -182.3 -262.75 820.84 -636.85 -500.3 479.25 -255.28 -343.12 438.43 -600.02 -594.25 312.74 -411.02 -460.49 433.25 -194.18 -247.22 1378 -202.92 -315.2 540.52 -619.88 -566.15 219.81 -154.88 -321.23 1322.3 -68.573 -325.38 913.01 -105.24 -443.94 868.88 -150.64 -58.898 1111.4 -403.04 -230.84 1199.4
137 1.1333 -336.5 -145.46 1369.3 -177.71 -35.899 1295.1 -416.11 -167.19 96.155 -340.24 -25.376 1502.5 -441.99 -146.65 951.79 -430.87 91.233 955.7 -233.05 36.833 1571.7 -174.91 184.16 849.1 -333.7 95.561 884.92 -294.83 -19.774 851.48 -462.19 -257.06 37.087 -352.71 -65.688 419.16 -365.36 -83.85 30.383 -390.73 -79.776 294.32 -364.08 39.124 1322.7 -330.22 9.1657 653.91 -277.86 -88.263 42.943 -433.51 80.709 1149.5 -194.6 147.33 945.85 -260.97 134.09 836.37 -509.89 -503.01 418.74 -325.13 -258.34 1293.6 -286.77 -160.06 1503.8 -354.24 -306.54 927.35 -130.27 -441.99 1123.1 -154.21 -66.027 1588.2 -37.681 -383.94 814.31 -75.108 -420.18 948.48 -164.22 -249.6 818.81 -600.7 -477.55 476.45 -219.38 -318.68 436.31 -558.69 -569.04 310.02 -373.59 -435.37 431.13 -175.34 -232.54 1375.2 -172.11 -295.59 539.59 -577.44 -539.34 216.92 -136.3 -306.71 1320.2 -54.231 -328.69 907.92 -94.628 -448.87 870.91 -132.56 -43.452 1107.6 -384.79 -218.37 1196.5
138 1.1417 -317.75 -131.8 1367.1 -159.21 -20.962 1293.1 -416.45 -166.43 95.731 -321.73 -11.287 1500.1 -424.25 -135.36 950.61 -392.51 104.47 949.42 -214.55 50.327 1569.5 -128.83 199.1 864.21 -289.31 110.67 886.45 -279.13 -7.1289 844.61 -462.36 -257.23 37.512 -347.87 -59.068 415.51 -365.87 -83.255 30.468 -387.34 -74.599 291.86 -345.92 52.873 1321.1 -322.16 18.756 650 -278.2 -88.263 42.858 -408.13 92.251 1143.7 -156.58 161.5 958.5 -213.7 148.86 843.67 -472.63 -478.06 414.83 -306.71 -244.08 1292.1 -269.29 -146.4 1501.2 -335.65 -293.73 925.91 -116.61 -434.35 1128.6 -135.79 -52.788 1585.4 -25.715 -389.12 817.79 -62.208 -419.93 952.13 -145.29 -236.19 818.72 -564.97 -454.38 471.44 -183.74 -294.32 435.88 -517.19 -542.65 304.51 -336.08 -409.83 429.26 -156.58 -217.43 1373.8 -141.14 -275.74 540.52 -533.14 -510.74 211.41 -118.05 -292.29 1320.3 -39.718 -330.31 908.17 -82.407 -451.41 876.94 -114.49 -28.091 1105.2 -366.04 -205.04 1194.7
139 1.1500 -298.48 -118.05 1365.8 -140.54 -6.0256 1292 -417.13 -165.66 95.561 -302.81 1.952 1498.4 -406.43 -124.33 949.93 -354.66 117.54 946.62 -195.79 63.651 1568.3 -83.086 213.1 885.51 -245.78 125.86 892.81 -264.28 4.8375 839.85 -462.87 -257.49 38.53 -344.22 -53.212 412.8 -366.29 -82.916 30.468 -384.88 -70.525 289.91 -326.91 66.282 1321 -313.67 27.752 646.52 -278.71 -88.263 42.858 -382.75 105.07 1140.7 -119.92 174.15 976.07 -167.36 163.71 856.49 -435.8 -452.77 408.72 -287.53 -229.91 1291.6 -250.53 -132.9 1499.5 -316.22 -281.17 925.31 -103.45 -427.31 1136.9 -117.29 -40.058 1584 -12.476 -389.88 825.93 -49.393 -417.72 959.6 -125.86 -221.84 819.65 -530.51 -431.38 463.89 -148.18 -270.05 437.66 -475.6 -514.64 296.44 -299.16 -383.94 427.65 -137.32 -202.33 1373.7 -109.9 -255.37 542.65 -488.33 -480.01 203.43 -100.4 -278.11 1322.4 -24.781 -330.05 911.65 -70.016 -450.82 885.6 -96.325 -13.07 1104 -346.94 -191.29 1193.6
140 1.1583 -279.05 -104.39 1365.8 -121.62 7.4684 1291.3 -417.13 -164.64 95.222 -283.63 14.597 1497.9 -388.27 -113.04 949.59 -317.58 131.29 947.21 -177.03 76.296 1568.3 -38.785 226.17 912.58 -204.02 140.03 903.42 -248.41 17.907 837.9 -463.63 -257.23 39.548 -342.44 -49.054 411.95 -366.8 -82.746 30.128 -384.03 -68.488 289.14 -307.22 79.606 1322.6 -304.93 36.154 643.89 -279.13 -88.093 42.773 -357.12 119.75 1140.6 -85.207 185.78 998.05 -122.89 178.56 874.22 -399.39 -426.8 400.66 -267.93 -215.99 1291.9 -230.67 -119.83 1498.6 -295.68 -268.44 925.06 -90.894 -420.94 1147.4 -98.107 -27.327 1584.1 0 -387.93 835.78 -36.663 -413.82 970.29 -106.51 -206.48 821.35 -496.39 -407.71 453.28 -112.7 -246.29 441.14 -434.78 -484.77 285.33 -262.58 -357.8 425.27 -117.8 -188.07 1374.6 -78.842 -234.91 546.8 -443.01 -447 193.33 -82.831 -264.53 1326.4 -10.608 -327.42 923.53 -58.219 -447.51 896.12 -77.4 0.84868 1103.5 -327.51 -177.2 1193.1
141 1.1667 -259.7 -91.148 1366.8 -101.84 20.623 1292.2 -417.3 -163.88 95.476 -263.68 27.073 1498.6 -369.68 -100.74 950.01 -281.08 146.31 951.37 -157.68 88.772 1569.6 2.4612 238.48 944.5 -163.88 154.04 916.74 -231.6 31.91 838.24 -464.14 -256.22 40.991 -341 -47.017 412.63 -367.39 -82.407 29.619 -383.86 -67.47 289.48 -287.19 93.015 1325.8 -295.76 44.641 641.69 -279.47 -88.008 42.773 -331.32 136.55 1142.8 -53.212 196.04 1023.4 -80.794 194.69 896.54 -363.57 -400.66 391.07 -248.49 -201.9 1292.6 -210.22 -107.36 1498.9 -274.89 -255.37 925.57 -77.994 -414.24 1159.5 -78.333 -14.512 1585.2 12.391 -382.16 847.92 -23.933 -408.89 983.11 -86.141 -191.46 823.73 -462.28 -382.75 439.62 -77.569 -222.35 446.24 -394.04 -453.28 271.41 -227.02 -332 422.9 -98.107 -174.4 1376.3 -47.611 -214.29 552.91 -397.27 -412.2 180.94 -65.094 -251.04 1332.1 3.3947 -322.92 936.77 -46.168 -441.82 908.34 -57.795 14.852 1104.2 -307.82 -162.86 1193.6
142 1.1750 -239.84 -77.994 1368.7 -80.709 33.693 1294.8 -418.14 -163.46 96.665 -243.4 38.954 1500.7 -350.67 -87.584 950.86 -245.86 162.44 957.99 -137.74 100.74 1572.3 39.888 249 980.39 -126.03 168.46 933.29 -214.29 45.914 839.51 -464.74 -254.69 43.198 -338.62 -46.338 413.9 -367.56 -82.322 29.534 -382.92 -67.215 291.1 -266.4 106.59 1330.9 -286 52.194 640.16 -279.55 -88.008 42.773 -304.51 154.97 1147.8 -24.102 204.87 1051 -40.991 210.9 923.11 -327.42 -373.76 379.44 -228.8 -187.56 1293.9 -189.17 -95.137 1500.3 -253.92 -241.53 926.76 -64.839 -407.11 1173.2 -58.05 -2.3763 1587.4 25.63 -374.1 861.75 -11.033 -402.87 997.11 -65.179 -176.27 827.21 -428.24 -357.04 423.49 -42.943 -199.02 452.69 -352.12 -420.94 255.62 -192.31 -307.05 420.44 -78.927 -160.99 1379 -16.38 -193.67 560.64 -350.42 -375.96 167.27 -47.696 -237.8 1339 17.907 -316.13 952.13 -33.438 -433.68 921.67 -37.681 28.855 1106.3 -287.87 -148.35 1194.9
143 1.1833 -219.98 -64.754 1371.2 -58.983 46.762 1298.4 -418.65 -162.61 99.295 -223.03 50.157 1504 -330.9 -73.835 952.05 -212.34 180.94 966.82 -117.46 112.2 1575.8 73.411 258 1018.1 -90.724 183.91 952.56 -196.64 60.256 840.36 -465.25 -253.5 46.592 -335.57 -45.829 416.11 -367.48 -82.322 29.704 -381.48 -67.47 294.07 -245.01 120.09 1337.3 -275.74 58.559 638.89 -279.47 -88.093 42.943 -277.43 173.55 1155.4 1.6125 212.85 1079.7 -3.9888 225.75 951.79 -291.18 -346.09 366.29 -209.11 -172.96 1295.6 -167.53 -83.51 1502.9 -232.28 -227.28 929.13 -51.769 -399.56 1188.2 -37.087 9.2506 1590.6 39.294 -364.17 877.03 2.0368 -394.81 1012.6 -44.386 -160.06 831.54 -394.64 -330.56 404.82 -9.6749 -176.53 460.58 -309 -387.17 238.9 -158.11 -281.93 416.62 -60.002 -147.76 1383.1 14.428 -173.05 569.89 -302.04 -337.86 153.44 -30.892 -224.65 1346.6 32.335 -307.65 969.36 -20.708 -423.58 936.18 -16.38 42.773 1109.8 -267.76 -133.5 1196.5
144 1.1917 -199.86 -51.26 1374.5 -37.512 59.408 1302.6 -418.48 -161.42 103.45 -201.9 60.935 1508.4 -310.62 -58.898 953.92 -180.77 201.65 978.02 -96.665 123.57 1580.2 103.11 265.72 1055.9 -57.625 199.95 973.44 -178.22 74.769 840.11 -465.08 -252.57 51.6 -332.26 -45.235 420.1 -367.48 -82.916 29.449 -379.27 -67.385 298.48 -223.29 133.33 1344.9 -263.68 64.415 638.8 -279.55 -88.517 42.858 -250.7 192.31 1164.6 25.206 220.15 1108.2 29.619 239.16 981.24 -255.54 -317.75 351.1 -189.93 -158.19 1298.3 -145.21 -72.223 1506.7 -210.81 -212.42 932.61 -38.106 -391.24 1203.6 -15.785 21.217 1594.4 53.297 -352.12 892.98 15.785 -385.13 1028.7 -24.018 -143.09 836.12 -360.77 -302.81 383.52 22.32 -154.71 469.06 -265.55 -352.54 221 -124.33 -257.06 411.69 -41.076 -135.02 1388.1 44.98 -152.42 579.99 -252.06 -298.31 139.1 -13.918 -211.75 1355.2 46.847 -297.38 987.52 -7.2986 -412.46 951.28 5.0921 56.522 1114.3 -247.56 -117.71 1198.8
145 1.2000 -179.58 -37.766 1378.4 -15.701 71.289 1307.7 -416.79 -159.3 108.97 -180.34 71.544 1513.7 -289.91 -43.198 956.04 -150.56 223.8 991.26 -75.532 134.86 1585.7 128.83 271.41 1092.8 -27.243 216.5 995.16 -159.21 89.705 839.94 -464.14 -251.29 58.729 -327.51 -44.641 425.7 -367.39 -84.019 28.516 -376.13 -66.621 304.34 -201.05 146.31 1353.9 -249.6 70.865 640.16 -279.64 -88.602 42.604 -224.05 210.9 1175.3 45.659 226.43 1136.2 60.256 251.72 1011.1 -219.3 -288.72 334.55 -170.5 -143.34 1301.7 -123.06 -61.275 1511.2 -189.34 -197.57 936.69 -24.272 -382.16 1219.1 5.771 32.759 1598.6 68.064 -338.2 909.19 29.958 -374.52 1044.8 -3.0552 -125.6 841.8 -326.49 -273.87 360.26 53.891 -133.33 476.87 -221.59 -317.07 201.99 -91.233 -232.11 406.18 -22.151 -122.46 1393.6 74.684 -132.22 590.43 -200.29 -257.91 124.59 2.7158 -198.68 1364.4 61.784 -286 1005.9 6.7046 -400.32 966.65 26.733 69.931 1119.8 -227.53 -101.33 1201.8
146 1.2083 -159.64 -24.272 1382.7 6.1954 82.746 1313.6 -414.07 -157.18 116.27 -158.79 82.237 1519.6 -268.86 -27.243 958.67 -121.96 246.88 1005.6 -53.976 145.38 1591.4 151.15 275.48 1128.4 0.084868 233.73 1016.9 -139.1 104.56 841.04 -462.45 -249.94 68.404 -321.48 -42.858 432.74 -367.14 -84.953 28.855 -372.32 -65.179 311.8 -178.22 159.04 1363.7 -233.56 78.333 642.62 -279.3 -88.602 42.519 -198.17 229.06 1187 64.075 232.2 1162.9 87.499 264.36 1040.6 -182.38 -258.76 316.73 -151.15 -128.41 1305.5 -101.16 -50.581 1515.9 -167.53 -182.3 941.02 -10.269 -372.32 1234.3 28.006 43.707 1603.3 83.68 -323.26 925.23 44.471 -363.32 1060.6 18.756 -108.46 848.68 -291.52 -243.74 335.48 84.274 -112.45 484.26 -175.76 -279.81 182.98 -59.068 -206.74 399.39 -3.5645 -109.9 1399.5 103.54 -113.04 601.46 -146.91 -216.84 111.52 19.604 -185.35 1373.8 77.145 -273.95 1023.3 21.217 -387 982.01 48.714 82.916 1125.7 -207.25 -84.783 1205.4
147 1.2167 -139.35 -10.099 1387.3 28.346 94.458 1319.8 -410.68 -155.05 124.93 -136.89 92.761 1526 -247.56 -11.033 961.98 -95.052 270.47 1021.2 -31.995 155.48 1597 171.09 279.05 1162.4 25.376 251.29 1038 -118.05 119.24 844.1 -459.56 -248.15 81.303 -314.69 -39.464 441.4 -366.46 -85.462 30.468 -367.31 -63.142 320.8 -154.97 172.03 1374.1 -216.07 86.65 645.93 -279.05 -89.026 42.519 -173.3 246.8 1199.6 80.37 237.97 1187.7 112.2 277.26 1068.7 -145.04 -227.96 298.57 -131.72 -113.21 1309.8 -79.012 -39.888 1521 -145.38 -166.34 945.85 3.4796 -361.71 1248.7 50.496 54.315 1608.1 100.14 -307.82 941.02 59.408 -350.84 1076 40.567 -91.657 856.23 -255.11 -212.34 309.6 113.21 -92.421 491.22 -128.32 -241.62 164.9 -27.582 -181.96 392.6 14.852 -97.004 1405.9 131.46 -94.458 612.83 -92.676 -175.17 99.55 36.323 -171.86 1383.3 92.761 -261.48 1040.1 36.154 -372.66 996.6 70.78 95.901 1131.9 -187.13 -67.979 1209.7
148 1.2250 -118.73 4.0737 1392.3 50.666 106.42 1326.2 -406.43 -152 135.45 -114.66 103.03 1532.6 -225.24 4.7526 965.71 -69.931 293.73 1037.3 -9.5901 166.17 1603.2 188.41 282.1 1193.9 48.629 269.37 1059.1 -96.665 133.58 848.51 -454.55 -245.18 96.495 -306.8 -34.881 451.41 -365.36 -85.632 32.929 -361.37 -60.256 331.41 -131.63 184.84 1384.6 -197.66 95.561 650.17 -278.28 -89.366 43.028 -149.11 264.28 1212.8 95.137 244.42 1210 135.02 289.74 1095.2 -106.51 -196.64 279.72 -112.2 -97.428 1314.5 -56.607 -29.279 1526.2 -123.23 -150.56 950.61 17.822 -350.5 1262.2 73.156 64.754 1612.9 116.78 -291.95 955.78 74.599 -337.6 1090.3 62.463 -74.769 864.8 -217.01 -179.84 283.12 140.8 -73.75 497.41 -80.03 -203.09 147.25 4.0737 -157.6 384.88 33.777 -83.765 1412.8 158.19 -75.957 623.52 -37.681 -132.9 89.366 53.382 -158.19 1393.1 108.72 -248.41 1055.8 51.6 -357.89 1010 93.015 108.97 1138.3 -166.85 -51.006 1214.7
149 1.2333 -98.277 18.077 1397.9 73.156 118.39 1332.3 -400.75 -147.59 146.91 -92.421 113.55 1539.2 -202.41 20.453 969.79 -46.338 315.88 1053.1 12.9 176.53 1609.5 203.77 284.9 1222.3 70.356 286.6 1078.9 -74.344 148.86 853.86 -447.59 -241.19 112.03 -297.63 -29.025 462.28 -363.49 -85.717 35.39 -354.49 -56.013 343.38 -108.21 197.15 1395.2 -178.48 105.15 655.77 -277.86 -88.772 43.452 -126.28 281.08 1226.3 108.89 251.29 1229.6 156.16 301.79 1120.1 -66.621 -164.81 260.71 -92.421 -81.388 1320 -34.287 -18.671 1531.2 -100.82 -134.77 956.04 32.335 -338.96 1274.9 95.731 75.108 1617.9 134.01 -275.57 969.19 90.384 -324.11 1103.1 85.886 -58.135 874.05 -177.29 -146.74 256.9 166.68 -56.013 502.59 -30.892 -163.96 130.19 36.239 -132.9 375.54 53.042 -70.101 1419.8 183.82 -58.135 633.45 16.889 -90.724 80.794 71.204 -144.45 1402.6 125.52 -234.74 1070.9 67.725 -342.44 1021.8 114.83 122.12 1144.9 -146.4 -34.541 1219.7
150 1.2417 -77.569 31.571 1403.8 95.476 131.04 1338.5 -393.45 -142.24 158.96 -70.016 124.16 1545.5 -179.5 36.154 974.71 -23.763 337.52 1067.8 35.814 186.37 1615.6 217.86 287.7 1247 90.894 302.55 1097.2 -51.006 164.3 859.03 -438.77 -236.53 127.98 -286.85 -21.472 473.56 -361.71 -85.292 37.851 -346.01 -50.666 356.11 -84.613 208.94 1405.6 -158.79 116.1 661.88 -278.71 -87.923 43.198 -104.22 297.29 1239.8 122.21 258.85 1247.2 176.1 312.57 1143.1 -25.8 -133.07 241.79 -72.138 -65.518 1326.1 -11.966 -7.4684 1536.1 -78.418 -119.15 962.06 47.696 -326.23 1285.9 118.39 85.207 1623.6 151.15 -258.85 981.07 106.42 -310.02 1114.1 108.21 -42.349 881.61 -135.53 -113.64 231.18 191.21 -39.294 506.75 19.689 -124.16 114.66 68.573 -108.8 365.7 73.071 -56.352 1426.9 208.44 -41.416 642.53 71.204 -48.46 73.071 89.451 -130.19 1411.5 143.34 -220.66 1084.4 83.765 -326.15 1032.2 136.47 135.7 1150.8 -125.52 -18.331 1225.4
151 1.2500 -56.352 44.895 1410.3 117.88 143.51 1344.6 -384.88 -135.45 171.43 -47.611 135.36 1551.7 -155.99 52.533 980.22 -2.0368 358.23 1081.4 58.644 196.3 1621.8 231.69 291.69 1267.6 110.41 318.08 1113.9 -27.412 179.5 863.7 -427.56 -231.27 145.38 -274.04 -12.136 485.02 -360.01 -84.698 40.821 -335.57 -44.216 368.75 -61.105 220.32 1415.5 -138.76 128.74 668.25 -279.64 -87.499 43.028 -82.916 312.74 1252.7 135.28 267.84 1263 195.03 323.26 1163 15.955 -101.16 223.03 -51.43 -50.072 1332.3 10.099 3.3947 1541 -56.098 -103.54 968.17 63.651 -312.91 1295.5 141.22 95.052 1628.9 168.8 -241.87 991.77 123.4 -295 1123.7 129.08 -27.412 887.55 -91.657 -80.2 205.47 214.29 -23.848 510.06 71.459 -84.274 100.65 101.16 -85.547 355.6 93.1 -42.604 1433.4 231.6 -25.8 650.77 125.77 -6.7046 67.215 108.97 -115.93 1419.4 162.01 -205.72 1095.9 100.48 -309.17 1040.8 158.36 149.62 1156.9 -104.47 -2.9704 1231.7
152 1.2583 -34.966 57.965 1416.8 140.12 155.99 1350.7 -375.46 -127.39 183.91 -25.376 146.82 1557.8 -132.05 68.828 986 17.737 377.32 1094.5 81.643 206.57 1628.1 246.12 297.12 1283.5 129.68 332.43 1128.4 -4.1585 194.43 868.28 -415.17 -224.31 163.54 -259.61 -2.1217 496.14 -358.4 -83.68 44.641 -323.43 -36.154 381.57 -37.851 231.6 1425 -118.05 142.66 675.04 -279.72 -86.735 43.707 -62.463 327.51 1265.1 148.6 277.77 1276.5 212.76 333.19 1178.7 59.408 -69.337 204.36 -30.552 -34.796 1338.5 32.165 14.343 1545.9 -33.523 -87.329 974.62 79.606 -299.07 1303.2 164.22 104.9 1633.9 188.07 -224.98 1001.1 140.97 -279.22 1131.9 150.98 -12.645 893.91 -45.914 -46.762 180.17 236.02 -8.9111 511.67 124.08 -44.81 87.414 134.52 -62.293 344.65 113.72 -28.77 1439.7 253.08 -11.203 657.39 181.28 33.777 63.906 129.17 -101.67 1426.8 180.85 -190.02 1106.1 118.31 -291.95 1048.5 180.43 163.37 1163.1 -82.831 12.221 1238.1
153 1.2667 -13.324 71.034 1422.9 161.93 168.72 1356.1 -364.59 -118.73 196.04 -2.9704 158.53 1563.4 -108.04 84.698 991.43 36.323 394.98 1106.6 104.47 217.35 1634.4 261.48 303.66 1294.9 147.16 345.58 1139.7 18.841 209.11 873.29 -402.36 -215.73 181.79 -242.72 8.3171 506.07 -356.36 -81.898 50.327 -309.17 -27.243 393.7 -14.937 243.15 1433.9 -97.344 156.92 681.57 -278.88 -85.038 44.81 -42.604 341.51 1276.7 163.71 288.3 1287.8 229.74 342.7 1190.3 104.64 -37.596 187.05 -9.5901 -19.52 1344.6 54.57 25.291 1550.2 -10.693 -71.289 981.07 96.325 -284.56 1309.1 186.79 114.83 1638.6 208.52 -206.99 1009.3 159.3 -262.75 1138.8 174.15 1.6974 900.28 1.0184 -12.985 155.48 256.81 4.8375 511.67 178.22 -5.4315 76.466 168.21 -38.954 332.94 135.02 -14.937 1445.5 273.19 2.0368 661.63 237.12 74.09 64.075 149.37 -87.838 1433.2 200.03 -174.15 1115 137.23 -273.95 1055.3 202.66 176.36 1168.9 -60.85 28.091 1244.2
154 1.2750 8.2322 84.189 1428.7 183.4 181.45 1360.9 -352.63 -109.65 208.01 19.35 170.08 1568.4 -84.104 101.16 996.86 54.146 411.61 1116.7 127.05 227.96 1640.2 277.86 311.47 1302 162.86 359.42 1147.4 42.943 224.22 879.23 -389.12 -206.48 200.37 -223.71 20.708 514.47 -353.31 -79.861 58.135 -292.96 -16.719 404.9 6.9592 254.52 1441.1 -75.957 171.6 687.51 -276.75 -82.407 46.253 -22.66 355.6 1287.5 180.17 300.18 1296.4 246.54 352.37 1198.1 150.98 -6.7046 170.58 11.712 -4.3283 1350.7 76.211 36.663 1554 11.627 -55.419 986.76 113.13 -269.71 1313.3 209.28 125.27 1643.2 229.91 -188.66 1016.5 178.48 -246.29 1144.4 197.06 16.634 905.97 50.581 20.538 132.14 276.25 18.331 510.57 233.47 32.844 68.658 201.99 -15.87 321.39 156.41 -1.6125 1450.6 291.69 15.022 664.01 292.62 113.21 66.961 170.33 -74.344 1438.7 220.4 -157.68 1122.1 156.92 -255.37 1060.2 224.56 189.68 1174.1 -39.124 43.368 1249.8
155 1.2833 29.958 97.344 1433.9 204.79 194.26 1365.9 -339.73 -99.72 219.98 41.925 181.36 1573.2 -60.256 118.22 1002 70.695 428.07 1124.8 150.05 238.39 1644.6 295.68 320.63 1304.9 178.22 373.25 1152.7 67.385 239.92 885.17 -374.86 -196.98 219.04 -202.83 35.22 521.51 -349.83 -77.145 68.404 -275.06 -4.498 414.41 29.279 265.55 1447.3 -54.146 187.22 692.95 -274.46 -79.606 47.781 -2.546 369.35 1297.3 198.08 313.42 1302.2 263.09 362.39 1201.9 198.17 23.933 156.16 33.523 10.184 1356.1 98.107 48.12 1557.8 33.947 -39.464 991.26 130.36 -254.69 1315.8 231.6 135.79 1647.2 252.14 -170.41 1022.3 197.57 -229.74 1148.1 220.15 31.486 910.29 101.67 53.976 111.26 293.47 31.571 508.44 289.06 69.507 64.669 235.42 6.7046 310.36 177.97 11.033 1454.8 309.17 27.582 664.6 347.19 150.13 72.647 191.55 -61.36 1442.9 241.11 -140.71 1127.9 177.63 -236.61 1063.4 246.54 203.43 1179.2 -17.313 58.135 1255
156 1.2917 51.769 109.9 1438.3 226.51 206.99 1370.4 -326.23 -89.536 232.11 64.33 192.65 1577.4 -36.323 134.94 1006.6 86.141 445.05 1131.7 172.79 249 1648.4 314.69 331.07 1304 193.33 386.74 1155.9 91.318 255.79 890.35 -359.25 -187.47 237.8 -180.43 50.921 526.78 -346.26 -73.835 80.709 -255.37 8.8263 422.47 51.769 276.84 1452.5 -31.825 203.43 697.95 -273.95 -76.89 49.393 16.804 382.75 1305.4 216.58 326.91 1304.9 279.64 373.25 1202.2 245.86 54.231 144.61 55.334 24.951 1360.8 120.17 59.577 1561.3 56.267 -23.424 995.67 148.26 -239.67 1316 253.92 146.48 1650.9 274.97 -152.08 1026.3 217.35 -212.42 1150 243.74 46.168 913.69 152.93 86.141 93.27 309.85 44.471 505.47 344.48 103.45 64.584 268.69 29.11 300.35 199.86 23.763 1458.2 325.72 39.464 663.41 401.09 183.99 81.643 212.76 -48.969 1446.1 262.16 -123.4 1132.3 198.85 -217.52 1065 268.78 217.01 1183.9 4.0737 72.307 1259.7
157 1.3000 73.75 122.04 1442.2 248.66 219.47 1374.2 -312.57 -79.861 244.84 86.735 204.11 1581 -12.9 150.98 1010.6 100.4 460.75 1136.9 195.37 260.29 1651.9 335.06 342.61 1299.3 207.59 400.66 1157 115 271.41 894.25 -342.61 -177.8 256.56 -156.75 67.555 530.51 -342.78 -69.846 95.222 -234.57 23.339 429.26 74.175 288.64 1457.1 -8.5717 219.89 701.52 -274.89 -75.108 50.581 35.645 395.57 1311.9 235.68 340.32 1304.1 295.93 385.3 1199.2 293.22 83.34 136.47 77.23 39.039 1364.4 141.98 71.204 1564.5 78.333 -7.4684 999.49 166.6 -224.65 1314.1 276.16 157.35 1653.7 298.82 -133.07 1028.9 238.05 -194.43 1150.1 267.84 61.105 916.23 204.36 117.29 78.163 325.81 57.625 501.31 398.12 135.53 67.555 302.04 51.006 291.69 221.67 35.899 1461 341.34 50.921 660.44 453.28 215.23 93.524 233.98 -36.918 1448.2 284.39 -105.92 1135 221.17 -198.17 1065.3 291.01 230.84 1187.9 25.8 86.056 1263.5
158 1.3083 95.646 134.35 1445.6 270.56 231.77 1376.9 -298.57 -70.101 258.34 108.72 215.73 1583.8 10.099 167.36 1013.6 112.7 474.67 1140.7 217.86 272 1654.7 356.36 355.09 1290.8 221.34 415.17 1156 138.67 286.77 897.31 -325.3 -168.21 275.31 -131.97 85.123 532.89 -337.86 -64.33 111.69 -213.1 38.785 435.71 96.495 300.86 1460.4 15.446 236.19 703.72 -272.6 -71.289 63.227 54.146 407.71 1317 256.3 354.49 1300.9 311.8 399.22 1192.9 338.88 111.77 131.21 99.295 52.024 1366.8 163.88 83.086 1566.9 100.48 7.723 1002.1 185.1 -209.79 1311 298.48 168.46 1655.9 323.6 -114.06 1030.8 260.04 -176.27 1148.9 291.69 76.127 917.51 255.11 147.59 66.791 342.1 71.034 496.39 448.7 165.49 73.241 334.55 72.647 285.16 242.81 47.781 1462.6 356.45 61.954 655.94 502.5 243.49 106.93 254.94 -25.206 1449.2 307.56 -88.432 1136.4 244.67 -178.73 1064.3 312.99 244.5 1191.2 47.696 99.55 1266.2
159 1.3167 117.54 147.16 1448.2 292.29 244.42 1379 -283.37 -60.086 272.94 130.7 227.79 1585.8 33.014 183.48 1015.2 123.74 487.82 1143.6 240.26 283.8 1656.6 377.83 368.5 1279.1 234.57 429.18 1152.3 162.1 301.96 899.77 -308.07 -158.28 293.3 -106 103.28 534.41 -331.24 -57.286 128.91 -190.95 54.74 442.25 118.48 313.25 1462.5 40.482 252.91 704.91 -268.78 -64.669 76.89 72.392 419.76 1321.1 277.69 369.43 1295.3 327.17 414.41 1184.1 381.65 138.67 128.91 120.85 64.33 1367.7 185.35 94.713 1568.7 122.89 23.254 1003.5 203.85 -194.69 1306.3 320.55 179.5 1657 348.89 -95.137 1031.1 282.1 -158.36 1146.4 316.81 89.96 917.93 303.32 177.2 59.153 358.74 84.274 491.22 496.05 192.74 81.643 365.61 93.355 280.23 264.02 59.662 1463 371.72 72.817 650.6 548.08 268.69 122.21 275.91 -13.664 1448.8 331.32 -71.289 1136.6 269.12 -158.79 1061.9 335.23 257.83 1194 69.761 112.7 1268.1
160 1.3250 139.1 159.72 1449.5 313.93 256.81 1380.5 -267.33 -49.733 288.3 152.93 239.75 1586.9 55.334 199.19 1016 135.02 500.38 1145 262.41 295.17 1657.8 399.56 383.26 1263.7 247.31 443.77 1146.8 185.69 317.58 901.55 -291.1 -147.67 310.96 -78.927 121.96 535.43 -322.16 -49.054 146.4 -167.7 71.034 448.19 140.54 325.98 1463.5 66.282 269.54 705.34 -262.16 -55.843 91.403 90.469 432.49 1323.9 299.58 384.54 1286.6 341.68 430.37 1173.5 420.94 163.88 128.91 142.49 76.551 1367.6 207.08 106.42 1569.5 145.38 39.124 1003.6 223.2 -179.41 1299.8 342.7 190.87 1657.4 374.86 -75.787 1029.9 304.93 -140.8 1142.6 344.14 102.52 918.27 347.53 205.3 54.825 376.13 97.174 486.63 539 217.43 91.318 395.23 113.47 277.18 285.33 71.544 1462.5 387.85 83.765 645.17 589.15 290.84 137.91 297.12 -2.2066 1447.3 355.26 -53.891 1135.9 293.39 -138.67 1057.2 357.55 271.24 1196 91.403 125.77 1268.9
161 1.3333 160.65 172.37 1449.5 335.57 269.29 1381.2 -249.94 -38.191 303.32 175.17 251.97 1587.4 77.654 214.55 1015.6 146.31 512.01 1144.7 284.73 306.71 1658 421.03 399.3 1245.4 259.78 458.63 1139.9 209.71 333.45 902.49 -273.53 -135.45 328.61 -51.006 140.71 535.69 -309.85 -39.209 163.8 -143.17 87.923 453.96 162.35 339.47 1464 92.761 286.17 704.15 -252.65 -46.592 106.42 108.04 444.71 1325 321.82 399.81 1275.4 354.58 447.08 1160.3 455.74 187.64 129.85 164.3 88.857 1366.6 228.97 118.73 1569.3 167.61 54.655 1002.5 243.15 -164.47 1291.8 364.68 202.41 1657.5 401 -56.183 1027 328.44 -123.4 1137.3 368.5 117.03 914.11 386.83 232.03 52.958 394.38 109.9 482.64 576.34 240.6 100.48 423.32 132.39 275.48 306.71 83.171 1461.2 405.5 94.713 640.24 625.82 311.04 151.4 318.42 8.5717 1444.3 379.87 -36.323 1133.5 318 -118.39 1050.9 379.78 284.9 1197.3 113.47 138.33 1268.8
162 1.3417 182.38 184.84 1448.6 357.04 281.51 1381 -230.67 -24.781 317.66 197.23 264.53 1586.8 99.89 229.4 1013.9 157.18 522.7 1143.3 307.05 318.51 1657.3 441.74 416.62 1224.5 271.49 472.37 1131.2 233.56 349.49 902.66 -254.18 -120.85 345.92 -21.896 160.23 534.84 -294.75 -28.516 180.6 -117.46 105.83 459.56 184.33 353.22 1463.5 119.66 303.06 702.03 -240.77 -38.021 121.28 125.6 456.33 1325 344.22 415.34 1261.8 366.88 463.8 1144.7 486.46 210.05 131.04 185.78 100.65 1364.2 250.78 131.04 1568.4 189.85 69.507 1000.7 263.85 -149.71 1282.2 387.17 214.04 1656.7 427.56 -36.323 1023.1 352.63 -105.92 1130.4 391.24 132.05 910.55 420.27 257.32 52.958 413.82 123.14 479.25 608.33 262.41 107.36 449.21 150.9 274.12 327.93 94.034 1458.3 425.44 106.17 636.09 657.13 330.56 161.08 339.81 19.18 1439.7 405.24 -19.35 1130.2 343.8 -98.277 1043.7 402.44 298.06 1197.6 135.02 150.64 1267.4
163 1.3500 204.11 197.4 1446.7 378.51 293.56 1379.5 -209.88 -9.7598 331.24 219.55 277.18 1585.2 122.04 243.74 1010.7 167.87 532.97 1141 329.54 330.39 1655.7 461.68 434.95 1201 281.42 485.02 1120.1 257.23 365.61 902.06 -233.13 -104.56 363.23 8.2322 180.6 532.72 -277.86 -17.737 196.47 -90.639 124.42 464.23 206.14 367.05 1462 146.82 320.55 699.4 -226.51 -29.789 135.7 143.17 467.71 1324.3 366.54 431.13 1246.1 379.61 480.27 1127.6 513.11 230.42 131.29 206.99 111.6 1360.6 272.94 142.58 1567 211.75 84.274 997.88 285.33 -134.77 1270.9 409.32 226 1655.2 455.06 -16.295 1018.3 377.49 -88.093 1122.3 412.37 147.16 907.32 448.19 280.57 53.382 434.01 136.81 476.45 635.66 282.78 110.84 472.37 168.63 272 348.89 104.73 1454.4 447.42 119.15 632.78 683.53 349.32 167.19 361.37 29.619 1433.9 431.3 -2.4612 1125.6 370.87 -77.909 1035.6 424 311.04 1196.1 156.16 163.37 1265
164 1.3583 225.75 209.88 1443.6 399.56 305.69 1377.5 -187.73 6.5348 344.14 241.7 289.48 1583.1 144.28 258.59 1006.2 179.07 542.48 1137.5 352.2 342.36 1653.7 479.76 453.96 1175 289.65 497.92 1107.1 280.91 381.14 900.28 -210.81 -86.65 380.72 39.209 201.82 529.32 -259.61 -6.5348 211.24 -62.293 144.28 467.11 227.53 380.63 1459.6 174.32 338.45 695.83 -210.3 -21.556 149.54 160.57 478.82 1322.4 388.44 447.51 1227.8 391.07 497.07 1109.1 535.69 248.66 130.19 228.12 122.46 1356.3 294.92 154.71 1564.8 233.73 99.041 994.06 307.31 -119.66 1258.4 431.13 238.05 1653.1 484.26 3.6493 1012.6 403.29 -70.101 1113.3 433.42 162.44 903.25 471.27 300.09 52.788 454.72 150.9 473.82 658.91 301.28 111.18 493.25 185.52 269.54 369.94 115.51 1449.6 469.91 134.94 629.89 705.85 367.31 169.48 383.01 39.888 1427 457.86 14.258 1119.9 398.79 -58.219 1026.9 444.88 325.04 1194.5 177.46 175.85 1261.7
165 1.3667 247.56 221.93 1439.4 420.61 317.58 1375 -164.3 24.272 355.85 263.94 301.54 1580.2 165.92 273.61 1001.4 191.21 551.81 1133.6 374.52 354.15 1650.7 496.14 473.73 1147.4 302.89 511.84 1096.7 304.51 396.33 897.82 -187.13 -67.046 397.35 70.78 223.8 524.4 -239.67 4.8375 225.58 -32.504 164.98 468.47 248.83 394.3 1456.3 202.33 356.19 690.91 -192.48 -12.985 163.37 177.97 489.52 1319.7 409.32 464.23 1208 401.09 513.88 1089.4 554.78 264.62 127.73 249.6 133.41 1351.4 316.64 167.53 1561.6 255.96 113.98 989.39 330.05 -104.47 1244.8 453.19 249.94 1650.2 513.71 24.442 1005.5 429.94 -52.364 1103.6 454.55 177.03 899.18 490.11 315.62 50.327 475.51 166.26 471.27 678.01 317.75 108.55 512.26 200.71 266.74 391.16 126.03 1444.1 492.06 152.85 628.02 724.6 384.11 167.53 404.74 49.733 1418.9 484.68 31.147 1113.2 428.16 -38.7 1017.1 466.01 338.62 1192.6 198.59 188.41 1257.5
166 1.3750 269.37 234.07 1434.2 441.57 329.63 1371.7 -139.35 43.283 365.95 285.92 313.25 1576.5 187.3 288.72 996.26 203.43 560.89 1129.3 396.93 366.04 1647.2 510.74 494.19 1118.6 313.76 525.33 1084.2 328.18 411.61 895.02 -161.08 -45.404 413.14 103.03 246.63 518.46 -218.79 17.313 239.41 -2.0368 187.22 468.56 269.96 407.37 1452.1 230.16 374.01 685.65 -172.96 -3.9888 177.12 195.2 500.38 1316.5 429.09 481.2 1186.4 410.08 530.17 1068.7 570.91 278.28 123.74 270.98 144.36 1345.8 338.62 179.67 1558.2 276.33 128.41 984.04 353.64 -89.026 1230 475.43 261.73 1646.7 542.9 45.404 996.94 457.01 -34.711 1092.9 474.33 189.68 896.29 504.88 327.59 46.168 496.48 182.55 468.98 692.95 331.75 103.03 530.17 215.06 263.68 412.12 136.81 1437.7 513.96 170.16 627.09 739.88 399.39 160.65 426.8 59.662 1409.7 512.26 48.12 1105.5 457.86 -18.247 1006.9 486.89 351.86 1189.3 219.38 200.8 1252.1
167 1.3833 290.93 246.37 1428.7 462.45 342.1 1367.6 -113.04 64.075 375.37 308.24 325.04 1572.2 208.78 304 990.24 214.97 569.04 1123.8 419.16 378 1642.9 523.38 514.81 1088.8 323.6 538.15 1070.7 351.78 426.29 891.2 -133.07 -21.981 427.99 135.96 270.3 511.75 -196.55 30.468 252.65 29.279 210.81 467.03 290.59 420.27 1447.3 258.25 391.75 680.05 -151.49 6.2802 190.87 212.76 511.24 1312.4 448.02 498.26 1163.4 417.89 546.13 1047.4 584.4 289.74 118.65 291.61 155.39 1339 360.69 191.55 1554 295.68 142.41 977.68 377.83 -73.75 1214 497.5 273.78 1642.4 572.35 66.282 987.35 484.68 -16.719 1081 495.63 204.53 895.53 516.08 336.67 41.076 518.46 199.52 467.11 704.91 343.72 94.967 549.18 229.23 261.05 433.25 147.33 1430.2 536.62 186.37 625.99 752.35 412.88 150.3 448.95 69.761 1399.2 540.78 65.094 1096.6 486.55 2.1217 995.67 507.09 365.44 1185.5 239.92 213.1 1246
168 1.3917 312.31 258 1422.3 483.15 354.58 1362.9 -85.207 85.886 383.26 330.39 337.35 1567.6 229.91 319.95 983.28 227.45 576.85 1117.9 441.14 390.22 1638 533.99 535.77 1058.4 333.11 550.71 1057.1 375.03 440.8 886.62 -103.54 2.3763 441.06 169.57 294.49 504.37 -172.45 44.98 265.47 61.699 235 463.89 311.04 433.68 1441.5 286.43 409.49 673.34 -128.49 17.822 204.02 230.67 522.02 1307.4 466.26 515.32 1139.7 424.93 561.49 1026.4 595.94 299.07 112.79 312.48 166.77 1331.2 382.67 203.68 1549.1 315.37 156.58 971.06 403.04 -58.05 1197.3 520.07 285.84 1637.6 602.31 87.414 976.91 513.11 1.0184 1067.9 516.59 217.69 894.93 524.31 342.19 36.069 542.05 217.18 465.25 714.67 354.49 84.953 567.94 242.64 257.83 454.13 157.77 1421.6 559.62 202.49 624.2 762.54 425.7 136.89 471.19 79.946 1387.5 569.72 81.813 1086.3 515.57 22.575 982.94 527.45 378.77 1181.2 260.8 226 1239.9
169 1.4000 333.7 269.71 1415.5 503.69 366.88 1357.6 -55.758 108.46 390.22 352.37 349.32 1562.5 251.04 335.31 975.64 241.28 585.5 1111.9 463.55 402.1 1632.2 542.65 556.65 1027.3 342.36 563.27 1042.5 398.45 455.15 881.61 -73.156 27.582 452.69 203.43 319.02 496.05 -146.65 61.02 277.6 94.713 259.7 459.81 331.66 446.66 1434.5 314.35 427.39 666.21 -104.22 31.147 215.73 248.83 532.63 1301.4 482.9 532.38 1114.3 431.13 576.85 1005.3 605.96 307.22 106.76 333.62 178.73 1323 405.07 215.31 1543.7 335.14 171.35 964.35 429.52 -41.925 1179.7 542.39 297.72 1632 632.52 108.97 965.37 542.31 19.265 1053.6 538.32 229.82 894 529.66 344.99 33.098 566.24 234.57 462.7 721.72 364.42 71.968 585.5 254.94 254.01 475.26 168.21 1413 582.79 219.81 621.91 770.86 439.19 118.65 493.17 90.469 1374.7 599.08 98.532 1075.2 545.87 42.943 969.02 547.74 392.09 1176.2 281.59 239.33 1233.3
170 1.4083 355.09 281.42 1408.4 524.14 379.1 1351.8 -25.036 132.14 397.01 374.69 360.86 1556.4 271.75 350.42 967.66 255.11 594.33 1105.2 485.61 413.9 1626.1 549.18 577.27 996.01 351.44 575.15 1027.9 422.3 469.32 875.75 -41.076 53.467 462.87 237.8 344.22 487.4 -118.65 79.521 288.55 128.24 284.99 455.06 352.12 459.47 1427 341.85 446.15 659.51 -77.569 47.017 226.6 267.5 543.07 1294.2 498.09 549.18 1088.1 436.39 592.21 984.04 615.97 314.35 101.25 354.58 190.78 1314.9 427.65 226.94 1538.3 354.58 186.37 957.82 456.67 -25.8 1161.2 564.63 309.6 1625.6 662.22 131.04 952.39 571.59 37.766 1038.4 560.98 242.81 892.47 533.14 346.52 34.032 590.85 252.06 459.56 726.13 373.76 56.352 603.07 267.33 250.7 496.82 178.9 1403.5 605.53 238.48 619.2 777.9 452.43 93.694 515.74 101.16 1361.3 629.38 115.84 1063 576.59 63.736 954.17 568.02 405.67 1170.7 301.88 252.57 1225.8
171 1.4167 376.39 292.62 1401.2 545.19 391.24 1345.4 6.3651 156.58 403.29 396.76 372.66 1550.1 291.86 365.87 959.52 269.29 603.24 1097.7 507.51 425.87 1620.1 553.76 597.22 965.29 360.43 586.95 1013.7 444.79 482.56 869.05 -8.2322 80.2 471.87 272.68 369.85 478.99 -88.857 100.65 298.06 161.93 310.62 450.31 372.32 472.21 1419.3 369.18 464.74 652.55 -48.544 66.112 236.19 286.68 553.93 1286.3 512.26 565.9 1061.2 440.97 607.57 963.25 625.99 320.46 98.701 375.63 202.33 1305.8 449.88 238.82 1532.2 373.76 201.05 950.69 485.02 -8.8263 1141.8 586.78 321.82 1619.2 692.35 152.68 938.72 601.46 56.777 1022.7 583.89 256.47 889.16 536.62 346.86 37.766 616.23 269.96 457.01 729.52 382.25 39.973 620.81 279.72 248.75 518.46 189.59 1393.5 628.53 257.83 615.46 784.09 463.72 64.16 538.74 112.28 1347.5 660.02 133.58 1049.7 607.4 85.038 938.98 588.56 419.16 1164.6 321.9 265.81 1217.7
172 1.4250 398.12 303.83 1393.8 566.24 403.97 1338.6 39.633 181.87 408.38 418.57 384.37 1543.7 311.38 381.14 951.28 284.39 612.32 1090 529.58 437.75 1614.4 556.39 616.99 935.41 369.26 599.34 999.49 466.43 495.46 862.09 25.291 107.61 479.33 308.16 395.48 471.02 -56.946 124.16 305.69 196.04 336.67 445.13 392.51 484.94 1411.4 395.99 483.32 645.42 -17.398 88.093 244.08 306.37 565.14 1278.4 525.08 581.94 1034.1 444.62 623.69 943.73 633.62 326.06 98.362 396.76 213.61 1296.4 472.37 250.61 1525.5 392.85 215.23 943.31 514.55 8.7414 1122.5 608.93 334.04 1613.2 723.07 174.49 924.72 631.84 76.296 1006.5 605.7 268.86 884.15 540.01 346.6 41.331 643.38 288.55 456.67 734.96 383.18 36.578 639.39 291.18 249.17 540.69 200.71 1382.9 653.14 275.91 611.81 788.08 471.36 53.042 562 123.65 1333.1 690.57 152 1035.6 638.38 106.51 923.79 609.27 432.49 1158 342.27 278.54 1210
173 1.4333 420.1 315.62 1386.7 587.46 416.79 1331.8 74.344 208.35 412.37 440.63 395.91 1537.7 330.39 396.25 942.63 300.35 622.17 1082.3 551.13 449.97 1608.7 557.58 636.34 907.32 377.92 612.32 986.17 488.25 508.36 854.87 59.577 135.36 485.11 344.56 421.88 462.95 -23.084 149.96 311.04 230.93 363.32 440.04 412.54 497.75 1403.1 422.98 502.08 638.72 15.361 112.53 249.68 325.98 575.83 1270.5 536.37 597.13 1007.4 448.02 640.84 926.16 637.78 330.31 98.871 417.8 225.24 1287.5 495.2 262.41 1519.1 411.52 229.06 936.43 544.94 27.158 1103.3 631.16 346.26 1607.5 754.05 196.98 910.55 662.56 96.919 990.07 625.56 280.91 879.06 542.14 347.02 43.283 671.05 307.14 457.86 738.44 383.18 34.372 657.98 302.13 251.04 563.69 211.75 1372.3 678.09 291.86 609.27 790.97 472.29 41.925 585.59 135.36 1317.7 721.21 172.03 1021.7 670.29 128.49 908.43 629.89 446.32 1151.1 362.05 291.61 1203.1
174 1.4417 442.42 327.76 1380.4 608.59 429.6 1325 110.24 235.42 414.58 462.62 407.71 1532.1 349.91 410.42 934.31 315.88 633.03 1075.2 572.86 461.85 1603.5 557.58 655.35 882.54 387 625.65 974.62 509.72 521.17 847.75 94.034 163.12 488.84 381.65 447.42 455.57 11.966 177.12 314.18 266.91 389.8 435.29 433 510.82 1395.2 449.63 520.75 632.69 50.327 138.42 253.92 345.58 586.44 1262.2 546.21 610.79 981.84 451.58 658.32 911.23 640.75 333.7 99.89 438.94 237.46 1280.2 517.52 274.46 1513.3 430.2 242.81 930.66 575.66 45.914 1084 652.97 358.31 1602.1 785.37 220.23 897.31 693.54 118.73 973.77 644.49 293.39 874.14 543.07 348.13 44.725 696.17 324.37 457.52 740.05 383.43 33.777 672.83 312.74 251.55 586.69 222.69 1361.5 701.26 307.31 608.08 792.41 474.67 35.135 609.86 147.92 1302 751.76 194.26 1008.4 702.96 151.15 893.74 650.85 460.32 1143.8 381.74 305.19 1196.7
175 1.4500 464.57 339.9 1375.1 629.97 442.84 1318.8 146.99 262.75 415.43 484.43 419.67 1527.1 369.18 423.58 926.5 331.66 644.83 1069.2 594.84 473.65 1598.7 556.73 672.92 861.41 395.57 639.14 964.27 530.09 533.99 840.87 129.17 191.29 490.62 418.48 471.27 450.05 48.629 205.13 315.28 303.83 416.36 431.38 453.96 524.14 1388.1 476.45 539.76 627.85 86.565 165.32 255.96 365.19 597.13 1254.2 554.36 622.85 958.24 455.06 675.8 898.75 643.21 336.59 100.48 460.49 249.94 1274.5 539.51 286.68 1507.8 448.7 256.47 925.4 607.65 65.603 1065.4 674.78 370.36 1597 817.19 244.42 885.68 725.79 141.9 959.01 662.99 305.69 869.3 543.41 349.06 45.914 715.86 338.62 454.13 740.64 383.52 33.438 682.59 320.55 250.78 609.95 234.49 1351 722.23 321.73 606.47 792.58 476.7 32.844 634.47 161.42 1286.8 782.23 218.53 996.09 736.23 174.4 880.42 672.15 473.82 1136.4 401.93 318.51 1191.6
176 1.4583 486.46 351.95 1370.7 651.7 456.25 1312.8 184.25 289.74 414.92 506.07 431.55 1522.1 388.19 436.22 919.8 347.02 657.39 1064.8 616.23 485.7 1593.6 556.22 688.96 844.27 403.29 653.06 956.04 549.6 547.23 834.93 164.47 218.87 490.62 455.06 495.2 446.32 86.056 233.73 314.61 341.51 443.27 428.67 475.51 537.3 1381.5 503.69 559.62 624.88 123.65 193.67 255.54 384.37 608.16 1247.5 560.89 633.45 936.69 458.54 692.95 888.74 644.49 337.94 99.89 482.56 261.73 1269.3 561.15 298.82 1501.9 466.69 269.63 920.14 641.35 86.141 1048.4 696.34 382.58 1591.4 849.27 269.54 875.67 759.48 167.27 947.04 682.34 319.1 864.21 543.15 349.57 46.847 729.78 348.81 448.61 740.9 383.09 33.098 689.13 325.64 248.83 633.28 246.97 1342 740.39 332.77 601.97 792.58 477.38 31.995 660.02 175.85 1273 811.85 245.44 985.06 770.35 198.85 869.3 693.54 487.57 1129.1 421.96 331.49 1187
177 1.4667 508.02 364.17 1366.7 673.26 470 1307.2 221.42 316.73 413.73 527.37 443.94 1517.1 407.2 448.02 914.54 360.86 670.2 1063 637.36 498.09 1589.1 556.65 703.64 831.45 410.17 667.4 950.86 569.12 560.81 830.77 200.29 245.18 488.84 491.3 520.58 443.77 123.74 262.75 312.23 380.04 469.74 427.31 496.99 550.37 1375.7 530.59 579.99 623.1 161.59 223.29 252.74 403.04 619.45 1242.6 565.64 643.89 919.03 461.94 709.41 881.18 645 338.88 98.277 504.45 273.53 1264.1 582.87 311.55 1496.1 485.19 282.61 915.56 676.65 108.46 1034.2 717.73 395.23 1585.9 881.27 297.72 871 793.18 194.69 938.05 701.94 332.17 860.05 542.65 350.25 47.356 739.2 355.85 442.76 741.24 382.67 33.098 694.39 329.54 246.29 656.28 260.37 1334.8 755.49 341.42 594.84 792.5 477.04 31.316 685.9 191.46 1261.3 840.87 274.55 976.58 805.4 224.98 860.82 715.1 501.65 1123.1 442.5 343.63 1183.1
178 1.4750 529.32 376.73 1363.4 694.13 484.09 1303 258.17 343.38 411.44 548.59 456.67 1512.8 426.8 459.05 911.14 372.66 683.36 1063.6 658.58 510.99 1585.4 557.16 717.05 822.71 416.28 681.66 949.08 588.56 574.22 827.97 236.1 270.47 485.27 526.86 546.63 442.93 162.35 291.44 307.65 418.23 495.88 427.14 518.12 563.35 1371.3 556.9 600.86 622.68 200.29 253.5 248.24 421.2 630.91 1240.6 569.21 655.6 907.41 464.65 724.26 876.43 645.93 339.73 96.919 526.27 285.67 1260.3 604.34 324.37 1491.3 504.03 295.59 912.58 711.79 131.72 1023.7 738.86 408.21 1581.6 912.92 328.18 869.39 826.61 223.12 932.44 721.72 344.65 855.98 542.14 350.67 47.696 747.43 362.13 436.82 741.24 382.58 33.098 698.97 332.94 242.98 678.77 274.12 1329.3 768.39 349.32 586.95 792.41 476.62 30.722 711.02 208.01 1252.4 869.9 304.85 972.67 840.36 253.16 855.64 736.23 515.32 1118.5 463.55 355.17 1180.1
179 1.4833 550.54 389.12 1360.9 714.67 498 1300.1 295 369.85 408.55 569.38 469.49 1509 446.74 470.68 908.94 383.52 696.85 1067 679.03 523.8 1582.4 557.67 729.86 817.53 421.88 695.75 950.35 607.82 587.46 826.36 271.92 295.26 480.01 562.42 572.26 443.77 201.31 320.38 301.54 455.66 521.77 427.73 538.74 576 1368.8 583.38 621.66 623.95 239.75 283.8 241.7 438.6 642.45 1241.2 573.28 669.27 902.32 466.77 737.59 874.99 646.78 340.32 95.901 547.82 298.23 1258.1 625.82 337.01 1487.6 522.96 308.24 911.06 744.63 155.31 1016 759.91 421.11 1578.2 943.39 360.35 870.74 859.37 252.14 930.92 740.64 355.68 850.72 541.97 350.5 48.205 757.45 368.33 430.96 741.24 382.25 32.929 703.81 336.08 239.24 701.26 288.04 1325.7 780.7 356.53 579.65 792.16 476.36 30.383 735.47 224.48 1246.5 898.5 335.06 973.44 874.48 283.97 854.7 757.11 528.22 1115.1 484.6 367.39 1177.6
180 1.4917 571.76 401.51 1359.2 735.47 512.01 1298.6 332.43 395.4 404.14 589.49 482.22 1506.9 466.6 483.49 907.92 394.13 710.51 1073 698.63 536.28 1580.5 557.84 742.59 816.09 427.82 708.65 954.68 627.26 600.78 826.1 307.65 319.1 473.05 597.39 597.89 446.74 240.35 349.57 294.66 492.32 547.23 428.92 558.94 588.47 1367.9 609.69 641.94 626.92 280.23 314.61 233.64 455.57 653.65 1244.3 578.21 684.46 902.23 469.06 748.96 876.6 647.71 340.41 95.222 569.04 310.87 1257.1 647.03 349.83 1485.4 541.97 321.31 910.12 775.78 178.9 1010.2 780.62 433.93 1576.4 972.67 395.57 880.84 890.86 281.68 932.78 758.55 366.54 845.79 541.71 350.67 48.884 768.82 374.44 425.61 741.24 381.99 32.335 708.9 339.39 235.68 723.58 301.96 1324.3 792.92 363.49 573.71 792.07 475.94 30.128 759.48 240.35 1242.9 925.91 364.25 979.12 907.07 317.15 858.18 777.64 541.46 1113.1 505.3 380.29 1176.3
181 1.5000 592.63 414.33 1359.1 755.83 526.35 1298.5 369.85 420.01 397.78 610.03 495.37 1506.2 486.55 497.07 908 404.06 723.67 1081.4 718.15 548.84 1580.1 558.52 755.07 818.3 434.69 720.87 961.89 647.29 614.44 826.61 342.7 341.76 464.06 630.91 623.1 451.58 280.4 378.94 285.16 528.56 572.26 430.62 579.05 600.95 1368.5 635.49 661.55 631.67 322.33 346.52 223.8 471.95 664.77 1249.8 584.32 700.5 906.22 471.7 759.23 881.27 648.39 340.66 95.137 590.51 323.69 1257.1 667.91 363.32 1484.6 561.32 334.21 910.29 806.67 201.99 1006.4 800.73 447.17 1576.3 1000.3 431.98 892.73 921.07 311.13 936.6 776.37 377.83 842.4 541.63 350.84 49.563 779.26 379.78 421.11 741.15 381.65 31.995 714.16 342.61 232.79 745.65 316.3 1324.4 805.23 369.94 569.21 792.16 475.51 30.128 782.74 255.54 1242.1 952.05 392.68 989.39 938.05 352.12 865.14 797.59 555.12 1112.4 526.01 393.36 1176.3
182 1.5083 613.51 427.73 1359.8 775.78 541.03 1299.3 406.94 444.28 390.31 630.4 508.87 1506.9 506.83 511.33 909.11 413.73 736.65 1092.1 737.84 562.25 1580.7 560.89 767.29 824.49 441.74 732.66 971.4 667.91 627.6 827.55 377.07 363.57 453.11 664.09 647.2 458.2 321.9 408.3 272.94 563.44 596.2 432.74 598.83 614.02 1370.5 660.95 680.98 637.87 366.12 378.68 212.42 488.5 675.89 1257.1 590.77 716.96 913.52 475.09 768.73 888.91 649.15 340.83 95.392 612.32 337.1 1258.1 688.19 377.41 1484.8 580.58 347.53 911.65 836.97 225.75 1005.2 820.93 461 1577.4 1025.5 467.88 911.57 949.16 342.87 946.19 794.79 388.7 840.28 541.63 351.1 50.836 788.08 384.11 417.8 741.24 381.31 31.995 718.83 345.24 230.93 767.21 331.07 1326.1 816.77 376.39 565.9 792.16 475.18 30.213 805.91 270.56 1243.8 977 420.52 1003.5 967.15 387.42 876.01 817.28 569.38 1113.2 547.31 406.26 1176.9
183 1.5167 634.47 441.48 1361.4 795.72 555.88 1301.1 443.27 468.3 382.25 650.34 523.04 1509 527.62 526.27 911.06 423.49 749.47 1104.4 757.45 576.08 1582.6 565.73 779.17 834.17 449.04 744.55 983.19 689.55 640.67 829.5 411.1 384.45 439.87 696.85 670.2 466.01 365.1 437.66 259.44 596.96 619.2 435.2 618.52 627.26 1373.7 685.9 700.08 645.51 411.52 411.1 199.95 505.13 687.51 1266.2 597.98 731.99 923.62 479.93 778.41 899.6 649.83 340.91 95.901 634.39 350.84 1259.8 707.97 391.58 1486.6 599.85 361.71 913.6 866.5 250.28 1006.6 840.87 475.09 1580.1 1049.7 504.03 932.1 976.58 373.5 957.31 813.46 399.98 838.92 541.71 350.93 52.533 794.87 387.85 415.51 741.24 381.14 31.91 722.31 347.11 230.33 788.68 345.92 1329.3 827.12 382.92 563.44 791.9 474.84 30.552 828.9 285.67 1247.4 1000.3 448.1 1020.1 995.08 422.56 890.77 836.97 583.81 1115.2 569.04 419.67 1178.7
184 1.5250 655.01 455.57 1364.1 815.41 570.99 1304.5 479.33 491.72 372.99 670.03 537.3 1512.3 548.76 541.37 914.28 433.51 761.77 1117.9 776.97 590.6 1586.3 572.94 791.14 846.22 457.18 756.34 996.52 712.04 653.82 832.98 445.22 404.4 424.34 728.59 692.52 474.75 409.49 467.88 245.1 629.97 641.26 437.66 638.38 640.41 1377.7 710.77 718.58 654.59 458.71 444.45 186.79 522.02 699.4 1276.8 606.04 745.14 935.67 486.89 789.1 912.08 650.6 341.25 97.004 656.2 364.59 1262.4 727.4 406.35 1489.5 619.2 375.54 916.83 895.87 274.55 1010.9 860.39 488.92 1583.8 1071.2 538.32 957.73 1003.3 403.55 970.8 832.21 411.61 838.5 542.14 350.33 54.74 800.22 391.33 413.99 741.24 381.06 31.656 724.52 348.3 230.76 809.72 360.69 1334.2 836.54 389.37 561.66 791.56 474.5 30.722 851.48 301.37 1252.2 1021.7 475.26 1039 1022.5 456.93 907.92 856.91 598.4 1118.6 590.6 433.42 1181.6
185 1.5333 675.63 469.83 1368.3 835.27 586.27 1309.2 516.76 514.38 361.88 689.21 552.15 1516.5 570.4 556.56 918.86 444.2 774 1132.6 796.4 605.19 1591.4 581.43 802.09 859.03 466.6 767.88 1010.9 735.21 667.15 837.65 479.93 424 407.11 758.89 714.08 484.26 454.55 498.26 229.82 662.39 662.99 440.04 657.98 654.08 1382.6 735.55 736.65 665.19 508.02 478.06 173.47 539.17 711.36 1287.9 615.12 757.28 948.99 495.46 800.64 925.74 651.53 341.51 98.956 677.84 378.26 1266.1 746.5 421.45 1493.3 638.89 389.63 920.99 924.72 298.82 1017.5 879.32 503.52 1588.6 1091.8 571.42 984.64 1028.8 432.83 986.67 851.23 423.75 839.43 542.9 349.57 58.219 804.46 395.06 413.73 741.15 381.06 31.656 725.62 349.4 232.54 830.35 375.63 1340.8 845.2 396.5 560.89 791.48 474.24 30.892 874.05 317.66 1258.9 1041.7 501.99 1059.5 1046.1 492.15 928.37 877.36 612.83 1123.2 612.24 447.25 1185.4
186 1.5417 696.17 484.43 1373.3 855.21 602.14 1314.6 554.87 536.45 349.57 708.05 567.68 1521.7 592.12 571.76 924.72 455.57 786.22 1148.5 815.41 619.71 1597.4 591.36 813.46 872.78 477.13 779.77 1026 758.38 680.64 843.67 515.49 444.11 388.36 788.59 734.11 494.1 500.3 527.45 213.61 693.96 683.36 442.08 677.59 667.91 1388.4 760.42 754.22 676.23 558.35 511.67 160.23 556.56 723.16 1299.4 625.48 768.99 963.25 505.3 812.95 940 652.55 341.76 102.1 699.57 392.51 1270.9 765.59 436.82 1498.2 659.25 404.31 925.57 952.81 323.18 1026 897.99 518.12 1594.5 1110.5 602.31 1012.4 1052.6 461.34 1005.1 871.17 436.31 841.97 544.09 349.23 63.142 807.86 399.13 415.34 740.73 380.8 32.08 727.15 350.84 235.51 850.72 390.82 1349.1 854.03 404.06 561.49 790.8 474.33 30.977 896.88 334.8 1267 1060.1 527.62 1081.3 1070.2 524.06 950.95 898.16 627.85 1128.9 633.88 461.51 1190.1
187 1.5500 716.2 499.19 1379 875.16 618.09 1321.1 592.97 559.02 336.25 727.15 583.13 1527.5 613.6 587.2 931.17 467.96 797.93 1164.6 833.74 634.64 1603.8 603.07 827.04 888.48 488.41 792.07 1041.6 781.55 694.13 851.06 551.9 464.57 368.07 817.28 752.86 503.78 547.4 556.14 196.98 724.86 703.05 443.43 697.11 681.49 1395.1 785.96 771.36 687.68 609.69 544.85 147.25 574.13 735.13 1311.5 636.76 780.95 978.53 516.51 825.6 955.36 654.33 342.19 106.76 721.29 407.11 1276.6 784.77 452.35 1504.1 680.13 419.42 931 980.56 346.94 1035.4 916.74 532.97 1601.5 1127.6 631.25 1040.8 1075 488.92 1024.9 891.71 449.46 845.2 545.87 348.98 69.846 811.42 403.04 419.16 740.39 381.06 32.165 729.44 352.8 239.84 871.34 406.52 1357.9 862.94 412.12 563.78 789.53 475.26 31.401 919.63 352.54 1275.8 1077.6 551.9 1104.4 1092.8 554.7 974.37 919.29 643.38 1135.3 655.52 475.51 1195.2
188 1.5583 736.65 513.96 1385.2 895.1 633.71 1328.2 631.67 582.02 322.5 746.41 598.49 1533.2 635.58 602.65 938.21 481.29 809.64 1180.6 852.24 650.26 1611 615.72 840.36 904.1 500.81 804.38 1057.5 804.8 707.63 860.31 589.41 485.95 347.19 844.35 771.53 512.6 596.03 584.66 180.94 755.24 722.31 443.69 716.2 694.9 1402.2 811.42 788.17 699.57 661.97 576.68 135.36 592.21 747.09 1324.1 648.82 793.01 994.31 528.9 838.24 971.31 657.13 343.12 113.21 743.36 421.54 1283.1 803.53 467.96 1510.5 701.86 434.35 937.11 1008 370.36 1046 934.99 548.5 1609 1143.7 658.66 1069 1096.2 515.4 1045 912.5 462.95 849.61 548.16 349.49 78.503 816.09 407.71 424.93 740.22 382.25 32.42 732.41 355.6 245.78 892.3 422.56 1367.5 872.61 421.2 567.6 788.85 475.94 31.91 942.37 370.11 1286 1094.8 574.3 1127.2 1113.2 584.4 997.11 940.42 658.91 1142.7 677.33 489.18 1201.2
189 1.5667 757.28 528.22 1392.2 915.05 649.66 1335.1 670.71 605.53 308.58 765 614.78 1540.1 657.9 617.58 945.68 495.2 821.86 1196.9 870.91 665.79 1618.5 628.96 852.67 918.7 514.13 817.02 1073.2 827.55 721.46 869.81 628.19 508.02 325.55 869.39 789.53 520.24 645.93 613.85 164.39 784.94 741.15 443.35 735.3 708.99 1409.7 836.71 804.38 711.36 714.84 607.91 124.08 610.79 759.4 1336.6 661.63 805.65 1009.5 542.14 850.55 986.93 660.44 345.07 120.68 766.27 435.8 1290.4 821.95 483.83 1517.5 723.84 448.95 944.24 1034.6 392.94 1057.9 952.9 564.03 1617 1158.4 684.37 1096.2 1116.8 540.78 1064.8 933.55 476.62 855.21 551.22 350.93 89.281 821.86 413.14 431.89 739.88 383.86 33.438 736.4 358.91 253.42 913.35 438.94 1378.3 883.14 430.87 572.86 789.7 475.43 32.165 965.37 387.85 1297.3 1111.7 596.03 1148.4 1132.2 612.66 1019.8 960.62 674.28 1150.9 699.4 502.67 1208.1
190 1.5750 777.64 542.56 1399.7 934.9 665.87 1343.4 710.26 628.7 293.73 783.33 631.08 1547.4 680.13 632.35 953.49 510.06 833.83 1212.9 889.16 681.32 1626.2 642.79 865.14 932.53 527.96 829.67 1088.7 849.95 735.64 879.32 667.4 530.34 303.32 893.07 806.75 526.86 696.6 643.47 147.42 814.05 759.65 442.5 754.73 723.16 1417.9 861.83 820.16 722.74 768.31 639.39 113.55 629.8 771.7 1348.6 675.38 819.15 1023.9 555.88 862.85 1001.9 664.6 347.96 129.42 789.1 450.39 1298.4 840.36 499.96 1525.2 746.33 463.89 952.05 1060.6 414.33 1070.6 970.63 579.82 1625.4 1172.5 708.39 1122.3 1136.5 565.31 1084.3 954.68 490.88 861.24 555.46 353.73 101.67 828.9 418.65 439.7 740.05 385.13 35.39 741.41 362.56 262.58 934.06 455.49 1389.7 894.68 440.8 579.65 790.63 474.84 32.335 988.8 405.84 1309.3 1128.1 616.65 1169.1 1149.9 639.39 1041.9 981.07 689.55 1159.6 721.97 515.83 1215.4
191 1.5833 798.52 556.82 1407.3 954.76 682.42 1352.4 750.4 652.46 278.37 801.92 646.78 1554.7 702.45 646.95 961.89 525.59 846.13 1227.8 907.07 697.61 1634.6 657.73 877.53 945.85 542.65 842.31 1103.8 872.53 750.32 889.16 706.95 554.02 281.17 915.72 823.22 532.04 747.6 671.64 132.05 842.31 777.98 440.55 774.59 736.91 1426.5 886.45 835.69 733.6 822.79 670.46 104.73 648.65 783.67 1360.3 690.4 833.49 1038.2 570.14 875.24 1016 670.12 351.86 140.03 811.85 464.82 1306.5 858.52 516.08 1533.2 768.82 479.42 960.11 1086.1 434.69 1084.3 988.54 595.35 1634.3 1185.9 731.14 1146.7 1156.1 588.47 1103.1 976.24 505.81 867.69 560.89 358.31 115.34 837.05 424.25 448.95 741.15 386.91 37.851 747.09 366.46 273.19 955.19 471.95 1401.1 907.49 451.24 587.46 791.05 475.09 32.504 1011.9 423.41 1321.7 1144.4 636.09 1189.2 1166.5 664.52 1063.3 1002 705.42 1168.2 744.97 529.15 1223
192 1.5917 819.4 571.42 1414.7 974.54 698.63 1361 790.71 677.25 263.09 820.42 663.5 1562.2 724.52 661.72 970.46 542.14 858.61 1242.1 925.06 714.16 1642.9 673.34 890.35 958.41 558.26 854.37 1117.6 894.59 765.34 898.92 747.01 579.05 258.85 937.28 838.83 535.77 799.71 700.33 118.05 869.73 796.4 437.07 794.11 750.4 1435.1 909.87 850.46 743.78 877.79 701.09 97.513 667.32 796.49 1371.8 706.53 848 1051.7 585.16 887.46 1029.4 676.82 356.7 151.23 834.85 479.33 1314.9 876.6 532.46 1541.4 791.56 495.88 968.26 1111.3 454.13 1098 1006.4 611.3 1643.2 1198.3 752.78 1169.1 1175.8 610.2 1120.9 997.96 521.43 874.9 568.11 364.51 129.42 846.22 430.03 459.56 742.68 389.12 40.567 753.71 371.13 284.73 976.24 488.75 1413.1 921.5 461.85 595.6 791.22 475.43 32.335 1034.5 440.97 1334.3 1160.6 654.33 1208 1182.1 688.53 1083.3 1022.5 721.55 1177.2 767.55 543.07 1230.8
193 1.6000 840.11 586.27 1422.6 994.06 715.01 1369.4 830.6 702.88 248.41 838.5 680.73 1569.6 746.58 676.91 979.04 559.45 871 1255.2 942.97 730.37 1650.9 689.55 903.59 970.55 574.9 866.5 1130 916.49 780.53 908.34 788.25 605.02 236.53 958.16 853.26 537.47 852.67 730.37 104.9 896.54 814.65 432.83 813.8 764.07 1444.1 932.95 865.14 752.61 933.04 731.9 92.082 686.75 810.15 1383.1 723.24 861.92 1064.4 600.78 899.09 1041.8 685.14 362.56 161.93 857.84 493.59 1323 894.85 548.76 1549.7 815.16 512.43 976.49 1136.4 472.21 1111.3 1024 627.51 1651.7 1210 773.49 1189.3 1194.3 631.08 1137.9 1019.8 537.89 882.2 576.25 372.32 144.11 856.15 435.8 471.19 744.21 391.58 43.877 761.27 376.3 296.27 996.52 505.64 1424.9 936.6 471.95 604.6 791.56 475.94 31.571 1056.5 458.2 1347 1176.5 671.64 1225.8 1196.4 710.85 1102 1042.1 737.59 1186.4 790.29 556.65 1238.9
194 1.6083 860.73 601.29 1430.4 1013.6 731.31 1378 870.66 729.52 234.15 856.57 697.11 1576.7 769.58 692.44 987.01 577.02 883.14 1267.3 960.96 746.58 1658.3 706.7 916.91 982.26 592.38 879.23 1141.6 939.15 796.66 917.51 829.84 632.1 214.46 978.44 866.42 537.72 905.8 760.84 93.1 923.02 832.72 427.82 833.4 778.41 1452.9 956.38 879.83 760.59 987.86 762.71 88.432 706.61 823.73 1393.7 740.3 876.01 1076.3 617.75 910.72 1053 693.54 368.75 172.71 879.83 508.44 1331.3 913.35 565.14 1557.7 838.58 528.73 984.72 1161 489.69 1124.2 1041.8 644.06 1660.2 1221.3 793.09 1208 1211.8 651.02 1154.2 1042 554.36 889.5 585.08 380.63 158.7 866.67 441.74 482.73 746.58 394.3 48.714 769.41 381.99 308.07 1016.4 522.7 1436.1 952.56 482.22 613.76 792.24 477.13 30.468 1078.7 475.43 1359.9 1192.4 688.11 1242.2 1210.8 731.99 1119.5 1061.7 753.03 1195 812.95 570.06 1247
195 1.6167 880.84 616.14 1437.5 1033 747.09 1386.1 911.48 756.85 220.23 874.73 712.98 1583 792.41 707.97 994.57 594.84 895.87 1278.2 978.61 763.3 1664.9 725.03 929.98 993.29 610.71 892.05 1152.3 961.89 813.12 926.16 871.68 660.19 193.24 997.88 879.23 536.96 958.75 791.73 82.746 949.42 850.97 422.05 853.6 792.67 1461.2 979.12 894.42 767.38 1041.2 793.43 85.717 726.47 837.05 1403.7 758.29 890.35 1087.5 635.83 922.6 1063.4 701.94 374.95 184.67 901.64 523.55 1339.6 931.26 581.43 1564.9 862.09 544.68 993.04 1185.3 506.58 1136.9 1059.3 660.61 1667.9 1232.8 811.68 1225.2 1229.7 669.95 1169.6 1064.4 570.57 897.05 593.91 388.36 173.81 877.79 449.12 493.68 750.23 396.08 55.249 777.73 388.44 320.29 1036 539.76 1446.4 969.45 492.74 622.68 793.77 478.06 29.279 1100 492.32 1372.2 1208.8 704.91 1256.6 1225.2 752.1 1135.9 1082 769.92 1203.6 834.93 583.81 1254.7
196 1.6250 900.79 630.74 1444.5 1052.4 762.62 1393.8 952.39 783.84 207.08 892.81 729.27 1589.2 814.9 723.67 1002 612.83 908.43 1287.4 996.09 779.85 1670.6 744.97 943.39 1004.2 629.72 904.52 1162.2 983.79 829.16 934.06 914.45 688.7 173.05 1016.9 891.88 535.35 1011.1 822.88 74.09 975.64 868.96 415.43 873.72 806.58 1469.1 1000 908.43 772.21 1092.9 823.81 84.359 746.16 850.55 1413.2 777.9 904.95 1098.5 654.76 934.9 1073 709.84 380.8 197.49 923.36 538.15 1347.5 949.42 597.39 1571.4 885.68 560.47 1001.1 1209.2 522.53 1149 1076.8 677.59 1674.4 1244.8 829.07 1240.9 1247.9 688.02 1183.8 1087.1 586.95 904.44 602.65 395.99 189.51 890.18 458.2 503.69 754.14 397.01 64.245 786.56 395.99 332.68 1054.9 556.56 1456 986.59 503.52 630.4 796.4 476.11 27.667 1120.3 508.7 1383.7 1225.6 722.31 1271.7 1239.4 770.52 1150.7 1102.3 786.73 1211.6 857.17 598.06 1261.8
197 1.6333 920.73 645.34 1451 1072.1 778.41 1400.7 993.12 810.74 195.2 911.23 745.06 1594.2 837.65 738.94 1009.2 631.5 921.07 1295.6 1013.7 796.66 1675.5 766.44 957.31 1013.9 649.58 917.17 1170.7 1005.6 845.28 941.19 958.16 717.9 154.21 1035.6 904.61 533.31 1062 854.7 67.215 1001.6 886.36 408.38 894 820.08 1476.9 1020.1 922.51 775.52 1142.7 854.87 85.038 765.93 864.21 1422 799.03 918.86 1109.1 675.04 948.23 1082 717.05 386.91 211.15 944.41 552.58 1354.6 967.32 612.83 1577.4 909.61 576.34 1008.2 1232.7 537.81 1161.1 1094.6 694.47 1680.2 1257.5 845.71 1254.9 1265.6 705.25 1196.6 1109.6 603.07 911.06 610.79 404.74 205.72 904.44 468.56 512.43 757.79 396.25 76.466 796.66 404.14 345.07 1073.4 573.03 1464.9 1004.2 515.32 637.02 800.05 470.34 24.951 1140.1 525.25 1394.2 1243 738.94 1285.3 1253.8 787.91 1164.1 1122.8 803.61 1219.2 879.49 612.83 1268
198 1.6417 940.51 659.85 1456.6 1091.9 794.02 1406.4 1033.7 838.66 184.25 929.47 760.16 1598.2 860.73 753.46 1015.9 650.85 934.4 1303.1 1031.7 813.29 1679.5 789.36 971.23 1022.7 670.12 930.24 1177.8 1027.2 861.49 947.55 1001.6 748.28 136.55 1054.3 916.83 530.17 1111.3 887.12 62.293 1026.7 903.42 401.43 914.11 833.83 1484.3 1039.8 936.43 777.05 1190.8 887.72 87.074 785.79 877.87 1429.9 821.01 931.6 1118.6 697.11 962.4 1090 723.41 393.36 226.17 964.95 567.68 1360.3 985.83 628.7 1582.9 933.38 592.63 1014.1 1256 552.83 1172.5 1112.7 711.28 1685.5 1269.9 861.75 1267 1283.3 721.89 1207.8 1132.1 619.03 916.74 617.92 415.34 222.35 920.73 479.93 520.33 761.6 393.02 91.657 807.86 412.37 357.8 1091.6 589.49 1472.7 1022.7 527.62 643.04 805.99 460.07 33.947 1159.2 541.37 1403.6 1260.5 754.65 1296.9 1268.9 804.38 1175.6 1143.3 821.01 1226.9 901.13 628.28 1274
199 1.6500 959.52 674.11 1461.6 1111.4 809.22 1411.3 1073.5 867.1 174.32 947.64 775.52 1601.9 883.64 767.88 1021.6 671.05 948.14 1309.6 1049.3 829.67 1682.7 813.97 985.23 1031.6 691.5 944.24 1184.3 1049.1 878.04 952.56 1044.8 778.83 120.6 1072.6 928.62 526.35 1158.6 919.8 59.068 1051.8 920.31 394.55 934.31 847.75 1490.9 1059 950.01 777.47 1236.7 921.24 89.96 805.48 891.71 1436.9 843.16 944.83 1126.5 720.44 976.74 1096.7 729.95 399.98 243.06 985.49 582.87 1365 1004.2 644.49 1587.7 957.14 608.59 1019.3 1278.5 567.09 1182.6 1130.7 727.91 1689.8 1282.5 877.45 1276.9 1301.5 737.67 1217.5 1154.2 635.07 921.67 624.71 426.89 239.33 938.38 491.64 527.62 766.27 389.71 108.72 819.65 421.37 369.94 1109.6 605.79 1479.2 1042.4 540.01 648.31 812.36 450.56 45.574 1177.9 557.16 1411.9 1276.8 770.18 1306.5 1284.7 820.08 1185 1163.9 835.35 1231.3 922.09 643.55 1278.8
200 1.6583 978.02 688.79 1465.7 1130.7 824.66 1415 1112.2 894.93 166.09 965.37 791.73 1604.8 906.73 783.42 1026.7 691.5 961.72 1314.3 1066.8 846.13 1685.4 838.92 999.57 1039 713.4 958.16 1189.2 1071.2 895.36 956.38 1088.4 808.37 107.61 1090.8 940.25 521.68 1204.4 953.32 58.474 1077.4 937.54 387.76 954.17 861.15 1496.1 1077.7 963.93 776.97 1280 954.85 94.203 825.09 905.54 1443.2 865.65 958.58 1133.6 744.29 991.34 1102.3 737.42 406.86 260.54 1005.7 597.39 1369 1022.5 660.19 1591.6 981.67 623.44 1024.1 1300.3 580.84 1191 1148.8 744.8 1693.2 1295.4 892.64 1285.1 1319.6 752.61 1225.2 1176.5 651.53 925.99 632.61 438.43 256.39 957.06 503.52 534.33 771.03 388.44 126.28 831.45 431.13 382.5 1127.5 622.08 1484.8 1062.5 552.83 652.46 818.21 442.76 58.644 1196.6 573.03 1419.3 1293.4 785.03 1314 1300.9 834.68 1192.1 1183.9 852.75 1237.6 943.31 658.66 1282.2
201 1.6667 996.94 703.64 1468.7 1150.3 840.11 1417.4 1150.6 922.51 159.89 983.36 807.6 1606.7 929.22 799.54 1027 712.21 975.05 1317.7 1084.4 862.34 1687.5 864.13 1014.4 1044.9 735.47 971.74 1192.6 1093.9 913.18 958.58 1131.6 838.16 96.495 1109.1 951.62 516.08 1248.6 987.52 61.105 1102.5 954.42 381.99 974.11 874.9 1500.5 1096.3 978.19 774.93 1320.5 988.37 100.4 845.11 919.29 1448.2 888.91 972.93 1140.3 768.39 1006.6 1107 745.73 414.41 277.6 1025.5 611.73 1371.6 1040.8 675.8 1594.5 1006.4 637.87 1028.2 1321.5 594.16 1198.1 1166.8 761.35 1696.1 1308.3 905.37 1288.3 1337.8 766.44 1230.4 1198.6 667.57 929.56 641.69 449.46 273.78 975.73 515.4 540.69 775.95 389.29 143.51 843.42 441.06 395.74 1145 638.46 1489 1083.5 566.24 656.37 823.73 437.83 72.392 1215 588.64 1425.4 1311 799.71 1319 1317.3 848.17 1196.8 1204.4 868.96 1241.8 964.78 673.43 1284.7
202 1.6750 1015.6 718.07 1471.1 1169.8 855.47 1419.6 1187.6 949.93 155.82 1001.4 822.71 1608.1 951.79 814.99 1028.5 732.75 988.12 1320 1101.9 878.55 1689 890.35 1029.9 1050.5 758.46 985.83 1195.4 1117.1 931.09 959.52 1172.5 868.28 86.65 1127.4 962.49 510.31 1289.1 1021.4 66.027 1127 970.8 376.73 994.14 888.99 1504.2 1114.4 992.11 771.19 1358.1 1021.4 109.31 865.31 933.29 1452.2 912.67 987.18 1146.3 793.09 1021.7 1110.4 754.98 422.81 294.49 1044.8 626.83 1373.2 1059.8 691.5 1596.8 1030.2 653.48 1030.6 1341.8 606.64 1203.6 1185.1 778.15 1698.8 1323.9 920.05 1290.2 1356.4 780.28 1233.8 1220.7 683.27 931.93 651.96 460.24 291.18 994.74 527.71 547.14 781.97 392.26 160.4 856.4 450.99 409.57 1162.9 654.33 1492 1105.2 579.82 660.19 829.07 436.82 86.226 1233.1 604.09 1429.9 1328.7 813.97 1321.5 1334.5 860.99 1198.8 1225.2 884.75 1243.2 985.66 688.62 1286.4
203 1.6833 1034.1 733.17 1472.4 1189.2 870.66 1421.7 1221.8 976.24 153.36 1019.4 837.65 1609.2 973.77 829.92 1029.7 753.63 1001.4 1321.2 1119.6 894.68 1690.1 917.25 1045.3 1055.2 782.23 1000.3 1197.2 1139.4 948.23 957.82 1211.4 897.48 79.267 1145.9 973.27 504.62 1326 1054.1 72.647 1151.4 986.17 371.21 1014.1 903.42 1507.5 1132.4 1005.7 766.78 1392.3 1053.6 119.75 885.34 947.64 1455.1 937.2 1001.7 1151.2 818.3 1036.6 1113 765.59 431.64 311.04 1064.3 643.13 1373.3 1079.2 706.53 1599 1053.1 669.35 1032.2 1361.5 618.52 1207.6 1203.9 795.21 1700.8 1341.6 934.74 1289.7 1375.6 793.18 1235.3 1243.4 699.06 933.63 663.33 470.68 308.41 1014.5 540.69 553.51 789.1 397.18 176.61 870.32 461.43 423.32 1180.8 669.61 1494.1 1127 593.31 663.5 835.1 439.11 99.974 1251.3 619.37 1432.7 1348.2 827.55 1321.9 1352.3 872.7 1198.2 1245.8 901.81 1244.5 1006.4 704.83 1287
204 1.6917 1053 748.03 1472.7 1208.5 885.68 1422.5 1254.3 1000.9 152.08 1037.8 852.5 1609.9 996.09 845.79 1030.3 774.84 1015.1 1321.1 1137.5 910.97 1690.2 944.5 1061.1 1058.7 806.41 1014.8 1197.8 1161.1 964.61 954.51 1249.4 925.48 74.09 1164.6 984.3 499.7 1359.9 1085.3 80.37 1174.6 1000.8 366.63 1034.1 918.19 1509.5 1150.6 1018.8 761.69 1423.6 1084 130.27 905.46 961.98 1456.6 962.57 1017.1 1155.3 844.18 1051.7 1114.7 777.73 441.06 326.74 1084 659.34 1372.4 1098.4 721.46 1600.4 1074.7 686.41 1032.5 1380.3 630.14 1209.2 1222 811.42 1701.5 1360.9 948.57 1286.1 1394.7 804.97 1234.7 1266 715.35 935.16 675.72 481.29 325.38 1035.3 554.27 558.94 797.25 403.97 191.55 885.68 472.63 436.31 1198.8 685.14 1495 1148.9 607.31 665.53 842.31 444.11 113.21 1269.5 634.39 1434.8 1369.8 840.79 1320.5 1371 883.22 1195.7 1266.1 919.2 1246.1 1026.9 720.87 1286.6
205 1.7000 1072.4 763.05 1472.1 1228 900.87 1421.7 1285.1 1024.4 150.9 1055.8 867.43 1610 1018 862.85 1029.1 796.15 1028.9 1319.2 1155.6 926.76 1690.1 971.99 1077.1 1061.4 830.86 1029.1 1197.1 1182.6 980.9 951.54 1284.9 952.47 69.507 1182.9 995.59 494.61 1390.8 1114.1 87.499 1196.8 1015.1 362.98 1054 932.87 1510.1 1169.4 1031.6 756.43 1452.4 1112.4 140.29 925.57 976.74 1456.8 988.46 1033.1 1158 870.66 1066.9 1115 791.22 450.82 341.42 1103.8 674.78 1371.4 1117.5 737.08 1601.1 1095.2 704.32 1031.7 1398 641.26 1209.5 1240 827.29 1701.6 1381.1 961.89 1280 1414.2 816.09 1231.5 1288.6 731.48 936.18 689.55 491.72 342.1 1057.2 567.68 562.59 806.67 412.12 205.47 902.66 484.68 447.42 1217.7 700.58 1494.8 1171.1 621.23 666.89 850.46 450.48 126.28 1288 648.48 1434.9 1391.3 854.03 1316.9 1391.4 894.17 1191.6 1286.1 930.24 1243.9 1046.4 736.65 1285.3
206 1.7083 1091.9 778.15 1470.4 1247.7 916.06 1420.7 1313.8 1046.1 150.22 1074.3 882.29 1609.1 1039.2 880.33 1025.6 817.62 1042.7 1316 1173.8 942.12 1689 999.83 1093 1062.5 855.3 1043.9 1194.7 1204.4 997.03 948.65 1317.7 977.51 65.518 1201 1006.5 489.26 1419.3 1140.3 94.034 1218.6 1028.7 358.82 1073.5 947.72 1509.5 1188.8 1045.7 751.25 1479.4 1137.9 149.28 945.77 991.85 1455.5 1014.1 1049.7 1160.1 897.05 1081.8 1113.8 806.33 460.58 355.26 1123.3 690.57 1369.9 1136.1 752.52 1600.9 1116.8 721.38 1030.7 1414.8 651.96 1208.1 1258.5 843.76 1701.2 1402.2 974.71 1271.3 1433.7 825.85 1226.2 1311.1 747.86 936.26 705.08 502.67 357.97 1080.5 580.92 565.14 817.36 421.11 219.21 921.5 497.24 456.93 1236.7 715.61 1493.3 1193.8 635.66 667.66 859.71 458.37 138.59 1306.4 662.56 1433.4 1412.5 867.18 1311.5 1412.8 904.95 1185.8 1305.6 948.14 1243.4 1065.9 753.54 1283.2
207 1.7167 1111.5 793.35 1468 1267.5 931.09 1418.9 1341.1 1065.9 149.96 1093.2 897.22 1607.3 1060.8 896.37 1022.3 839.34 1056.5 1311.5 1192 957.31 1687.3 1027.9 1109.4 1062.4 880.67 1059 1190.6 1226.3 1013.3 946.11 1348.3 999.49 62.293 1219.7 1018.5 484.43 1446 1163.6 99.55 1240 1042.4 354.49 1093 963.17 1508.2 1209.3 1060.3 746.33 1505 1159.9 156.41 965.71 1006.7 1452.9 1040.3 1067 1161.5 923.96 1096.7 1111.4 822.71 471.44 368.07 1142.2 706.78 1367.5 1155.1 767.29 1600.2 1138.8 737.42 1028.9 1430.7 662.05 1205.2 1277.3 860.05 1699.7 1424.5 987.35 1260.3 1453.5 835.27 1219 1333.9 764.07 935.41 721.72 513.96 373.25 1104.6 594.75 566.41 829.58 430.7 231.86 941.86 510.57 465.16 1256.1 730.03 1491.3 1217.3 650.34 667.66 869.98 467.45 149.71 1325 676.65 1430.8 1434.6 880.17 1304.1 1434 914.79 1177.4 1326 964.86 1242.9 1085.3 770.52 1280.6
208 1.7250 1131.1 808.96 1465 1287.7 946.11 1416.6 1366.6 1084 149.03 1111.9 912.16 1605.1 1082.5 911.48 1018.5 860.99 1070.9 1304.5 1210.4 972.76 1684.9 1057.1 1126.4 1061.2 906.73 1074 1185.1 1248 1029.2 942.88 1376.2 1017.9 59.662 1239 1031.7 480.27 1471.1 1183.9 102.78 1261.4 1057.1 350.76 1112.9 978.87 1505.7 1229.8 1074 741.15 1529.2 1178.6 160.99 985.57 1022.1 1448.3 1067 1084.7 1161.5 951.88 1112.3 1107.4 840.45 483.15 379.61 1161.1 722.82 1364.2 1174.5 781.8 1598.7 1160.1 753.46 1026.9 1446 671.64 1200.5 1296 875.67 1697.9 1447.6 999.32 1247 1474.2 844.86 1210.6 1356.4 780.53 933.8 739.37 525.42 387.51 1129.1 609.27 566.66 843.33 440.97 243.4 963.25 523.89 471.78 1276.4 744.72 1488.1 1241.2 665.02 666.38 881.35 477.13 160.15 1343.8 690.15 1427.4 1457.1 892.3 1294.4 1455 923.11 1166.3 1347.6 980.22 1241.8 1104.4 786.56 1277.5
209 1.7333 1150.6 824.75 1461.5 1307.7 961.38 1413.7 1389.7 1100.1 146.65 1130.9 926.84 1601.8 1103.9 926.93 1013.9 882.97 1085.2 1295.8 1229 988.12 1682.1 1087 1143.1 1058.6 933.12 1089.4 1178.2 1269.3 1044.6 938.55 1400 1034.2 56.946 1258.3 1045.9 476.36 1494.6 1200.9 103.2 1282.7 1071.8 347.19 1132.6 995.16 1502.3 1250 1087.8 736.23 1552 1193.8 162.61 1004.9 1038.3 1442 1093.8 1102.8 1160.4 980.9 1128.5 1102.2 859.54 495.2 390.14 1179.7 738.86 1360.4 1193.8 795.89 1595.9 1181.7 769.41 1024.4 1460.7 680.98 1194.9 1314.8 891.2 1695.5 1471.2 1010 1231.5 1494.7 854.2 1200.2 1378.8 796.49 931.68 758.46 537.13 400.58 1154 623.69 565.56 858.78 451.84 254.09 986 536.37 477.64 1296.9 759.23 1483.8 1265.5 679.96 664.35 894.08 487.48 169.91 1362.6 703.22 1422.7 1479.4 903.93 1282.4 1476.4 931 1153.3 1368.7 995.59 1239.1 1123.8 803.11 1273.8
210 1.7417 1169.9 840.53 1457.3 1327.6 976.66 1410.3 1410.7 1114.3 142.66 1150.2 941.35 1598 1125.2 942.12 1009.2 905.29 1099.8 1285.2 1247.5 1003.6 1678.7 1117.7 1160.2 1054.9 960.03 1105.2 1169.7 1290.6 1060.3 933.8 1420.2 1048.8 53.297 1278.4 1060.2 472.37 1515.7 1214.9 100.91 1303.5 1085.3 342.87 1152.3 1011.9 1498 1270.3 1101.8 731.48 1572.6 1206.2 161.08 1024.4 1055.4 1434.5 1121 1121.7 1158.6 1010.7 1145 1095.8 879.91 507.34 400.07 1198.8 755.07 1356.6 1213.3 809.64 1592.4 1203.4 785.54 1021.2 1474.8 690.57 1188.3 1333.4 906.73 1692 1494.9 1019.4 1214.3 1514.8 862.68 1188.2 1400.7 812.1 929.56 778.75 549.6 412.03 1179.2 639.56 562.5 875.24 462.45 264.45 1009.3 549.6 482.39 1317 773.15 1478.9 1289.8 695.32 661.46 908.77 497.84 179.16 1381.3 716.54 1416.9 1502 915.39 1269 1498.5 938.55 1139.2 1389.3 1011.3 1235.3 1143.5 820.42 1269.4
211 1.7500 1189.6 856.06 1452.3 1347.8 992.11 1406.3 1429.3 1126.5 137.66 1169.4 956.04 1593.8 1146.3 957.23 1004.5 928.54 1114.7 1272.8 1266.1 1018.7 1674.5 1149 1177.9 1050.6 987.52 1121.7 1159.9 1311.6 1075.8 929.22 1437.3 1060.8 48.29 1299.7 1074.8 468.47 1534.1 1225.8 95.561 1323.6 1098.9 338.54 1171.9 1028.7 1492.3 1290.8 1116.6 727.83 1590.9 1216.4 155.99 1043.7 1072.8 1425.8 1148.7 1140.8 1155.9 1041.1 1161.7 1088.1 901.3 519.73 408.72 1217.9 771.11 1352.2 1232.9 824.92 1587.7 1225.1 801.49 1017.1 1488.5 700.16 1180.6 1353 922.51 1687.9 1521.7 1029.1 1194.7 1534.9 870.41 1174.8 1422.6 827.8 926.76 800.22 563.01 422.47 1204.8 656.03 558.6 893.15 473.05 274.12 1033.1 563.78 485.95 1336.8 786.56 1472.9 1314.1 711.02 657.13 925.48 507 187.9 1399.7 729.95 1410.1 1525.1 926.84 1254.3 1520.4 944.83 1123.4 1410.3 1026.6 1231 1163.4 836.54 1264.2
212 1.7583 1209.3 871 1446.8 1368 1006.9 1401.3 1445.4 1136.6 131.55 1188.6 970.8 1589.2 1167.3 972.42 999.91 952.81 1129.8 1258.5 1285 1033.5 1669.9 1181.2 1195.9 1045.2 1015.9 1138.3 1148.4 1332.9 1091 925.06 1451.7 1069.9 42.434 1321.6 1090.3 464.74 1549.7 1234.4 87.074 1343.5 1113.7 334.63 1191.2 1045.2 1485.7 1312.1 1133.5 725.45 1607.5 1225.2 146.99 1063.1 1090.4 1415.4 1176.8 1160.1 1152.2 1072.1 1178.7 1079.5 923.45 533.82 415.51 1237 786.9 1346.8 1252.4 840.79 1582 1246.5 816.6 1011.9 1501.8 709.67 1171.9 1372.3 937.62 1683.6 1548.3 1037.8 1174.1 1554.5 877.79 1159.7 1444.1 843.84 922.85 823.22 577.27 431.47 1231.4 671.39 554.27 912.67 483.32 283.29 1057.4 578.97 488.41 1357.1 800.05 1465.9 1338.7 726.72 652.04 943.9 515.4 195.79 1417.9 743.19 1402.9 1548.5 937.79 1238.1 1542.1 950.1 1106.2 1431.2 1041.9 1226.6 1183.9 850.97 1258.8
213 1.7667 1229.1 886.62 1440.8 1387.9 1021.7 1395.9 1459.5 1145 124.08 1207.8 985.57 1584.1 1188.1 987.35 995.59 977.17 1145.5 1242.5 1304.2 1048.2 1665.1 1214 1214.2 1039.1 1045 1155.6 1136.1 1353.9 1106.3 921.67 1463 1077.3 36.154 1343.4 1106 460.83 1562.8 1241 76.127 1362.9 1128.4 330.39 1210.1 1062 1478 1334 1151.1 722.65 1622.9 1232.3 134.94 1082.6 1108.5 1403.5 1205.2 1180 1147.5 1103.7 1196.4 1069.8 946.79 548.33 421.45 1256 803.11 1340.7 1272.1 855.47 1575.9 1268.1 831.62 1005.9 1514.6 719.09 1162.7 1391.3 952.47 1678.4 1575.3 1045.5 1152.3 1573.8 884.92 1143.7 1465.3 859.54 917.76 847.07 592.12 438.6 1258.8 686.67 548.67 933.63 493.68 291.86 1082.1 595.01 489.69 1377.9 813.97 1458.8 1363.3 742.34 646.35 963.76 523.21 202.92 1436.3 756.09 1394.8 1571.7 947.72 1220.4 1563.4 954.76 1088.5 1451.5 1057.1 1221.4 1204.4 865.74 1253.2
214 1.7750 1248.9 902.32 1434.1 1407.3 1036.8 1390.3 1471.9 1152.6 116.01 1227 1000.3 1578.3 1208.5 1002.3 990.07 1002.5 1161.8 1225.2 1323.6 1063 1659.8 1247.6 1232.5 1032.1 1075.2 1173.4 1123.1 1374.6 1121.4 917.68 1471.1 1083.9 29.449 1365 1121.4 456.5 1574.6 1246.3 63.651 1381.7 1142.3 325.72 1229 1079.2 1469.4 1355.6 1168.2 718.58 1637.5 1237.2 120.34 1102.3 1126.6 1389.8 1233.9 1200.4 1141.8 1136.4 1214.2 1059.2 971.57 563.18 426.38 1275.1 819.48 1334 1291.8 869.73 1568.9 1289.3 846.56 999.49 1527.3 728.51 1153.3 1410.6 966.99 1672.7 1602.1 1051.6 1128.7 1592.9 891.03 1127 1486.5 875.16 912.16 871.76 607.57 443.94 1286 702.79 542.14 956.12 504.62 300.01 1108.1 611.13 490.37 1398.9 828.9 1450.7 1388.4 757.79 640.16 985.23 530.76 209.54 1454.5 768.9 1386.1 1594.5 957.23 1201.1 1584.3 958.16 1070.1 1471.4 1071.8 1215.8 1224.3 880.93 1246.7
215 1.7833 1268.7 918.19 1427.1 1426.1 1051.5 1384 1483.7 1160.4 107.78 1246.5 1015.1 1571.7 1229 1017.1 983.62 1029.1 1177.4 1206.3 1343 1077.4 1654 1282 1251.2 1024.6 1106.3 1191.2 1108.2 1395.1 1136.5 913.18 1476.8 1089 24.527 1386.4 1136.8 451.92 1585.2 1250.3 49.393 1400.2 1156 320.12 1247.7 1096.7 1460.1 1377.4 1185 714.25 1651.8 1241.4 103.03 1122.3 1144.7 1375.2 1263.5 1221.5 1135.7 1170.1 1232.3 1047.8 997.45 578.63 430.11 1294.6 834.76 1326.7 1311.6 884.66 1561.7 1310.2 861.15 992.87 1539.8 738.27 1143.6 1430.1 981.07 1666.1 1629 1056.3 1103.9 1611.9 896.46 1109.8 1507.5 891.03 906.22 897.22 623.69 448.19 1313.7 719.34 534.58 980.56 516.08 307.14 1135.5 626.66 489.86 1419.6 843.67 1441.9 1413.6 773.15 633.03 1008.7 538.57 215.9 1472.8 781.97 1376.6 1617.3 965.8 1181 1606.2 961.47 1050.1 1491.7 1086.3 1209.9 1243.8 896.12 1239.9
216 1.7917 1288.3 934.74 1419.9 1445.8 1066.7 1377.1 1494.9 1168.2 100.4 1266.6 1030.1 1564.9 1249.8 1031.7 976.58 1056.4 1193.8 1186.5 1362.4 1091.8 1647.5 1317.4 1270.4 1017.3 1138.8 1209.7 1092.5 1415.3 1151.4 908.6 1480.9 1091.9 24.697 1407.5 1152.1 447.08 1593 1254.1 34.966 1418.8 1169.3 314.35 1266.2 1114.1 1450.2 1399.5 1200.7 709.16 1665.6 1245 80.2 1142.7 1163.4 1359.5 1293.7 1243 1129 1204.5 1250.5 1035.9 1023.9 594.33 433.25 1313.7 848.93 1319.6 1331.4 899.6 1554.5 1330.7 875.24 985.57 1552.1 748.45 1133.3 1449.8 995.5 1659.2 1655.6 1059.7 1078.2 1631.3 902.23 1092.3 1528 906.47 899.43 923.45 640.16 451.07 1341.8 735.89 526.69 1006.5 527.88 313.33 1163.2 642.7 488.5 1439.5 857.84 1432.1 1439 789.02 624.97 1033.7 547.14 221.34 1491.3 795.55 1366.8 1640.2 973.69 1159.6 1629 964.86 1028.9 1512.3 1101.5 1203.4 1262.4 911.91 1232.7
217 1.8000 1307.7 950.86 1412.2 1466.3 1082.3 1369.9 1506.1 1172.5 97.344 1286.6 1045.2 1558 1269.7 1045.7 968.85 1084.6 1211.3 1165.7 1381.8 1105.8 1640.8 1353.2 1289.8 1009.8 1172.6 1228.1 1075.7 1435.1 1166 902.91 1484.2 1093.2 28.516 1429.8 1169.1 443.18 1596.8 1257 26.394 1438.3 1183.3 310.79 1285 1131.6 1439.6 1421.1 1215.6 702.54 1678.3 1247.7 51.769 1163.4 1181.6 1341.5 1324.4 1263.9 1121.4 1240.4 1269.4 1023.6 1051.3 610.62 435.46 1332.4 863.19 1312 1351.3 914.28 1547.1 1351 889.16 977.08 1565 758.89 1123.1 1469.7 1009.7 1651.8 1681.7 1061.4 1051.7 1650.3 907.75 1073.6 1548.5 921.58 892.05 950.69 656.54 452.77 1370.5 752.35 518.71 1034 540.44 318 1191.7 659.59 486.04 1459.6 871.85 1422 1464.9 805.31 616.48 1060.1 557.5 225.24 1509.8 809.47 1356.5 1663 981.67 1137.1 1651.3 967.15 1007 1533.1 1116.9 1196.4 1280.9 927.35 1224.8
218 1.8083 1326.8 966.73 1403.7 1485.1 1096.7 1362.3 1517.1 1173 100.31 1306.5 1060.8 1551.7 1288.3 1059.7 961.81 1114.4 1229 1143.8 1401.7 1119.8 1635 1389.5 1309.3 1002.6 1207.6 1246.2 1058.2 1454.5 1180.3 895.78 1487 1093.8 31.995 1453.5 1188.6 442.93 1599.3 1257.4 25.206 1458.8 1200 311.8 1303.5 1149.4 1428.2 1439.8 1231.4 696.6 1688.6 1234.6 49.648 1184.4 1200.4 1322 1356 1283.7 1113.6 1277.6 1288.5 1011.6 1079.7 627.51 436.73 1351.1 877.45 1303.5 1370.7 930.24 1539.8 1370.8 902.66 967.49 1578.2 769.92 1112.4 1489.9 1023.6 1644.8 1707.2 1061.9 1024.1 1668.1 911.91 1053.9 1569.2 936.77 884.07 979.46 673.26 453.36 1399.6 769.67 510.4 1063.1 554.19 321.31 1221.1 677.33 482.81 1480.4 885.94 1411.8 1490.8 822.03 607.91 1088.1 569.46 227.87 1528.3 823.3 1345.7 1685.4 989.31 1114.1 1672.2 967.92 984.98 1553.9 1131.7 1188.7 1300 941.86 1216.2
219 1.8167 1345.8 982.86 1394.8 1503.8 1111.3 1355.4 1524.7 1173.6 104.05 1326.8 1075.9 1546 1305.6 1073.2 956.12 1146.1 1247.2 1120.8 1421.8 1133.8 1630.4 1427.1 1328.9 996.86 1243.1 1265 1042.2 1473.1 1194.3 888.65 1488.6 1094.4 34.032 1478 1208 445.56 1600.4 1257.5 25.715 1480 1217.1 314.35 1321.5 1167.3 1416.4 1456.2 1247.5 693.37 1689.2 1239.3 48.46 1206.9 1220.5 1302.6 1388.6 1302.4 1105.7 1316.2 1307.4 1000 1109.8 644.57 435.63 1369 891.54 1294.8 1390.1 945.68 1533.7 1389.9 915.56 957.73 1591 781.21 1102.3 1509.9 1037.2 1639.7 1732.4 1060.8 996.09 1685.6 915.13 1035.5 1589.3 952.3 876.09 1009.5 689.55 453.11 1428.7 788.25 501.57 1093.9 569.72 322.41 1250.7 696.09 478.99 1501 899.35 1401.8 1516.6 839.6 599 1118 583.64 228.8 1546.5 837.48 1334 1706.5 995.93 1091.6 1690.7 967.15 964.18 1574.4 1146.5 1181.3 1319 956.12 1207.7
220 1.8250 1365.3 999.07 1387.7 1524.2 1126.7 1350.1 1526.9 1175.6 103.45 1347.2 1090.6 1540.9 1323.1 1084.8 951.37 1179.5 1266.2 1098.1 1441.7 1147.5 1626.6 1465.5 1348.2 992.19 1280.2 1284.5 1026.9 1490.5 1209.2 882.54 1489.1 1095.1 35.475 1500.3 1225.1 447.42 1600.4 1257.7 26.224 1497.7 1229.6 315.54 1339 1184.9 1404.6 1475.9 1261.8 692.61 1690.2 1240.3 47.526 1230.2 1240.5 1282.8 1422.6 1319.7 1098.4 1355.8 1327.1 988.88 1141.6 661.97 433.08 1386.5 906.73 1287.3 1410.1 960.2 1528.3 1408.8 927.44 948.06 1603.5 792.41 1092.3 1529.1 1050.2 1635.2 1756.7 1057.3 969.53 1703 918.61 1019.3 1609.1 967.66 868.79 1040.7 705.85 451.84 1458.4 807.86 493 1126.4 587.46 321.39 1281.2 715.1 473.99 1520.9 913.01 1392.3 1542.6 858.18 590.77 1150.1 600.7 227.45 1565 852.41 1323.3 1726.4 1002.2 1070.4 1709.5 964.95 944.92 1594.8 1161.2 1174.8 1337.9 970.97 1200.1
221 1.8333 1385 1015 1382.8 1544.9 1142.3 1345.2 1528.2 1177.5 101.84 1367.6 1105.6 1536.8 1341 1095.4 947.13 1215 1286.3 1076.6 1461.8 1161.3 1623.4 1505.5 1367.6 988.88 1319.4 1303.9 1012.2 1507.5 1225.2 878.89 1489.6 1095.1 36.578 1518.5 1237 446.66 1600.4 1257.6 26.733 1510 1238 314.27 1356.8 1202 1394.2 1498.1 1275.9 692.44 1691 1240.3 47.187 1254.2 1260.3 1262.7 1457.7 1335.4 1091.4 1395.9 1347.6 978.95 1174.7 680.47 429.09 1404.1 922.94 1282.3 1430.1 974.79 1523.6 1428.3 938.38 938.3 1615.8 803.87 1083.1 1548.3 1063.7 1631.7 1779.8 1052.1 945.17 1719.5 922.85 1003.5 1628.5 982.77 861.75 1073.1 722.23 449.12 1489.2 828.48 484.77 1160.7 606.55 318.08 1313.6 734.53 467.54 1540.9 927.44 1384.6 1569 877.62 583.38 1183.5 620.38 223.71 1583.6 867.43 1314.9 1744.7 1009.3 1049.6 1728.5 961.72 927.78 1614.8 1175.4 1168.9 1355.8 986.25 1194.3
222 1.8417 1404.6 1030.4 1379.4 1565.7 1157.4 1340.9 1530.3 1178.2 100.65 1388 1119.7 1532.7 1358.9 1106.3 944.07 1252.7 1306.9 1056.4 1482.2 1175.3 1620.4 1546 1386.9 986.59 1359.8 1323.3 999.07 1524.7 1240.1 878.81 1489.7 1094.7 37.172 1532 1244.2 443.18 1600.4 1257.5 26.733 1519.1 1244.1 311.47 1374.5 1217.3 1385.3 1517.9 1289.7 690.91 1691.2 1240.3 47.102 1280 1279.7 1243.4 1494.7 1350.2 1085.3 1436.9 1368 971.4 1209 699.57 423.24 1422.3 939.15 1280 1450.1 989.14 1519 1446.8 948.74 930.41 1628.4 816.34 1076.9 1568.3 1077.6 1628.4 1801.7 1047.9 923.02 1735.1 927.44 988.54 1647.5 997.62 855.72 1106.6 739.03 445.13 1520.7 850.8 477.04 1196.1 627.09 312.57 1347.4 754.65 460.32 1561.2 942.63 1379 1596.2 897.99 577.19 1217.9 642.79 217.94 1602.1 882.63 1308.3 1761.3 1017.1 1029.1 1746.6 957.23 913.01 1634.3 1189.3 1163.2 1373.1 1000.7 1190.7
223 1.8500 1424.2 1045.7 1377.5 1586 1172.1 1337.6 1532.2 1178.9 98.277 1408 1133.6 1529.3 1376.6 1118 942.46 1292.3 1327.2 1039.3 1502.7 1189.2 1617.9 1586.7 1405.7 985.32 1401.3 1342.4 988.03 1541.5 1252.8 880.59 1489 1094.4 37.681 1542.2 1249.9 438.77 1600.6 1257.4 27.158 1526.4 1248.9 307.82 1392.3 1231.7 1378.5 1534.8 1302.4 687.35 1691.2 1240 46.762 1307.6 1298 1227 1533.2 1364.3 1081.3 1479.6 1388.1 966.73 1244.1 719.51 415.43 1441 954.76 1279.4 1470.6 1003.3 1513.9 1464 960.2 926.76 1641.3 829.41 1073.5 1588.2 1090.8 1625.6 1821.1 1040.7 905.88 1749.5 932.19 976.91 1666.4 1012.2 851.73 1140.7 756.43 439.19 1552.9 874.56 470 1232.4 649.32 304.51 1382.2 775.78 452.52 1581.9 958.5 1376 1623.4 918.86 572.01 1253.4 667.91 210.22 1620.6 897.05 1304.2 1775.7 1025.8 1009.8 1763.4 951.88 901.47 1655 1203.9 1159.9 1390.7 1013.9 1189.9
224 1.8583 1442.5 1060.2 1377.2 1605.4 1186.6 1335 1533.8 1179.7 97.089 1428.2 1147.8 1527 1394.5 1130.9 941.44 1334 1348 1026.8 1523.1 1202.7 1616.2 1628 1423.4 986.76 1444.1 1361.8 980.82 1558.2 1264.2 881.78 1488.7 1094.1 38.445 1550.9 1256 434.86 1600.9 1257.4 27.412 1532.4 1252.8 304.17 1410.3 1246.1 1373.6 1548.5 1313 681.91 1691 1239.9 46.677 1335.3 1313.9 1214.4 1572.6 1377.5 1079.4 1523.1 1407.9 964.61 1280 740.47 406.26 1459.2 969.19 1280 1491.4 1017.6 1510.8 1481.5 973.6 925.48 1654.7 841.55 1072.4 1608.8 1104 1623.9 1839.5 1034.5 891.2 1762.7 936.6 968.94 1684.6 1026.8 849.44 1175.8 774.5 431.89 1585.4 899.09 463.89 1269.5 673.43 294.24 1417.5 797.84 444.11 1601.8 974.2 1374.8 1650.3 940.25 567.77 1290.2 695.66 200.54 1639.4 911.31 1302.7 1788.3 1034.6 992.45 1778.7 947.55 893.4 1674.6 1217.9 1157.4 1409.1 1027.2 1189.8
225 1.8667 1461 1073.7 1377.7 1624.2 1200.6 1334 1535.1 1180.2 98.022 1448.2 1162.8 1526.1 1413 1145.5 940.93 1376 1367.7 1018.8 1543.5 1216.1 1615.7 1669.6 1439.4 990.92 1487.5 1380.5 977.51 1576.2 1276.4 881.1 1488.7 1093.8 39.124 1558.8 1261.8 431.64 1601 1257.4 27.667 1537.8 1256.4 301.45 1429.1 1261 1370.7 1560.1 1322 676.31 1691 1239.9 46.677 1362.3 1327.7 1205.8 1612.7 1390.1 1079.7 1566.4 1426.5 965.12 1316 762.45 395.91 1476.9 981.84 1281.5 1511.4 1032.5 1509.1 1499.3 986.93 924.72 1667.9 852.75 1073.6 1629.4 1117.1 1622.7 1853.8 1027.4 883.14 1774.8 940.42 963.93 1702.4 1042 850.72 1211.2 793.09 422.9 1618.2 924.81 458.88 1307.6 698.63 282.02 1453 821.01 435.2 1620.5 988.8 1374.8 1677.8 962.06 564.71 1328.6 725.28 189.34 1658 925.48 1303.8 1800.6 1041.6 978.44 1791.8 945 888.48 1692.9 1231.4 1155.9 1427.4 1040.6 1189.3
226 1.8750 1479.6 1088.3 1378.1 1641.9 1214 1333.5 1535.5 1180.8 98.786 1468 1177.7 1527 1431.6 1160.3 941.35 1416.9 1385.1 1014.8 1563.6 1229.1 1617.1 1710.2 1455.2 998.13 1529.4 1396.6 977 1593 1289.8 877.79 1488.5 1093.9 40.482 1566 1267.5 429.94 1601 1257.3 27.412 1542.5 1259.9 300.09 1448.5 1276.6 1369.4 1571.4 1331.6 672.58 1690.8 1239.9 46.762 1389 1340.4 1200.6 1652.7 1402.1 1081.6 1608.2 1442.9 968.6 1352 785.11 384.54 1495.4 993.89 1283.5 1530.2 1046.8 1508.6 1515.7 999.07 925.82 1680.7 862.68 1077.1 1649.3 1129.8 1622.2 1866.1 1020.5 876.6 1786 943.31 961.3 1720.3 1055.1 850.89 1246.1 812.61 411.44 1650.9 950.86 455.57 1346.6 725.62 268.27 1488.1 844.61 426.38 1638.8 1002.6 1375.8 1705.7 984.3 563.78 1368.3 757.36 176.61 1675.5 938.72 1306.3 1815 1044.9 969.79 1801.4 944.75 885.51 1710.8 1244.3 1155.1 1445.3 1054 1190.2
227 1.8833 1497.7 1103.1 1379.4 1660 1227.3 1334.4 1535.5 1181.5 98.786 1487.2 1191.7 1529.9 1449.6 1174.8 943.22 1455.9 1400.5 1014.9 1584 1242.1 1619.4 1749.5 1471.5 1008.4 1569.5 1410.3 979.12 1609.9 1303.9 874.05 1488.2 1094.3 42.349 1573 1273.1 429.26 1601 1257.1 27.158 1547.1 1264.3 299.92 1468.6 1291.8 1370.7 1582.9 1341.2 670.46 1690.7 1239.8 46.847 1415.9 1353.8 1198.7 1691.2 1414.1 1086.1 1647.5 1457.6 975.05 1388.5 808.79 371.81 1515.6 1007.9 1285.2 1548.4 1060.6 1509.7 1532.2 1011.6 928.29 1693.2 870.91 1082.1 1668.9 1142.6 1622.4 1876 1014.3 871.85 1796.6 945.68 960.2 1738.7 1068.2 851.82 1281.2 833.49 397.61 1682.8 977.08 454.13 1386.4 755.58 252.14 1522.8 868.88 418.31 1656.2 1016.1 1378.1 1733.1 1006.6 564.46 1409.1 791.56 163.37 1692.2 951.54 1309.8 1832.1 1044 967.32 1806.9 946.87 882.97 1729 1256.6 1156.4 1463.8 1067.2 1192
228 1.8917 1515.6 1117.5 1381.7 1680.6 1241.2 1337.3 1535.7 1182.4 99.38 1506.2 1205.3 1533.6 1467 1188.6 945.94 1493 1415 1017.8 1604 1255.3 1622.4 1787 1485.4 1019 1607.7 1423.6 984.55 1632.7 1318.8 871.93 1488.1 1094.7 44.471 1579.5 1278 429.01 1600.6 1256.9 26.733 1551.2 1268.5 300.69 1489.1 1306.4 1373.8 1595.2 1350.7 669.78 1690.1 1239.6 46.677 1442.5 1370.4 1200.5 1727.5 1426.4 1092.9 1685.9 1471 981.41 1424.9 833.83 357.8 1531.4 1022.7 1287.3 1566.9 1074.3 1511.4 1549.7 1026.5 930.15 1705.3 877.36 1088.4 1688.8 1155.6 1623.9 1884.7 1009.9 869.9 1806.8 947.04 961.13 1757.3 1082.4 854.03 1316.4 855.64 381.31 1713.4 1003.1 454.38 1426.9 787.66 235.17 1557.3 893.91 410.93 1672.7 1029.7 1381.1 1760.6 1028.9 566.41 1451.2 827.97 149.54 1709 963.93 1314 1850.7 1039.7 969.53 1809.6 951.62 881.35 1747.5 1268.9 1158.4 1483 1081.5 1194
229 1.9000 1533.7 1131.6 1384.6 1700.6 1254.2 1341.1 1536.4 1183.1 101.67 1524.9 1219 1537.5 1484.7 1201.2 949.67 1527.5 1428.2 1022.5 1623.4 1268 1625.4 1822.7 1495.3 1029.1 1643.8 1436.1 992.28 1642.9 1332.6 868.03 1488.2 1095.1 48.035 1585 1282.8 429.52 1600.2 1256.1 26.309 1554.4 1271.2 302.3 1509.7 1321 1378.9 1607.8 1360.3 670.54 1689.6 1239.2 46.338 1470 1389.5 1205.9 1761.8 1438.3 1102.5 1723.8 1481.6 987.35 1461 860.56 342.27 1548.2 1036.9 1290.1 1585.5 1087.8 1513.5 1568.3 1041.7 932.36 1716.9 882.8 1096.1 1708.2 1168.6 1626.2 1893.7 1008.5 871.42 1816.1 948.14 963.84 1775.7 1097 857.17 1351.4 879.23 362.81 1743.2 1029.2 456.08 1468.3 820.08 218.53 1591.6 919.97 403.38 1690.2 1043.2 1384.7 1787.6 1050.9 569.97 1495.3 865.99 135.28 1725.6 975.3 1319.3 1868.5 1034.2 974.37 1812.5 958.41 882.71 1766.4 1281.2 1161.5 1502.3 1096.2 1196.3
230 1.9083 1551.6 1145.1 1388.4 1719.1 1265.6 1344.1 1537.5 1184.4 105.24 1543.6 1231.4 1541.8 1502.2 1213.4 954.25 1559.7 1441.7 1028.9 1642.6 1280.7 1629 1856.6 1502.5 1040.1 1677.3 1446.8 1001.7 1660.2 1345.8 872.7 1488.6 1096.1 53.806 1590.3 1288.3 431.98 1599.9 1255.8 26.224 1557.4 1273.4 304.51 1530.3 1335 1385.5 1621 1369.9 671.98 1689.2 1239 46.253 1497.7 1408.5 1212.8 1793.8 1449.3 1113.7 1759.7 1491.4 996.09 1497.2 889.33 325.55 1565.7 1051.3 1293.6 1604.2 1101.2 1517.5 1588 1055.3 934.82 1728.6 888.23 1104.7 1726.9 1180.8 1629.1 1903.2 1010.4 876.09 1825.5 950.18 969.02 1794.6 1112.3 860.39 1387.1 904.95 342.7 1773.1 1055.4 458.97 1510.3 854.28 200.88 1624.7 947.04 395.06 1708.6 1055.9 1388.6 1814.2 1073.2 574.81 1541 905.29 121.53 1741.8 986.17 1325.3 1883.8 1031.2 980.99 1818.1 965.71 887.46 1785.6 1293.6 1165.4 1520.4 1110.5 1199.7
231 1.9167 1570.3 1158.9 1392.2 1738.2 1276.8 1348 1538.3 1186.4 110.24 1562.6 1243.6 1546.6 1519.6 1225.7 958.75 1590 1457.4 1038 1661.8 1293.4 1633.1 1887.5 1510.7 1053.8 1708.1 1457.9 1012.2 1677 1358.5 878.81 1488.8 1097.6 61.02 1596.1 1294.2 435.71 1599.5 1256 26.139 1560.3 1276.6 308.75 1551.5 1348.9 1393.1 1634.1 1379.4 673.94 1689 1239 46.338 1525.1 1427 1220.9 1822.7 1459.2 1125.4 1792.1 1503.4 1008.6 1533.7 919.03 309.17 1584.2 1065.7 1298.6 1622.8 1113.6 1523.2 1608.6 1068.3 938.3 1740.3 894.08 1114.2 1745.1 1192.5 1632.9 1914.3 1015.2 883.48 1836.3 954.17 977 1811.5 1126.5 863.11 1423.2 932.78 321.06 1802.3 1081 462.78 1553.5 890.43 183.14 1656.7 975.22 386.74 1726.1 1067.6 1393.1 1841.2 1095.2 580.41 1588.1 945.68 108.97 1757.4 996.6 1332.3 1896.9 1032.9 989.05 1827.3 972.16 896.04 1805.6 1306 1170.1 1536.2 1126.7 1204.7
232 1.9250 1589.6 1173 1396.7 1759.5 1288.9 1354.2 1539.3 1189.7 116.27 1581.7 1256.1 1552.1 1538.2 1238 961.89 1618 1474.4 1048.6 1681.1 1305.9 1637.9 1916.2 1519.3 1069.2 1736.7 1471.4 1024 1693.6 1371.6 885.6 1489.5 1100.3 69.761 1602.6 1300.5 440.46 1598.7 1256 25.885 1563.4 1280.7 314.78 1573 1362.4 1401.6 1647.4 1389.5 676.91 1688.9 1239.6 46.508 1551.1 1444.1 1230.3 1849.4 1467.3 1137.4 1820.3 1516.1 1022.1 1570.7 949.42 293.3 1603.5 1081.3 1304.4 1641.9 1125.8 1529 1628.6 1081.6 942.8 1751.9 900.79 1124.7 1763.7 1204.4 1637.7 1927.4 1022 893.91 1848.3 960.03 987.52 1830.8 1143.2 869.39 1459.9 961.81 298.74 1830.6 1105.8 467.54 1598.1 928.29 166.26 1689.3 1003.2 378.85 1743 1079.2 1398.1 1867.8 1117.5 587.12 1636.4 987.1 97.513 1772.9 1007.1 1340.4 1908.1 1039.4 998.98 1840.7 977.93 907.32 1825.8 1317.9 1175.4 1537 1140.4 1219.1
233 1.9333 1608.4 1186.4 1401.8 1779.3 1300.3 1359.9 1540.7 1193.8 123.14 1600.6 1268.4 1557.2 1556.5 1250.7 962.49 1644.1 1492 1060.2 1700.1 1317.8 1642.7 1943.9 1526.9 1084.9 1763.1 1487.5 1036.6 1710.2 1385.3 891.71 1491 1103.8 79.946 1609.5 1307.4 446.32 1598.6 1255.8 25.97 1567.4 1286.3 321.48 1594.8 1375.5 1410.6 1660.6 1399.8 680.64 1688.8 1240.3 46.338 1575.3 1460 1240.3 1874.8 1473.4 1149.5 1846.9 1528.5 1036.4 1608.6 980.9 275.99 1623.9 1093.4 1306 1661.1 1137.7 1534.1 1648 1094.3 947.64 1763.7 908 1135.1 1782.8 1216.6 1643.1 1941.7 1030.2 906.3 1861.4 967.49 999.49 1848.6 1159.2 874.73 1497.5 992.7 275.48 1858.1 1130 472.8 1644.2 967.58 150.13 1721.5 1030.4 371.55 1760.4 1091 1404 1893.2 1139.8 595.52 1685.9 1029.9 86.99 1788.3 1017.9 1348.7 1918.9 1049.7 1010.7 1856.2 982.94 918.7 1846.2 1329.5 1181 1572.4 1155.6 1214.2
234 1.9417 1627.1 1199.5 1406.9 1800.2 1311.9 1366 1542.7 1199.2 131.04 1619.5 1279 1561.7 1574.9 1263.1 973.18 1668.8 1510.3 1072.8 1719 1329.1 1647.9 1970.5 1533.3 1100.1 1787.3 1503.9 1048.9 1727.2 1399.2 896.37 1493.3 1107.6 91.233 1617.2 1315.7 452.69 1599 1255.7 26.309 1572.6 1293.1 328.61 1616.5 1388.1 1419.6 1674 1410.3 684.71 1688.9 1241.4 46.508 1598.2 1475.6 1251.2 1898.7 1478.9 1160.4 1872.4 1541.4 1051 1647.6 1013.3 258.17 1641.9 1107.9 1310.1 1679.8 1148.8 1539.9 1668.1 1106.3 952.13 1775.9 915.98 1145.7 1801.7 1228.5 1648.9 1956.5 1038.9 919.29 1875.1 975.56 1011.9 1865.8 1174.3 880.17 1536.2 1025.6 251.97 1884.7 1153.7 477.89 1691.3 1008 134.52 1753.1 1057.9 364.51 1778.2 1102.7 1410.3 1917.9 1161.8 604.26 1736.2 1073.7 77.23 1803.3 1028.8 1357.5 1929.6 1060.9 1022.7 1872.1 987.86 929.56 1866.2 1341 1187.4 1591.7 1170.2 1220.6
235 1.9500 1646.1 1213 1412.1 1822 1323 1372.7 1545.4 1205.9 139.1 1638.5 1289.9 1566 1593 1276.5 980.05 1692 1529.1 1085.6 1738 1340.2 1653.1 1995.3 1539 1113.7 1809.8 1520.4 1060.8 1744.7 1413.1 899.94 1496 1113.2 103.28 1626.9 1326 458.63 1599.4 1256 27.582 1579.1 1300.9 336.42 1638 1400.4 1428.8 1688.1 1421.5 689.21 1689.4 1242.6 46.508 1620.8 1490.9 1262.2 1920.8 1484.8 1170.5 1896.5 1554.5 1064.9 1688.1 1047.2 240.77 1656.2 1124.1 1318.1 1698.8 1159.2 1546 1687.9 1118.9 957.56 1788.4 924.72 1155.9 1820.2 1239.1 1654.1 1971.1 1047.4 931.51 1888.7 983.7 1023.5 1882.7 1187.3 884.49 1576.8 1060.3 228.72 1910.7 1176.8 483.24 1738.4 1049.3 119.41 1785.5 1086.3 356.61 1796.4 1114.8 1416.8 1942 1183.1 613.34 1786.8 1118 69.337 1818.7 1040.1 1366.1 1940.8 1071 1033.9 1887.9 994.23 940.59 1886.2 1352.3 1193.4 1610.5 1182.7 1228.1
236 1.9583 1665.4 1225.8 1417.4 1841.5 1332.8 1378.2 1548.9 1213.4 146.99 1657.4 1302 1570.7 1611.4 1290.8 985.66 1713.9 1547.4 1098.4 1756.9 1351.4 1658.3 2018.9 1544.9 1124.8 1831.4 1536.6 1072.1 1762.7 1427.1 903.93 1499 1120.8 116.1 1637.4 1336.8 464.4 1600.2 1256.6 29.11 1586.9 1309.8 344.31 1659.7 1412.3 1437.3 1703 1433.7 693.96 1689.8 1243.7 46.338 1642.7 1505.6 1272.8 1941.8 1491.6 1179.1 1919.5 1567.7 1077.4 1729.1 1082.1 224.05 1668.8 1143.3 1329.5 1718.1 1170.1 1550.5 1706.9 1131.7 963.42 1800.9 934.48 1165.5 1838.7 1249.8 1659.4 1985.5 1055.5 942.46 1901.9 992.28 1034.2 1901.8 1201.5 890.69 1619 1095.9 205.63 1935.8 1198.5 488.25 1786 1091.3 105.49 1818.4 1115.1 347.96 1814.8 1127.4 1423 1965.4 1203.4 622.25 1837.6 1162.5 63.651 1834.5 1051.7 1374.6 1953.6 1079.9 1044.6 1902.9 1002.4 951.54 1906.5 1363 1199.4 1628.9 1195.2 1235.8
237 1.9667 1684.5 1238.5 1422.7 1862.8 1343.5 1384.1 1553.1 1221.8 154.97 1676.6 1313.4 1575.1 1629.6 1305 991.68 1735.8 1565.9 1110.8 1775.7 1361.9 1663.1 2041.8 1551.6 1133.2 1852 1552.1 1082.7 1780.5 1441.4 908.77 1502.2 1129.3 128.49 1648.6 1348 470.34 1601.4 1257.8 30.383 1595.3 1320.3 352.2 1681.8 1424.1 1445.7 1718.3 1446.1 698.55 1689.9 1245.3 46.253 1664.4 1520.2 1283.3 1962.1 1500 1186.6 1941.9 1580.4 1088.2 1770.9 1117.9 208.01 1678.9 1154.6 1336.7 1737 1181.7 1554.1 1726.6 1146 968.85 1813.1 945.6 1174.5 1857.7 1260.2 1664.9 1999.8 1063.4 951.96 1915.1 1001.4 1044.1 1924.3 1218.9 901.72 1662.1 1132.2 183.23 1960.2 1219.1 492.06 1834.3 1133.2 93.015 1852 1143.8 339.13 1832 1139.4 1428.9 1987.7 1222.7 630.65 1889.3 1207.8 59.153 1851.2 1063.4 1382.6 1968.3 1087.6 1055.1 1916.9 1011.9 961.38 1926.4 1373.8 1205.5 1647.6 1215.6 1238.4
238 1.9750 1703.6 1251.4 1428.3 1882 1353.7 1388.9 1556.8 1231.1 163.03 1696.3 1323.5 1579.1 1647.8 1319.2 996.43 1757.9 1584.6 1122.6 1795 1372.3 1667.1 2063.6 1560 1139.2 1872 1567.3 1091.1 1798.5 1455 914.54 1505.1 1138.9 140.97 1660.6 1359.5 476.11 1602.3 1259.9 31.656 1603.8 1332 359.76 1703.8 1435.4 1453.5 1733.8 1458.6 702.79 1690.2 1248.3 46.592 1685.8 1534.5 1293.6 1982.2 1509.9 1193.1 1963.2 1593.2 1096.7 1813.7 1154.5 192.74 1696.3 1167.7 1341.8 1755.7 1192.6 1557.8 1745.6 1161.1 974.11 1825.1 956.63 1182.6 1876.2 1270.6 1668.8 2014.6 1071.2 960.87 1928.3 1010.7 1052.7 1942 1233.6 907.24 1706.8 1169.3 161.93 1984.2 1238.8 495.2 1883.7 1175.9 81.982 1886.3 1172.5 330.73 1848.3 1151.2 1434.2 2009.1 1240.8 638.04 1940.6 1253.3 55.164 1868.5 1075.3 1389.5 1984.5 1094.6 1064.9 1930.2 1022.3 969.53 1946.1 1384.3 1211.2 1662.6 1228.6 1245
239 1.9833 1722.9 1263.7 1433.5 1901.8 1364.4 1394 1560.2 1241.3 171.43 1715.4 1333.8 1582.4 1667.6 1333.3 1000.5 1778.5 1601.5 1132.3 1814.1 1382.7 1670.9 2084.2 1569.2 1142.6 1891.7 1581.9 1098.4 1817.2 1468.6 921.16 1507.9 1149 153.53 1672.3 1371.8 481.63 1603.8 1263.3 33.947 1612.6 1343.9 367.14 1725.2 1446.4 1460.7 1749.8 1471.6 706.95 1691.2 1253.3 48.12 1706.9 1548.9 1303.6 2002.3 1520.8 1198.3 1983.4 1606.1 1103.1 1857.8 1191.5 178.56 1718.7 1181 1345.3 1774.5 1202.3 1561.6 1763.7 1174.9 978.7 1836.9 967.75 1190.2 1894.6 1280.6 1672.2 2030 1079 969.02 1941.8 1020.1 1060.4 1961 1248.3 912.58 1753.2 1207.7 142.58 2007.6 1257.4 498.26 1934.4 1220.6 73.326 1921.4 1201 322.16 1865.4 1162.9 1439.1 2029.6 1257.8 644.83 1991.1 1298.6 51.515 1885.3 1087.1 1395.7 2001.4 1101 1073.7 1943.1 1033.1 976.66 1966.2 1395 1216.6 1692.4 1242 1245.8
240 1.9917 1741.9 1275.8 1437.7 1921.9 1375.3 1398.4 1563.9 1251.9 179.92 1734.2 1344.6 1585 1688.3 1347.2 1004.1 1797.8 1616.7 1139.9 1833.1 1392.6 1673.7 2104 1579.1 1143.8 1911.1 1596.4 1104.6 1836.3 1482.2 927.35 1510.9 1159.5 165.83 1684 1384.5 486.8 1605.2 1267.6 37.596 1621.2 1355.6 374.1 1746.6 1457.4 1466.6 1766.4 1485.3 711.02 1692.2 1259.9 49.648 1728 1563 1312.4 2022.1 1532.6 1202 2002.5 1619.1 1107.6 1903.1 1228.4 165.41 1742.1 1194 1348.1 1792.8 1212.1 1565 1782.7 1188.3 982.35 1848.6 979.38 1196.3 1913.3 1290.2 1675 2045.7 1087.2 976.58 1955.6 1029.9 1067 1980.6 1262.7 917.76 1800.4 1246.6 125.94 2030 1274.5 499.96 1985 1265 64.839 1956.8 1229.1 313.42 1883.8 1175 1443.5 2049.2 1274.1 650.51 2041.6 1343.7 49.223 1901.6 1098.5 1401.2 2018.5 1107.2 1082 1956.7 1044.8 983.19 1986.2 1407.4 1220.9 1708.1 1256.6 1252.8
241 2.0000 1760.6 1288.4 1440.5 1941.5 1386.1 1401.8 1568.4 1262.4 188.49 1753.1 1355.6 1586.9 1708.6 1361.3 1006.9 1816.6 1631.8 1146.4 1852 1402.3 1676 2123.7 1590 1143.3 1929.2 1611.1 1109.1 1855.6 1496.1 931.6 1514.8 1170.5 177.2 1696 1397.4 491.55 1606.2 1274 42.773 1631 1367.4 380.63 1767.6 1468.5 1471.2 1783.2 1498.4 714.08 1692.7 1267.6 50.581 1749 1576.5 1320 2041.7 1544.7 1203.6 2021 1632.4 1111.2 1949.1 1265 153.78 1766.5 1207.8 1350.1 1811.3 1222 1566.7 1802 1203.1 985.83 1860.9 991.85 1201.1 1932.1 1299.8 1677 2062.3 1096.1 982.94 1970.1 1040.2 1072.6 2000.1 1277.1 922.6 1848.3 1285.7 111.69 2051.7 1290.6 500.3 2035.3 1307.9 56.607 1991.9 1256.6 305.61 1902.2 1187.4 1446.7 2067.9 1289 654.5 2091.9 1388 47.271 1917.2 1110.4 1406 2036 1113.6 1089.4 1971.2 1057.3 988.97 2006.4 1419.2 1224.6 1729.3 1269.7 1255.8
242 2.0083 1778.9 1300.3 1442.8 1961 1397.1 1404.2 1573.5 1273.3 196.98 1772.1 1366 1588.1 1728.1 1375.7 1009.8 1834.9 1646.7 1152.3 1871 1412 1677.3 2142 1601.5 1141.2 1946.9 1626 1112.9 1875.2 1510.2 933.55 1520.3 1180.7 187.73 1708.6 1410.3 495.97 1607.1 1282.6 48.969 1641.9 1379.6 386.57 1788.1 1479.1 1475.2 1800.9 1510.8 716.12 1692.5 1278.3 52.279 1769.6 1589.7 1326.4 2060.4 1557.3 1203.6 2039 1646.7 1113.6 1995.2 1301.2 143.77 1780.4 1219.9 1352.2 1829.8 1231.9 1567.4 1820.6 1217.9 988.63 1873.7 1004.7 1205.1 1950.7 1309.3 1678.3 2079.8 1105.6 988.12 1985.2 1050.5 1077.1 2018.8 1291.1 925.57 1897.1 1324.1 100.14 2072.6 1305.8 499.36 2085.3 1350.2 49.648 2027.1 1283.5 298.57 1919.4 1199.4 1448.8 2086.3 1303 657.3 2141 1430.8 44.301 1932.3 1122.7 1409.7 2053.7 1120.9 1095.8 1987.3 1069.8 994.06 2026.3 1431.2 1227.8 1750.9 1280.9 1257.4
243 2.0167 1797.2 1311.5 1444.4 1980.6 1408.3 1405.8 1579.1 1283.7 205.21 1790.7 1376 1588.6 1747.3 1390.5 1012.7 1852.4 1660.4 1156.4 1889.8 1421.8 1678 2160.2 1613.9 1138.3 1963.8 1641 1115.5 1895.3 1524.1 934.4 1527.5 1190 197.57 1721.5 1423.5 499.79 1607.6 1293.6 56.013 1652.5 1391.6 391.92 1808.7 1490.1 1478.6 1819.1 1523.2 717.9 1692.2 1291.9 55.334 1790.1 1602.4 1331.3 2078.9 1570.2 1202.5 2056 1661.7 1114.7 2041.7 1336.1 136.04 1807.4 1233.6 1351.3 1848.6 1241.2 1567.4 1839.3 1232.2 990.92 1886.8 1017.7 1207.6 1969.3 1319.1 1678.9 2099.1 1115.8 992.87 2000.2 1060.2 1080.5 2036.8 1304.1 926.5 1946 1361.5 91.827 2092.2 1320.9 497.92 2134.3 1390.9 44.301 2062.3 1310 292.12 1936.6 1211 1450.1 2104.2 1315.8 658.32 2188.1 1472 41.585 1948 1134.9 1412.3 2072.3 1128.5 1101.8 2005.4 1082.2 998.13 2046.2 1443.6 1230.5 1779.8 1294.5 1252.3
244 2.0250 1815.6 1322.9 1445.2 2000.1 1419.3 1406.5 1585.7 1294.2 213.36 1809.5 1385.3 1588.3 1767 1405.8 1014.9 1869.1 1673.6 1159.5 1908.4 1431.5 1678.2 2179.3 1627.2 1134.6 1981.1 1656.5 1117.2 1915.5 1537.9 934.9 1536.1 1198.7 206.23 1735.6 1436.7 503.1 1608.2 1306.3 63.396 1663.5 1402.9 397.27 1829.3 1501.7 1481 1837.6 1536 718.92 1692.3 1307.5 58.983 1810.1 1615 1334.9 2097.3 1583.3 1200 2072.5 1677.2 1114.1 2087.4 1369.3 131.04 1816.5 1243.6 1352.2 1867.3 1251.6 1566.8 1858.8 1246.3 991.85 1900.4 1030.9 1208.4 1988.3 1328.8 1679.1 2120.2 1126.6 997.88 2018.9 1071.4 1084.4 2056.3 1316.9 927.1 1994.1 1396.7 86.65 2111.2 1336.6 495.46 2182.4 1429.2 40.482 2096.7 1335.4 286.6 1954.9 1222.9 1450.3 2121.8 1327.7 656.88 2234.1 1511.2 39.973 1964.9 1147.3 1413.5 2091.6 1136.9 1107.4 2025.5 1093.9 1001.4 2066 1456.2 1231.7 1780.4 1308.6 1262.1
245 2.0333 1834 1334.4 1445.3 2019.5 1430.6 1406.4 1593.5 1304.4 221.34 1828.2 1394.7 1587.1 1786.8 1420.3 1015.4 1886.1 1687.8 1162.8 1927 1440.8 1677.4 2197.1 1641.3 1128.8 1996.5 1670.8 1116.4 1935.7 1551.6 935.24 1545.5 1207.6 214.55 1749.9 1450.5 506.58 1610.9 1320 71.204 1675 1414.2 402.7 1850 1513.2 1482.6 1856.2 1549.6 718.92 1694 1323.4 62.802 1829.7 1627.5 1337.8 2115.4 1597.2 1195.8 2088.3 1692.6 1112.4 2131 1400.4 127.73 1834.8 1255.5 1352.4 1885.8 1262.2 1565.4 1878.6 1260.8 990.32 1915 1044.4 1207.5 2006.5 1338 1677.1 2142.9 1138 1001.8 2037.6 1082.6 1086.6 2078.8 1330.2 927.1 2039.8 1429.4 83.765 2129.6 1351.9 491.81 2228.9 1464.8 38.785 2129.5 1360.1 282.53 1973.3 1234.6 1449.6 2139.2 1339 653.48 2278.4 1547.5 38.191 1981.5 1159.1 1412.8 2111.4 1146.1 1111.9 2047.4 1105.6 1004.2 2085.1 1468.7 1232 1795.8 1320.8 1264
246 2.0417 1852.6 1346.5 1443.9 2038.8 1442.2 1405.8 1602.3 1314.8 229.06 1846.9 1404.9 1585.8 1806.8 1433.4 1014.5 1902.8 1701.7 1165 1945.9 1450.3 1676.1 2212 1655.9 1120.4 2011.8 1684.6 1115.8 1955.8 1565 934.9 1555.9 1217.8 223.54 1765.1 1464 509.63 1615.6 1333.4 79.436 1687.3 1425.8 408.13 1870.4 1524.7 1483 1875.2 1564.6 719.34 1698.3 1338.5 68.064 1849 1640 1339.7 2133.4 1611.9 1190.2 2103.4 1708.4 1109.1 2171.4 1429.8 126.28 1853.1 1267.1 1350.4 1904 1272.1 1563.4 1898.4 1275.1 986.93 1931.2 1057.8 1205.1 2024.6 1347.2 1675 2166.3 1149.7 1004.7 2057.3 1093.9 1087.8 2094.5 1342.9 922.94 2082.4 1460.1 82.322 2147.9 1366.8 487.14 2271.7 1497.1 37.851 2160.1 1384.3 279.3 1991.6 1245.9 1447.4 2156.7 1350 649.07 2319.8 1581.1 36.578 1998 1170.1 1410.3 2132.1 1155.8 1115.8 2070.9 1117.6 1006.1 2104 1481.3 1231.5 1815.9 1330.6 1260.9
247 2.0500 1871.3 1358.1 1441.1 2057.9 1453.8 1404.4 1612.6 1325.7 236.87 1865.4 1415.3 1583.6 1826.7 1446.9 1012.8 1919.2 1715.4 1165.8 1964.3 1459.6 1673.7 -9999.99 -9999.99 -9999.99 2026.7 1699.6 1113.7 1975.8 1578.4 934.48 1567 1229.4 232.03 1781.8 1478.2 511.67 1623.4 1346.6 88.772 1700.9 1438.1 412.97 1890.4 1536.6 1482.5 1894.3 1580.2 719.76 1705.4 1353.3 75.278 1868.3 1652.5 1340.4 2150.8 1627.3 1183.3 2118 1724.9 1104.6 2208.9 1457.3 126.37 1876.5 1278.4 1345.4 1921.9 1282.6 1559.6 1917.4 1288.8 982.86 1948.7 1070.9 1201.2 2043.1 1356.3 1672.2 2190.4 1161.7 1006.9 2078.7 1105.6 1088 2122.5 1358.2 920.65 2121.2 1488.7 82.237 2166.6 1381.9 482.13 2308.9 1526.1 36.918 2188.3 1407.1 276.41 2010.4 1257 1443.8 2174.7 1361.5 644.15 2357.5 1611.9 34.541 2016.3 1181.2 1406.2 2153.2 1165.5 1119.4 2095.3 1129.7 1006.7 2123.6 1494.4 1230.5 1835.4 1342.3 1255.5
248 2.0583 1890.4 1369.8 1437.5 2076.9 1465.2 1401.8 1624.6 1338 244.67 1883.8 1426.5 1580.1 1845.8 1461.3 1010.3 1934.5 1727.5 1164.6 1982.5 1468.8 1671.1 -9999.99 -9999.99 -9999.99 2041.2 1714.5 1109.9 1995.6 1592 933.55 1579.3 1241.3 239.92 1800.4 1494.7 513.37 1633.6 1360 98.107 1716.2 1452.1 417.55 1910.7 1548.8 1480.9 1913.3 1595.2 719.09 1714.2 1367.4 82.661 1887 1665.5 1340.1 2168 1643.3 1175.6 2132 1741.5 1099 2241.7 1482.8 126.71 1893.2 1289.9 1341.8 1940.6 1293.1 1554.5 1935.4 1302 979.29 1967.2 1083.7 1195 2061.7 1365.5 1668.8 2216.2 1174.4 1008 2101.4 1117.7 1086.4 2138.9 1370.7 915.72 2155.1 1516.8 85.123 2185.1 1397.1 477.38 2341.3 1552 36.663 2214.3 1428.8 274.12 2029.8 1268 1439 2194.1 1373.7 638.72 2391 1639.5 31.825 2036.4 1192.3 1400.9 2175.6 1175.9 1121.6 2120.9 1141.7 1005.5 2136.4 1504.9 1225.7 1854.5 1353.5 1251.6
249 2.0667 1909.8 1381.4 1433 2095.7 1476.6 1398.4 1638.5 1351.1 252.99 1901.9 1438.7 1575.8 1864.4 1475.2 1007.5 1950 1740.1 1162.9 2001.3 1478.1 1667.7 -9999.99 -9999.99 -9999.99 2054.9 1728.6 1104.6 2014.8 1605.7 931.93 1593 1253.3 248.92 1821.4 1512.9 513.37 1644.2 1374.2 107.36 1734 1468.5 421.79 1930.9 1560.8 1478.7 1932.3 1610.6 717.13 1723.4 1381.2 89.281 1905.7 1678.3 1339 2184.6 1660 1167.3 2145.3 1758 1092.3 2269.1 1506 126.28 1911 1300.8 1337.6 1959.3 1302.2 1550.1 1953.2 1316 974.88 1986.8 1096.8 1186.7 2080.3 1374.6 1664.6 2242.8 1187.1 1007.9 2124.7 1129.7 1083.1 2161.2 1385 910.72 2183.6 1543.5 89.111 2203.7 1412.7 472.88 2369.6 1575.1 36.154 2238.5 1449.5 271.92 2049.7 1278.9 1433.8 2214.4 1385.9 632.78 2419.1 1664.1 29.279 2057.4 1203 1394.5 2199.6 1187.3 1121.3 2147.6 1154 1002.6 2152.6 1516.5 1221.1 1874.1 1365.3 1247.4
250 2.0750 1929.3 1393 1428 2114.5 1488.1 1394.3 1654.3 1364.8 261.9 1920.3 1450.1 1571.5 1882.7 1488.2 1003.7 1964.9 1752.3 1159.8 2019.9 1487.2 1663.7 -9999.99 -9999.99 -9999.99 2068 1742.8 1098.2 2033.8 1619.2 929.22 1608.9 1266.1 259.61 1844.3 1531.7 511.5 1654.8 1388.4 116.52 1754.6 1485.6 424.51 1951.2 1572.7 1475.4 1952 1627.4 714.84 1733 1395.4 95.731 1924.6 1691 1336.8 2201.6 1677.5 1158 2157.6 1774.7 1084.4 2292.3 1526.5 125.35 1930 1311.5 1332.2 1977.8 1312.1 1545.1 1971.1 1330.2 969.11 2008.7 1110.1 1176.8 2098.7 1383.7 1660 2270.9 1200.4 1007.4 2149 1142.2 1080 2178.4 1397.9 904.61 2207.2 1565 91.403 2222.9 1428.8 469.15 2392.7 1595.3 34.966 2260.4 1468.6 269.54 2068.7 1289.2 1427.3 2234.3 1398.1 626.92 2442.2 1687.2 27.667 2078.3 1213.5 1386.7 2224.3 1199 1119.2 2174.2 1167.4 999.4 2171.9 1528.9 1216.9 1892.4 1377.4 1242.4
251 2.0833 1948.7 1404.2 1422.7 2132.9 1498.9 1389.7 1672.2 1379 270.47 1938.8 1460.4 1566.7 1899.9 1501.7 998.9 1979 1763.7 1155.5 2038.4 1496.3 1659.4 -9999.99 -9999.99 -9999.99 2080.7 1757.2 1091.1 2052.2 1632.8 925.99 1627.4 1279.7 270.98 1868.7 1551.6 508.36 1666.1 1402.4 125.77 1777.1 1503.3 425.87 1971.5 1585.2 1471.2 1972.7 1645.4 711.96 1743.9 1410.7 102.35 1943.6 1703.6 1334.2 2219.2 1695.5 1147.9 2169.2 1791.3 1075.4 2310.7 1544.2 123.99 1949.2 1322.2 1326.3 1995.3 1323.3 1537.9 1989 1343.5 963.42 2032.7 1123.1 1166 2116.9 1392.7 1655 2300.8 1214.6 1006.5 2176.9 1155.9 1075 2196.7 1411.4 898.92 2225 1581.6 90.639 2242.4 1444.5 465.92 2409.5 1611.6 33.777 2279.6 1486.2 266.65 2087.2 1299.3 1419.9 2253.3 1411.9 621.66 2460.8 1706 25.46 2099.3 1224.2 1378 2250 1211.3 1116.4 2202 1182 994.74 2192.9 1541.7 1212.9 1910.3 1388.4 1237.3
252 2.0917 1968.2 1415.4 1417 2151 1509.7 1384.6 1691.5 1394.3 278.79 1957.4 1470.7 1561.2 1917 1514.8 993.8 1992.4 1775.2 1150.1 2056.6 1505.5 1654.9 -9999.99 -9999.99 -9999.99 2093 1771.9 1083.3 2070.8 1647.1 922.85 1647.9 1294.6 282.02 1894.3 1573 504.62 1679.7 1417.3 134.6 1801.3 1522.8 426.38 1993 1598.3 1466.8 1994.4 1663.2 707.97 1756.7 1426.3 108.89 1962.5 1716.9 1331 2235 1713.7 1136.5 2180 1808.6 1065.5 2322.2 1558.7 121.28 1968.9 1332.1 1319.5 2010.9 1334.9 1528.2 2006.7 1357.5 957.23 2057.5 1136 1153.9 2134.9 1401.4 1649.4 2331.5 1229.1 1005.2 2204.4 1169.6 1067.8 2214.6 1424.6 893.66 2235.5 1595.4 86.141 2261.9 1460.2 462.95 2420.3 1622.8 32.929 2295.8 1502.4 263.85 2106 1309.3 1412.1 2270.9 1426.8 617.58 2474.2 1718.2 22.151 2120.4 1235.1 1367.6 2276.8 1224.3 1113 2231.6 1196.9 989.31 2214.3 1554.6 1208.8 1929 1398.8 1231.9
253 2.1000 1988.5 1427 1410.9 2169.5 1521.2 1379.4 1713 1410.9 286.6 1976.2 1481.3 1555.6 1936.1 1527.6 986.76 2006 1787 1144.3 2074.7 1514.7 1649.9 -9999.99 -9999.99 -9999.99 2104.8 1787.5 1075.2 2089.6 1661.2 918.7 1670.3 1310.7 292.29 1921.3 1595.3 500.89 1695.5 1433.2 143.09 1827.3 1543.4 426.8 2014.7 1611.9 1461.9 2017.1 1680.7 703.05 1771.3 1442.5 115 1981.5 1730.7 1327.2 2249.4 1732.7 1124.3 2189.9 1825.3 1054 2325.9 1568.3 116.78 1986.8 1342.5 1312.1 2025.1 1346.3 1520.8 2024.3 1371.2 950.18 2083.7 1149.3 1140.5 2152.8 1410.2 1643.9 2362 1243.4 1002.5 2232.9 1183.9 1059.8 2231.3 1437.2 889.25 2238.3 1604.3 78.503 2281.5 1475.8 461.26 2424.6 1628.1 32.504 2307.6 1515.7 260.71 2125.1 1319.4 1404 2287.5 1442.2 614.78 2482.1 1724.7 18.162 2141.5 1246 1356.3 2304.3 1238.1 1108.1 2262.9 1212 983.7 2234.8 1567.1 1204.3 1946.4 1410.1 1226.2
254 2.1083 2007.9 1438.4 1405.1 2189.2 1532.6 1374.4 1736.4 1429.2 294.07 1995.2 1492 1549.6 1955.1 1541.1 978.19 2020.3 1798.9 1138.6 2092.9 1524.1 1645.2 -9999.99 -9999.99 -9999.99 2116.2 1802.2 1066.8 2108.6 1674.4 913.52 1694.5 1328.3 301.79 1949.8 1617.8 497.41 1713.3 1451.4 152.59 1854.8 1563.9 427.31 2034.7 1625.2 1456 2040.5 1697.9 698.46 1788 1460.2 120.51 2000.2 1743.7 1321.9 2264.4 1752.4 1112 2199.6 1842.4 1042.1 2324.4 1573 112.03 2004.7 1353.4 1304.9 2037.3 1357 1516.1 2040.6 1383.9 943.56 2111.1 1163 1126.5 2170.4 1419.2 1638.4 2392.9 1257.1 999.07 2261.9 1198.3 1051.4 2246.8 1449.3 885.94 2236 1606.4 69.337 2300.7 1490.8 459.98 2424.8 1629.9 32.844 2315.2 1525.1 257.83 2144.7 1329.5 1395.7 2304.8 1457.6 612.75 2483.8 1724.4 21.387 2162.3 1256.6 1344.2 2332.3 1251.8 1102.3 2295.4 1227.4 977.25 2253.2 1578.9 1199.3 1962.5 1422.5 1220.1
255 2.1167 2025.1 1450.5 1399 2208.6 1543.9 1369.3 1761.4 1448.4 300.43 2013.7 1502.5 1543.2 1971.7 1554.9 970.29 2034.1 1810.3 1132.3 2111 1533.1 1640.7 -9999.99 -9999.99 -9999.99 2127.1 1816.9 1057.8 2127.6 1687.8 908 1720.3 1347.6 310.53 1980 1640.4 494.53 1734.2 1470.7 161.42 1883.7 1585 428.58 2053.7 1638.5 1449.5 2064.7 1714.5 693.88 1807.7 1479.1 125.35 2018.5 1755.3 1315 2278.2 1771.7 1097.9 2208.3 1860.6 1030.7 2323.9 1575.3 108.38 2023.1 1364.3 1298.1 2049.5 1367.4 1512.6 2055.5 1396.5 937.28 2139.4 1176.9 1111.9 2187.8 1428 1632.9 2424.1 1271.3 995.08 2291.9 1212.8 1042.1 2262.3 1460.6 883.14 2233.5 1604.9 61.36 2318 1504.4 458.37 2424.5 1630.3 33.014 2321.7 1530.9 255.11 2164.6 1339.9 1387 2322.1 1471.8 610.37 2484.3 1724.7 22.405 2182.8 1267.4 1331.6 2360.8 1266 1095.8 2327.5 1243.1 969.45 2269.6 1590 1193.6 1978 1434.9 1214.7
256 2.1250 2041.2 1463.5 1393.3 2226.8 1554.9 1364.1 1788.1 1468.6 305.86 2032.2 1513.3 1537.6 1987.4 1569 963.25 2047.6 1821.9 1125.8 2128.6 1541.8 1636.3 -9999.99 -9999.99 -9999.99 2137.6 1832.2 1048.3 2146.9 1701 902.74 1747.2 1368.5 318.25 2010.9 1662.6 492.06 1758.5 1491.3 167.87 1913.9 1607.1 429.6 2072.6 1651.9 1443.2 2089.4 1730.5 689.21 1830.9 1499.3 130.19 2036 1766.5 1307.2 2289.9 1791.6 1082.1 2216.2 1878.4 1018.9 2325.9 1576.9 105.41 2040.2 1375 1292 2063.8 1378 1508.8 2069.2 1409.4 931.26 2168.1 1191.2 1097.1 2205 1436 1627.9 2454.7 1285.8 989.73 2323.1 1229.3 1033.3 2278.6 1471.2 880.59 2231.9 1602.6 56.607 2333.2 1515.7 456.17 2424.8 1629.8 32.844 2329.9 1535.9 252.91 2183.8 1349.7 1378.2 2339 1485.2 608.33 2484 1724.9 22.32 2203.5 1278.7 1318.6 2389.5 1280.9 1089 2359.6 1261.1 961.81 2287.4 1601.1 1188.2 1993 1447.3 1209.5
257 2.1333 2057.8 1475.3 1388.9 2244.7 1565.3 1359 1816.5 1490.3 311.04 2050.3 1524.2 1532 2002.5 1583.1 956.8 2061.4 1833.4 1119.7 2146 1550.5 1632.3 -9999.99 -9999.99 -9999.99 2147.4 1846.8 1038.7 2166.2 1713.3 898.24 1774.8 1390.9 325.04 2042.4 1684.6 490.11 1785.3 1513.5 173.55 1945.1 1628.7 429.94 2091.2 1665.2 1437.2 2114.2 1746.3 686.16 1856.9 1520.2 134.6 2052.7 1777.3 1300.1 2301.4 1812.8 1066.8 2223 1896 1006.5 2327.3 1578.4 102.27 2056.8 1384.4 1285.4 2082.3 1389.2 1503.2 2082.1 1422.6 927.1 2197.6 1206.1 1083.3 2222.1 1444.8 1623.7 2485.8 1302 988.46 2354.2 1245.5 1024.6 2294.3 1482.2 876.35 2230.5 1602.1 53.467 2347.3 1525.4 453.7 2425.1 1629.3 32.759 2338.9 1541.5 249 2202.7 1359.4 1369.8 2355.3 1497.2 606.21 2483.6 1725 21.726 2224.3 1290.6 1305.9 2418.8 1296.1 1082 2391.9 1283.5 955.19 2306.2 1612.2 1183.8 2007.5 1460.6 1204.2
258 2.1417 2076 1486.1 1386.1 2262.1 1574.8 1358.1 1846.3 1513.2 314.86 2068.7 1535.2 1526.7 2017.1 1596.7 952.05 2074.7 1843.6 1114.1 2163.1 1559.4 1629 -9999.99 -9999.99 -9999.99 2156.4 1860.1 1029.4 2184.7 1725.8 894.85 1803 1414.6 329.88 2075.3 1706.4 488.84 1814.6 1536.4 178.22 1977.3 1650.7 429.86 2108.7 1677.9 1432.2 2138.9 1763 685.14 1885.5 1541.7 138.08 2068.9 1788.8 1294.8 2312.8 1834 1051.4 2228.4 1913.5 993.72 2327.5 1579.7 99.465 2070.4 1397.6 1283.9 2107.3 1401.3 1494.2 2093.9 1436.3 924.47 2227.9 1222.3 1070.9 2238.5 1453.3 1620 2517.6 1317.8 984.55 2386.3 1261.7 1016.3 2308.5 1492.7 871.76 2229.6 1603.3 50.666 2361.1 1533.6 450.9 2425.5 1629.6 33.353 2344.6 1546.6 241.19 2221.4 1369.5 1362.3 2369.8 1507.2 604 2483.4 1724.9 21.472 2245.3 1302.7 1294.9 2448.9 1311.5 1074.5 2424.5 1299.3 946.79 2323.4 1618 1179.5 2024.1 1473 1204.4
259 2.1500 2093.9 1496.1 1384.1 2280 1584.5 1357.5 1877 1536.4 316.56 2087 1546 1522.5 2031.5 1610.5 949.59 2087.9 1854 1109.8 2179.8 1567.8 1626.5 -9999.99 -9999.99 -9999.99 2164.3 1872.9 1020.6 2202.1 1737.2 895.1 1831.7 1438.3 332.26 2108.7 1727.3 488.33 1846 1560.6 181.28 2010.4 1673.2 430.03 2124.4 1689.6 1428.2 2163.5 1780.2 685.48 1917 1564.1 140.71 2084.8 1800 1290.8 2322.5 1856.6 1036.2 2232.1 1929.5 980.48 2327.4 1581.3 97.598 2073 1410.6 1289.6 2138.7 1414.5 1481.9 2105.9 1449.5 922.6 2259.4 1239.8 1060.2 2254.5 1461.1 1617.1 2548.3 1332.9 977.76 2418.3 1278.4 1008.6 2321.7 1501.7 869.73 2229.5 1604.9 48.46 2372.9 1540.3 448.19 2425.8 1630.6 34.796 2355 1552.5 245.35 2239.9 1380.3 1356.6 2382.3 1515.1 601.88 2483.4 1724.9 21.472 2265.6 1315.2 1285.8 2479.6 1327.6 1067.3 2456.5 1313.7 940.08 2336.1 1624.3 1177.3 2040 1485.4 1202.5
260 2.1583 2110.5 1505.6 1382.8 2297.5 1594.3 1356.5 1908.8 1559.4 316.81 2104.4 1556.3 1519.4 2046 1624.7 948.99 2101.4 1864.7 1107.5 2196.1 1576.3 1624.7 -9999.99 -9999.99 -9999.99 2170.9 1885.3 1012.4 2218.6 1744.6 900.36 1861 1462.8 332.85 2141.7 1747.4 487.57 1878.6 1586.2 182.98 2043.4 1696.3 430.7 2140.1 1700.2 1425.4 2187.7 1796.4 685.9 1951.2 1586.4 142.24 2100.1 1810.6 1288 2330.2 1879.1 1021.6 2233.8 1944.6 967.92 2327 1582.6 96.834 -9999.99 -9999.99 -9999.99 2155.1 1424.9 1478.1 2117.8 1460.7 922.17 2290.8 1257.9 1052.4 2270.6 1469.1 1614.4 2577.9 1347.2 969.87 2447.8 1295.3 999.91 2334.5 1509.3 869.39 2229.6 1606.7 47.526 2381.8 1545.5 445.56 2425.9 1631.2 35.645 2355.4 1553.4 243.74 2257.7 1391.2 1352.8 2392.4 1520.7 599.42 2483.4 1724.7 21.387 2286.6 1327.8 1279.1 2510.2 1344.4 1060.6 -9999.99 -9999.99 -9999.99 2349.3 1632.6 1175.4 -9999.99 -9999.99 -9999.99
261 2.1667 2125.6 1514.8 1382.3 2314.2 1603.7 1354.8 1941.1 1582.8 316.22 2120.7 1565.9 1517.5 2059.4 1639 949.93 2112.8 1874.7 1105.7 2212.2 1584.3 1623.9 2342.1 1964.4 920.82 2175.9 1898.8 1006.1 2231 1756.1 901.13 1890.6 1487.6 331.41 2174.5 1767.5 487.65 1913.7 1606.8 179.58 2076.1 1719 430.96 2156.4 1710.1 1424.1 2211.2 1810.7 686.16 1986.8 1608.7 142.83 2115.5 1821.1 1286.9 2335.6 1899.1 1006.2 2232.8 1958.8 956.29 2326.9 1583 96.41 -9999.99 -9999.99 -9999.99 2172.1 1434.2 1474.5 2128.6 1471.1 922.94 2320.8 1276.2 1047.4 2286.3 1477.1 1612.6 2608.2 1361.8 962.83 2478.1 1313.5 995.5 2346.3 1516.3 868.45 2229.6 1607.7 47.611 2388.2 1549.8 443.43 2425.9 1631.1 35.475 2356.4 1554.4 243.06 2274.9 1402 1350.4 2400.9 1525.2 597.22 2483.3 1724.3 20.878 2308.4 1341.1 1275.1 2540.6 1361.7 1054.7 -9999.99 -9999.99 -9999.99 2362.8 1640.8 1172.6 -9999.99 -9999.99 -9999.99
262 2.1750 2139.9 1524 1382.4 2329.9 1612.1 1352.7 1973.9 1607.2 314.78 2136.5 1575.3 1516.9 2072.3 1652.2 951.28 2122.7 1884.6 1105.2 2227.8 1592.3 1624.1 2335.1 1985.9 909.7 2179.5 1911.4 1002.1 2242.3 1766.6 904.1 1920.8 1512.2 328.18 2206.6 1787.5 489.52 1950.6 1630.6 176.1 2108.6 1740.4 430.96 2172.2 1718.8 1423.7 2234.8 1824.5 686.58 2024.1 1631.4 141.73 2131.4 1831.6 1288.3 2338.4 1918 991.43 2230 1971.4 946.28 2327.1 1583.3 96.325 -9999.99 -9999.99 -9999.99 2187.6 1443.9 1472.5 2138.1 1482.8 923.79 2349.1 1293.6 1044.6 2301.7 1484.8 1611.3 2637 1377.2 957.65 2507.8 1331.4 991.85 2357.1 1522.5 867.01 2229.5 1607.3 48.29 2393.9 1553.6 442.67 2425.9 1631 35.305 2358.1 1555.5 242.72 2291.4 1412.6 1349.8 2408.9 1529.7 595.6 2483.2 1724 20.453 2328.7 1354 1273 2570.4 1379.1 1050.1 -9999.99 -9999.99 -9999.99 2376.6 1648.5 1168.8 -9999.99 -9999.99 -9999.99
263 2.1833 2153.7 1533.3 1382.8 2345.8 1619.9 1351.4 2007.1 1631.9 311.55 2151.5 1584.2 1517.4 2086.5 1663.9 952.05 2134.2 1894.5 1107.9 2243 1600.1 1624.6 2326.6 2005.4 899.85 2182.1 1921.8 1000.3 2266.6 1772.1 898.84 1951.2 1537.9 323.18 2238.1 1805.5 493.59 1990 1656.6 172.71 2141.3 1761.2 431.13 2187.8 1726.8 1424.3 2258.4 1838.7 692.01 2064 1653.5 139.78 2147.8 1841.4 1290.8 2339.3 1937.3 979.63 2225.7 1982.9 938.98 2327.7 1584.1 97.513 -9999.99 -9999.99 -9999.99 2201.3 1453.4 1471.9 2146.5 1494.8 924.98 2376.2 1310.4 1043.5 2316.9 1492.8 1610.5 2663 1393.6 953.32 2536.7 1349.2 989.73 2367.1 1527.5 865.91 2228.8 1607.7 49.139 2400.4 1557.4 442.42 2425.5 1630.7 34.372 2361 1557.1 243.23 2306.4 1422.5 1349.9 2416.4 1534 593.82 2483.4 1724.2 20.708 2346.9 1366.1 1272.1 2599.7 1396.5 1047.8 -9999.99 -9999.99 -9999.99 2392.7 1656 1165.2 -9999.99 -9999.99 -9999.99
264 2.1917 2166.9 1542.5 1383.3 2362 1627.7 1350.4 2041.4 1656.5 305.86 2165.6 1592.4 1518.5 2100.7 1674.7 953.24 2146.9 1904.7 1112.9 2257.3 1608.1 1625.1 2316.6 2023.3 892.56 2184.6 1932.4 1000.9 2285.2 1778.7 897.22 1982.5 1563.7 315.28 2268.9 1822.5 499.19 2031.3 1683.2 168.97 2174 1781.1 431.81 2203 1734.4 1425.4 2280.5 1851.2 695.83 2106.6 1677.4 138.16 2165 1851.6 1295.1 2339 1955.8 970.8 2220.6 1993.5 935.16 2328.4 1585.5 99.465 -9999.99 -9999.99 -9999.99 2214.8 1461.5 1472.5 2154.8 1506.1 926.33 2402.8 1326.3 1044.3 2331.6 1501.5 1610.5 2690.3 1411.7 953.92 2564.2 1367 989.14 2376.4 1531.3 865.65 2228.4 1610.5 50.327 2407.7 1561.5 442.25 2424.3 1630.1 33.098 2364.9 1559.1 244.67 2319.9 1431.6 1350.6 2423.8 1537.9 592.46 2483.9 1724.2 21.472 2363.5 1377.4 1272.6 2628.3 1413.6 1047.6 -9999.99 -9999.99 -9999.99 2407.4 1663.2 1161.8 -9999.99 -9999.99 -9999.99
265 2.2000 2180 1550.5 1383.9 2374.9 1635.2 1350.7 2076.1 1681.1 299.75 2179.3 1600.6 1519.8 2113.1 1686.8 954.25 2158.5 1915 1118.6 2271.2 1616.5 1625.6 2305.7 2039.8 887.04 2187.6 1943.9 1004.5 2300.5 1785.9 898.5 2015.1 1589.3 304.93 2298 1838.7 505.3 2073.1 1710.7 164.05 2205.8 1800.4 432.91 2217.6 1741.9 1427 2300.5 1863.5 699.91 2149.3 1701.4 134.18 2180.9 1862.3 1300.7 2337.5 1973.1 963 2215.3 2003.6 934.57 2328.3 1586.8 100.14 -9999.99 -9999.99 -9999.99 2227.2 1468.8 1474.2 2163.2 1516.3 926.93 2428.5 1340.9 1046.1 2345.5 1509.7 1611.4 2715.8 1430.6 955.27 2590.1 1383.9 990.41 2385.2 1535.3 865.91 2228.2 1613.7 52.194 2414.7 1565.3 442.42 2423.1 1630.1 32.165 2368.5 1561.1 245.52 2333.3 1440.2 1352.5 2430.9 1541.9 592.29 2484 1723.9 21.387 2378.6 1387.5 1274 2656.8 1430.4 1048.9 -9999.99 -9999.99 -9999.99 2420.3 1669.8 1159.8 -9999.99 -9999.99 -9999.99

View file

@ -0,0 +1,22 @@
LFHD, RFHD
RFHD, RBHD
RBHD, LBHD
LBHD, LFHD
LELB, LWRB
LWRB, LFIN
LELB, LSHO
LSHO, RSHO
RSHO, STRN
LSHO, STRN
RSHO, RELB
RELB, RWRB
RWRB, RFIN
LSHO, LFWT
RSHO, RFWT
LFWT, RFWT
LFWT, LKNE
RFWT, RKNE
LKNE, LHEE
RKNE, RHEE
RMT5, RHEE
LMT5, LHEE

Binary file not shown.

Binary file not shown.

34
GPy/util/sinc.py Normal file
View file

@ -0,0 +1,34 @@
from sympy import Function, S, oo, I, cos, sin
class sinc_grad(Function):
nargs = 1
def fdiff(self, argindex=1):
return ((2-x*x)*sin(self.args[0]) - 2*x*cos(x))/(x*x*x)
@classmethod
def eval(cls, x):
if x.is_Number:
if x is S.Zero:
return S.Zero
else:
return (x*cos(x) - sin(x))/(x*x)
class sinc(Function):
nargs = 1
def fdiff(self, argindex=1):
return sinc_grad(self.args[0])
@classmethod
def eval(cls, x):
if x.is_Number:
if x is S.Zero:
return S.One
else:
return sin(x)/x
def _eval_is_real(self):
return self.args[0].is_real

73
Untitled0.ipynb Normal file
View file

@ -0,0 +1,73 @@
{
"metadata": {
"name": ""
},
"nbformat": 3,
"nbformat_minor": 0,
"worksheets": [
{
"cells": [
{
"cell_type": "code",
"collapsed": false,
"input": [
"import numpy\n",
"np.where?"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
"Object `np.where` not found.\n"
]
}
],
"prompt_number": 1
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"numpy.where?"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 2
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"numpy.exp(-36)"
],
"language": "python",
"metadata": {},
"outputs": [
{
"metadata": {},
"output_type": "pyout",
"prompt_number": 3,
"text": [
"2.3195228302435696e-16"
]
}
],
"prompt_number": 3
},
{
"cell_type": "code",
"collapsed": false,
"input": [],
"language": "python",
"metadata": {},
"outputs": []
}
],
"metadata": {}
}
]
}

330
svm_gui.py Normal file
View file

@ -0,0 +1,330 @@
"""
==========
Libsvm GUI
==========
A simple graphical frontend for Libsvm mainly intended for didactic
purposes. You can create data points by point and click and visualize
the decision region induced by different kernels and parameter settings.
To create positive examples click the left mouse button; to create
negative examples click the right button.
If all examples are from the same class, it uses a one-class SVM.
"""
from __future__ import division, print_function
print(__doc__)
# Author: Peter Prettenhoer <peter.prettenhofer@gmail.com>
#
# License: BSD Style.
import matplotlib
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_tkagg import NavigationToolbar2TkAgg
from matplotlib.figure import Figure
from matplotlib.contour import ContourSet
import Tkinter as Tk
import sys
import numpy as np
from sklearn import svm
from sklearn.datasets import dump_svmlight_file
from sklearn.externals.six.moves import xrange
y_min, y_max = -50, 50
x_min, x_max = -50, 50
class Model(object):
"""The Model which hold the data. It implements the
observable in the observer pattern and notifies the
registered observers on change event.
"""
def __init__(self):
self.observers = []
self.surface = None
self.data = []
self.cls = None
self.surface_type = 0
def changed(self, event):
"""Notify the observers. """
for observer in self.observers:
observer.update(event, self)
def add_observer(self, observer):
"""Register an observer. """
self.observers.append(observer)
def set_surface(self, surface):
self.surface = surface
def dump_svmlight_file(self, file):
data = np.array(self.data)
X = data[:, 0:2]
y = data[:, 2]
dump_svmlight_file(X, y, file)
class Controller(object):
def __init__(self, model):
self.model = model
self.kernel = Tk.IntVar()
self.surface_type = Tk.IntVar()
# Whether or not a model has been fitted
self.fitted = False
def fit(self):
print("fit the model")
train = np.array(self.model.data)
X = train[:, 0:2]
y = train[:, 2]
C = float(self.complexity.get())
gamma = float(self.gamma.get())
coef0 = float(self.coef0.get())
degree = int(self.degree.get())
kernel_map = {0: "linear", 1: "rbf", 2: "poly"}
if len(np.unique(y)) == 1:
clf = svm.OneClassSVM(kernel=kernel_map[self.kernel.get()],
gamma=gamma, coef0=coef0, degree=degree)
clf.fit(X)
else:
clf = svm.SVC(kernel=kernel_map[self.kernel.get()], C=C,
gamma=gamma, coef0=coef0, degree=degree)
clf.fit(X, y)
if hasattr(clf, 'score'):
print("Accuracy:", clf.score(X, y) * 100)
X1, X2, Z = self.decision_surface(clf)
self.model.clf = clf
self.model.set_surface((X1, X2, Z))
self.model.surface_type = self.surface_type.get()
self.fitted = True
self.model.changed("surface")
def decision_surface(self, cls):
delta = 1
x = np.arange(x_min, x_max + delta, delta)
y = np.arange(y_min, y_max + delta, delta)
X1, X2 = np.meshgrid(x, y)
Z = cls.decision_function(np.c_[X1.ravel(), X2.ravel()])
Z = Z.reshape(X1.shape)
return X1, X2, Z
def clear_data(self):
self.model.data = []
self.fitted = False
self.model.changed("clear")
def add_example(self, x, y, label):
self.model.data.append((x, y, label))
self.model.changed("example_added")
# update decision surface if already fitted.
self.refit()
def refit(self):
"""Refit the model if already fitted. """
if self.fitted:
self.fit()
class View(object):
"""Test docstring. """
def __init__(self, root, controller):
f = Figure()
ax = f.add_subplot(111)
ax.set_xticks([])
ax.set_yticks([])
ax.set_xlim((x_min, x_max))
ax.set_ylim((y_min, y_max))
canvas = FigureCanvasTkAgg(f, master=root)
canvas.show()
canvas.get_tk_widget().pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
canvas._tkcanvas.pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
canvas.mpl_connect('button_press_event', self.onclick)
toolbar = NavigationToolbar2TkAgg(canvas, root)
toolbar.update()
self.controllbar = ControllBar(root, controller)
self.f = f
self.ax = ax
self.canvas = canvas
self.controller = controller
self.contours = []
self.c_labels = None
self.plot_kernels()
def plot_kernels(self):
self.ax.text(-50, -60, "Linear: $u^T v$")
self.ax.text(-20, -60, "RBF: $\exp (-\gamma \| u-v \|^2)$")
self.ax.text(10, -60, "Poly: $(\gamma \, u^T v + r)^d$")
def onclick(self, event):
if event.xdata and event.ydata:
if event.button == 1:
self.controller.add_example(event.xdata, event.ydata, 1)
elif event.button == 3:
self.controller.add_example(event.xdata, event.ydata, -1)
def update_example(self, model, idx):
x, y, l = model.data[idx]
if l == 1:
color = 'w'
elif l == -1:
color = 'k'
self.ax.plot([x], [y], "%so" % color, scalex=0.0, scaley=0.0)
def update(self, event, model):
if event == "examples_loaded":
for i in xrange(len(model.data)):
self.update_example(model, i)
if event == "example_added":
self.update_example(model, -1)
if event == "clear":
self.ax.clear()
self.ax.set_xticks([])
self.ax.set_yticks([])
self.contours = []
self.c_labels = None
self.plot_kernels()
if event == "surface":
self.remove_surface()
self.plot_support_vectors(model.clf.support_vectors_)
self.plot_decision_surface(model.surface, model.surface_type)
self.canvas.draw()
def remove_surface(self):
"""Remove old decision surface."""
if len(self.contours) > 0:
for contour in self.contours:
if isinstance(contour, ContourSet):
for lineset in contour.collections:
lineset.remove()
else:
contour.remove()
self.contours = []
def plot_support_vectors(self, support_vectors):
"""Plot the support vectors by placing circles over the
corresponding data points and adds the circle collection
to the contours list."""
cs = self.ax.scatter(support_vectors[:, 0], support_vectors[:, 1],
s=80, edgecolors="k", facecolors="none")
self.contours.append(cs)
def plot_decision_surface(self, surface, type):
X1, X2, Z = surface
if type == 0:
levels = [-1.0, 0.0, 1.0]
linestyles = ['dashed', 'solid', 'dashed']
colors = 'k'
self.contours.append(self.ax.contour(X1, X2, Z, levels,
colors=colors,
linestyles=linestyles))
elif type == 1:
self.contours.append(self.ax.contourf(X1, X2, Z, 10,
cmap=matplotlib.cm.bone,
origin='lower', alpha=0.85))
self.contours.append(self.ax.contour(X1, X2, Z, [0.0], colors='k',
linestyles=['solid']))
else:
raise ValueError("surface type unknown")
class ControllBar(object):
def __init__(self, root, controller):
fm = Tk.Frame(root)
kernel_group = Tk.Frame(fm)
Tk.Radiobutton(kernel_group, text="Linear", variable=controller.kernel,
value=0, command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(kernel_group, text="RBF", variable=controller.kernel,
value=1, command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(kernel_group, text="Poly", variable=controller.kernel,
value=2, command=controller.refit).pack(anchor=Tk.W)
kernel_group.pack(side=Tk.LEFT)
valbox = Tk.Frame(fm)
controller.complexity = Tk.StringVar()
controller.complexity.set("1.0")
c = Tk.Frame(valbox)
Tk.Label(c, text="C:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(c, width=6, textvariable=controller.complexity).pack(
side=Tk.LEFT)
c.pack()
controller.gamma = Tk.StringVar()
controller.gamma.set("0.01")
g = Tk.Frame(valbox)
Tk.Label(g, text="gamma:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(g, width=6, textvariable=controller.gamma).pack(side=Tk.LEFT)
g.pack()
controller.degree = Tk.StringVar()
controller.degree.set("3")
d = Tk.Frame(valbox)
Tk.Label(d, text="degree:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(d, width=6, textvariable=controller.degree).pack(side=Tk.LEFT)
d.pack()
controller.coef0 = Tk.StringVar()
controller.coef0.set("0")
r = Tk.Frame(valbox)
Tk.Label(r, text="coef0:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(r, width=6, textvariable=controller.coef0).pack(side=Tk.LEFT)
r.pack()
valbox.pack(side=Tk.LEFT)
cmap_group = Tk.Frame(fm)
Tk.Radiobutton(cmap_group, text="Hyperplanes",
variable=controller.surface_type, value=0,
command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(cmap_group, text="Surface",
variable=controller.surface_type, value=1,
command=controller.refit).pack(anchor=Tk.W)
cmap_group.pack(side=Tk.LEFT)
train_button = Tk.Button(fm, text='Fit', width=5,
command=controller.fit)
train_button.pack()
fm.pack(side=Tk.LEFT)
Tk.Button(fm, text='Clear', width=5,
command=controller.clear_data).pack(side=Tk.LEFT)
def get_parser():
from optparse import OptionParser
op = OptionParser()
op.add_option("--output",
action="store", type="str", dest="output",
help="Path where to dump data.")
return op
def main(argv):
op = get_parser()
opts, args = op.parse_args(argv[1:])
root = Tk.Tk()
model = Model()
controller = Controller(model)
root.wm_title("Scikit-learn Libsvm GUI")
view = View(root, controller)
model.add_observer(view)
Tk.mainloop()
if opts.output:
model.dump_svmlight_file(opts.output)
if __name__ == "__main__":
main(sys.argv)

95
test6.py Normal file
View file

@ -0,0 +1,95 @@
from matplotlib import pyplot as plt
import GPy
import numpy as np
class lvm_visualise:
def __init__(self, visualise, ax):
self.cid = ax.figure.canvas.mpl_connect('button_press_event', self.on_click)
self.cid = ax.figure.canvas.mpl_connect('motion_notify_event', self.on_move)
self.visualise = visualise
self.ax = ax
# This is vectorDisplay code
self.called = False
self.move_on = False
def on_click(self, event):
print 'click', event.xdata, event.ydata
if event.inaxes!=self.ax: return
self.move_on = not self.move_on
print
if self.called:
self.xs.append(event.xdata)
self.ys.append(event.ydata)
self.line.set_data(self.xs, self.ys)
self.line.figure.canvas.draw()
else:
self.xs = [event.xdata]
self.ys = [event.ydata]
self.line, = ax.plot(event.xdata, event.ydata)
self.called = True
def on_move(self, event):
if event.inaxes!=self.ax: return
if self.called and self.move_on:
# This is vectorModify code
#print 'move', event.xdata, event.ydata
latent_values = np.array((event.xdata, event.ydata))
self.visualise.modify(latent_values)
#print 'y', y
class data_visualiser:
def __init__(self, model):
self.model = model
def modify(self, latent_values):
raise NotImplementedError, "this needs to be implemented to use the data_visualiser class"
class vector_visualise(data_visualiser):
def __init__(self, model):
data_visualiser.__init__(self, model)
self.fig_display = plt.figure()
self.model = model
self.y = model.predict(np.zeros((1, model.input_dim)))[0]
self.handle = plt.plot(np.arange(0, model.output_dim)[:, None], self.y.T)[0]
def modify(self, latent_values):
y = self.model.predict(latent_values)[0]
xdata, ydata = self.handle.get_data()
self.handle.set_data(xdata, y.T)
plt.show()
class image_visualise(data_visualiser):
def __init__(self, model, dimensions=(16,16), transpose=False, invert=False):
data_visualiser.__init__(self, model)
self.fig_display = plt.figure()
self.model = model
self.imvals = self.get_array(model, np.zeros((1, model.input_dim)))
self.handle = plt.imshow(self.imvals)
self.transpose = transpose
self.invert = invert
def modify(self, latent_values):
xdata, ydata = self.handle.get_data()
self.imvals = self.get_array(model, latent_values)
self.handle.set_array(self.imvals)
plt.show()
def get_array(self, latent_values):
self.y = self.model.predict(latent_values)[0]
imvals = np.reshape(self.y, dimensions)
if self.transpose:
imvals = imvals.T
if self.invert:
imvals = -imvals
model = GPy.examples.dimensionality_reduction.oil_100()
visualise = vector_visualise(model)
fig = plt.figure()
ax = fig.add_subplot(111)
plt.ylim((-1, 1))
plt.xlim((-1, 1))
ax.set_title('latent space')
linebuilder = lvm_visualise(visualise, ax)
plt.show()