merge chagnes from devel

This commit is contained in:
Zhenwen Dai 2014-05-15 18:02:33 +01:00
commit 4ec8f464e2
64 changed files with 2592 additions and 10697 deletions

View file

@ -10,7 +10,7 @@ from model import Model
from parameterization import ObsAr from parameterization import ObsAr
from .. import likelihoods from .. import likelihoods
from ..likelihoods.gaussian import Gaussian from ..likelihoods.gaussian import Gaussian
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation, LatentFunctionInference
from parameterization.variational import VariationalPosterior from parameterization.variational import VariationalPosterior
class GP(Model): class GP(Model):
@ -21,6 +21,7 @@ class GP(Model):
:param Y: output observations :param Y: output observations
:param kernel: a GPy kernel, defaults to rbf+white :param kernel: a GPy kernel, defaults to rbf+white
:param likelihood: a GPy likelihood :param likelihood: a GPy likelihood
:param :class:`~GPy.inference.latent_function_inference.LatentFunctionInference` inference_method: The inference method to use for this GP
:rtype: model object :rtype: model object
.. Note:: Multiple independent outputs are allowed using columns of Y .. Note:: Multiple independent outputs are allowed using columns of Y
@ -220,3 +221,20 @@ class GP(Model):
""" """
return self.kern.input_sensitivity() return self.kern.input_sensitivity()
def optimize(self, optimizer=None, start=None, **kwargs):
"""
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:
:param max_f_eval: maximum number of function evaluations
:type max_f_eval: int
:messages: whether to display during optimisation
:type messages: bool
:param optimizer: which optimizer to use (defaults to self.preferred optimizer)
:type optimizer: string
TODO: valid args
"""
self.inference_method.on_optimization_start()
super(GP, self).optimize(optimizer, start, **kwargs)
self.inference_method.on_optimization_end()

View file

@ -220,7 +220,7 @@ class Model(Parameterized):
if self.is_fixed: if self.is_fixed:
raise RuntimeError, "Cannot optimize, when everything is fixed" raise RuntimeError, "Cannot optimize, when everything is fixed"
if self.size == 0: if self.size == 0:
raise RuntimeError, "Model without parameters cannot be minimized" raise RuntimeError, "Model without parameters cannot be optimized"
if optimizer is None: if optimizer is None:
optimizer = self.preferred_optimizer optimizer = self.preferred_optimizer

View file

@ -38,7 +38,12 @@ class ArrayList(list):
raise ValueError, "{} is not in list".format(item) raise ValueError, "{} is not in list".format(item)
pass pass
class ObservablesList(object): class ObserverList(object):
"""
A list which containts the observables.
It only holds weak references to observers, such that unbound
observers dont dangle in memory.
"""
def __init__(self): def __init__(self):
self._poc = [] self._poc = []
@ -46,25 +51,30 @@ class ObservablesList(object):
p,o,c = self._poc[ind] p,o,c = self._poc[ind]
return p, o(), c return p, o(), c
def remove(self, priority, observable, callble): def remove(self, priority, observer, callble):
""" """
Remove one observer, which had priority and callble.
""" """
self.flush() self.flush()
for i in range(len(self) - 1, -1, -1): for i in range(len(self) - 1, -1, -1):
p,o,c = self[i] p,o,c = self[i]
if priority==p and observable==o and callble==c: if priority==p and observer==o and callble==c:
del self._poc[i] del self._poc[i]
def __repr__(self): def __repr__(self):
return self._poc.__repr__() return self._poc.__repr__()
def add(self, priority, observable, callble): def add(self, priority, observer, callble):
"""
Add an observer with priority and callble
"""
if observer is not None:
ins = 0 ins = 0
for pr, _, _ in self: for pr, _, _ in self:
if priority > pr: if priority > pr:
break break
ins += 1 ins += 1
self._poc.insert(ins, (priority, weakref.ref(observable), callble)) self._poc.insert(ins, (priority, weakref.ref(observer), callble))
def __str__(self): def __str__(self):
ret = [] ret = []
@ -81,12 +91,14 @@ class ObservablesList(object):
return '\n'.join(ret) return '\n'.join(ret)
def flush(self): def flush(self):
"""
Make sure all weak references, which point to nothing are flushed (deleted)
"""
self._poc = [(p,o,c) for p,o,c in self._poc if o() is not None] self._poc = [(p,o,c) for p,o,c in self._poc if o() is not None]
def __iter__(self): def __iter__(self):
self.flush() self.flush()
for p, o, c in self._poc: for p, o, c in self._poc:
if o() is not None:
yield p, o(), c yield p, o(), c
def __len__(self): def __len__(self):
@ -94,10 +106,11 @@ class ObservablesList(object):
return self._poc.__len__() return self._poc.__len__()
def __deepcopy__(self, memo): def __deepcopy__(self, memo):
self.flush() s = ObserverList()
s = ObservablesList() for p,o,c in self:
import copy import copy
s._poc = copy.deepcopy(self._poc, memo) s.add(p, copy.deepcopy(o, memo), copy.deepcopy(c, memo))
s.flush()
return s return s
def __getstate__(self): def __getstate__(self):

View file

@ -1,7 +1,7 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
__updated__ = '2014-04-15' __updated__ = '2014-05-12'
import numpy as np import numpy as np
from parameter_core import Observable, Pickleable from parameter_core import Observable, Pickleable
@ -15,10 +15,10 @@ class ObsAr(np.ndarray, Pickleable, Observable):
""" """
__array_priority__ = -1 # Never give back ObsAr __array_priority__ = -1 # Never give back ObsAr
def __new__(cls, input_array, *a, **kw): def __new__(cls, input_array, *a, **kw):
# allways make a copy of input paramters, as we need it to be in C order:
if not isinstance(input_array, ObsAr): if not isinstance(input_array, ObsAr):
obj = np.atleast_1d(np.require(input_array, dtype=np.float64, requirements=['W', 'C'])).view(cls) obj = np.atleast_1d(np.require(np.copy(input_array), dtype=np.float64, requirements=['W', 'C'])).view(cls)
else: obj = input_array else: obj = input_array
#cls.__name__ = "ObsAr" # because of fixed printing of `array` in np printing
super(ObsAr, obj).__init__(*a, **kw) super(ObsAr, obj).__init__(*a, **kw)
return obj return obj

View file

@ -4,7 +4,7 @@
import itertools import itertools
import numpy import numpy
np = numpy np = numpy
from parameter_core import OptimizationHandlable, adjust_name_for_printing from parameter_core import Parameterizable, adjust_name_for_printing
from observable_array import ObsAr from observable_array import ObsAr
###### printing ###### printing
@ -16,7 +16,7 @@ __precision__ = numpy.get_printoptions()['precision'] # numpy printing precision
__print_threshold__ = 5 __print_threshold__ = 5
###### ######
class Param(OptimizationHandlable, ObsAr): class Param(Parameterizable, ObsAr):
""" """
Parameter object for GPy models. Parameter object for GPy models.
@ -42,10 +42,9 @@ class Param(OptimizationHandlable, ObsAr):
""" """
__array_priority__ = -1 # Never give back Param __array_priority__ = -1 # Never give back Param
_fixes_ = None _fixes_ = None
_parameters_ = [] parameters = []
def __new__(cls, name, input_array, default_constraint=None): def __new__(cls, name, input_array, default_constraint=None):
obj = numpy.atleast_1d(super(Param, cls).__new__(cls, input_array=input_array)) obj = numpy.atleast_1d(super(Param, cls).__new__(cls, input_array=input_array))
cls.__name__ = "Param"
obj._current_slice_ = (slice(obj.shape[0]),) obj._current_slice_ = (slice(obj.shape[0]),)
obj._realshape_ = obj.shape obj._realshape_ = obj.shape
obj._realsize_ = obj.size obj._realsize_ = obj.size
@ -58,9 +57,9 @@ class Param(OptimizationHandlable, ObsAr):
def build_pydot(self,G): def build_pydot(self,G):
import pydot import pydot
node = pydot.Node(id(self), shape='record', label=self.name) node = pydot.Node(id(self), shape='trapezium', label=self.name)#, fontcolor='white', color='white')
G.add_node(node) G.add_node(node)
for o in self.observers.keys(): for _, o, _ in self.observers:
label = o.name if hasattr(o, 'name') else str(o) label = o.name if hasattr(o, 'name') else str(o)
observed_node = pydot.Node(id(o), label=label) observed_node = pydot.Node(id(o), label=label)
G.add_node(observed_node) G.add_node(observed_node)
@ -88,8 +87,18 @@ class Param(OptimizationHandlable, ObsAr):
@property @property
def param_array(self): def param_array(self):
"""
As we are a leaf, this just returns self
"""
return self return self
@property
def values(self):
"""
Return self as numpy array view
"""
return self.view(np.ndarray)
@property @property
def gradient(self): def gradient(self):
""" """
@ -100,11 +109,11 @@ class Param(OptimizationHandlable, ObsAr):
""" """
if getattr(self, '_gradient_array_', None) is None: if getattr(self, '_gradient_array_', None) is None:
self._gradient_array_ = numpy.empty(self._realshape_, dtype=numpy.float64) self._gradient_array_ = numpy.empty(self._realshape_, dtype=numpy.float64)
return self._gradient_array_[self._current_slice_] return self._gradient_array_#[self._current_slice_]
@gradient.setter @gradient.setter
def gradient(self, val): def gradient(self, val):
self._gradient_array_[self._current_slice_] = val self._gradient_array_[:] = val
#=========================================================================== #===========================================================================
# Array operations -> done # Array operations -> done
@ -112,10 +121,13 @@ class Param(OptimizationHandlable, ObsAr):
def __getitem__(self, s, *args, **kwargs): def __getitem__(self, s, *args, **kwargs):
if not isinstance(s, tuple): if not isinstance(s, tuple):
s = (s,) s = (s,)
if not reduce(lambda a, b: a or numpy.any(b is Ellipsis), s, False) and len(s) <= self.ndim: #if not reduce(lambda a, b: a or numpy.any(b is Ellipsis), s, False) and len(s) <= self.ndim:
s += (Ellipsis,) # s += (Ellipsis,)
new_arr = super(Param, self).__getitem__(s, *args, **kwargs) new_arr = super(Param, self).__getitem__(s, *args, **kwargs)
try: new_arr._current_slice_ = s; new_arr._original_ = self.base is new_arr.base try:
new_arr._current_slice_ = s
new_arr._gradient_array_ = self.gradient[s]
new_arr._original_ = self.base is new_arr.base
except AttributeError: pass # returning 0d array or float, double etc except AttributeError: pass # returning 0d array or float, double etc
return new_arr return new_arr
@ -130,6 +142,9 @@ class Param(OptimizationHandlable, ObsAr):
def _raveled_index_for(self, obj): def _raveled_index_for(self, obj):
return self._raveled_index() return self._raveled_index()
#===========================================================================
# Index recreation
#===========================================================================
def _expand_index(self, slice_index=None): def _expand_index(self, slice_index=None):
# this calculates the full indexing arrays from the slicing objects given by get_item for _real..._ attributes # this calculates the full indexing arrays from the slicing objects given by get_item for _real..._ attributes
# it basically translates slices to their respective index arrays and turns negative indices around # it basically translates slices to their respective index arrays and turns negative indices around
@ -138,6 +153,8 @@ class Param(OptimizationHandlable, ObsAr):
slice_index = self._current_slice_ slice_index = self._current_slice_
def f(a): def f(a):
a, b = a a, b = a
if isinstance(a, numpy.ndarray) and a.dtype == bool:
raise ValueError, "Boolean indexing not implemented, use Param[np.where(index)] to index by boolean arrays!"
if a not in (slice(None), Ellipsis): if a not in (slice(None), Ellipsis):
if isinstance(a, slice): if isinstance(a, slice):
start, stop, step = a.indices(b) start, stop, step = a.indices(b)
@ -156,6 +173,36 @@ class Param(OptimizationHandlable, ObsAr):
def _ensure_fixes(self): def _ensure_fixes(self):
if not self._has_fixes(): self._fixes_ = numpy.ones(self._realsize_, dtype=bool) if not self._has_fixes(): self._fixes_ = numpy.ones(self._realsize_, dtype=bool)
#===========================================================================
# parameterizable
#===========================================================================
def traverse(self, visit, *args, **kwargs):
"""
Traverse the hierarchy performing visit(self, *args, **kwargs) at every node passed by.
See "visitor pattern" in literature. This is implemented in pre-order fashion.
This will function will just call visit on self, as Param are leaf nodes.
"""
self.__visited = True
visit(self, *args, **kwargs)
self.__visited = False
def traverse_parents(self, visit, *args, **kwargs):
"""
Traverse the hierarchy upwards, visiting all parents and their children, except self.
See "visitor pattern" in literature. This is implemented in pre-order fashion.
Example:
parents = []
self.traverse_parents(parents.append)
print parents
"""
if self.has_parent():
self.__visited = True
self._parent_._traverse_parents(visit, *args, **kwargs)
self.__visited = False
#=========================================================================== #===========================================================================
# Convenience # Convenience
#=========================================================================== #===========================================================================
@ -228,9 +275,16 @@ class Param(OptimizationHandlable, ObsAr):
and len(set(map(len, clean_curr_slice))) <= 1): and len(set(map(len, clean_curr_slice))) <= 1):
return numpy.fromiter(itertools.izip(*clean_curr_slice), return numpy.fromiter(itertools.izip(*clean_curr_slice),
dtype=[('', int)] * self._realndim_, count=len(clean_curr_slice[0])).view((int, self._realndim_)) dtype=[('', int)] * self._realndim_, count=len(clean_curr_slice[0])).view((int, self._realndim_))
try:
expanded_index = list(self._expand_index(slice_index)) expanded_index = list(self._expand_index(slice_index))
return numpy.fromiter(itertools.product(*expanded_index), indices = numpy.fromiter(itertools.product(*expanded_index),
dtype=[('', int)] * self._realndim_, count=reduce(lambda a, b: a * b.size, expanded_index, 1)).view((int, self._realndim_)) dtype=[('', int)] * self._realndim_, count=reduce(lambda a, b: a * b.size, expanded_index, 1)).view((int, self._realndim_))
except:
print "Warning: extended indexing was used"
indices = np.indices(self._realshape_, dtype=int)
indices = indices[(slice(None),)+slice_index]
indices = np.rollaxis(indices, 0, indices.ndim)
return indices
def _max_len_names(self, gen, header): def _max_len_names(self, gen, header):
gen = map(lambda x: " ".join(map(str, x)), gen) gen = map(lambda x: " ".join(map(str, x)), gen)
return reduce(lambda a, b:max(a, len(b)), gen, len(header)) return reduce(lambda a, b:max(a, len(b)), gen, len(header))
@ -272,7 +326,7 @@ class Param(OptimizationHandlable, ObsAr):
class ParamConcatenation(object): class ParamConcatenation(object):
def __init__(self, params): def __init__(self, params):
""" """
Parameter concatenation for convienience of printing regular expression matched arrays Parameter concatenation for convenience of printing regular expression matched arrays
you can index this concatenation as if it was the flattened concatenation you can index this concatenation as if it was the flattened concatenation
of all the parameters it contains, same for setting parameters (Broadcasting enabled). of all the parameters it contains, same for setting parameters (Broadcasting enabled).
@ -316,8 +370,8 @@ class ParamConcatenation(object):
val = val.values() val = val.values()
ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True; ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True;
vals = self.values(); vals[s] = val vals = self.values(); vals[s] = val
[numpy.copyto(p, vals[ps], where=ind[ps]) for p, ps in zip(self.params, self._param_slices_):
for p, ps in zip(self.params, self._param_slices_)] p.flat[ind[ps]] = vals[ps]
if update: if update:
self.update_all_params() self.update_all_params()
def values(self): def values(self):

View file

@ -17,7 +17,7 @@ from transformations import Logexp, NegativeLogexp, Logistic, __fixed__, FIXED,
import numpy as np import numpy as np
import re import re
__updated__ = '2014-04-16' __updated__ = '2014-05-15'
class HierarchyError(Exception): class HierarchyError(Exception):
""" """
@ -52,13 +52,23 @@ class Observable(object):
_updated = True _updated = True
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(Observable, self).__init__() super(Observable, self).__init__()
from lists_and_dicts import ObservablesList from lists_and_dicts import ObserverList
self.observers = ObservablesList() self.observers = ObserverList()
def add_observer(self, observer, callble, priority=0): def add_observer(self, observer, callble, priority=0):
"""
Add an observer `observer` with the callback `callble`
and priority `priority` to this observers list.
"""
self.observers.add(priority, observer, callble) self.observers.add(priority, observer, callble)
def remove_observer(self, observer, callble=None): def remove_observer(self, observer, callble=None):
"""
Either (if callble is None) remove all callables,
which were added alongside observer,
or remove callable `callble` which was added alongside
the observer `observer`.
"""
to_remove = [] to_remove = []
for poc in self.observers: for poc in self.observers:
_, obs, clble = poc _, obs, clble = poc
@ -91,10 +101,6 @@ class Observable(object):
break break
callble(self, which=which) callble(self, which=which)
#===============================================================================
# Foundation framework for parameterized and param objects:
#===============================================================================
class Parentable(object): class Parentable(object):
""" """
Enable an Object to have a parent. Enable an Object to have a parent.
@ -124,7 +130,7 @@ class Parentable(object):
""" """
Disconnect this object from its parent Disconnect this object from its parent
""" """
raise NotImplementedError, "Abstaract superclass" raise NotImplementedError, "Abstract superclass"
@property @property
def _highest_parent_(self): def _highest_parent_(self):
@ -162,7 +168,6 @@ class Pickleable(object):
:param protocol: pickling protocol to use, python-pickle for details. :param protocol: pickling protocol to use, python-pickle for details.
""" """
import cPickle as pickle import cPickle as pickle
import pickle #TODO: cPickle
if isinstance(f, str): if isinstance(f, str):
with open(f, 'w') as f: with open(f, 'w') as f:
pickle.dump(self, f, protocol) pickle.dump(self, f, protocol)
@ -173,38 +178,42 @@ class Pickleable(object):
# copy and pickling # copy and pickling
#=========================================================================== #===========================================================================
def copy(self): def copy(self):
"""Returns a (deep) copy of the current model""" """
Returns a (deep) copy of the current parameter handle.
All connections to parents of the copy will be cut.
"""
#raise NotImplementedError, "Copy is not yet implemented, TODO: Observable hierarchy" #raise NotImplementedError, "Copy is not yet implemented, TODO: Observable hierarchy"
import copy import copy
memo = {} memo = {}
memo[id(self._parent_)] = None # the next part makes sure that we do not include parents in any form:
memo[id(self.gradient)] = None parents = []
memo[id(self.param_array)] = None self.traverse_parents(parents.append) # collect parents
memo[id(self._fixes_)] = None for p in parents:
c = copy.deepcopy(self, memo) memo[id(p)] = None # set all parents to be None, so they will not be copied
memo[id(self.gradient)] = None # reset the gradient
memo[id(self.param_array)] = None # and param_array
memo[id(self._fixes_)] = None # fixes have to be reset, as this is now highest parent
c = copy.deepcopy(self, memo) # and start the copy
c._parent_index_ = None c._parent_index_ = None
return c return c
def __deepcopy__(self, memo): def __deepcopy__(self, memo):
s = self.__new__(self.__class__) s = self.__new__(self.__class__) # fresh instance
memo[id(self)] = s memo[id(self)] = s # be sure to break all cycles --> self is already done
import copy import copy
s.__dict__.update(copy.deepcopy(self.__dict__, memo)) s.__dict__.update(copy.deepcopy(self.__dict__, memo)) # standard copy
return s return s
def __getstate__(self): def __getstate__(self):
ignore_list = ([#'_parent_', '_parent_index_', ignore_list = ['_param_array_', # parameters get set from bottom to top
#'observers', '_gradient_array_', # as well as gradients
'_param_array_', '_gradient_array_', '_fixes_', '_fixes_', # and fixes
'_Cacher_wrap__cachers'] '_Cacher_wrap__cachers', # never pickle cachers
#+ self.parameter_names(recursive=False) ]
)
dc = dict() dc = dict()
for k,v in self.__dict__.iteritems(): for k,v in self.__dict__.iteritems():
if k not in ignore_list: if k not in ignore_list:
#if hasattr(v, "__getstate__"):
#dc[k] = v.__getstate__()
#else:
dc[k] = v dc[k] = v
return dc return dc
@ -212,12 +221,6 @@ class Pickleable(object):
self.__dict__.update(state) self.__dict__.update(state)
return self return self
#def __getstate__(self, memo):
# raise NotImplementedError, "get state must be implemented to be able to pickle objects"
#def __setstate__(self, memo):
# raise NotImplementedError, "set state must be implemented to be able to pickle objects"
class Gradcheckable(Pickleable, Parentable): class Gradcheckable(Pickleable, Parentable):
""" """
Adds the functionality for an object to be gradcheckable. Adds the functionality for an object to be gradcheckable.
@ -252,7 +255,6 @@ class Gradcheckable(Pickleable, Parentable):
""" """
raise HierarchyError, "This parameter is not in a model with a likelihood, and, therefore, cannot be gradient checked!" raise HierarchyError, "This parameter is not in a model with a likelihood, and, therefore, cannot be gradient checked!"
class Nameable(Gradcheckable): class Nameable(Gradcheckable):
""" """
Make an object nameable inside the hierarchy. Make an object nameable inside the hierarchy.
@ -291,41 +293,8 @@ class Nameable(Gradcheckable):
return self._parent_.hierarchy_name() + "." + adjust(self.name) return self._parent_.hierarchy_name() + "." + adjust(self.name)
return adjust(self.name) return adjust(self.name)
class Indexable(object):
"""
Enable enraveled indexes and offsets for this object.
The raveled index of an object is the index for its parameters in a flattened int array.
"""
def __init__(self, *a, **kw):
super(Indexable, self).__init__()
def _raveled_index(self): class Indexable(Nameable, Observable):
"""
Flattened array of ints, specifying the index of this object.
This has to account for shaped parameters!
"""
raise NotImplementedError, "Need to be able to get the raveled Index"
def _offset_for(self, param):
"""
Return the offset of the param inside this parameterized object.
This does not need to account for shaped parameters, as it
basically just sums up the parameter sizes which come before param.
"""
return 0
#raise NotImplementedError, "shouldnt happen, offset required from non parameterization object?"
def _raveled_index_for(self, param):
"""
get the raveled index for a param
that is an int array, containing the indexes for the flattened
param inside this parameterized logic.
"""
return param._raveled_index()
#raise NotImplementedError, "shouldnt happen, raveld index transformation required from non parameterization object?"
class Constrainable(Nameable, Indexable, Observable):
""" """
Make an object constrainable with Priors and Transformations. Make an object constrainable with Priors and Transformations.
TODO: Mappings!! TODO: Mappings!!
@ -336,7 +305,7 @@ class Constrainable(Nameable, Indexable, Observable):
:func:`constrain()` and :func:`unconstrain()` are main methods here :func:`constrain()` and :func:`unconstrain()` are main methods here
""" """
def __init__(self, name, default_constraint=None, *a, **kw): def __init__(self, name, default_constraint=None, *a, **kw):
super(Constrainable, self).__init__(name=name, *a, **kw) super(Indexable, self).__init__(name=name, *a, **kw)
self._default_constraint_ = default_constraint self._default_constraint_ = default_constraint
from index_operations import ParameterIndexOperations from index_operations import ParameterIndexOperations
self.constraints = ParameterIndexOperations() self.constraints = ParameterIndexOperations()
@ -358,6 +327,39 @@ class Constrainable(Nameable, Indexable, Observable):
self._connect_fixes() self._connect_fixes()
self._notify_parent_change() self._notify_parent_change()
#===========================================================================
# Indexable
#===========================================================================
def _offset_for(self, param):
"""
Return the offset of the param inside this parameterized object.
This does not need to account for shaped parameters, as it
basically just sums up the parameter sizes which come before param.
"""
if param.has_parent():
if param._parent_._get_original(param) in self.parameters:
return self._param_slices_[param._parent_._get_original(param)._parent_index_].start
return self._offset_for(param._parent_) + param._parent_._offset_for(param)
return 0
def _raveled_index_for(self, param):
"""
get the raveled index for a param
that is an int array, containing the indexes for the flattened
param inside this parameterized logic.
"""
from param import ParamConcatenation
if isinstance(param, ParamConcatenation):
return np.hstack((self._raveled_index_for(p) for p in param.params))
return param._raveled_index() + self._offset_for(param)
def _raveled_index(self):
"""
Flattened array of ints, specifying the index of this object.
This has to account for shaped parameters!
"""
return np.r_[:self.size]
#=========================================================================== #===========================================================================
# Fixing Parameters: # Fixing Parameters:
#=========================================================================== #===========================================================================
@ -412,9 +414,24 @@ class Constrainable(Nameable, Indexable, Observable):
self._fixes_ = None self._fixes_ = None
del self.constraints[__fixed__] del self.constraints[__fixed__]
#===========================================================================
# Convenience for fixed
#===========================================================================
def _has_fixes(self): def _has_fixes(self):
return hasattr(self, "_fixes_") and self._fixes_ is not None and self._fixes_.size == self.size return hasattr(self, "_fixes_") and self._fixes_ is not None and self._fixes_.size == self.size
@property
def is_fixed(self):
for p in self.parameters:
if not p.is_fixed: return False
return True
def _get_original(self, param):
# if advanced indexing is activated it happens that the array is a copy
# you can retrieve the original param through this method, by passing
# the copy here
return self.parameters[param._parent_index_]
#=========================================================================== #===========================================================================
# Prior Operations # Prior Operations
#=========================================================================== #===========================================================================
@ -438,8 +455,7 @@ class Constrainable(Nameable, Indexable, Observable):
def unset_priors(self, *priors): def unset_priors(self, *priors):
""" """
Un-set all priors given from this parameter handle. Un-set all priors given (in *priors) from this parameter handle.
""" """
return self._remove_from_index_operations(self.priors, priors) return self._remove_from_index_operations(self.priors, priors)
@ -541,13 +557,13 @@ class Constrainable(Nameable, Indexable, Observable):
self.constraints = ParameterIndexOperationsView(parent.constraints, parent._offset_for(self), self.size) self.constraints = ParameterIndexOperationsView(parent.constraints, parent._offset_for(self), self.size)
self.priors = ParameterIndexOperationsView(parent.priors, parent._offset_for(self), self.size) self.priors = ParameterIndexOperationsView(parent.priors, parent._offset_for(self), self.size)
self._fixes_ = None self._fixes_ = None
for p in self._parameters_: for p in self.parameters:
p._parent_changed(parent) p._parent_changed(parent)
def _add_to_index_operations(self, which, reconstrained, what, warning): def _add_to_index_operations(self, which, reconstrained, what, warning):
""" """
Helper preventing copy code. Helper preventing copy code.
This addes the given what (transformation, prior etc) to parameter index operations which. This adds the given what (transformation, prior etc) to parameter index operations which.
revonstrained are reconstrained indices. revonstrained are reconstrained indices.
warn when reconstraining parameters if warning is True. warn when reconstraining parameters if warning is True.
TODO: find out which parameters have changed specifically TODO: find out which parameters have changed specifically
@ -569,14 +585,13 @@ class Constrainable(Nameable, Indexable, Observable):
removed = np.empty((0,), dtype=int) removed = np.empty((0,), dtype=int)
for t in transforms: for t in transforms:
unconstrained = which.remove(t, self._raveled_index()) unconstrained = which.remove(t, self._raveled_index())
print unconstrained
removed = np.union1d(removed, unconstrained) removed = np.union1d(removed, unconstrained)
if t is __fixed__: if t is __fixed__:
self._highest_parent_._set_unfixed(self, unconstrained) self._highest_parent_._set_unfixed(self, unconstrained)
return removed return removed
class OptimizationHandlable(Constrainable): class OptimizationHandlable(Indexable):
""" """
This enables optimization handles on an Object as done in GPy 0.4. This enables optimization handles on an Object as done in GPy 0.4.
@ -585,14 +600,8 @@ class OptimizationHandlable(Constrainable):
def __init__(self, name, default_constraint=None, *a, **kw): def __init__(self, name, default_constraint=None, *a, **kw):
super(OptimizationHandlable, self).__init__(name, default_constraint=default_constraint, *a, **kw) super(OptimizationHandlable, self).__init__(name, default_constraint=default_constraint, *a, **kw)
def transform(self):
[np.put(self.param_array, ind, c.finv(self.param_array.flat[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
def untransform(self):
[np.put(self.param_array, ind, c.f(self.param_array.flat[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
def _get_params_transformed(self): def _get_params_transformed(self):
# transformed parameters (apply transformation rules) # transformed parameters (apply un-transformation rules)
p = self.param_array.copy() p = self.param_array.copy()
[np.put(p, ind, c.finv(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__] [np.put(p, ind, c.finv(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
if self.has_parent() and self.constraints[__fixed__].size != 0: if self.has_parent() and self.constraints[__fixed__].size != 0:
@ -604,29 +613,55 @@ class OptimizationHandlable(Constrainable):
return p return p
def _set_params_transformed(self, p): def _set_params_transformed(self, p):
if p is self.param_array: """
p = p.copy() Set parameters p, but make sure they get transformed before setting.
This means, the optimizer sees p, whereas the model sees transformed(p),
such that, the parameters the model sees are in the right domain.
"""
if not(p is self.param_array):
if self.has_parent() and self.constraints[__fixed__].size != 0: if self.has_parent() and self.constraints[__fixed__].size != 0:
fixes = np.ones(self.size).astype(bool) fixes = np.ones(self.size).astype(bool)
fixes[self.constraints[__fixed__]] = FIXED fixes[self.constraints[__fixed__]] = FIXED
self.param_array.flat[fixes] = p self.param_array.flat[fixes] = p
elif self._has_fixes(): self.param_array.flat[self._fixes_] = p elif self._has_fixes(): self.param_array.flat[self._fixes_] = p
else: self.param_array.flat = p else: self.param_array.flat = p
self.untransform() [np.put(self.param_array, ind, c.f(self.param_array.flat[ind]))
for c, ind in self.constraints.iteritems() if c != __fixed__]
self._trigger_params_changed() self._trigger_params_changed()
def _trigger_params_changed(self, trigger_parent=True): def _trigger_params_changed(self, trigger_parent=True):
[p._trigger_params_changed(trigger_parent=False) for p in self._parameters_] """
First tell all children to update,
then update yourself.
If trigger_parent is True, we will tell the parent, otherwise not.
"""
[p._trigger_params_changed(trigger_parent=False) for p in self.parameters]
self.notify_observers(None, None if trigger_parent else -np.inf) self.notify_observers(None, None if trigger_parent else -np.inf)
def _size_transformed(self): def _size_transformed(self):
"""
As fixes are not passed to the optimiser, the size of the model for the optimiser
is the size of all parameters minus the size of the fixes.
"""
return self.size - self.constraints[__fixed__].size return self.size - self.constraints[__fixed__].size
def _transform_gradients(self, g):
"""
Transform the gradients by multiplying the gradient factor for each
constraint to it.
"""
if self.has_parent():
return g
[np.put(g, i, g[i] * c.gradfactor(self.param_array[i])) for c, i in self.constraints.iteritems() if c != __fixed__]
if self._has_fixes(): return g[self._fixes_]
return g
@property @property
def num_params(self): def num_params(self):
""" """
Return the number of parameters of this parameter_handle. Return the number of parameters of this parameter_handle.
Param objects will allways return 0. Param objects will always return 0.
""" """
raise NotImplemented, "Abstract, please implement in respective classes" raise NotImplemented, "Abstract, please implement in respective classes"
@ -640,10 +675,11 @@ class OptimizationHandlable(Constrainable):
""" """
if adjust_for_printing: adjust = lambda x: adjust_name_for_printing(x) if adjust_for_printing: adjust = lambda x: adjust_name_for_printing(x)
else: adjust = lambda x: x else: adjust = lambda x: x
if recursive: names = [xi for x in self._parameters_ for xi in x.parameter_names(add_self=True, adjust_for_printing=adjust_for_printing)] if recursive: names = [xi for x in self.parameters for xi in x.parameter_names(add_self=True, adjust_for_printing=adjust_for_printing)]
else: names = [adjust(x.name) for x in self._parameters_] else: names = [adjust(x.name) for x in self.parameters]
if add_self: names = map(lambda x: adjust(self.name) + "." + x, names) if add_self: names = map(lambda x: adjust(self.name) + "." + x, names)
return names return names
def _get_param_names(self): def _get_param_names(self):
n = np.array([p.hierarchy_name() + '[' + str(i) + ']' for p in self.flattened_parameters for i in p._indices()]) n = np.array([p.hierarchy_name() + '[' + str(i) + ']' for p in self.flattened_parameters for i in p._indices()])
return n return n
@ -662,7 +698,7 @@ class OptimizationHandlable(Constrainable):
Randomize the model. Randomize the model.
Make this draw from the prior if one exists, else draw from given random generator Make this draw from the prior if one exists, else draw from given random generator
:param rand_gen: numpy random number generator which takes args and kwargs :param rand_gen: np random number generator which takes args and kwargs
:param flaot loc: loc parameter for random number generator :param flaot loc: loc parameter for random number generator
:param float scale: scale parameter for random number generator :param float scale: scale parameter for random number generator
:param args, kwargs: will be passed through to random number generator :param args, kwargs: will be passed through to random number generator
@ -678,7 +714,7 @@ class OptimizationHandlable(Constrainable):
# for all parameterized objects # for all parameterized objects
#=========================================================================== #===========================================================================
@property @property
def full_gradient(self): def gradient_full(self):
""" """
Note to users: Note to users:
This does not return the gradient in the right shape! Use self.gradient This does not return the gradient in the right shape! Use self.gradient
@ -692,30 +728,53 @@ class OptimizationHandlable(Constrainable):
return self._gradient_array_ return self._gradient_array_
def _propagate_param_grad(self, parray, garray): def _propagate_param_grad(self, parray, garray):
"""
For propagating the param_array and gradient_array.
This ensures the in memory view of each subsequent array.
1.) connect param_array of children to self.param_array
2.) tell all children to propagate further
"""
pi_old_size = 0 pi_old_size = 0
for pi in self._parameters_: for pi in self.parameters:
pislice = slice(pi_old_size, pi_old_size + pi.size) pislice = slice(pi_old_size, pi_old_size + pi.size)
self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat
self.full_gradient[pislice] = pi.full_gradient.flat # , requirements=['C', 'W']).flat self.gradient_full[pislice] = pi.gradient_full.flat # , requirements=['C', 'W']).flat
pi.param_array.data = parray[pislice].data pi.param_array.data = parray[pislice].data
pi.full_gradient.data = garray[pislice].data pi.gradient_full.data = garray[pislice].data
pi._propagate_param_grad(parray[pislice], garray[pislice]) pi._propagate_param_grad(parray[pislice], garray[pislice])
pi_old_size += pi.size pi_old_size += pi.size
class Parameterizable(OptimizationHandlable): class Parameterizable(OptimizationHandlable):
"""
A parameterisable class.
This class provides the parameters list (ArrayList) and standard parameter handling,
such as {add|remove}_parameter(), traverse hierarchy and param_array, gradient_array
and the empty parameters_changed().
This class is abstract and should not be instantiated.
Use GPy.core.Parameterized() as node (or leaf) in the parameterized hierarchy.
Use GPy.core.Param() for a leaf in the parameterized hierarchy.
"""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(Parameterizable, self).__init__(*args, **kwargs) super(Parameterizable, self).__init__(*args, **kwargs)
from GPy.core.parameterization.lists_and_dicts import ArrayList from GPy.core.parameterization.lists_and_dicts import ArrayList
self._parameters_ = ArrayList() self.parameters = ArrayList()
self.size = 0 self._param_array_ = None
self._added_names_ = set() self._added_names_ = set()
self.__visited = False # for traversing in reverse order we need to know if we were here already
@property @property
def param_array(self): def param_array(self):
if not hasattr(self, '_param_array_'): """
Array representing the parameters of this class.
There is only one copy of all parameters in memory, two during optimization.
"""
if self.__dict__.get('_param_array_', None) is None:
self._param_array_ = np.empty(self.size, dtype=np.float64) self._param_array_ = np.empty(self.size, dtype=np.float64)
return self._param_array_ return self._param_array_
@ -723,12 +782,58 @@ class Parameterizable(OptimizationHandlable):
def param_array(self, arr): def param_array(self, arr):
self._param_array_ = arr self._param_array_ = arr
def traverse(self, visit, *args, **kwargs):
"""
Traverse the hierarchy performing visit(self, *args, **kwargs)
at every node passed by downwards. This function includes self!
See "visitor pattern" in literature. This is implemented in pre-order fashion.
Example:
Collect all children:
children = []
self.traverse(children.append)
print children
"""
if not self.__visited:
visit(self, *args, **kwargs)
self.__visited = True
for c in self.parameters:
c.traverse(visit, *args, **kwargs)
self.__visited = False
def traverse_parents(self, visit, *args, **kwargs):
"""
Traverse the hierarchy upwards, visiting all parents and their children except self.
See "visitor pattern" in literature. This is implemented in pre-order fashion.
Example:
parents = []
self.traverse_parents(parents.append)
print parents
"""
if self.has_parent():
self.__visited = True
self._parent_._traverse_parents(visit, *args, **kwargs)
self.__visited = False
def _traverse_parents(self, visit, *args, **kwargs):
if not self.__visited:
self.__visited = True
visit(self, *args, **kwargs)
if self.has_parent():
self._parent_._traverse_parents(visit, *args, **kwargs)
self._parent_.traverse(visit, *args, **kwargs)
self.__visited = False
#========================================================================= #=========================================================================
# Gradient handling # Gradient handling
#========================================================================= #=========================================================================
@property @property
def gradient(self): def gradient(self):
if not hasattr(self, '_gradient_array_'): if self.__dict__.get('_gradient_array_', None) is None:
self._gradient_array_ = np.empty(self.size, dtype=np.float64) self._gradient_array_ = np.empty(self.size, dtype=np.float64)
return self._gradient_array_ return self._gradient_array_
@ -738,7 +843,7 @@ class Parameterizable(OptimizationHandlable):
@property @property
def num_params(self): def num_params(self):
return len(self._parameters_) return len(self.parameters)
def _add_parameter_name(self, param, ignore_added_names=False): def _add_parameter_name(self, param, ignore_added_names=False):
pname = adjust_name_for_printing(param.name) pname = adjust_name_for_printing(param.name)
@ -771,130 +876,6 @@ class Parameterizable(OptimizationHandlable):
self._remove_parameter_name(None, old_name) self._remove_parameter_name(None, old_name)
self._add_parameter_name(param) self._add_parameter_name(param)
def add_parameter(self, param, index=None, _ignore_added_names=False):
"""
:param parameters: the parameters to add
:type parameters: list of or one :py:class:`GPy.core.param.Param`
:param [index]: index of where to put parameters
:param bool _ignore_added_names: whether the name of the parameter overrides a possibly existing field
Add all parameters to this param class, you can insert parameters
at any given index using the :func:`list.insert` syntax
"""
if param in self._parameters_ and index is not None:
self.remove_parameter(param)
self.add_parameter(param, index)
# elif param.has_parent():
# raise HierarchyError, "parameter {} already in another model ({}), create new object (or copy) for adding".format(param._short(), param._highest_parent_._short())
elif param not in self._parameters_:
if param.has_parent():
parent = param._parent_
while parent is not None:
if parent is self:
raise HierarchyError, "You cannot add a parameter twice into the hierarchy"
parent = parent._parent_
param._parent_.remove_parameter(param)
# make sure the size is set
if index is None:
self.constraints.update(param.constraints, self.size)
self.priors.update(param.priors, self.size)
self._parameters_.append(param)
else:
start = sum(p.size for p in self._parameters_[:index])
self.constraints.shift_right(start, param.size)
self.priors.shift_right(start, param.size)
self.constraints.update(param.constraints, start)
self.priors.update(param.priors, start)
self._parameters_.insert(index, param)
param.add_observer(self, self._pass_through_notify_observers, -np.inf)
parent = self
while parent is not None:
parent.size += param.size
parent = parent._parent_
self._connect_parameters()
self._highest_parent_._connect_parameters(ignore_added_names=_ignore_added_names)
self._highest_parent_._notify_parent_change()
self._highest_parent_._connect_fixes()
else:
raise HierarchyError, """Parameter exists already and no copy made"""
def add_parameters(self, *parameters):
"""
convenience method for adding several
parameters without gradient specification
"""
[self.add_parameter(p) for p in parameters]
def remove_parameter(self, param):
"""
:param param: param object to remove from being a parameter of this parameterized object.
"""
if not param in self._parameters_:
raise RuntimeError, "Parameter {} does not belong to this object, remove parameters directly from their respective parents".format(param._short())
start = sum([p.size for p in self._parameters_[:param._parent_index_]])
self._remove_parameter_name(param)
self.size -= param.size
del self._parameters_[param._parent_index_]
param._disconnect_parent()
param.remove_observer(self, self._pass_through_notify_observers)
self.constraints.shift_left(start, param.size)
self._connect_parameters()
self._notify_parent_change()
parent = self._parent_
while parent is not None:
parent.size -= param.size
parent = parent._parent_
self._highest_parent_._connect_parameters()
self._highest_parent_._connect_fixes()
self._highest_parent_._notify_parent_change()
def _connect_parameters(self, ignore_added_names=False):
# connect parameterlist to this parameterized object
# This just sets up the right connection for the params objects
# to be used as parameters
# it also sets the constraints for each parameter to the constraints
# of their respective parents
if not hasattr(self, "_parameters_") or len(self._parameters_) < 1:
# no parameters for this class
return
old_size = 0
self.param_array = np.empty(self.size, dtype=np.float64)
self._gradient_array_ = np.empty(self.size, dtype=np.float64)
self._param_slices_ = []
for i, p in enumerate(self._parameters_):
p._parent_ = self
p._parent_index_ = i
pslice = slice(old_size, old_size + p.size)
# first connect all children
p._propagate_param_grad(self.param_array[pslice], self.full_gradient[pslice])
# then connect children to self
self.param_array[pslice] = p.param_array.flat # , requirements=['C', 'W']).ravel(order='C')
self.full_gradient[pslice] = p.full_gradient.flat # , requirements=['C', 'W']).ravel(order='C')
if not p.param_array.flags['C_CONTIGUOUS']:
raise ValueError, "This should not happen! Please write an email to the developers with the code, which reproduces this error. All parameter arrays must be C_CONTIGUOUS"
p.param_array.data = self.param_array[pslice].data
p.full_gradient.data = self.full_gradient[pslice].data
self._param_slices_.append(pslice)
self._add_parameter_name(p, ignore_added_names=ignore_added_names)
old_size += p.size
#=========================================================================== #===========================================================================
# notification system # notification system
#=========================================================================== #===========================================================================
@ -904,30 +885,13 @@ class Parameterizable(OptimizationHandlable):
self.notify_observers(which=which) self.notify_observers(which=which)
#=========================================================================== #===========================================================================
# Pickling
#===========================================================================
def __setstate__(self, state):
super(Parameterizable, self).__setstate__(state)
self._connect_parameters()
self._connect_fixes()
self._notify_parent_change()
self.parameters_changed()
def copy(self):
c = super(Parameterizable, self).copy()
c._connect_parameters()
c._connect_fixes()
c._notify_parent_change()
return c
#===========================================================================
# From being parentable, we have to define the parent_change notification # From being parentable, we have to define the parent_change notification
#=========================================================================== #===========================================================================
def _notify_parent_change(self): def _notify_parent_change(self):
""" """
Notify all parameters that the parent has changed Notify all parameters that the parent has changed
""" """
for p in self._parameters_: for p in self.parameters:
p._parent_changed(self) p._parent_changed(self)
def parameters_changed(self): def parameters_changed(self):

View file

@ -3,13 +3,10 @@
import numpy; np = numpy import numpy; np = numpy
import cPickle
import itertools import itertools
from re import compile, _pattern_type from re import compile, _pattern_type
from param import ParamConcatenation from param import ParamConcatenation
from parameter_core import Pickleable, Parameterizable, adjust_name_for_printing from parameter_core import HierarchyError, Parameterizable, adjust_name_for_printing
from transformations import __fixed__
from lists_and_dicts import ArrayList
class ParametersChangedMeta(type): class ParametersChangedMeta(type):
def __call__(self, *args, **kw): def __call__(self, *args, **kw):
@ -68,8 +65,7 @@ class Parameterized(Parameterizable):
def __init__(self, name=None, parameters=[], *a, **kw): def __init__(self, name=None, parameters=[], *a, **kw):
super(Parameterized, self).__init__(name=name, *a, **kw) super(Parameterized, self).__init__(name=name, *a, **kw)
self._in_init_ = True self._in_init_ = True
self._parameters_ = ArrayList() self.size = sum(p.size for p in self.parameters)
self.size = sum(p.size for p in self._parameters_)
self.add_observer(self, self._parameters_changed_notification, -100) self.add_observer(self, self._parameters_changed_notification, -100)
if not self._has_fixes(): if not self._has_fixes():
self._fixes_ = None self._fixes_ = None
@ -82,15 +78,15 @@ class Parameterized(Parameterizable):
import pydot # @UnresolvedImport import pydot # @UnresolvedImport
iamroot = False iamroot = False
if G is None: if G is None:
G = pydot.Dot(graph_type='digraph') G = pydot.Dot(graph_type='digraph', bgcolor=None)
iamroot=True iamroot=True
node = pydot.Node(id(self), shape='record', label=self.name) node = pydot.Node(id(self), shape='box', label=self.name)#, color='white')
G.add_node(node) G.add_node(node)
for child in self._parameters_: for child in self.parameters:
child_node = child.build_pydot(G) child_node = child.build_pydot(G)
G.add_edge(pydot.Edge(node, child_node)) G.add_edge(pydot.Edge(node, child_node))#, color='white'))
for o in self.observers.keys(): for _, o, _ in self.observers:
label = o.name if hasattr(o, 'name') else str(o) label = o.name if hasattr(o, 'name') else str(o)
observed_node = pydot.Node(id(o), label=label) observed_node = pydot.Node(id(o), label=label)
G.add_node(observed_node) G.add_node(observed_node)
@ -102,58 +98,133 @@ class Parameterized(Parameterizable):
return node return node
#=========================================================================== #===========================================================================
# Gradient control # Add remove parameters:
#=========================================================================== #===========================================================================
def _transform_gradients(self, g): def add_parameter(self, param, index=None, _ignore_added_names=False):
if self.has_parent(): """
return g :param parameters: the parameters to add
[numpy.put(g, i, g[i] * c.gradfactor(self.param_array[i])) for c, i in self.constraints.iteritems() if c != __fixed__] :type parameters: list of or one :py:class:`GPy.core.param.Param`
if self._has_fixes(): return g[self._fixes_] :param [index]: index of where to put parameters
return g
:param bool _ignore_added_names: whether the name of the parameter overrides a possibly existing field
#=========================================================================== Add all parameters to this param class, you can insert parameters
# Indexable at any given index using the :func:`list.insert` syntax
#=========================================================================== """
def _offset_for(self, param): if param in self.parameters and index is not None:
# get the offset in the parameterized index array for param self.remove_parameter(param)
self.add_parameter(param, index)
# elif param.has_parent():
# raise HierarchyError, "parameter {} already in another model ({}), create new object (or copy) for adding".format(param._short(), param._highest_parent_._short())
elif param not in self.parameters:
if param.has_parent(): if param.has_parent():
if param._parent_._get_original(param) in self._parameters_: def visit(parent, self):
return self._param_slices_[param._parent_._get_original(param)._parent_index_].start if parent is self:
return self._offset_for(param._parent_) + param._parent_._offset_for(param) raise HierarchyError, "You cannot add a parameter twice into the hierarchy"
return 0 param.traverse_parents(visit, self)
param._parent_.remove_parameter(param)
# make sure the size is set
if index is None:
self.constraints.update(param.constraints, self.size)
self.priors.update(param.priors, self.size)
self.parameters.append(param)
else:
start = sum(p.size for p in self.parameters[:index])
self.constraints.shift_right(start, param.size)
self.priors.shift_right(start, param.size)
self.constraints.update(param.constraints, start)
self.priors.update(param.priors, start)
self.parameters.insert(index, param)
def _raveled_index_for(self, param): param.add_observer(self, self._pass_through_notify_observers, -np.inf)
"""
get the raveled index for a param
that is an int array, containing the indexes for the flattened
param inside this parameterized logic.
"""
if isinstance(param, ParamConcatenation):
return numpy.hstack((self._raveled_index_for(p) for p in param.params))
return param._raveled_index() + self._offset_for(param)
def _raveled_index(self): parent = self
""" while parent is not None:
get the raveled index for this object, parent.size += param.size
this is not in the global view of things! parent = parent._parent_
"""
return numpy.r_[:self.size]
#=========================================================================== self._connect_parameters()
# Convenience for fixed, tied checking of param:
#===========================================================================
@property
def is_fixed(self):
for p in self._parameters_:
if not p.is_fixed: return False
return True
def _get_original(self, param): self._highest_parent_._connect_parameters(ignore_added_names=_ignore_added_names)
# if advanced indexing is activated it happens that the array is a copy self._highest_parent_._notify_parent_change()
# you can retrieve the original param through this method, by passing self._highest_parent_._connect_fixes()
# the copy here
return self._parameters_[param._parent_index_] else:
raise HierarchyError, """Parameter exists already and no copy made"""
def add_parameters(self, *parameters):
"""
convenience method for adding several
parameters without gradient specification
"""
[self.add_parameter(p) for p in parameters]
def remove_parameter(self, param):
"""
:param param: param object to remove from being a parameter of this parameterized object.
"""
if not param in self.parameters:
raise RuntimeError, "Parameter {} does not belong to this object {}, remove parameters directly from their respective parents".format(param._short(), self.name)
start = sum([p.size for p in self.parameters[:param._parent_index_]])
self._remove_parameter_name(param)
self.size -= param.size
del self.parameters[param._parent_index_]
param._disconnect_parent()
param.remove_observer(self, self._pass_through_notify_observers)
self.constraints.shift_left(start, param.size)
self._connect_parameters()
self._notify_parent_change()
parent = self._parent_
while parent is not None:
parent.size -= param.size
parent = parent._parent_
self._highest_parent_._connect_parameters()
self._highest_parent_._connect_fixes()
self._highest_parent_._notify_parent_change()
def _connect_parameters(self, ignore_added_names=False):
# connect parameterlist to this parameterized object
# This just sets up the right connection for the params objects
# to be used as parameters
# it also sets the constraints for each parameter to the constraints
# of their respective parents
if not hasattr(self, "parameters") or len(self.parameters) < 1:
# no parameters for this class
return
if self.param_array.size != self.size:
self.param_array = np.empty(self.size, dtype=np.float64)
if self.gradient.size != self.size:
self._gradient_array_ = np.empty(self.size, dtype=np.float64)
old_size = 0
self._param_slices_ = []
for i, p in enumerate(self.parameters):
p._parent_ = self
p._parent_index_ = i
pslice = slice(old_size, old_size + p.size)
# first connect all children
p._propagate_param_grad(self.param_array[pslice], self.gradient_full[pslice])
# then connect children to self
self.param_array[pslice] = p.param_array.flat # , requirements=['C', 'W']).ravel(order='C')
self.gradient_full[pslice] = p.gradient_full.flat # , requirements=['C', 'W']).ravel(order='C')
if not p.param_array.flags['C_CONTIGUOUS']:
raise ValueError, "This should not happen! Please write an email to the developers with the code, which reproduces this error. All parameter arrays must be C_CONTIGUOUS"
p.param_array.data = self.param_array[pslice].data
p.gradient_full.data = self.gradient_full[pslice].data
self._param_slices_.append(pslice)
self._add_parameter_name(p, ignore_added_names=ignore_added_names)
old_size += p.size
#=========================================================================== #===========================================================================
# Get/set parameters: # Get/set parameters:
@ -200,10 +271,28 @@ class Parameterized(Parameterizable):
def __setattr__(self, name, val): def __setattr__(self, name, val):
# override the default behaviour, if setting a param, so broadcasting can by used # override the default behaviour, if setting a param, so broadcasting can by used
if hasattr(self, "_parameters_"): if hasattr(self, "parameters"):
pnames = self.parameter_names(False, adjust_for_printing=True, recursive=False) pnames = self.parameter_names(False, adjust_for_printing=True, recursive=False)
if name in pnames: self._parameters_[pnames.index(name)][:] = val; return if name in pnames: self.parameters[pnames.index(name)][:] = val; return
object.__setattr__(self, name, val); object.__setattr__(self, name, val);
#===========================================================================
# Pickling
#===========================================================================
def __setstate__(self, state):
super(Parameterized, self).__setstate__(state)
self._connect_parameters()
self._connect_fixes()
self._notify_parent_change()
self.parameters_changed()
def copy(self):
c = super(Parameterized, self).copy()
c._connect_parameters()
c._connect_fixes()
c._notify_parent_change()
return c
#=========================================================================== #===========================================================================
# Printing: # Printing:
#=========================================================================== #===========================================================================
@ -211,22 +300,22 @@ class Parameterized(Parameterizable):
return self.hierarchy_name() return self.hierarchy_name()
@property @property
def flattened_parameters(self): def flattened_parameters(self):
return [xi for x in self._parameters_ for xi in x.flattened_parameters] return [xi for x in self.parameters for xi in x.flattened_parameters]
@property @property
def _parameter_sizes_(self): def _parameter_sizes_(self):
return [x.size for x in self._parameters_] return [x.size for x in self.parameters]
@property @property
def parameter_shapes(self): def parameter_shapes(self):
return [xi for x in self._parameters_ for xi in x.parameter_shapes] return [xi for x in self.parameters for xi in x.parameter_shapes]
@property @property
def _constraints_str(self): def _constraints_str(self):
return [cs for p in self._parameters_ for cs in p._constraints_str] return [cs for p in self.parameters for cs in p._constraints_str]
@property @property
def _priors_str(self): def _priors_str(self):
return [cs for p in self._parameters_ for cs in p._priors_str] return [cs for p in self.parameters for cs in p._priors_str]
@property @property
def _description_str(self): def _description_str(self):
return [xi for x in self._parameters_ for xi in x._description_str] return [xi for x in self.parameters for xi in x._description_str]
@property @property
def _ties_str(self): def _ties_str(self):
return [','.join(x._ties_str) for x in self.flattened_parameters] return [','.join(x._ties_str) for x in self.flattened_parameters]
@ -246,7 +335,7 @@ class Parameterized(Parameterizable):
to_print = [] to_print = []
for n, d, c, t, p in itertools.izip(names, desc, constrs, ts, prirs): for n, d, c, t, p in itertools.izip(names, desc, constrs, ts, prirs):
to_print.append(format_spec.format(name=n, desc=d, const=c, t=t, pri=p)) to_print.append(format_spec.format(name=n, desc=d, const=c, t=t, pri=p))
# to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)] # to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self.parameters, constrs, ts)]
sep = '-' * (nl + sl + cl + + pl + tl + 8 * 2 + 3) sep = '-' * (nl + sl + cl + + pl + tl + 8 * 2 + 3)
if header: if header:
header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}} | {{4:^{4}s}}".format(nl, sl, cl, pl, tl).format(name, "Value", "Constraint", "Prior", "Tied to") header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}} | {{4:^{4}s}}".format(nl, sl, cl, pl, tl).format(name, "Value", "Constraint", "Prior", "Tied to")

View file

@ -81,7 +81,7 @@ class VariationalPosterior(Parameterized):
def _raveled_index(self): def _raveled_index(self):
index = np.empty(dtype=int, shape=0) index = np.empty(dtype=int, shape=0)
size = 0 size = 0
for p in self._parameters_: for p in self.parameters:
index = np.hstack((index, p._raveled_index()+size)) index = np.hstack((index, p._raveled_index()+size))
size += p._realsize_ if hasattr(p, '_realsize_') else p.size size += p._realsize_ if hasattr(p, '_realsize_') else p.size
return index return index
@ -96,10 +96,13 @@ class VariationalPosterior(Parameterized):
dc = self.__dict__.copy() dc = self.__dict__.copy()
dc['mean'] = self.mean[s] dc['mean'] = self.mean[s]
dc['variance'] = self.variance[s] dc['variance'] = self.variance[s]
dc['_parameters_'] = copy.copy(self._parameters_) dc['parameters'] = copy.copy(self.parameters)
n.__dict__.update(dc) n.__dict__.update(dc)
n._parameters_[dc['mean']._parent_index_] = dc['mean'] n.parameters[dc['mean']._parent_index_] = dc['mean']
n._parameters_[dc['variance']._parent_index_] = dc['variance'] n.parameters[dc['variance']._parent_index_] = dc['variance']
n._gradient_array_ = None
oversize = self.size - self.mean.size - self.variance.size
n.size = n.mean.size + n.variance.size + oversize
n.ndim = n.mean.ndim n.ndim = n.mean.ndim
n.shape = n.mean.shape n.shape = n.mean.shape
n.num_data = n.mean.shape[0] n.num_data = n.mean.shape[0]
@ -147,11 +150,11 @@ class SpikeAndSlabPosterior(VariationalPosterior):
dc['mean'] = self.mean[s] dc['mean'] = self.mean[s]
dc['variance'] = self.variance[s] dc['variance'] = self.variance[s]
dc['binary_prob'] = self.binary_prob[s] dc['binary_prob'] = self.binary_prob[s]
dc['_parameters_'] = copy.copy(self._parameters_) dc['parameters'] = copy.copy(self.parameters)
n.__dict__.update(dc) n.__dict__.update(dc)
n._parameters_[dc['mean']._parent_index_] = dc['mean'] n.parameters[dc['mean']._parent_index_] = dc['mean']
n._parameters_[dc['variance']._parent_index_] = dc['variance'] n.parameters[dc['variance']._parent_index_] = dc['variance']
n._parameters_[dc['binary_prob']._parent_index_] = dc['binary_prob'] n.parameters[dc['binary_prob']._parent_index_] = dc['binary_prob']
n.ndim = n.mean.ndim n.ndim = n.mean.ndim
n.shape = n.mean.shape n.shape = n.mean.shape
n.num_data = n.mean.shape[0] n.num_data = n.mean.shape[0]

View file

@ -79,29 +79,32 @@ class SparseGP(GP):
self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z) self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X) self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X)
def _raw_predict(self, Xnew, full_cov=False): def _raw_predict(self, Xnew, full_cov=False, kern=None):
""" """
Make a prediction for the latent function values Make a prediction for the latent function values
""" """
if kern is None: kern = self.kern
if not isinstance(Xnew, VariationalPosterior): if not isinstance(Xnew, VariationalPosterior):
Kx = self.kern.K(self.Z, Xnew) Kx = kern.K(self.Z, Xnew)
mu = np.dot(Kx.T, self.posterior.woodbury_vector) mu = np.dot(Kx.T, self.posterior.woodbury_vector)
if full_cov: if full_cov:
Kxx = self.kern.K(Xnew) Kxx = kern.K(Xnew)
var = Kxx - np.dot(Kx.T, np.dot(self.posterior.woodbury_inv, Kx)) var = Kxx - np.dot(Kx.T, np.dot(self.posterior.woodbury_inv, Kx))
#var = Kxx[:,:,None] - np.tensordot(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx).T, Kx, [1,0]).swapaxes(1,2) #var = Kxx[:,:,None] - np.tensordot(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx).T, Kx, [1,0]).swapaxes(1,2)
var = var.squeeze() var = var.squeeze()
else: else:
Kxx = self.kern.Kdiag(Xnew) Kxx = kern.Kdiag(Xnew)
var = (Kxx - np.sum(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx) * Kx[None,:,:], 1)).T var = (Kxx - np.sum(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx) * Kx[None,:,:], 1)).T
else: else:
Kx = self.kern.psi1(self.Z, Xnew) Kx = kern.psi1(self.Z, Xnew)
mu = np.dot(Kx, self.posterior.woodbury_vector) mu = np.dot(Kx, self.posterior.woodbury_vector)
if full_cov: if full_cov:
raise NotImplementedError, "TODO" raise NotImplementedError, "TODO"
else: else:
Kxx = self.kern.psi0(self.Z, Xnew) Kxx = kern.psi0(self.Z, Xnew)
psi2 = self.kern.psi2(self.Z, Xnew) psi2 = kern.psi2(self.Z, Xnew)
var = Kxx - np.sum(np.sum(psi2 * Kmmi_LmiBLmi[None, :, :], 1), 1) var = Kxx - np.sum(np.sum(psi2 * Kmmi_LmiBLmi[None, :, :], 1), 1)
return mu, var return mu, var

View file

@ -96,15 +96,11 @@ def toy_linear_1d_classification_laplace(seed=default_seed, optimize=True, plot=
# Optimize # Optimize
if optimize: if optimize:
#m.update_likelihood_approximation()
# Parameters optimization:
try: try:
m.optimize('scg', messages=1) m.optimize('scg', messages=1)
except Exception as e: except Exception as e:
return m return m
#m.pseudo_EM()
# Plot # Plot
if plot: if plot:
fig, axes = pb.subplots(2, 1) fig, axes = pb.subplots(2, 1)
@ -133,10 +129,7 @@ def sparse_toy_linear_1d_classification(num_inducing=10, seed=default_seed, opti
# Optimize # Optimize
if optimize: if optimize:
#m.update_likelihood_approximation() m.optimize()
# Parameters optimization:
#m.optimize()
m.pseudo_EM()
# Plot # Plot
if plot: if plot:

View file

@ -161,6 +161,7 @@ def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40,
import GPy import GPy
from matplotlib import pyplot as plt from matplotlib import pyplot as plt
from ..util.misc import param_to_array from ..util.misc import param_to_array
import numpy as np
_np.random.seed(0) _np.random.seed(0)
data = GPy.util.datasets.oil() data = GPy.util.datasets.oil()
@ -174,11 +175,10 @@ def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40,
m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05) m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05)
if plot: if plot:
y = m.Y
fig, (latent_axes, sense_axes) = plt.subplots(1, 2) fig, (latent_axes, sense_axes) = plt.subplots(1, 2)
m.plot_latent(ax=latent_axes, labels=m.data_labels) m.plot_latent(ax=latent_axes, labels=m.data_labels)
data_show = GPy.plotting.matplot_dep.visualize.vector_show(y) data_show = GPy.plotting.matplot_dep.visualize.vector_show((m.Y[0,:]))
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm_dimselect(param_to_array(m.X.mean), # @UnusedVariable lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm_dimselect(param_to_array(m.X.mean)[0:1,:], # @UnusedVariable
m, data_show, latent_axes=latent_axes, sense_axes=sense_axes) m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
raw_input('Press enter to finish') raw_input('Press enter to finish')
plt.close(fig) plt.close(fig)
@ -408,15 +408,16 @@ def stick(kernel=None, optimize=True, verbose=True, plot=True):
data = GPy.util.datasets.osu_run1() data = GPy.util.datasets.osu_run1()
# optimize # optimize
m = GPy.models.GPLVM(data['Y'], 2, kernel=kernel) m = GPy.models.GPLVM(data['Y'], 2, kernel=kernel)
if optimize: m.optimize(messages=verbose, max_f_eval=10000) if optimize: m.optimize('bfgs', messages=verbose, max_f_eval=10000)
if plot: if plot:
plt.clf plt.clf
ax = m.plot_latent() ax = m.plot_latent()
y = m.Y[0, :] y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect']) data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
vis = GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, latent_axes=ax) lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[:1, :].copy(), m, data_show, latent_axes=ax)
raw_input('Press enter to finish') raw_input('Press enter to finish')
lvm_visualizer.close()
data_show.close()
return m return m
def bcgplvm_linear_stick(kernel=None, optimize=True, verbose=True, plot=True): def bcgplvm_linear_stick(kernel=None, optimize=True, verbose=True, plot=True):
@ -475,24 +476,28 @@ def robot_wireless(optimize=True, verbose=True, plot=True):
def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True): def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True):
from GPy.models import BayesianGPLVM from GPy.models import BayesianGPLVM
from matplotlib import pyplot as plt from matplotlib import pyplot as plt
import numpy as np
import GPy import GPy
data = GPy.util.datasets.osu_run1() data = GPy.util.datasets.osu_run1()
Q = 6 Q = 6
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(Q, _np.exp(-2)) + GPy.kern.White(Q, _np.exp(-2)) kernel = GPy.kern.RBF(Q, lengthscale=np.repeat(.5, Q), ARD=True)
m = BayesianGPLVM(data['Y'], Q, init="PCA", num_inducing=20, kernel=kernel) m = BayesianGPLVM(data['Y'], Q, init="PCA", num_inducing=20, kernel=kernel)
m.data = data
m.likelihood.variance = 0.001
# optimize # optimize
m.ensure_default_constraints() if optimize: m.optimize('bfgs', messages=verbose, max_iters=800, xtol=1e-300, ftol=1e-300)
if optimize: m.optimize('scg', messages=verbose, max_iters=200, xtol=1e-300, ftol=1e-300)
m._set_params(m._get_params())
if plot: if plot:
plt.clf, (latent_axes, sense_axes) = plt.subplots(1, 2) plt.clf, (latent_axes, sense_axes) = plt.subplots(1, 2)
plt.sca(latent_axes) plt.sca(latent_axes)
m.plot_latent() m.plot_latent(ax=latent_axes)
y = m.likelihood.Y[0, :].copy() y = m.Y[:1, :].copy()
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect']) data_show = GPy.plotting.matplot_dep.visualize.stick_show(y, connect=data['connect'])
GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X[0, :].copy(), m, data_show, latent_axes=latent_axes, sense_axes=sense_axes) GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X.mean[:1, :].copy(), m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
raw_input('Press enter to finish') plt.draw()
#raw_input('Press enter to finish')
return m return m
@ -509,11 +514,12 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True, optimize=True, verbose
if optimize: m.optimize(messages=verbose, max_f_eval=10000) if optimize: m.optimize(messages=verbose, max_f_eval=10000)
if plot: if plot:
ax = m.plot_latent() ax = m.plot_latent()
y = m.likelihood.Y[0, :] y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.skeleton_show(y[None, :], data['skel']) data_show = GPy.plotting.matplot_dep.visualize.skeleton_show(y[None, :], data['skel'])
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax) lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[0].copy(), m, data_show, latent_axes=ax)
raw_input('Press enter to finish') raw_input('Press enter to finish')
lvm_visualizer.close() lvm_visualizer.close()
data_show.close()
return m return m

View file

@ -6,6 +6,10 @@
# some platforms, hence this option. # some platforms, hence this option.
openmp=False openmp=False
[datasets]
# location for the local data cache
dir=$HOME/tmp/GPy-datasets/
[anaconda] [anaconda]
# if you have an anaconda python installation please specify it here. # if you have an anaconda python installation please specify it here.
installed = False installed = False

View file

@ -25,6 +25,20 @@ etc.
""" """
class LatentFunctionInference(object):
def on_optimization_start(self):
"""
This function gets called, just before the optimization loop to start.
"""
pass
def on_optimization_end(self):
"""
This function gets called, just after the optimization loop ended.
"""
pass
from exact_gaussian_inference import ExactGaussianInference from exact_gaussian_inference import ExactGaussianInference
from laplace import Laplace from laplace import Laplace
from GPy.inference.latent_function_inference.var_dtc import VarDTC from GPy.inference.latent_function_inference.var_dtc import VarDTC
@ -38,11 +52,26 @@ from var_dtc_gpu import VarDTC_GPU
# class FullLatentFunctionData(object): # class FullLatentFunctionData(object):
# #
# #
# class LatentFunctionInference(object):
# def inference(self, kern, X, likelihood, Y, Y_metadata=None): # class EMLikeLatentFunctionInference(LatentFunctionInference):
# def update_approximation(self):
# """
# This function gets called when the
# """
#
# def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None):
# """ # """
# Do inference on the latent functions given a covariance function `kern`, # Do inference on the latent functions given a covariance function `kern`,
# inputs and outputs `X` and `Y`, and a likelihood `likelihood`. # inputs and outputs `X` and `Y`, inducing_inputs `Z`, and a likelihood `likelihood`.
# Additional metadata for the outputs `Y` can be given in `Y_metadata`.
# """
# raise NotImplementedError, "Abstract base class for full inference"
#
# class VariationalLatentFunctionInference(LatentFunctionInference):
# def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None):
# """
# Do inference on the latent functions given a covariance function `kern`,
# inputs and outputs `X` and `Y`, inducing_inputs `Z`, and a likelihood `likelihood`.
# Additional metadata for the outputs `Y` can be given in `Y_metadata`. # Additional metadata for the outputs `Y` can be given in `Y_metadata`.
# """ # """
# raise NotImplementedError, "Abstract base class for full inference" # raise NotImplementedError, "Abstract base class for full inference"

View file

@ -4,9 +4,10 @@
from posterior import Posterior from posterior import Posterior
from ...util.linalg import jitchol, tdot, dtrtrs, dpotri, pdinv from ...util.linalg import jitchol, tdot, dtrtrs, dpotri, pdinv
import numpy as np import numpy as np
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class DTC(object): class DTC(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.

View file

@ -5,10 +5,11 @@ from posterior import Posterior
from ...util.linalg import pdinv, dpotrs, tdot from ...util.linalg import pdinv, dpotrs, tdot
from ...util import diag from ...util import diag
import numpy as np import numpy as np
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class ExactGaussianInference(object): class ExactGaussianInference(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian. An object for inference when the likelihood is Gaussian.

View file

@ -1,9 +1,10 @@
import numpy as np import numpy as np
from ...util.linalg import pdinv,jitchol,DSYR,tdot,dtrtrs, dpotrs from ...util.linalg import pdinv,jitchol,DSYR,tdot,dtrtrs, dpotrs
from posterior import Posterior from posterior import Posterior
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class EP(object): class EP(LatentFunctionInference):
def __init__(self, epsilon=1e-6, eta=1., delta=1.): def __init__(self, epsilon=1e-6, eta=1., delta=1.):
""" """
The expectation-propagation algorithm. The expectation-propagation algorithm.
@ -21,6 +22,14 @@ class EP(object):
def reset(self): def reset(self):
self.old_mutilde, self.old_vtilde = None, None self.old_mutilde, self.old_vtilde = None, None
self._ep_approximation = None
def on_optimization_start(self):
self._ep_approximation = None
def on_optimization_end(self):
# TODO: update approximation in the end as well? Maybe even with a switch?
pass
def inference(self, kern, X, likelihood, Y, Y_metadata=None, Z=None): def inference(self, kern, X, likelihood, Y, Y_metadata=None, Z=None):
num_data, output_dim = X.shape num_data, output_dim = X.shape
@ -28,7 +37,10 @@ class EP(object):
K = kern.K(X) K = kern.K(X)
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self.expectation_propagation(K, Y, likelihood, Y_metadata) if self._ep_approximation is None:
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation = self.expectation_propagation(K, Y, likelihood, Y_metadata)
else:
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde)) Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde))
@ -42,8 +54,6 @@ class EP(object):
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL} return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
def expectation_propagation(self, K, Y, likelihood, Y_metadata): def expectation_propagation(self, K, Y, likelihood, Y_metadata):
num_data, data_dim = Y.shape num_data, data_dim = Y.shape
@ -108,4 +118,3 @@ class EP(object):
mu_tilde = v_tilde/tau_tilde mu_tilde = v_tilde/tau_tilde
return mu, Sigma, mu_tilde, tau_tilde, Z_hat return mu, Sigma, mu_tilde, tau_tilde, Z_hat

View file

@ -1,11 +1,59 @@
import numpy as np import numpy as np
from ...util.linalg import pdinv,jitchol,DSYR,tdot,dtrtrs, dpotrs from ...util import diag
from expectation_propagation import EP from ...util.linalg import mdot, jitchol, backsub_both_sides, tdot, dtrtrs, dtrtri, dpotri, dpotrs, symmetrify, DSYR
from ...util.misc import param_to_array
from ...core.parameterization.variational import VariationalPosterior
from . import LatentFunctionInference
from posterior import Posterior from posterior import Posterior
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class EPDTC(EP): class EPDTC(LatentFunctionInference):
#def __init__(self, epsilon=1e-6, eta=1., delta=1.): const_jitter = 1e-6
def __init__(self, epsilon=1e-6, eta=1., delta=1., limit=1):
from ...util.caching import Cacher
self.limit = limit
self.get_trYYT = Cacher(self._get_trYYT, limit)
self.get_YYTfactor = Cacher(self._get_YYTfactor, limit)
self.epsilon, self.eta, self.delta = epsilon, eta, delta
self.reset()
def set_limit(self, limit):
self.get_trYYT.limit = limit
self.get_YYTfactor.limit = limit
def _get_trYYT(self, Y):
return param_to_array(np.sum(np.square(Y)))
def __getstate__(self):
# has to be overridden, as Cacher objects cannot be pickled.
return self.limit
def __setstate__(self, state):
# has to be overridden, as Cacher objects cannot be pickled.
self.limit = state
from ...util.caching import Cacher
self.get_trYYT = Cacher(self._get_trYYT, self.limit)
self.get_YYTfactor = Cacher(self._get_YYTfactor, self.limit)
def _get_YYTfactor(self, Y):
"""
find a matrix L which satisfies LLT = YYT.
Note that L may have fewer columns than Y.
"""
N, D = Y.shape
if (N>=D):
return param_to_array(Y)
else:
return jitchol(tdot(Y))
def get_VVTfactor(self, Y, prec):
return Y * prec # TODO chache this, and make it effective
def reset(self):
self.old_mutilde, self.old_vtilde = None, None
self._ep_approximation = None
def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None): def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None):
num_data, output_dim = X.shape num_data, output_dim = X.shape
@ -14,26 +62,131 @@ class EPDTC(EP):
Kmm = kern.K(Z) Kmm = kern.K(Z)
Kmn = kern.K(Z,X) Kmn = kern.K(Z,X)
if self._ep_approximation is None:
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation = self.expectation_propagation(Kmm, Kmn, Y, likelihood, Y_metadata)
else:
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation
if isinstance(X, VariationalPosterior):
uncertain_inputs = True
psi0 = kern.psi0(Z, X)
psi1 = Kmn.T#kern.psi1(Z, X)
psi2 = kern.psi2(Z, X)
else:
uncertain_inputs = False
psi0 = kern.Kdiag(X)
psi1 = Kmn.T#kern.K(X, Z)
psi2 = None
#see whether we're using variational uncertain inputs
_, output_dim = Y.shape
#see whether we've got a different noise variance for each datum
#beta = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6)
beta = tau_tilde
VVT_factor = beta[:,None]*mu_tilde[:,None]
trYYT = self.get_trYYT(mu_tilde[:,None])
# do the inference:
het_noise = beta.size > 1
num_inducing = Z.shape[0]
num_data = Y.shape[0]
# kernel computations, using BGPLVM notation
Kmm = kern.K(Z).copy()
diag.add(Kmm, self.const_jitter)
Lm = jitchol(Kmm) Lm = jitchol(Kmm)
Lmi = dtrtrs(Lm,np.eye(Lm.shape[0]))[0]
Kmmi = np.dot(Lmi.T,Lmi) # The rather complex computations of A
KmmiKmn = np.dot(Kmmi,Kmn) if uncertain_inputs:
K = np.dot(Kmn.T,KmmiKmn) if het_noise:
psi2_beta = psi2 * (beta.flatten().reshape(num_data, 1, 1)).sum(0)
else:
psi2_beta = psi2.sum(0) * beta
LmInv = dtrtri(Lm)
A = LmInv.dot(psi2_beta.dot(LmInv.T))
else:
if het_noise:
tmp = psi1 * (np.sqrt(beta.reshape(num_data, 1)))
else:
tmp = psi1 * (np.sqrt(beta))
tmp, _ = dtrtrs(Lm, tmp.T, lower=1)
A = tdot(tmp) #print A.sum()
# factor B
B = np.eye(num_inducing) + A
LB = jitchol(B)
psi1Vf = np.dot(psi1.T, VVT_factor)
# back substutue C into psi1Vf
tmp, _ = dtrtrs(Lm, psi1Vf, lower=1, trans=0)
_LBi_Lmi_psi1Vf, _ = dtrtrs(LB, tmp, lower=1, trans=0)
tmp, _ = dtrtrs(LB, _LBi_Lmi_psi1Vf, lower=1, trans=1)
Cpsi1Vf, _ = dtrtrs(Lm, tmp, lower=1, trans=1)
# data fit and derivative of L w.r.t. Kmm
delit = tdot(_LBi_Lmi_psi1Vf)
data_fit = np.trace(delit)
DBi_plus_BiPBi = backsub_both_sides(LB, output_dim * np.eye(num_inducing) + delit)
delit = -0.5 * DBi_plus_BiPBi
delit += -0.5 * B * output_dim
delit += output_dim * np.eye(num_inducing)
# Compute dL_dKmm
dL_dKmm = backsub_both_sides(Lm, delit)
# derivatives of L w.r.t. psi
dL_dpsi0, dL_dpsi1, dL_dpsi2 = _compute_dL_dpsi(num_inducing, num_data, output_dim, beta, Lm,
VVT_factor, Cpsi1Vf, DBi_plus_BiPBi,
psi1, het_noise, uncertain_inputs)
# log marginal likelihood
log_marginal = _compute_log_marginal_likelihood(likelihood, num_data, output_dim, beta, het_noise,
psi0, A, LB, trYYT, data_fit, VVT_factor)
#put the gradients in the right places
dL_dR = _compute_dL_dR(likelihood,
het_noise, uncertain_inputs, LB,
_LBi_Lmi_psi1Vf, DBi_plus_BiPBi, Lm, A,
psi0, psi1, beta,
data_fit, num_data, output_dim, trYYT, mu_tilde[:,None])
dL_dthetaL = 0#likelihood.exact_inference_gradients(dL_dR,Y_metadata)
if uncertain_inputs:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dpsi0':dL_dpsi0,
'dL_dpsi1':dL_dpsi1,
'dL_dpsi2':dL_dpsi2,
'dL_dthetaL':dL_dthetaL}
else:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dKdiag':dL_dpsi0,
'dL_dKnm':dL_dpsi1,
'dL_dthetaL':dL_dthetaL}
#get sufficient things for posterior prediction
#TODO: do we really want to do this in the loop?
if VVT_factor.shape[1] == Y.shape[1]:
woodbury_vector = Cpsi1Vf # == Cpsi1V
else:
print 'foobar'
psi1V = np.dot(mu_tilde[:,None].T*beta, psi1).T
tmp, _ = dtrtrs(Lm, psi1V, lower=1, trans=0)
tmp, _ = dpotrs(LB, tmp, lower=1)
woodbury_vector, _ = dtrtrs(Lm, tmp, lower=1, trans=1)
Bi, _ = dpotri(LB, lower=1)
symmetrify(Bi)
Bi = -dpotri(LB, lower=1)[0]
diag.add(Bi, 1)
woodbury_inv = backsub_both_sides(Lm, Bi)
#construct a posterior object
post = Posterior(woodbury_inv=woodbury_inv, woodbury_vector=woodbury_vector, K=Kmm, mean=None, cov=None, K_chol=Lm)
return post, log_marginal, grad_dict
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self.expectation_propagation(Kmm, Kmn, Y, likelihood, Y_metadata)
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde))
alpha, _ = dpotrs(LW, mu_tilde, lower=1)
log_marginal = 0.5*(-num_data * log_2_pi - W_logdet - np.sum(alpha * mu_tilde)) # TODO: add log Z_hat??
dL_dK = 0.5 * (tdot(alpha[:,None]) - Wi)
dL_dthetaL = np.zeros(likelihood.size)#TODO: derivatives of the likelihood parameters
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
@ -121,3 +274,69 @@ class EPDTC(EP):
mu_tilde = v_tilde/tau_tilde mu_tilde = v_tilde/tau_tilde
return mu, Sigma, mu_tilde, tau_tilde, Z_hat return mu, Sigma, mu_tilde, tau_tilde, Z_hat
def _compute_dL_dpsi(num_inducing, num_data, output_dim, beta, Lm, VVT_factor, Cpsi1Vf, DBi_plus_BiPBi, psi1, het_noise, uncertain_inputs):
dL_dpsi0 = -0.5 * output_dim * (beta[:,None] * np.ones([num_data, 1])).flatten()
dL_dpsi1 = np.dot(VVT_factor, Cpsi1Vf.T)
dL_dpsi2_beta = 0.5 * backsub_both_sides(Lm, output_dim * np.eye(num_inducing) - DBi_plus_BiPBi)
if het_noise:
if uncertain_inputs:
dL_dpsi2 = beta[:, None, None] * dL_dpsi2_beta[None, :, :]
else:
dL_dpsi1 += 2.*np.dot(dL_dpsi2_beta, (psi1 * beta.reshape(num_data, 1)).T).T
dL_dpsi2 = None
else:
dL_dpsi2 = beta * dL_dpsi2_beta
if uncertain_inputs:
# repeat for each of the N psi_2 matrices
dL_dpsi2 = np.repeat(dL_dpsi2[None, :, :], num_data, axis=0)
else:
# subsume back into psi1 (==Kmn)
dL_dpsi1 += 2.*np.dot(psi1, dL_dpsi2)
dL_dpsi2 = None
return dL_dpsi0, dL_dpsi1, dL_dpsi2
def _compute_dL_dR(likelihood, het_noise, uncertain_inputs, LB, _LBi_Lmi_psi1Vf, DBi_plus_BiPBi, Lm, A, psi0, psi1, beta, data_fit, num_data, output_dim, trYYT, Y):
# the partial derivative vector for the likelihood
if likelihood.size == 0:
# save computation here.
dL_dR = None
elif het_noise:
if uncertain_inputs:
raise NotImplementedError, "heteroscedatic derivates with uncertain inputs not implemented"
else:
#from ...util.linalg import chol_inv
#LBi = chol_inv(LB)
LBi, _ = dtrtrs(LB,np.eye(LB.shape[0]))
Lmi_psi1, nil = dtrtrs(Lm, psi1.T, lower=1, trans=0)
_LBi_Lmi_psi1, _ = dtrtrs(LB, Lmi_psi1, lower=1, trans=0)
dL_dR = -0.5 * beta + 0.5 * (beta*Y)**2
dL_dR += 0.5 * output_dim * (psi0 - np.sum(Lmi_psi1**2,0))[:,None] * beta**2
dL_dR += 0.5*np.sum(mdot(LBi.T,LBi,Lmi_psi1)*Lmi_psi1,0)[:,None]*beta**2
dL_dR += -np.dot(_LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T * Y * beta**2
dL_dR += 0.5*np.dot(_LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T**2 * beta**2
else:
# likelihood is not heteroscedatic
dL_dR = -0.5 * num_data * output_dim * beta + 0.5 * trYYT * beta ** 2
dL_dR += 0.5 * output_dim * (psi0.sum() * beta ** 2 - np.trace(A) * beta)
dL_dR += beta * (0.5 * np.sum(A * DBi_plus_BiPBi) - data_fit)
return dL_dR
def _compute_log_marginal_likelihood(likelihood, num_data, output_dim, beta, het_noise, psi0, A, LB, trYYT, data_fit,Y):
#compute log marginal likelihood
if het_noise:
lik_1 = -0.5 * num_data * output_dim * np.log(2. * np.pi) + 0.5 * np.sum(np.log(beta)) - 0.5 * np.sum(beta * np.square(Y).sum(axis=-1))
lik_2 = -0.5 * output_dim * (np.sum(beta.flatten() * psi0) - np.trace(A))
else:
lik_1 = -0.5 * num_data * output_dim * (np.log(2. * np.pi) - np.log(beta)) - 0.5 * beta * trYYT
lik_2 = -0.5 * output_dim * (np.sum(beta * psi0) - np.trace(A))
lik_3 = -output_dim * (np.sum(np.log(np.diag(LB))))
lik_4 = 0.5 * data_fit
log_marginal = lik_1 + lik_2 + lik_3 + lik_4
return log_marginal

View file

@ -5,9 +5,10 @@ from posterior import Posterior
from ...util.linalg import jitchol, tdot, dtrtrs, dpotri, pdinv from ...util.linalg import jitchol, tdot, dtrtrs, dpotri, pdinv
from ...util import diag from ...util import diag
import numpy as np import numpy as np
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class FITC(object): class FITC(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.

View file

@ -16,8 +16,9 @@ from ...util.misc import param_to_array
from posterior import Posterior from posterior import Posterior
import warnings import warnings
from scipy import optimize from scipy import optimize
from . import LatentFunctionInference
class Laplace(object): class Laplace(LatentFunctionInference):
def __init__(self): def __init__(self):
""" """

View file

@ -7,9 +7,10 @@ from ...util import diag
from ...core.parameterization.variational import VariationalPosterior from ...core.parameterization.variational import VariationalPosterior
import numpy as np import numpy as np
from ...util.misc import param_to_array from ...util.misc import param_to_array
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class VarDTC(object): class VarDTC(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.
@ -190,7 +191,7 @@ class VarDTC(object):
post = Posterior(woodbury_inv=woodbury_inv, woodbury_vector=woodbury_vector, K=Kmm, mean=None, cov=None, K_chol=Lm) post = Posterior(woodbury_inv=woodbury_inv, woodbury_vector=woodbury_vector, K=Kmm, mean=None, cov=None, K_chol=Lm)
return post, log_marginal, grad_dict return post, log_marginal, grad_dict
class VarDTCMissingData(object): class VarDTCMissingData(LatentFunctionInference):
const_jitter = 1e-6 const_jitter = 1e-6
def __init__(self, limit=1, inan=None): def __init__(self, limit=1, inan=None):
from ...util.caching import Cacher from ...util.caching import Cacher

View file

@ -7,6 +7,7 @@ from ...util import diag
from ...core.parameterization.variational import VariationalPosterior from ...core.parameterization.variational import VariationalPosterior
import numpy as np import numpy as np
from ...util.misc import param_to_array from ...util.misc import param_to_array
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
from ...util import gpu_init from ...util import gpu_init
@ -19,7 +20,7 @@ try:
except: except:
pass pass
class VarDTC_GPU(object): class VarDTC_GPU(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.

View file

@ -7,6 +7,7 @@ from ...util import diag
from ...core.parameterization.variational import VariationalPosterior from ...core.parameterization.variational import VariationalPosterior
import numpy as np import numpy as np
from ...util.misc import param_to_array from ...util.misc import param_to_array
from . import LatentFunctionInference
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
try: try:
@ -14,7 +15,7 @@ try:
except: except:
pass pass
class VarDTC_minibatch(object): class VarDTC_minibatch(LatentFunctionInference):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.

View file

@ -32,7 +32,7 @@ def print_out(len_maxiters, fnow, current_grad, beta, iteration):
sys.stdout.flush() sys.stdout.flush()
def exponents(fnow, current_grad): def exponents(fnow, current_grad):
exps = [np.abs(fnow), current_grad] exps = [np.abs(np.float(fnow)), current_grad]
return np.sign(exps) * np.log10(exps).astype(int) return np.sign(exps) * np.log10(exps).astype(int)
def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=np.inf, display=True, xtol=None, ftol=None, gtol=None): def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=np.inf, display=True, xtol=None, ftol=None, gtol=None):

View file

@ -3,15 +3,18 @@ from _src.rbf import RBF
from _src.linear import Linear, LinearFull from _src.linear import Linear, LinearFull
from _src.static import Bias, White from _src.static import Bias, White
from _src.brownian import Brownian from _src.brownian import Brownian
from _src.stationary import Exponential, Matern32, Matern52, ExpQuad, RatQuad, Cosine from _src.stationary import Exponential, OU, Matern32, Matern52, ExpQuad, RatQuad, Cosine
from _src.mlp import MLP from _src.mlp import MLP
from _src.periodic import PeriodicExponential, PeriodicMatern32, PeriodicMatern52 from _src.periodic import PeriodicExponential, PeriodicMatern32, PeriodicMatern52
from _src.independent_outputs import IndependentOutputs, Hierarchical from _src.independent_outputs import IndependentOutputs, Hierarchical
from _src.coregionalize import Coregionalize from _src.coregionalize import Coregionalize
from _src.ssrbf import SSRBF # TODO: ZD: did you remove this? from _src.ssrbf import SSRBF # TODO: ZD: did you remove this?
from _src.ODE_UY import ODE_UY from _src.ODE_UY import ODE_UY
#from _src.ODE_UYC import ODE_UYC ADD THIS FILE TO THE REPO!! from _src.ODE_UYC import ODE_UYC
#from _src.ODE_st import ODE_st from _src.ODE_st import ODE_st
from _src.ODE_t import ODE_t
from _src.poly import Poly
# TODO: put this in an init file somewhere # TODO: put this in an init file somewhere
#I'm commenting this out because the files were not added. JH. Remember to add the files before commiting #I'm commenting this out because the files were not added. JH. Remember to add the files before commiting
try: try:

290
GPy/kern/_src/ODE_UYC.py Normal file
View file

@ -0,0 +1,290 @@
# Copyright (c) 2013, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
import numpy as np
from independent_outputs import index_to_slices
class ODE_UYC(Kern):
def __init__(self, input_dim, variance_U=3., variance_Y=1., lengthscale_U=1., lengthscale_Y=1., ubias =1. ,active_dims=None, name='ode_uyc'):
assert input_dim ==2, "only defined for 2 input dims"
super(ODE_UYC, self).__init__(input_dim, active_dims, name)
self.variance_Y = Param('variance_Y', variance_Y, Logexp())
self.variance_U = Param('variance_U', variance_U, Logexp())
self.lengthscale_Y = Param('lengthscale_Y', lengthscale_Y, Logexp())
self.lengthscale_U = Param('lengthscale_U', lengthscale_U, Logexp())
self.ubias = Param('ubias', ubias, Logexp())
self.add_parameters(self.variance_Y, self.variance_U, self.lengthscale_Y, self.lengthscale_U, self.ubias)
def K(self, X, X2=None):
# model : a * dy/dt + b * y = U
#lu=sqrt(3)/theta1 ly=1/theta2 theta2= a/b :thetay sigma2=1/(2ab) :sigmay
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
K = np.zeros((X.shape[0], X.shape[0]))
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
K = np.zeros((X.shape[0], X2.shape[0]))
#stop
#rdist = X[:,0][:,None] - X2[:,0][:,None].T
rdist = X - X2.T
ly=1/self.lengthscale_Y
lu=np.sqrt(3)/self.lengthscale_U
#iu=self.input_lengthU #dimention of U
Vu=self.variance_U
Vy=self.variance_Y
#Vy=ly/2
#stop
# kernel for kuu matern3/2
kuu = lambda dist:Vu * (1 + lu* np.abs(dist)) * np.exp(-lu * np.abs(dist)) +self.ubias
# kernel for kyy
k1 = lambda dist:np.exp(-ly*np.abs(dist))*(2*lu+ly)/(lu+ly)**2
k2 = lambda dist:(np.exp(-lu*dist)*(ly-2*lu+lu*ly*dist-lu**2*dist) + np.exp(-ly*dist)*(2*lu-ly) ) / (ly-lu)**2
k3 = lambda dist:np.exp(-lu*dist) * ( (1+lu*dist)/(lu+ly) + (lu)/(lu+ly)**2 )
kyy = lambda dist:Vu*Vy*(k1(dist) + k2(dist) + k3(dist))
# cross covariance function
kyu3 = lambda dist:np.exp(-lu*dist)/(lu+ly)*(1+lu*(dist+1/(lu+ly)))
#kyu3 = lambda dist: 0
k1cros = lambda dist:np.exp(ly*dist)/(lu-ly) * ( 1- np.exp( (lu-ly)*dist) + lu* ( dist*np.exp( (lu-ly)*dist ) + (1- np.exp( (lu-ly)*dist ) ) /(lu-ly) ) )
#k1cros = lambda dist:0
k2cros = lambda dist:np.exp(ly*dist)*( 1/(lu+ly) + lu/(lu+ly)**2 )
#k2cros = lambda dist:0
Vyu=np.sqrt(Vy*ly*2)
# cross covariance kuy
kuyp = lambda dist:Vu*Vyu*(kyu3(dist)) #t>0 kuy
kuyn = lambda dist:Vu*Vyu*(k1cros(dist)+k2cros(dist)) #t<0 kuy
# cross covariance kyu
kyup = lambda dist:Vu*Vyu*(k1cros(-dist)+k2cros(-dist)) #t>0 kyu
kyun = lambda dist:Vu*Vyu*(kyu3(-dist)) #t<0 kyu
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
K[ss1,ss2] = kuu(np.abs(rdist[ss1,ss2]))
elif i==0 and j==1:
#K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kuyp(np.abs(rdist[ss1,ss2])), kuyn(np.abs(rdist[ss1,ss2]) ) )
K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kuyp(rdist[ss1,ss2]), kuyn(rdist[ss1,ss2] ) )
elif i==1 and j==1:
K[ss1,ss2] = kyy(np.abs(rdist[ss1,ss2]))
else:
#K[ss1,ss2]= 0
#K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kyup(np.abs(rdist[ss1,ss2])), kyun(np.abs(rdist[ss1,ss2]) ) )
K[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kyup(rdist[ss1,ss2]), kyun(rdist[ss1,ss2] ) )
return K
def Kdiag(self, X):
"""Compute the diagonal of the covariance matrix associated to X."""
Kdiag = np.zeros(X.shape[0])
ly=1/self.lengthscale_Y
lu=np.sqrt(3)/self.lengthscale_U
Vu = self.variance_U
Vy=self.variance_Y
k1 = (2*lu+ly)/(lu+ly)**2
k2 = (ly-2*lu + 2*lu-ly ) / (ly-lu)**2
k3 = 1/(lu+ly) + (lu)/(lu+ly)**2
slices = index_to_slices(X[:,-1])
for i, ss1 in enumerate(slices):
for s1 in ss1:
if i==0:
Kdiag[s1]+= self.variance_U + self.ubias
elif i==1:
Kdiag[s1]+= Vu*Vy*(k1+k2+k3)
else:
raise ValueError, "invalid input/output index"
#Kdiag[slices[0][0]]+= self.variance_U #matern32 diag
#Kdiag[slices[1][0]]+= self.variance_U*self.variance_Y*(k1+k2+k3) # diag
return Kdiag
def update_gradients_full(self, dL_dK, X, X2=None):
"""derivative of the covariance matrix with respect to the parameters."""
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
#rdist = X[:,0][:,None] - X2[:,0][:,None].T
rdist = X - X2.T
ly=1/self.lengthscale_Y
lu=np.sqrt(3)/self.lengthscale_U
Vu=self.variance_U
Vy=self.variance_Y
Vyu = np.sqrt(Vy*ly*2)
dVdly = 0.5/np.sqrt(ly)*np.sqrt(2*Vy)
dVdVy = 0.5/np.sqrt(Vy)*np.sqrt(2*ly)
rd=rdist.shape[0]
dktheta1 = np.zeros([rd,rd])
dktheta2 = np.zeros([rd,rd])
dkUdvar = np.zeros([rd,rd])
dkYdvar = np.zeros([rd,rd])
dkdubias = np.zeros([rd,rd])
# dk dtheta for UU
UUdtheta1 = lambda dist: np.exp(-lu* dist)*dist + (-dist)*np.exp(-lu* dist)*(1+lu*dist)
UUdtheta2 = lambda dist: 0
#UUdvar = lambda dist: (1 + lu*dist)*np.exp(-lu*dist)
UUdvar = lambda dist: (1 + lu* np.abs(dist)) * np.exp(-lu * np.abs(dist))
# dk dtheta for YY
dk1theta1 = lambda dist: np.exp(-ly*dist)*2*(-lu)/(lu+ly)**3
dk2theta1 = lambda dist: (1.0)*(
np.exp(-lu*dist)*dist*(-ly+2*lu-lu*ly*dist+dist*lu**2)*(ly-lu)**(-2) + np.exp(-lu*dist)*(-2+ly*dist-2*dist*lu)*(ly-lu)**(-2)
+np.exp(-dist*lu)*(ly-2*lu+ly*lu*dist-dist*lu**2)*2*(ly-lu)**(-3)
+np.exp(-dist*ly)*2*(ly-lu)**(-2)
+np.exp(-dist*ly)*2*(2*lu-ly)*(ly-lu)**(-3)
)
dk3theta1 = lambda dist: np.exp(-dist*lu)*(lu+ly)**(-2)*((2*lu+ly+dist*lu**2+lu*ly*dist)*(-dist-2/(lu+ly))+2+2*lu*dist+ly*dist)
#dktheta1 = lambda dist: self.variance_U*self.variance_Y*(dk1theta1+dk2theta1+dk3theta1)
dk1theta2 = lambda dist: np.exp(-ly*dist) * ((lu+ly)**(-2)) * ( (-dist)*(2*lu+ly) + 1 + (-2)*(2*lu+ly)/(lu+ly) )
dk2theta2 =lambda dist: 1*(
np.exp(-dist*lu)*(ly-lu)**(-2) * ( 1+lu*dist+(-2)*(ly-2*lu+lu*ly*dist-dist*lu**2)*(ly-lu)**(-1) )
+np.exp(-dist*ly)*(ly-lu)**(-2) * ( (-dist)*(2*lu-ly) -1+(2*lu-ly)*(-2)*(ly-lu)**(-1) )
)
dk3theta2 = lambda dist: np.exp(-dist*lu) * (-3*lu-ly-dist*lu**2-lu*ly*dist)/(lu+ly)**3
#dktheta2 = lambda dist: self.variance_U*self.variance_Y*(dk1theta2 + dk2theta2 +dk3theta2)
# kyy kernel
k1 = lambda dist: np.exp(-ly*dist)*(2*lu+ly)/(lu+ly)**2
k2 = lambda dist: (np.exp(-lu*dist)*(ly-2*lu+lu*ly*dist-lu**2*dist) + np.exp(-ly*dist)*(2*lu-ly) ) / (ly-lu)**2
k3 = lambda dist: np.exp(-lu*dist) * ( (1+lu*dist)/(lu+ly) + (lu)/(lu+ly)**2 )
#dkdvar = k1+k2+k3
# cross covariance function
kyu3 = lambda dist:np.exp(-lu*dist)/(lu+ly)*(1+lu*(dist+1/(lu+ly)))
k1cros = lambda dist:np.exp(ly*dist)/(lu-ly) * ( 1- np.exp( (lu-ly)*dist) + lu* ( dist*np.exp( (lu-ly)*dist ) + (1- np.exp( (lu-ly)*dist ) ) /(lu-ly) ) )
k2cros = lambda dist:np.exp(ly*dist)*( 1/(lu+ly) + lu/(lu+ly)**2 )
# cross covariance kuy
kuyp = lambda dist:(kyu3(dist)) #t>0 kuy
kuyn = lambda dist:(k1cros(dist)+k2cros(dist)) #t<0 kuy
# cross covariance kyu
kyup = lambda dist:(k1cros(-dist)+k2cros(-dist)) #t>0 kyu
kyun = lambda dist:(kyu3(-dist)) #t<0 kyu
# dk dtheta for UY
dkyu3dtheta2 = lambda dist: np.exp(-lu*dist) * ( (-1)*(lu+ly)**(-2)*(1+lu*dist+lu*(lu+ly)**(-1)) + (lu+ly)**(-1)*(-lu)*(lu+ly)**(-2) )
dkyu3dtheta1 = lambda dist: np.exp(-lu*dist)*(lu+ly)**(-1)* ( (-dist)*(1+dist*lu+lu*(lu+ly)**(-1)) -\
(lu+ly)**(-1)*(1+dist*lu+lu*(lu+ly)**(-1)) +dist+(lu+ly)**(-1)-lu*(lu+ly)**(-2) )
dkcros2dtheta1 = lambda dist: np.exp(ly*dist)* ( -(ly+lu)**(-2) + (ly+lu)**(-2) + (-2)*lu*(lu+ly)**(-3) )
dkcros2dtheta2 = lambda dist: np.exp(ly*dist)*dist* ( (ly+lu)**(-1) + lu*(lu+ly)**(-2) ) + \
np.exp(ly*dist)*( -(lu+ly)**(-2) + lu*(-2)*(lu+ly)**(-3) )
dkcros1dtheta1 = lambda dist: np.exp(ly*dist)*( -(lu-ly)**(-2)*( 1-np.exp((lu-ly)*dist) + lu*dist*np.exp((lu-ly)*dist)+ \
lu*(1-np.exp((lu-ly)*dist))/(lu-ly) ) + (lu-ly)**(-1)*( -np.exp( (lu-ly)*dist )*dist + dist*np.exp( (lu-ly)*dist)+\
lu*dist**2*np.exp((lu-ly)*dist)+(1-np.exp((lu-ly)*dist))/(lu-ly) - lu*np.exp((lu-ly)*dist)*dist/(lu-ly) -\
lu*(1-np.exp((lu-ly)*dist))/(lu-ly)**2 ) )
dkcros1dtheta2 = lambda t: np.exp(ly*t)*t/(lu-ly)*( 1-np.exp((lu-ly)*t) +lu*t*np.exp((lu-ly)*t)+\
lu*(1-np.exp((lu-ly)*t))/(lu-ly) )+\
np.exp(ly*t)/(lu-ly)**2* ( 1-np.exp((lu-ly)*t) +lu*t*np.exp((lu-ly)*t) + lu*( 1-np.exp((lu-ly)*t) )/(lu-ly) )+\
np.exp(ly*t)/(lu-ly)*( np.exp((lu-ly)*t)*t -lu*t*t*np.exp((lu-ly)*t) +lu*t*np.exp((lu-ly)*t)/(lu-ly)+\
lu*( 1-np.exp((lu-ly)*t) )/(lu-ly)**2 )
dkuypdtheta1 = lambda dist:(dkyu3dtheta1(dist)) #t>0 kuy
dkuyndtheta1 = lambda dist:(dkcros1dtheta1(dist)+dkcros2dtheta1(dist)) #t<0 kuy
# cross covariance kyu
dkyupdtheta1 = lambda dist:(dkcros1dtheta1(-dist)+dkcros2dtheta1(-dist)) #t>0 kyu
dkyundtheta1 = lambda dist:(dkyu3dtheta1(-dist)) #t<0 kyu
dkuypdtheta2 = lambda dist:(dkyu3dtheta2(dist)) #t>0 kuy
dkuyndtheta2 = lambda dist:(dkcros1dtheta2(dist)+dkcros2dtheta2(dist)) #t<0 kuy
# cross covariance kyu
dkyupdtheta2 = lambda dist:(dkcros1dtheta2(-dist)+dkcros2dtheta2(-dist)) #t>0 kyu
dkyundtheta2 = lambda dist:(dkyu3dtheta2(-dist)) #t<0 kyu
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
#target[ss1,ss2] = kuu(np.abs(rdist[ss1,ss2]))
dktheta1[ss1,ss2] = Vu*UUdtheta1(np.abs(rdist[ss1,ss2]))
dktheta2[ss1,ss2] = 0
dkUdvar[ss1,ss2] = UUdvar(np.abs(rdist[ss1,ss2]))
dkYdvar[ss1,ss2] = 0
dkdubias[ss1,ss2] = 1
elif i==0 and j==1:
########target[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kuyp(np.abs(rdist[ss1,ss2])), kuyn(np.abs(rdist[s1[0],s2[0]]) ) )
#np.where( rdist[ss1,ss2]>0 , kuyp(np.abs(rdist[ss1,ss2])), kuyn(np.abs(rdist[s1[0],s2[0]]) ) )
#dktheta1[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , self.variance_U*self.variance_Y*dkcrtheta1(np.abs(rdist[ss1,ss2])) ,self.variance_U*self.variance_Y*(dk1theta1(np.abs(rdist[ss1,ss2]))+dk2theta1(np.abs(rdist[ss1,ss2]))) )
#dktheta2[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , self.variance_U*self.variance_Y*dkcrtheta2(np.abs(rdist[ss1,ss2])) ,self.variance_U*self.variance_Y*(dk1theta2(np.abs(rdist[ss1,ss2]))+dk2theta2(np.abs(rdist[ss1,ss2]))) )
dktheta1[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*Vyu*dkuypdtheta1(rdist[ss1,ss2]),Vu*Vyu*dkuyndtheta1(rdist[ss1,ss2]) )
dkUdvar[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vyu*kuyp(rdist[ss1,ss2]), Vyu* kuyn(rdist[ss1,ss2]) )
dktheta2[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*Vyu*dkuypdtheta2(rdist[ss1,ss2])+Vu*dVdly*kuyp(rdist[ss1,ss2]),Vu*Vyu*dkuyndtheta2(rdist[ss1,ss2])+Vu*dVdly*kuyn(rdist[ss1,ss2]) )
dkYdvar[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*dVdVy*kuyp(rdist[ss1,ss2]), Vu*dVdVy* kuyn(rdist[ss1,ss2]) )
dkdubias[ss1,ss2] = 0
elif i==1 and j==1:
#target[ss1,ss2] = kyy(np.abs(rdist[ss1,ss2]))
dktheta1[ss1,ss2] = self.variance_U*self.variance_Y*(dk1theta1(np.abs(rdist[ss1,ss2]))+dk2theta1(np.abs(rdist[ss1,ss2]))+dk3theta1(np.abs(rdist[ss1,ss2])))
dktheta2[ss1,ss2] = self.variance_U*self.variance_Y*(dk1theta2(np.abs(rdist[ss1,ss2])) + dk2theta2(np.abs(rdist[ss1,ss2])) +dk3theta2(np.abs(rdist[ss1,ss2])))
dkUdvar[ss1,ss2] = self.variance_Y*(k1(np.abs(rdist[ss1,ss2]))+k2(np.abs(rdist[ss1,ss2]))+k3(np.abs(rdist[ss1,ss2])) )
dkYdvar[ss1,ss2] = self.variance_U*(k1(np.abs(rdist[ss1,ss2]))+k2(np.abs(rdist[ss1,ss2]))+k3(np.abs(rdist[ss1,ss2])) )
dkdubias[ss1,ss2] = 0
else:
#######target[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kyup(np.abs(rdist[ss1,ss2])), kyun(np.abs(rdist[s1[0],s2[0]]) ) )
#dktheta1[ss1,ss2] = np.where( rdist[ss1,ss2]>0 ,self.variance_U*self.variance_Y*(dk1theta1(np.abs(rdist[ss1,ss2]))+dk2theta1(np.abs(rdist[ss1,ss2]))) , self.variance_U*self.variance_Y*dkcrtheta1(np.abs(rdist[ss1,ss2])) )
#dktheta2[ss1,ss2] = np.where( rdist[ss1,ss2]>0 ,self.variance_U*self.variance_Y*(dk1theta2(np.abs(rdist[ss1,ss2]))+dk2theta2(np.abs(rdist[ss1,ss2]))) , self.variance_U*self.variance_Y*dkcrtheta2(np.abs(rdist[ss1,ss2])) )
dktheta1[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*Vyu*dkyupdtheta1(rdist[ss1,ss2]),Vu*Vyu*dkyundtheta1(rdist[ss1,ss2]) )
dkUdvar[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vyu*kyup(rdist[ss1,ss2]),Vyu*kyun(rdist[ss1,ss2]))
dktheta2[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*Vyu*dkyupdtheta2(rdist[ss1,ss2])+Vu*dVdly*kyup(rdist[ss1,ss2]),Vu*Vyu*dkyundtheta2(rdist[ss1,ss2])+Vu*dVdly*kyun(rdist[ss1,ss2]) )
dkYdvar[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , Vu*dVdVy*kyup(rdist[ss1,ss2]), Vu*dVdVy*kyun(rdist[ss1,ss2]))
dkdubias[ss1,ss2] = 0
#stop
self.variance_U.gradient = np.sum(dkUdvar * dL_dK) # Vu
self.variance_Y.gradient = np.sum(dkYdvar * dL_dK) # Vy
self.lengthscale_U.gradient = np.sum(dktheta1*(-np.sqrt(3)*self.lengthscale_U**(-2))* dL_dK) #lu
self.lengthscale_Y.gradient = np.sum(dktheta2*(-self.lengthscale_Y**(-2)) * dL_dK) #ly
self.ubias.gradient = np.sum(dkdubias * dL_dK)

267
GPy/kern/_src/ODE_st.py Normal file
View file

@ -0,0 +1,267 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
import numpy as np
from independent_outputs import index_to_slices
class ODE_st(Kern):
"""
kernel resultiong from a first order ODE with OU driving GP
:param input_dim: the number of input dimension, has to be equal to one
:type input_dim: int
:param varianceU: variance of the driving GP
:type varianceU: float
:param lengthscaleU: lengthscale of the driving GP (sqrt(3)/lengthscaleU)
:type lengthscaleU: float
:param varianceY: 'variance' of the transfer function
:type varianceY: float
:param lengthscaleY: 'lengthscale' of the transfer function (1/lengthscaleY)
:type lengthscaleY: float
:rtype: kernel object
"""
def __init__(self, input_dim, a=1.,b=1., c=1.,variance_Yx=3.,variance_Yt=1.5, lengthscale_Yx=1.5, lengthscale_Yt=1.5, active_dims=None, name='ode_st'):
assert input_dim ==3, "only defined for 3 input dims"
super(ODE_st, self).__init__(input_dim, active_dims, name)
self.variance_Yt = Param('variance_Yt', variance_Yt, Logexp())
self.variance_Yx = Param('variance_Yx', variance_Yx, Logexp())
self.lengthscale_Yt = Param('lengthscale_Yt', lengthscale_Yt, Logexp())
self.lengthscale_Yx = Param('lengthscale_Yx', lengthscale_Yx, Logexp())
self.a= Param('a', a, Logexp())
self.b = Param('b', b, Logexp())
self.c = Param('c', c, Logexp())
self.add_parameters(self.a, self.b, self.c, self.variance_Yt, self.variance_Yx, self.lengthscale_Yt,self.lengthscale_Yx)
def K(self, X, X2=None):
# model : -a d^2y/dx^2 + b dy/dt + c * y = U
# kernel Kyy rbf spatiol temporal
# vyt Y temporal variance vyx Y spatiol variance lyt Y temporal lengthscale lyx Y spatiol lengthscale
# kernel Kuu doper( doper(Kyy))
# a b c lyt lyx vyx*vyt
"""Compute the covariance matrix between X and X2."""
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
K = np.zeros((X.shape[0], X.shape[0]))
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
K = np.zeros((X.shape[0], X2.shape[0]))
tdist = (X[:,0][:,None] - X2[:,0][None,:])**2
xdist = (X[:,1][:,None] - X2[:,1][None,:])**2
ttdist = (X[:,0][:,None] - X2[:,0][None,:])
#rdist = [tdist,xdist]
#dist = np.abs(X - X2.T)
vyt = self.variance_Yt
vyx = self.variance_Yx
lyt=1/(2*self.lengthscale_Yt)
lyx=1/(2*self.lengthscale_Yx)
a = self.a ## -a is used in the model, negtive diffusion
b = self.b
c = self.c
kyy = lambda tdist,xdist: np.exp(-lyt*(tdist) -lyx*(xdist))
k1 = lambda tdist: (2*lyt - 4*lyt**2 * (tdist) )
k2 = lambda xdist: ( 4*lyx**2 * (xdist) - 2*lyx )
k3 = lambda xdist: ( 3*4*lyx**2 - 6*8*xdist*lyx**3 + 16*xdist**2*lyx**4 )
k4 = lambda ttdist: 2*lyt*(ttdist)
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
K[ss1,ss2] = vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
elif i==0 and j==1:
K[ss1,ss2] = (-a*k2(xdist[ss1,ss2]) + b*k4(ttdist[ss1,ss2]) + c)*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
#K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kuyp(np.abs(rdist[ss1,ss2])), kuyn(np.abs(rdist[ss1,ss2]) ) )
#K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kuyp(rdist[ss1,ss2]), kuyn(rdist[ss1,ss2] ) )
elif i==1 and j==1:
K[ss1,ss2] = ( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 )* vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
else:
K[ss1,ss2] = (-a*k2(xdist[ss1,ss2]) - b*k4(ttdist[ss1,ss2]) + c)*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
#K[ss1,ss2]= np.where( rdist[ss1,ss2]>0 , kyup(np.abs(rdist[ss1,ss2])), kyun(np.abs(rdist[ss1,ss2]) ) )
#K[ss1,ss2] = np.where( rdist[ss1,ss2]>0 , kyup(rdist[ss1,ss2]), kyun(rdist[ss1,ss2] ) )
#stop
return K
def Kdiag(self, X):
"""Compute the diagonal of the covariance matrix associated to X."""
vyt = self.variance_Yt
vyx = self.variance_Yx
lyt = 1./(2*self.lengthscale_Yt)
lyx = 1./(2*self.lengthscale_Yx)
a = self.a
b = self.b
c = self.c
## dk^2/dtdt'
k1 = (2*lyt )*vyt*vyx
## dk^2/dx^2
k2 = ( - 2*lyx )*vyt*vyx
## dk^4/dx^2dx'^2
k3 = ( 4*3*lyx**2 )*vyt*vyx
Kdiag = np.zeros(X.shape[0])
slices = index_to_slices(X[:,-1])
for i, ss1 in enumerate(slices):
for s1 in ss1:
if i==0:
Kdiag[s1]+= vyt*vyx
elif i==1:
#i=1
Kdiag[s1]+= b**2*k1 - 2*a*c*k2 + a**2*k3 + c**2*vyt*vyx
#Kdiag[s1]+= Vu*Vy*(k1+k2+k3)
else:
raise ValueError, "invalid input/output index"
return Kdiag
def update_gradients_full(self, dL_dK, X, X2=None):
#def dK_dtheta(self, dL_dK, X, X2, target):
"""derivative of the covariance matrix with respect to the parameters."""
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
K = np.zeros((X.shape[0], X.shape[0]))
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
vyt = self.variance_Yt
vyx = self.variance_Yx
lyt = 1./(2*self.lengthscale_Yt)
lyx = 1./(2*self.lengthscale_Yx)
a = self.a
b = self.b
c = self.c
tdist = (X[:,0][:,None] - X2[:,0][None,:])**2
xdist = (X[:,1][:,None] - X2[:,1][None,:])**2
#rdist = [tdist,xdist]
ttdist = (X[:,0][:,None] - X2[:,0][None,:])
rd=tdist.shape[0]
dka = np.zeros([rd,rd])
dkb = np.zeros([rd,rd])
dkc = np.zeros([rd,rd])
dkYdvart = np.zeros([rd,rd])
dkYdvarx = np.zeros([rd,rd])
dkYdlent = np.zeros([rd,rd])
dkYdlenx = np.zeros([rd,rd])
kyy = lambda tdist,xdist: np.exp(-lyt*(tdist) -lyx*(xdist))
#k1 = lambda tdist: (lyt - lyt**2 * (tdist) )
#k2 = lambda xdist: ( lyx**2 * (xdist) - lyx )
#k3 = lambda xdist: ( 3*lyx**2 - 6*xdist*lyx**3 + xdist**2*lyx**4 )
#k4 = lambda tdist: -lyt*np.sqrt(tdist)
k1 = lambda tdist: (2*lyt - 4*lyt**2 * (tdist) )
k2 = lambda xdist: ( 4*lyx**2 * (xdist) - 2*lyx )
k3 = lambda xdist: ( 3*4*lyx**2 - 6*8*xdist*lyx**3 + 16*xdist**2*lyx**4 )
k4 = lambda ttdist: 2*lyt*(ttdist)
dkyydlyx = lambda tdist,xdist: kyy(tdist,xdist)*(-xdist)
dkyydlyt = lambda tdist,xdist: kyy(tdist,xdist)*(-tdist)
dk1dlyt = lambda tdist: 2. - 4*2.*lyt*tdist
dk2dlyx = lambda xdist: (4.*2.*lyx*xdist -2.)
dk3dlyx = lambda xdist: (6.*4.*lyx - 18.*8*xdist*lyx**2 + 4*16*xdist**2*lyx**3)
dk4dlyt = lambda ttdist: 2*(ttdist)
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
dka[ss1,ss2] = 0
dkb[ss1,ss2] = 0
dkc[ss1,ss2] = 0
dkYdvart[ss1,ss2] = vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdvarx[ss1,ss2] = vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])
elif i==0 and j==1:
dka[ss1,ss2] = -k2(xdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkb[ss1,ss2] = k4(ttdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkc[ss1,ss2] = vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
#dkYdvart[ss1,ss2] = 0
#dkYdvarx[ss1,ss2] = 0
#dkYdlent[ss1,ss2] = 0
#dkYdlenx[ss1,ss2] = 0
dkYdvart[ss1,ss2] = (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdvarx[ss1,ss2] = (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])* (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)+\
vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*b*dk4dlyt(ttdist[ss1,ss2])
dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)+\
vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*dk2dlyx(xdist[ss1,ss2]))
elif i==1 and j==1:
dka[ss1,ss2] = (2*a*k3(xdist[ss1,ss2]) - 2*c*k2(xdist[ss1,ss2]))*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkb[ss1,ss2] = 2*b*k1(tdist[ss1,ss2])*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkc[ss1,ss2] = (-2*a*k2(xdist[ss1,ss2]) + 2*c )*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdvart[ss1,ss2] = ( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 )*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdvarx[ss1,ss2] = ( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 )*vyt* kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])*( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 ) +\
vyx*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*b**2*dk1dlyt(tdist[ss1,ss2])
dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 ) +\
vyx*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])* (-2*a*c*dk2dlyx(xdist[ss1,ss2]) + a**2*dk3dlyx(xdist[ss1,ss2]) )
else:
dka[ss1,ss2] = -k2(xdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkb[ss1,ss2] = -k4(ttdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkc[ss1,ss2] = vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
#dkYdvart[ss1,ss2] = 0
#dkYdvarx[ss1,ss2] = 0
#dkYdlent[ss1,ss2] = 0
#dkYdlenx[ss1,ss2] = 0
dkYdvart[ss1,ss2] = (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdvarx[ss1,ss2] = (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])* (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)+\
vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-1)*b*dk4dlyt(ttdist[ss1,ss2])
dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)+\
vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*dk2dlyx(xdist[ss1,ss2]))
self.a.gradient = np.sum(dka * dL_dK)
self.b.gradient = np.sum(dkb * dL_dK)
self.c.gradient = np.sum(dkc * dL_dK)
self.variance_Yt.gradient = np.sum(dkYdvart * dL_dK) # Vy
self.variance_Yx.gradient = np.sum(dkYdvarx * dL_dK)
self.lengthscale_Yt.gradient = np.sum(dkYdlent*(-0.5*self.lengthscale_Yt**(-2)) * dL_dK) #ly np.sum(dktheta2*(-self.lengthscale_Y**(-2)) * dL_dK)
self.lengthscale_Yx.gradient = np.sum(dkYdlenx*(-0.5*self.lengthscale_Yx**(-2)) * dL_dK)

165
GPy/kern/_src/ODE_t.py Normal file
View file

@ -0,0 +1,165 @@
from kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
import numpy as np
from independent_outputs import index_to_slices
class ODE_t(Kern):
def __init__(self, input_dim, a=1., c=1.,variance_Yt=3., lengthscale_Yt=1.5,ubias =1., active_dims=None, name='ode_st'):
assert input_dim ==2, "only defined for 2 input dims"
super(ODE_t, self).__init__(input_dim, active_dims, name)
self.variance_Yt = Param('variance_Yt', variance_Yt, Logexp())
self.lengthscale_Yt = Param('lengthscale_Yt', lengthscale_Yt, Logexp())
self.a= Param('a', a, Logexp())
self.c = Param('c', c, Logexp())
self.ubias = Param('ubias', ubias, Logexp())
self.add_parameters(self.a, self.c, self.variance_Yt, self.lengthscale_Yt,self.ubias)
def K(self, X, X2=None):
"""Compute the covariance matrix between X and X2."""
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
K = np.zeros((X.shape[0], X.shape[0]))
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
K = np.zeros((X.shape[0], X2.shape[0]))
tdist = (X[:,0][:,None] - X2[:,0][None,:])**2
ttdist = (X[:,0][:,None] - X2[:,0][None,:])
vyt = self.variance_Yt
lyt=1/(2*self.lengthscale_Yt)
a = -self.a
c = self.c
kyy = lambda tdist: np.exp(-lyt*(tdist))
k1 = lambda tdist: (2*lyt - 4*lyt**2 *(tdist) )
k4 = lambda tdist: 2*lyt*(tdist)
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
K[ss1,ss2] = vyt*kyy(tdist[ss1,ss2])
elif i==0 and j==1:
K[ss1,ss2] = (k4(ttdist[ss1,ss2])+1)*vyt*kyy(tdist[ss1,ss2])
#K[ss1,ss2] = (2*lyt*(ttdist[ss1,ss2])+1)*vyt*kyy(tdist[ss1,ss2])
elif i==1 and j==1:
K[ss1,ss2] = ( k1(tdist[ss1,ss2]) + 1. )*vyt* kyy(tdist[ss1,ss2])+self.ubias
else:
K[ss1,ss2] = (-k4(ttdist[ss1,ss2])+1)*vyt*kyy(tdist[ss1,ss2])
#K[ss1,ss2] = (-2*lyt*(ttdist[ss1,ss2])+1)*vyt*kyy(tdist[ss1,ss2])
#stop
return K
def Kdiag(self, X):
vyt = self.variance_Yt
lyt = 1./(2*self.lengthscale_Yt)
a = -self.a
c = self.c
k1 = (2*lyt )*vyt
Kdiag = np.zeros(X.shape[0])
slices = index_to_slices(X[:,-1])
for i, ss1 in enumerate(slices):
for s1 in ss1:
if i==0:
Kdiag[s1]+= vyt
elif i==1:
#i=1
Kdiag[s1]+= k1 + vyt+self.ubias
#Kdiag[s1]+= Vu*Vy*(k1+k2+k3)
else:
raise ValueError, "invalid input/output index"
return Kdiag
def update_gradients_full(self, dL_dK, X, X2=None):
"""derivative of the covariance matrix with respect to the parameters."""
X,slices = X[:,:-1],index_to_slices(X[:,-1])
if X2 is None:
X2,slices2 = X,slices
K = np.zeros((X.shape[0], X.shape[0]))
else:
X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1])
vyt = self.variance_Yt
lyt = 1./(2*self.lengthscale_Yt)
tdist = (X[:,0][:,None] - X2[:,0][None,:])**2
ttdist = (X[:,0][:,None] - X2[:,0][None,:])
#rdist = [tdist,xdist]
rd=tdist.shape[0]
dka = np.zeros([rd,rd])
dkc = np.zeros([rd,rd])
dkYdvart = np.zeros([rd,rd])
dkYdlent = np.zeros([rd,rd])
dkdubias = np.zeros([rd,rd])
kyy = lambda tdist: np.exp(-lyt*(tdist))
dkyydlyt = lambda tdist: kyy(tdist)*(-tdist)
k1 = lambda tdist: (2*lyt - 4*lyt**2 * (tdist) )
k4 = lambda ttdist: 2*lyt*(ttdist)
dk1dlyt = lambda tdist: 2. - 4*2.*lyt*tdist
dk4dlyt = lambda ttdist: 2*(ttdist)
for i, s1 in enumerate(slices):
for j, s2 in enumerate(slices2):
for ss1 in s1:
for ss2 in s2:
if i==0 and j==0:
dkYdvart[ss1,ss2] = kyy(tdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])
dkdubias[ss1,ss2] = 0
elif i==0 and j==1:
dkYdvart[ss1,ss2] = (k4(ttdist[ss1,ss2])+1)*kyy(tdist[ss1,ss2])
#dkYdvart[ss1,ss2] = ((2*lyt*ttdist[ss1,ss2])+1)*kyy(tdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])* (k4(ttdist[ss1,ss2])+1.)+\
vyt*kyy(tdist[ss1,ss2])*(dk4dlyt(ttdist[ss1,ss2]))
#dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])* (2*lyt*(ttdist[ss1,ss2])+1.)+\
#vyt*kyy(tdist[ss1,ss2])*(2*ttdist[ss1,ss2])
dkdubias[ss1,ss2] = 0
elif i==1 and j==1:
dkYdvart[ss1,ss2] = (k1(tdist[ss1,ss2]) + 1. )* kyy(tdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])*( k1(tdist[ss1,ss2]) + 1. ) +\
vyt*kyy(tdist[ss1,ss2])*dk1dlyt(tdist[ss1,ss2])
dkdubias[ss1,ss2] = 1
else:
dkYdvart[ss1,ss2] = (-k4(ttdist[ss1,ss2])+1)*kyy(tdist[ss1,ss2])
#dkYdvart[ss1,ss2] = (-2*lyt*(ttdist[ss1,ss2])+1)*kyy(tdist[ss1,ss2])
dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])* (-k4(ttdist[ss1,ss2])+1.)+\
vyt*kyy(tdist[ss1,ss2])*(-dk4dlyt(ttdist[ss1,ss2]) )
dkdubias[ss1,ss2] = 0
#dkYdlent[ss1,ss2] = vyt*dkyydlyt(tdist[ss1,ss2])* (-2*lyt*(ttdist[ss1,ss2])+1.)+\
#vyt*kyy(tdist[ss1,ss2])*(-2)*(ttdist[ss1,ss2])
self.variance_Yt.gradient = np.sum(dkYdvart * dL_dK)
self.lengthscale_Yt.gradient = np.sum(dkYdlent*(-0.5*self.lengthscale_Yt**(-2)) * dL_dK)
self.ubias.gradient = np.sum(dkdubias * dL_dK)

View file

@ -141,10 +141,10 @@ class Add(CombinationKernel):
from static import White, Bias from static import White, Bias
target_mu = np.zeros(variational_posterior.shape) target_mu = np.zeros(variational_posterior.shape)
target_S = np.zeros(variational_posterior.shape) target_S = np.zeros(variational_posterior.shape)
for p1 in self._parameters_: for p1 in self.parameters:
#compute the effective dL_dpsi1. extra terms appear becaue of the cross terms in psi2! #compute the effective dL_dpsi1. extra terms appear becaue of the cross terms in psi2!
eff_dL_dpsi1 = dL_dpsi1.copy() eff_dL_dpsi1 = dL_dpsi1.copy()
for p2 in self._parameters_: for p2 in self.parameters:
if p2 is p1: if p2 is p1:
continue continue
if isinstance(p2, White): if isinstance(p2, White):
@ -160,7 +160,7 @@ class Add(CombinationKernel):
def add(self, other, name='sum'): def add(self, other, name='sum'):
if isinstance(other, Add): if isinstance(other, Add):
other_params = other._parameters_[:] other_params = other.parameters[:]
for p in other_params: for p in other_params:
other.remove_parameter(p) other.remove_parameter(p)
self.add_parameters(*other_params) self.add_parameters(*other_params)
@ -170,7 +170,4 @@ class Add(CombinationKernel):
return self return self
def input_sensitivity(self): def input_sensitivity(self):
in_sen = np.zeros(self.input_dim) return reduce(np.add, [k.input_sensitivity() for k in self.parts])
for i, p in enumerate(self.parts):
in_sen[p.active_dims] += p.input_sensitivity()
return in_sen

View file

@ -32,7 +32,7 @@ def index_to_slices(index):
[ret[ind_i].append(slice(*indexes_i)) for ind_i,indexes_i in zip(ind[switchpoints[:-1]],zip(switchpoints,switchpoints[1:]))] [ret[ind_i].append(slice(*indexes_i)) for ind_i,indexes_i in zip(ind[switchpoints[:-1]],zip(switchpoints,switchpoints[1:]))]
return ret return ret
class IndependentOutputs(Kern): class IndependentOutputs(CombinationKernel):
""" """
A kernel which can represent several independent functions. this kernel A kernel which can represent several independent functions. this kernel
'switches off' parts of the matrix where the output indexes are different. 'switches off' parts of the matrix where the output indexes are different.
@ -180,6 +180,9 @@ class Hierarchical(CombinationKernel):
def Kdiag(self,X): def Kdiag(self,X):
return np.diag(self.K(X)) return np.diag(self.K(X))
def gradients_X(self, dL_dK, X, X2=None):
raise NotImplementedError
def update_gradients_full(self,dL_dK,X,X2=None): def update_gradients_full(self,dL_dK,X,X2=None):
slices = [index_to_slices(X[:,i]) for i in self.extra_dims] slices = [index_to_slices(X[:,i]) for i in self.extra_dims]
if X2 is None: if X2 is None:

View file

@ -34,40 +34,28 @@ class Kern(Parameterized):
is the active_dimensions of inputs X we will work on. is the active_dimensions of inputs X we will work on.
All kernels will get sliced Xes as inputs, if active_dims is not None All kernels will get sliced Xes as inputs, if active_dims is not None
Only positive integers are allowed in active_dims!
if active_dims is None, slicing is switched off and all X will be passed through as given. if active_dims is None, slicing is switched off and all X will be passed through as given.
:param int input_dim: the number of input dimensions to the function :param int input_dim: the number of input dimensions to the function
:param array-like|slice|None active_dims: list of indices on which dimensions this kernel works on, or none if no slicing :param array-like|None active_dims: list of indices on which dimensions this kernel works on, or none if no slicing
Do not instantiate. Do not instantiate.
""" """
super(Kern, self).__init__(name=name, *a, **kw) super(Kern, self).__init__(name=name, *a, **kw)
try:
self.input_dim = int(input_dim) self.input_dim = int(input_dim)
self.active_dims = active_dims# if active_dims is not None else slice(0, input_dim, 1)
except TypeError:
# input_dim is something else then an integer
self.input_dim = input_dim
if active_dims is not None:
print "WARNING: given input_dim={} is not an integer and active_dims={} is given, switching off slicing"
self.active_dims = None
if self.active_dims is not None and self.input_dim is not None: if active_dims is None:
assert isinstance(self.active_dims, (slice, list, tuple, np.ndarray)), 'active_dims needs to be an array-like or slice object over dimensions, {} given'.format(self.active_dims.__class__) active_dims = np.arange(input_dim)
if isinstance(self.active_dims, slice):
self.active_dims = slice(self.active_dims.start or 0, self.active_dims.stop or self.input_dim, self.active_dims.step or 1) self.active_dims = np.array(active_dims, dtype=int)
active_dim_size = int(np.round((self.active_dims.stop-self.active_dims.start)/self.active_dims.step))
elif isinstance(self.active_dims, np.ndarray): assert self.active_dims.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, active_dims={}".format(self.input_dim, self.active_dims.size, self.active_dims)
#assert np.all(self.active_dims >= 0), 'active dimensions need to be positive. negative indexing is not allowed'
assert self.active_dims.ndim == 1, 'only flat indices allowed, given active_dims.shape={}, provide only indexes to the dimensions (columns) of the input'.format(self.active_dims.shape)
active_dim_size = self.active_dims.size
else:
active_dim_size = len(self.active_dims)
assert active_dim_size == self.input_dim, "input_dim={} does not match len(active_dim)={}, active_dims={}".format(self.input_dim, active_dim_size, self.active_dims)
self._sliced_X = 0 self._sliced_X = 0
self.useGPU = self._support_GPU and useGPU self.useGPU = self._support_GPU and useGPU
@Cache_this(limit=10) @Cache_this(limit=20)
def _slice_X(self, X): def _slice_X(self, X):
return X[:, self.active_dims] return X[:, self.active_dims]
@ -176,8 +164,8 @@ class Kern(Parameterized):
""" """
Shortcut for tensor `prod`. Shortcut for tensor `prod`.
""" """
assert self.active_dims == range(self.input_dim), "Can only use kernels, which have their input_dims defined from 0" assert np.all(self.active_dims == range(self.input_dim)), "Can only use kernels, which have their input_dims defined from 0"
assert other.active_dims == range(other.input_dim), "Can only use kernels, which have their input_dims defined from 0" assert np.all(other.active_dims == range(other.input_dim)), "Can only use kernels, which have their input_dims defined from 0"
other.active_dims += self.input_dim other.active_dims += self.input_dim
return self.prod(other) return self.prod(other)
@ -195,17 +183,17 @@ class Kern(Parameterized):
assert isinstance(other, Kern), "only kernels can be added to kernels..." assert isinstance(other, Kern), "only kernels can be added to kernels..."
from prod import Prod from prod import Prod
#kernels = [] #kernels = []
#if isinstance(self, Prod): kernels.extend(self._parameters_) #if isinstance(self, Prod): kernels.extend(self.parameters)
#else: kernels.append(self) #else: kernels.append(self)
#if isinstance(other, Prod): kernels.extend(other._parameters_) #if isinstance(other, Prod): kernels.extend(other.parameters)
#else: kernels.append(other) #else: kernels.append(other)
return Prod([self, other], name) return Prod([self, other], name)
def _check_input_dim(self, X): def _check_input_dim(self, X):
assert X.shape[1] == self.input_dim, "You did not specify active_dims and X has wrong shape: X_dim={}, whereas input_dim={}".format(X.shape[1], self.input_dim) assert X.shape[1] == self.input_dim, "{} did not specify active_dims and X has wrong shape: X_dim={}, whereas input_dim={}".format(self.name, X.shape[1], self.input_dim)
def _check_active_dims(self, X): def _check_active_dims(self, X):
assert X.shape[1] >= len(np.r_[self.active_dims]), "At least {} dimensional X needed, X.shape={!s}".format(len(np.r_[self.active_dims]), X.shape) assert X.shape[1] >= len(self.active_dims), "At least {} dimensional X needed, X.shape={!s}".format(len(self.active_dims), X.shape)
class CombinationKernel(Kern): class CombinationKernel(Kern):
@ -222,9 +210,10 @@ class CombinationKernel(Kern):
:param list kernels: List of kernels to combine (can be only one element) :param list kernels: List of kernels to combine (can be only one element)
:param str name: name of the combination kernel :param str name: name of the combination kernel
:param array-like|slice extra_dims: if needed extra dimensions for the combination kernel to work on :param array-like extra_dims: if needed extra dimensions for the combination kernel to work on
""" """
assert all([isinstance(k, Kern) for k in kernels]) assert all([isinstance(k, Kern) for k in kernels])
extra_dims = np.array(extra_dims, dtype=int)
input_dim, active_dims = self.get_input_dim_active_dims(kernels, extra_dims) input_dim, active_dims = self.get_input_dim_active_dims(kernels, extra_dims)
# initialize the kernel with the full input_dim # initialize the kernel with the full input_dim
super(CombinationKernel, self).__init__(input_dim, active_dims, name) super(CombinationKernel, self).__init__(input_dim, active_dims, name)
@ -233,21 +222,23 @@ class CombinationKernel(Kern):
@property @property
def parts(self): def parts(self):
return self._parameters_ return self.parameters
def get_input_dim_active_dims(self, kernels, extra_dims = None): def get_input_dim_active_dims(self, kernels, extra_dims = None):
#active_dims = reduce(np.union1d, (np.r_[x.active_dims] for x in kernels), np.array([], dtype=int)) #active_dims = reduce(np.union1d, (np.r_[x.active_dims] for x in kernels), np.array([], dtype=int))
#active_dims = np.array(np.concatenate((active_dims, extra_dims if extra_dims is not None else [])), dtype=int) #active_dims = np.array(np.concatenate((active_dims, extra_dims if extra_dims is not None else [])), dtype=int)
input_dim = np.array([k.input_dim for k in kernels]) input_dim = reduce(max, (k.active_dims.max() for k in kernels)) + 1
if np.all(input_dim[0]==input_dim):
input_dim = input_dim[0] if extra_dims is not None:
active_dims = None input_dim += extra_dims.size
active_dims = np.arange(input_dim)
return input_dim, active_dims return input_dim, active_dims
def input_sensitivity(self): def input_sensitivity(self):
raise NotImplementedError("Choose the kernel you want to get the sensitivity for. You need to override the default behaviour for getting the input sensitivity to be able to get the input sensitivity. For sum kernel it is the sum of all sensitivities, TODO: product kernel? Other kernels?, also TODO: shall we return all the sensitivities here in the combination kernel? So we can combine them however we want? This could lead to just plot all the sensitivities here...") raise NotImplementedError("Choose the kernel you want to get the sensitivity for. You need to override the default behaviour for getting the input sensitivity to be able to get the input sensitivity. For sum kernel it is the sum of all sensitivities, TODO: product kernel? Other kernels?, also TODO: shall we return all the sensitivities here in the combination kernel? So we can combine them however we want? This could lead to just plot all the sensitivities here...")
def _check_input_dim(self, X): def _check_active_dims(self, X):
return return
def _check_input_dim(self, X): def _check_input_dim(self, X):

View file

@ -12,6 +12,7 @@ from ...core.parameterization.transformations import Logexp
from ...util.caching import Cache_this from ...util.caching import Cache_this
from ...core.parameterization import variational from ...core.parameterization import variational
from psi_comp import linear_psi_comp from psi_comp import linear_psi_comp
from ...util.config import *
class Linear(Kern): class Linear(Kern):
""" """
@ -188,12 +189,23 @@ class Linear(Kern):
AZZA = ZA.T[:, None, :, None] * ZA[None, :, None, :] AZZA = ZA.T[:, None, :, None] * ZA[None, :, None, :]
AZZA = AZZA + AZZA.swapaxes(1, 2) AZZA = AZZA + AZZA.swapaxes(1, 2)
AZZA_2 = AZZA/2. AZZA_2 = AZZA/2.
if config.getboolean('parallel', 'openmp'):
pragma_string = '#pragma omp parallel for private(m,mm,q,qq,factor,tmp)'
header_string = '#include <omp.h>'
weave_options = {'headers' : ['<omp.h>'],
'extra_compile_args': ['-fopenmp -O3'],
'extra_link_args' : ['-lgomp'],
'libraries': ['gomp']}
else:
pragma_string = ''
header_string = ''
weave_options = {'extra_compile_args': ['-O3']}
#Using weave, we can exploit the symmetry of this problem: #Using weave, we can exploit the symmetry of this problem:
code = """ code = """
int n, m, mm,q,qq; int n, m, mm,q,qq;
double factor,tmp; double factor,tmp;
#pragma omp parallel for private(m,mm,q,qq,factor,tmp) %s
for(n=0;n<N;n++){ for(n=0;n<N;n++){
for(m=0;m<num_inducing;m++){ for(m=0;m<num_inducing;m++){
for(mm=0;mm<=m;mm++){ for(mm=0;mm<=m;mm++){
@ -217,26 +229,36 @@ class Linear(Kern):
} }
} }
} }
""" """ % pragma_string
support_code = """ support_code = """
#include <omp.h> %s
#include <math.h> #include <math.h>
""" """ % header_string
weave_options = {'headers' : ['<omp.h>'],
'extra_compile_args': ['-fopenmp -O3'], #-march=native'],
'extra_link_args' : ['-lgomp']}
mu = vp.mean mu = vp.mean
N,num_inducing,input_dim,mu = mu.shape[0],Z.shape[0],mu.shape[1],param_to_array(mu) N,num_inducing,input_dim,mu = mu.shape[0],Z.shape[0],mu.shape[1],param_to_array(mu)
weave.inline(code, support_code=support_code, libraries=['gomp'], weave.inline(code, support_code=support_code,
arg_names=['N','num_inducing','input_dim','mu','AZZA','AZZA_2','target_mu','target_S','dL_dpsi2'], arg_names=['N','num_inducing','input_dim','mu','AZZA','AZZA_2','target_mu','target_S','dL_dpsi2'],
type_converters=weave.converters.blitz,**weave_options) type_converters=weave.converters.blitz,**weave_options)
def _weave_dpsi2_dZ(self, dL_dpsi2, Z, vp, target): def _weave_dpsi2_dZ(self, dL_dpsi2, Z, vp, target):
AZA = self.variances*self._ZAinner(vp, Z) AZA = self.variances*self._ZAinner(vp, Z)
if config.getboolean('parallel', 'openmp'):
pragma_string = '#pragma omp parallel for private(n,mm,q)'
header_string = '#include <omp.h>'
weave_options = {'headers' : ['<omp.h>'],
'extra_compile_args': ['-fopenmp -O3'],
'extra_link_args' : ['-lgomp'],
'libraries': ['gomp']}
else:
pragma_string = ''
header_string = ''
weave_options = {'extra_compile_args': ['-O3']}
code=""" code="""
int n,m,mm,q; int n,m,mm,q;
#pragma omp parallel for private(n,mm,q) %s
for(m=0;m<num_inducing;m++){ for(m=0;m<num_inducing;m++){
for(q=0;q<input_dim;q++){ for(q=0;q<input_dim;q++){
for(mm=0;mm<num_inducing;mm++){ for(mm=0;mm<num_inducing;mm++){
@ -246,18 +268,15 @@ class Linear(Kern):
} }
} }
} }
""" """ % pragma_string
support_code = """ support_code = """
#include <omp.h> %s
#include <math.h> #include <math.h>
""" """ % header_string
weave_options = {'headers' : ['<omp.h>'],
'extra_compile_args': ['-fopenmp -O3'], #-march=native'],
'extra_link_args' : ['-lgomp']}
N,num_inducing,input_dim = vp.mean.shape[0],Z.shape[0],vp.mean.shape[1] N,num_inducing,input_dim = vp.mean.shape[0],Z.shape[0],vp.mean.shape[1]
mu = param_to_array(vp.mean) mu = param_to_array(vp.mean)
weave.inline(code, support_code=support_code, libraries=['gomp'], weave.inline(code, support_code=support_code,
arg_names=['N','num_inducing','input_dim','AZA','target','dL_dpsi2'], arg_names=['N','num_inducing','input_dim','AZA','target','dL_dpsi2'],
type_converters=weave.converters.blitz,**weave_options) type_converters=weave.converters.blitz,**weave_options)

42
GPy/kern/_src/poly.py Normal file
View file

@ -0,0 +1,42 @@
# Copyright (c) 2014, James Hensman
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from kern import Kern
from ...util.misc import param_to_array
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
class Poly(Kern):
"""
Polynomial kernel
"""
def __init__(self, input_dim, variance=1., order=3., active_dims=None, name='poly'):
super(Poly, self).__init__(input_dim, active_dims, name)
self.variance = Param('variance', variance, Logexp())
self.add_parameter(self.variance)
self.order=order
def K(self, X, X2=None):
return (self._dot_product(X, X2) + 1.)**self.order * self.variance
def _dot_product(self, X, X2=None):
if X2 is None:
return np.dot(X, X.T)
else:
return np.dot(X, X2.T)
def Kdiag(self, X):
return self.variance*(np.square(X).sum(1) + 1.)**self.order
def update_gradients_full(self, dL_dK, X, X2=None):
self.variance.gradient = np.sum(dL_dK * (self._dot_product(X, X2) + 1.)**self.order)
def update_gradients_diag(self, dL_dKdiag, X):
raise NotImplementedError
def gradients_X(self, dL_dK, X, X2=None):
raise NotImplementedError
def gradients_X_diag(self, dL_dKdiag, X):
raise NotImplementedError

View file

@ -10,6 +10,7 @@ from GPy.util.caching import Cache_this
from ...core.parameterization import variational from ...core.parameterization import variational
from psi_comp import ssrbf_psi_comp from psi_comp import ssrbf_psi_comp
from psi_comp import ssrbf_psi_gpucomp from psi_comp import ssrbf_psi_gpucomp
from ...util.config import *
class RBF(Stationary): class RBF(Stationary):
""" """
@ -32,7 +33,6 @@ class RBF(Stationary):
else: else:
self.psicomp = ssrbf_psi_comp self.psicomp = ssrbf_psi_comp
def K_of_r(self, r): def K_of_r(self, r):
return self.variance * np.exp(-0.5 * r**2) return self.variance * np.exp(-0.5 * r**2)
@ -246,6 +246,16 @@ class RBF(Stationary):
@Cache_this(limit=1) @Cache_this(limit=1)
def _psi2computations(self, Z, vp): def _psi2computations(self, Z, vp):
if config.getboolean('parallel', 'openmp'):
pragma_string = '#pragma omp parallel for private(tmp, exponent_tmp)'
header_string = '#include <omp.h>'
libraries = ['gomp']
else:
pragma_string = ''
header_string = ''
libraries = []
mu, S = vp.mean, vp.variance mu, S = vp.mean, vp.variance
N, Q = mu.shape N, Q = mu.shape
@ -268,8 +278,7 @@ class RBF(Stationary):
variance_sq = float(np.square(self.variance)) variance_sq = float(np.square(self.variance))
code = """ code = """
double tmp, exponent_tmp; double tmp, exponent_tmp;
%s
#pragma omp parallel for private(tmp, exponent_tmp)
for (int n=0; n<N; n++) for (int n=0; n<N; n++)
{ {
for (int m=0; m<M; m++) for (int m=0; m<M; m++)
@ -293,20 +302,20 @@ class RBF(Stationary):
tmp = -Zdist_sq(m,mm,q) - tmp - half_log_denom(n,q); tmp = -Zdist_sq(m,mm,q) - tmp - half_log_denom(n,q);
exponent_tmp += tmp; exponent_tmp += tmp;
} }
//compute psi2 by exponontiating //compute psi2 by exponentiating
psi2(n,m,mm) = variance_sq * exp(exponent_tmp); psi2(n,m,mm) = variance_sq * exp(exponent_tmp);
psi2(n,mm,m) = psi2(n,m,mm); psi2(n,mm,m) = psi2(n,m,mm);
} }
} }
} }
""" """ % pragma_string
support_code = """ support_code = """
#include <omp.h> %s
#include <math.h> #include <math.h>
""" """ % header_string
mu = param_to_array(mu) mu = param_to_array(mu)
weave.inline(code, support_code=support_code, libraries=['gomp'], weave.inline(code, support_code=support_code, libraries=libraries,
arg_names=['N', 'M', 'Q', 'mu', 'Zhat', 'mudist_sq', 'mudist', 'denom_l2', 'Zdist_sq', 'half_log_denom', 'psi2', 'variance_sq'], arg_names=['N', 'M', 'Q', 'mu', 'Zhat', 'mudist_sq', 'mudist', 'denom_l2', 'Zdist_sq', 'half_log_denom', 'psi2', 'variance_sq'],
type_converters=weave.converters.blitz, **self.weave_options) type_converters=weave.converters.blitz, **self.weave_options)
@ -318,12 +327,20 @@ class RBF(Stationary):
#return 2.*np.einsum( 'ijk,ijk,ijkl,il->l', dL_dpsi2, psi2, Zdist_sq * (2.*S[:,None,None,:]/l2 + 1.) + mudist_sq + S[:, None, None, :] / l2, 1./(2.*S + l2))*self.lengthscale #return 2.*np.einsum( 'ijk,ijk,ijkl,il->l', dL_dpsi2, psi2, Zdist_sq * (2.*S[:,None,None,:]/l2 + 1.) + mudist_sq + S[:, None, None, :] / l2, 1./(2.*S + l2))*self.lengthscale
result = np.zeros(self.input_dim) result = np.zeros(self.input_dim)
if config.getboolean('parallel', 'openmp'):
pragma_string = '#pragma omp parallel for reduction(+:tmp)'
header_string = '#include <omp.h>'
libraries = ['gomp']
else:
pragma_string = ''
header_string = ''
libraries = []
code = """ code = """
double tmp; double tmp;
for(int q=0; q<Q; q++) for(int q=0; q<Q; q++)
{ {
tmp = 0.0; tmp = 0.0;
#pragma omp parallel for reduction(+:tmp) %s
for(int n=0; n<N; n++) for(int n=0; n<N; n++)
{ {
for(int m=0; m<M; m++) for(int m=0; m<M; m++)
@ -341,16 +358,16 @@ class RBF(Stationary):
result(q) = tmp; result(q) = tmp;
} }
""" """ % pragma_string
support_code = """ support_code = """
#include <omp.h> %s
#include <math.h> #include <math.h>
""" """ % header_string
N,Q = S.shape N,Q = S.shape
M = psi2.shape[-1] M = psi2.shape[-1]
S = param_to_array(S) S = param_to_array(S)
weave.inline(code, support_code=support_code, libraries=['gomp'], weave.inline(code, support_code=support_code, libraries=libraries,
arg_names=['psi2', 'dL_dpsi2', 'N', 'M', 'Q', 'mudist_sq', 'l2', 'Zdist_sq', 'S', 'result'], arg_names=['psi2', 'dL_dpsi2', 'N', 'M', 'Q', 'mudist_sq', 'l2', 'Zdist_sq', 'S', 'result'],
type_converters=weave.converters.blitz, **self.weave_options) type_converters=weave.converters.blitz, **self.weave_options)

View file

@ -192,6 +192,27 @@ class Exponential(Stationary):
def dK_dr(self, r): def dK_dr(self, r):
return -0.5*self.K_of_r(r) return -0.5*self.K_of_r(r)
class OU(Stationary):
"""
OU kernel:
.. math::
k(r) = \\sigma^2 \exp(- r) \\ \\ \\ \\ \\text{ where } r = \sqrt{\sum_{i=1}^input_dim \\frac{(x_i-y_i)^2}{\ell_i^2} }
"""
def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='OU'):
super(OU, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name)
def K_of_r(self, r):
return self.variance * np.exp(-r)
def dK_dr(self,r):
return -1.*self.variance*np.exp(-r)
class Matern32(Stationary): class Matern32(Stationary):
""" """
Matern 3/2 kernel: Matern 3/2 kernel:

View file

@ -227,3 +227,6 @@ class Bernoulli(Likelihood):
ns = np.ones_like(gp, dtype=int) ns = np.ones_like(gp, dtype=int)
Ysim = np.random.binomial(ns, self.gp_link.transf(gp)) Ysim = np.random.binomial(ns, self.gp_link.transf(gp))
return Ysim.reshape(orig_shape) return Ysim.reshape(orig_shape)
def exact_inference_gradients(self, dL_dKdiag,Y_metadata=None):
pass

View file

@ -0,0 +1,48 @@
# Copyright (c) 2014 The GPy authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import sympy as sym
from GPy.util.symbolic import gammaln, normcdfln, normcdf, IndMatrix, create_matrix
import numpy as np
from ..util.univariate_Gaussian import std_norm_pdf, std_norm_cdf
import link_functions
from symbolic import Symbolic
from scipy import stats
class Ordinal(Symbolic):
"""
Ordinal
.. math::
p(y_{i}|\pi(f_{i})) = \left(\frac{r}{r+f_i}\right)^r \frac{\Gamma(r+y_i)}{y!\Gamma(r)}\left(\frac{f_i}{r+f_i}\right)^{y_i}
.. Note::
Y takes non zero integer values..
link function should have a positive domain, e.g. log (default).
.. See also::
symbolic.py, for the parent class
"""
def __init__(self, categories=3, gp_link=None):
if gp_link is None:
gp_link = link_functions.Identity()
dispersion = sym.Symbol('width', positive=True, real=True)
y_0 = sym.Symbol('y_0', nonnegative=True, integer=True)
f_0 = sym.Symbol('f_0', positive=True, real=True)
log_pdf = create_matrix('log_pdf', 1, categories)
log_pdf[0] = normcdfln(-f_0)
if categories>2:
w = create_matrix('w', 1, categories)
log_pdf[categories-1] = normcdfln(w.sum() + f_0)
for i in range(1, categories-1):
log_pdf[i] = sym.log(normcdf(w[0, 0:i-1].sum() + f_0) - normcdf(w[0, 0:i].sum()-f_0) )
else:
log_pdf[1] = normcdfln(f_0)
log_pdf.index_var = y_0
super(Ordinal, self).__init__(log_pdf=log_pdf, gp_link=gp_link, name='Ordinal')
# TODO: Check this.
self.log_concave = True

View file

@ -42,7 +42,7 @@ class BayesianGPLVM(SparseGP):
assert Z.shape[1] == X.shape[1] assert Z.shape[1] == X.shape[1]
if kernel is None: if kernel is None:
kernel = kern.RBF(input_dim, lengthscale=fracs, ARD=True) # + kern.white(input_dim) kernel = kern.RBF(input_dim, lengthscale=1./fracs, ARD=True) # + kern.white(input_dim)
if likelihood is None: if likelihood is None:
likelihood = Gaussian() likelihood = Gaussian()

View file

@ -97,7 +97,7 @@ def plot_latent(model, labels=None, which_indices=None,
elif type(ul) is np.int64: elif type(ul) is np.int64:
this_label = 'class %i' % ul this_label = 'class %i' % ul
else: else:
this_label = 'class %i' % i this_label = unicode(ul)
m = marker.next() m = marker.next()
index = np.nonzero(labels == ul)[0] index = np.nonzero(labels == ul)[0]

View file

@ -68,7 +68,7 @@ def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False):
for i in range(ard_params.shape[0]): for i in range(ard_params.shape[0]):
c = Tango.nextMedium() c = Tango.nextMedium()
bars.append(plot_bars(fig, ax, x, ard_params[i,:], c, kernel._parameters_[i].name, bottom=bottom)) bars.append(plot_bars(fig, ax, x, ard_params[i,:], c, kernel.parameters[i].name, bottom=bottom))
bottom += ard_params[i,:] bottom += ard_params[i,:]
ax.set_xlim(-.5, kernel.input_dim - .5) ax.set_xlim(-.5, kernel.input_dim - .5)

View file

@ -14,7 +14,7 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
which_data_ycols='all', fixed_inputs=[], which_data_ycols='all', fixed_inputs=[],
levels=20, samples=0, fignum=None, ax=None, resolution=None, levels=20, samples=0, fignum=None, ax=None, resolution=None,
plot_raw=False, plot_raw=False,
linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue'], Y_metadata=None): linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue'], Y_metadata=None, data_symbol='kx'):
""" """
Plot the posterior of the GP. Plot the posterior of the GP.
- In one dimension, the function is plotted with a shaded region identifying two standard deviations. - In one dimension, the function is plotted with a shaded region identifying two standard deviations.
@ -97,7 +97,7 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
for d in which_data_ycols: for d in which_data_ycols:
plots['gpplot'] = gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], ax=ax, edgecol=linecol, fillcol=fillcol) plots['gpplot'] = gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], ax=ax, edgecol=linecol, fillcol=fillcol)
plots['dataplot'] = ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) plots['dataplot'] = ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], data_symbol, mew=1.5)
#optionally plot some samples #optionally plot some samples
if samples: #NOTE not tested with fixed_inputs if samples: #NOTE not tested with fixed_inputs

View file

@ -74,13 +74,16 @@ class vector_show(matplotlib_show):
""" """
def __init__(self, vals, axes=None): def __init__(self, vals, axes=None):
matplotlib_show.__init__(self, vals, axes) matplotlib_show.__init__(self, vals, axes)
self.handle = self.axes.plot(np.arange(0, len(vals))[:, None], self.vals) #assert vals.ndim == 2, "Please give a vector in [n x 1] to plot"
#assert vals.shape[1] == 1, "only showing a vector in one dimension"
self.size = vals.size
self.handle = self.axes.plot(np.arange(0, vals.size)[:, None], vals)[0]
def modify(self, vals): def modify(self, vals):
self.vals = vals.copy() self.vals = vals.copy()
for handle, vals in zip(self.handle, self.vals.T): xdata, ydata = self.handle.get_data()
xdata, ydata = handle.get_data() assert vals.size == self.size, "values passed into modify changed size! vals.size:{} != in.size:{}".format(vals.size, self.size)
handle.set_data(xdata, vals) self.handle.set_data(xdata, self.vals)
self.axes.figure.canvas.draw() self.axes.figure.canvas.draw()
@ -94,13 +97,13 @@ class lvm(matplotlib_show):
:type data_visualize: visualize.data_show type. :type data_visualize: visualize.data_show type.
:param latent_axes: the axes where the latent visualization should be plotted. :param latent_axes: the axes where the latent visualization should be plotted.
""" """
if vals == None: if vals is None:
if isinstance(model.X, VariationalPosterior): if isinstance(model.X, VariationalPosterior):
vals = param_to_array(model.X.mean) vals = param_to_array(model.X.mean)
else: else:
vals = param_to_array(model.X) vals = param_to_array(model.X)
if len(vals.shape)==1:
vals = param_to_array(vals) vals = vals[None,:]
matplotlib_show.__init__(self, vals, axes=latent_axes) matplotlib_show.__init__(self, vals, axes=latent_axes)
if isinstance(latent_axes,mpl.axes.Axes): if isinstance(latent_axes,mpl.axes.Axes):
@ -391,14 +394,13 @@ class mocap_data_show_vpython(vpython_show):
def process_values(self): def process_values(self):
raise NotImplementedError, "this needs to be implemented to use the data_show class" raise NotImplementedError, "this needs to be implemented to use the data_show class"
class mocap_data_show(matplotlib_show): class mocap_data_show(matplotlib_show):
"""Base class for visualizing motion capture data.""" """Base class for visualizing motion capture data."""
def __init__(self, vals, axes=None, connect=None): def __init__(self, vals, axes=None, connect=None):
if axes==None: if axes==None:
fig = plt.figure() fig = plt.figure()
axes = fig.add_subplot(111, projection='3d') axes = fig.add_subplot(111, projection='3d',aspect='equal')
matplotlib_show.__init__(self, vals, axes) matplotlib_show.__init__(self, vals, axes)
self.connect = connect self.connect = connect
@ -443,11 +445,12 @@ class mocap_data_show(matplotlib_show):
def process_values(self): def process_values(self):
raise NotImplementedError, "this needs to be implemented to use the data_show class" raise NotImplementedError, "this needs to be implemented to use the data_show class"
def initialize_axes(self): def initialize_axes(self, boundary=0.05):
"""Set up the axes with the right limits and scaling.""" """Set up the axes with the right limits and scaling."""
self.x_lim = np.array([self.vals[:, 0].min(), self.vals[:, 0].max()]) bs = [(self.vals[:, i].max()-self.vals[:, i].min())*boundary for i in xrange(3)]
self.y_lim = np.array([self.vals[:, 1].min(), self.vals[:, 1].max()]) self.x_lim = np.array([self.vals[:, 0].min()-bs[0], self.vals[:, 0].max()+bs[0]])
self.z_lim = np.array([self.vals[:, 2].min(), self.vals[:, 2].max()]) self.y_lim = np.array([self.vals[:, 1].min()-bs[1], self.vals[:, 1].max()+bs[1]])
self.z_lim = np.array([self.vals[:, 2].min()-bs[2], self.vals[:, 2].max()+bs[2]])
def initialize_axes_modify(self): def initialize_axes_modify(self):
self.points_handle.remove() self.points_handle.remove()
@ -470,6 +473,8 @@ class mocap_data_show(matplotlib_show):
class stick_show(mocap_data_show): class stick_show(mocap_data_show):
"""Show a three dimensional point cloud as a figure. Connect elements of the figure together using the matrix connect.""" """Show a three dimensional point cloud as a figure. Connect elements of the figure together using the matrix connect."""
def __init__(self, vals, connect=None, axes=None): def __init__(self, vals, connect=None, axes=None):
if len(vals.shape)==1:
vals = vals[None,:]
mocap_data_show.__init__(self, vals, axes=axes, connect=connect) mocap_data_show.__init__(self, vals, axes=axes, connect=connect)
def process_values(self): def process_values(self):

View file

@ -304,23 +304,13 @@ class KernelTestsMiscellaneous(unittest.TestCase):
def setUp(self): def setUp(self):
N, D = 100, 10 N, D = 100, 10
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.random.uniform(-10,10,D) self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.random.uniform(-10,10,D)
self.rbf = GPy.kern.RBF(2, active_dims=slice(0,4,2)) self.rbf = GPy.kern.RBF(2, active_dims=np.arange(0,4,2))
self.linear = GPy.kern.Linear(2, active_dims=(3,9)) self.linear = GPy.kern.Linear(2, active_dims=(3,9))
self.matern = GPy.kern.Matern32(3, active_dims=np.array([1,7,9])) self.matern = GPy.kern.Matern32(3, active_dims=np.array([1,7,9]))
self.sumkern = self.rbf + self.linear self.sumkern = self.rbf + self.linear
self.sumkern += self.matern self.sumkern += self.matern
self.sumkern.randomize() self.sumkern.randomize()
def test_active_dims(self):
# test the automatic dim detection expression for slices:
start, stop = 0, 277
for i in range(start,stop,7):
for j in range(1,4):
GPy.kern.Kern(int(np.round((i+1)/j)), slice(0, i+1, j), "testkern")
# test the ability to have only one dim
sk = GPy.kern.RBF(2) + GPy.kern.Matern32(2)
self.assertEqual(sk.input_dim, 2)
def test_which_parts(self): def test_which_parts(self):
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X))) self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X))) self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X)))
@ -344,10 +334,15 @@ class KernelTestsNonContinuous(unittest.TestCase):
self.X2[(N0*2):, -1] = 1 self.X2[(N0*2):, -1] = 1
def test_IndependentOutputs(self): def test_IndependentOutputs(self):
k = GPy.kern.RBF(self.D) k = GPy.kern.RBF(self.D, active_dims=range(self.D))
kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single') kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1)) self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')] k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, active_dims=range(self.D), name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
def test_Hierarchical(self):
k = [GPy.kern.RBF(2, active_dims=[0,2], name='rbf1'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf2')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split') kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1)) self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))

View file

@ -27,11 +27,11 @@ class ArrayCoreTest(unittest.TestCase):
class ParameterizedTest(unittest.TestCase): class ParameterizedTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.rbf = GPy.kern.RBF(1) self.rbf = GPy.kern.RBF(20)
self.white = GPy.kern.White(1) self.white = GPy.kern.White(1)
from GPy.core.parameterization import Param from GPy.core.parameterization import Param
from GPy.core.parameterization.transformations import Logistic from GPy.core.parameterization.transformations import Logistic
self.param = Param('param', np.random.rand(25,2), Logistic(0, 1)) self.param = Param('param', np.random.uniform(0,1,(25,2)), Logistic(0, 1))
self.test1 = GPy.core.Parameterized("test model") self.test1 = GPy.core.Parameterized("test model")
self.test1.param = self.param self.test1.param = self.param
@ -95,7 +95,7 @@ class ParameterizedTest(unittest.TestCase):
self.assertListEqual(self.test1.kern.param_array.tolist(), val[:2].tolist()) self.assertListEqual(self.test1.kern.param_array.tolist(), val[:2].tolist())
def test_add_parameter_already_in_hirarchy(self): def test_add_parameter_already_in_hirarchy(self):
self.assertRaises(HierarchyError, self.test1.add_parameter, self.white._parameters_[0]) self.assertRaises(HierarchyError, self.test1.add_parameter, self.white.parameters[0])
def test_default_constraints(self): def test_default_constraints(self):
self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops) self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops)
@ -142,6 +142,8 @@ class ParameterizedTest(unittest.TestCase):
self.testmodel.randomize() self.testmodel.randomize()
self.assertEqual(val, self.testmodel.kern.lengthscale) self.assertEqual(val, self.testmodel.kern.lengthscale)
def test_regular_expression_misc(self): def test_regular_expression_misc(self):
self.testmodel.kern.lengthscale.fix() self.testmodel.kern.lengthscale.fix()
val = float(self.testmodel.kern.lengthscale) val = float(self.testmodel.kern.lengthscale)

View file

@ -89,28 +89,28 @@ class Test(ListDictTestCase):
self.assertIs(pcopy.constraints, pcopy.rbf.lengthscale.constraints._param_index_ops) self.assertIs(pcopy.constraints, pcopy.rbf.lengthscale.constraints._param_index_ops)
self.assertIs(pcopy.constraints, pcopy.linear.constraints._param_index_ops) self.assertIs(pcopy.constraints, pcopy.linear.constraints._param_index_ops)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
self.assertListEqual(par.full_gradient.tolist(), pcopy.full_gradient.tolist()) self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.param_array, pcopy.param_array)
self.assertIsNot(par.full_gradient, pcopy.full_gradient) self.assertIsNot(par.gradient_full, pcopy.gradient_full)
with tempfile.TemporaryFile('w+b') as f: with tempfile.TemporaryFile('w+b') as f:
par.pickle(f) par.pickle(f)
f.seek(0) f.seek(0)
pcopy = pickle.load(f) pcopy = pickle.load(f)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
pcopy.gradient = 10 pcopy.gradient = 10
np.testing.assert_allclose(par.linear.full_gradient, pcopy.linear.full_gradient) np.testing.assert_allclose(par.linear.gradient_full, pcopy.linear.gradient_full)
np.testing.assert_allclose(pcopy.linear.full_gradient, 10) np.testing.assert_allclose(pcopy.linear.gradient_full, 10)
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
def test_model(self): def test_model(self):
par = toy_rbf_1d_50(optimize=0, plot=0) par = toy_rbf_1d_50(optimize=0, plot=0)
pcopy = par.copy() pcopy = par.copy()
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
self.assertListEqual(par.full_gradient.tolist(), pcopy.full_gradient.tolist()) self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.param_array, pcopy.param_array)
self.assertIsNot(par.full_gradient, pcopy.full_gradient) self.assertIsNot(par.gradient_full, pcopy.gradient_full)
self.assertTrue(pcopy.checkgrad()) self.assertTrue(pcopy.checkgrad())
self.assert_(np.any(pcopy.gradient!=0.0)) self.assert_(np.any(pcopy.gradient!=0.0))
with tempfile.TemporaryFile('w+b') as f: with tempfile.TemporaryFile('w+b') as f:
@ -118,7 +118,7 @@ class Test(ListDictTestCase):
f.seek(0) f.seek(0)
pcopy = pickle.load(f) pcopy = pickle.load(f)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
np.testing.assert_allclose(par.full_gradient, pcopy.full_gradient) np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assert_(pcopy.checkgrad()) self.assert_(pcopy.checkgrad())
@ -126,18 +126,21 @@ class Test(ListDictTestCase):
par = toy_rbf_1d_50(optimize=0, plot=0) par = toy_rbf_1d_50(optimize=0, plot=0)
pcopy = GPRegression(par.X.copy(), par.Y.copy(), kernel=par.kern.copy()) pcopy = GPRegression(par.X.copy(), par.Y.copy(), kernel=par.kern.copy())
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
self.assertListEqual(par.full_gradient.tolist(), pcopy.full_gradient.tolist()) self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.param_array, pcopy.param_array)
self.assertIsNot(par.full_gradient, pcopy.full_gradient) self.assertIsNot(par.gradient_full, pcopy.gradient_full)
self.assertTrue(pcopy.checkgrad()) self.assertTrue(pcopy.checkgrad())
self.assert_(np.any(pcopy.gradient!=0.0)) self.assert_(np.any(pcopy.gradient!=0.0))
pcopy.optimize('bfgs')
par.optimize('bfgs')
np.testing.assert_allclose(pcopy.param_array, par.param_array, atol=.001)
with tempfile.TemporaryFile('w+b') as f: with tempfile.TemporaryFile('w+b') as f:
par.pickle(f) par.pickle(f)
f.seek(0) f.seek(0)
pcopy = pickle.load(f) pcopy = pickle.load(f)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
np.testing.assert_allclose(par.full_gradient, pcopy.full_gradient) np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assert_(pcopy.checkgrad()) self.assert_(pcopy.checkgrad())
@ -148,18 +151,18 @@ class Test(ListDictTestCase):
par.gradient = 10 par.gradient = 10
pcopy = par.copy() pcopy = par.copy()
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
self.assertListEqual(par.full_gradient.tolist(), pcopy.full_gradient.tolist()) self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.param_array, pcopy.param_array)
self.assertIsNot(par.full_gradient, pcopy.full_gradient) self.assertIsNot(par.gradient_full, pcopy.gradient_full)
with tempfile.TemporaryFile('w+b') as f: with tempfile.TemporaryFile('w+b') as f:
par.pickle(f) par.pickle(f)
f.seek(0) f.seek(0)
pcopy = pickle.load(f) pcopy = pickle.load(f)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
pcopy.gradient = 10 pcopy.gradient = 10
np.testing.assert_allclose(par.full_gradient, pcopy.full_gradient) np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
np.testing.assert_allclose(pcopy.mean.full_gradient, 10) np.testing.assert_allclose(pcopy.mean.gradient_full, 10)
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
def test_model_concat(self): def test_model_concat(self):
@ -167,10 +170,10 @@ class Test(ListDictTestCase):
par.randomize() par.randomize()
pcopy = par.copy() pcopy = par.copy()
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
self.assertListEqual(par.full_gradient.tolist(), pcopy.full_gradient.tolist()) self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.param_array, pcopy.param_array)
self.assertIsNot(par.full_gradient, pcopy.full_gradient) self.assertIsNot(par.gradient_full, pcopy.gradient_full)
self.assertTrue(pcopy.checkgrad()) self.assertTrue(pcopy.checkgrad())
self.assert_(np.any(pcopy.gradient!=0.0)) self.assert_(np.any(pcopy.gradient!=0.0))
with tempfile.TemporaryFile('w+b') as f: with tempfile.TemporaryFile('w+b') as f:
@ -178,7 +181,7 @@ class Test(ListDictTestCase):
f.seek(0) f.seek(0)
pcopy = pickle.load(f) pcopy = pickle.load(f)
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
np.testing.assert_allclose(par.full_gradient, pcopy.full_gradient) np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
self.assertSequenceEqual(str(par), str(pcopy)) self.assertSequenceEqual(str(par), str(pcopy))
self.assert_(pcopy.checkgrad()) self.assert_(pcopy.checkgrad())

View file

@ -1,84 +1,108 @@
from ..core.parameterization.parameter_core import Observable from ..core.parameterization.parameter_core import Observable
import itertools import itertools, collections, weakref
class Cacher(object): class Cacher(object):
"""
"""
def __init__(self, operation, limit=5, ignore_args=(), force_kwargs=()): def __init__(self, operation, limit=5, ignore_args=(), force_kwargs=()):
"""
Parameters:
***********
:param callable operation: function to cache
:param int limit: depth of cacher
:param [int] ignore_args: list of indices, pointing at arguments to ignore in *args of operation(*args). This includes self!
:param [str] force_kwargs: list of kwarg names (strings). If a kwarg with that name is given, the cacher will force recompute and wont cache anything.
"""
self.limit = int(limit) self.limit = int(limit)
self.ignore_args = ignore_args self.ignore_args = ignore_args
self.force_kwargs = force_kwargs self.force_kwargs = force_kwargs
self.operation=operation self.operation=operation
self.cached_inputs = [] self.order = collections.deque()
self.cached_outputs = [] self.cached_inputs = {} # point from cache_ids to a list of [ind_ids], which where used in cache cache_id
self.inputs_changed = []
#=======================================================================
# point from each ind_id to [ref(obj), cache_ids]
# 0: a weak reference to the object itself
# 1: the cache_ids in which this ind_id is used (len will be how many times we have seen this ind_id)
self.cached_input_ids = {}
#=======================================================================
self.cached_outputs = {} # point from cache_ids to outputs
self.inputs_changed = {} # point from cache_ids to bools
def combine_inputs(self, args, kw):
"Combines the args and kw in a unique way, such that ordering of kwargs does not lead to recompute"
return args + tuple(c[1] for c in sorted(kw.items(), key=lambda x: x[0]))
def prepare_cache_id(self, combined_args_kw, ignore_args):
"get the cacheid (conc. string of argument ids in order) ignoring ignore_args"
return "".join(str(id(a)) for i,a in enumerate(combined_args_kw) if i not in ignore_args)
def ensure_cache_length(self, cache_id):
"Ensures the cache is within its limits and has one place free"
if len(self.order) == self.limit:
# we have reached the limit, so lets release one element
cache_id = self.order.popleft()
combined_args_kw = self.cached_inputs[cache_id]
for ind in combined_args_kw:
ind_id = id(ind)
ref, cache_ids = self.cached_input_ids[ind_id]
if len(cache_ids) == 1 and ref() is not None:
ref().remove_observer(self, self.on_cache_changed)
del self.cached_input_ids[ind_id]
else:
cache_ids.remove(cache_id)
self.cached_input_ids[ind_id] = [ref, cache_ids]
del self.cached_outputs[cache_id]
del self.inputs_changed[cache_id]
del self.cached_inputs[cache_id]
def add_to_cache(self, cache_id, combined_args_kw, output):
self.inputs_changed[cache_id] = False
self.cached_outputs[cache_id] = output
self.order.append(cache_id)
self.cached_inputs[cache_id] = combined_args_kw
for a in combined_args_kw:
ind_id = id(a)
v = self.cached_input_ids.get(ind_id, [weakref.ref(a), []])
v[1].append(cache_id)
if len(v[1]) == 1:
a.add_observer(self, self.on_cache_changed)
self.cached_input_ids[ind_id] = v
def __call__(self, *args, **kw): def __call__(self, *args, **kw):
""" """
A wrapper function for self.operation, A wrapper function for self.operation,
""" """
#ensure that specified arguments are ignored # 1: Check whether we have forced recompute arguments:
items = sorted(kw.items(), key=lambda x: x[0])
oa_all = args + tuple(a for _,a in items)
if len(self.ignore_args) != 0:
oa = [a for i,a in itertools.chain(enumerate(args), items) if i not in self.ignore_args and i not in self.force_kwargs]
else:
oa = oa_all
# this makes sure we only add an observer once, and that None can be in args
observable_args = []
for a in oa:
if (not any(a is ai for ai in observable_args)) and a is not None:
observable_args.append(a)
#make sure that all the found argument really are observable:
#otherswise don't cache anything, pass args straight though
if not all([isinstance(arg, Observable) for arg in observable_args]):
return self.operation(*args, **kw)
if len(self.force_kwargs) != 0: if len(self.force_kwargs) != 0:
# check if there are force args, which force reloading
for k in self.force_kwargs: for k in self.force_kwargs:
if k in kw and kw[k] is not None: if k in kw and kw[k] is not None:
return self.operation(*args, **kw) return self.operation(*args, **kw)
# TODO: WARNING !!! Cache OFFSWITCH !!! WARNING
# return self.operation(*args, **kw)
#if the result is cached, return the cached computation # 2: prepare_cache_id and get the unique id string for this call
state = [all(a is b for a, b in itertools.izip_longest(args, cached_i)) for cached_i in self.cached_inputs] inputs = self.combine_inputs(args, kw)
cache_id = self.prepare_cache_id(inputs, self.ignore_args)
# 2: if anything is not cachable, we will just return the operation, without caching
if reduce(lambda a,b: a or (not isinstance(b, Observable)), inputs, False):
return self.operation(*args, **kw)
# 3&4: check whether this cache_id has been cached, then has it changed?
try: try:
if any(state): if(self.inputs_changed[cache_id]):
i = state.index(True) # 4: This happens, when elements have changed for this cache id
if self.inputs_changed[i]: self.inputs_changed[cache_id] = False
#(elements of) the args have changed since we last computed: update self.cached_outputs[cache_id] = self.operation(*args, **kw)
self.cached_outputs[i] = self.operation(*args, **kw) except KeyError:
self.inputs_changed[i] = False # 3: This is when we never saw this chache_id:
return self.cached_outputs[i] self.ensure_cache_length(cache_id)
else: self.add_to_cache(cache_id, inputs, self.operation(*args, **kw))
#first time we've seen these arguments: compute
#first make sure the depth limit isn't exceeded
if len(self.cached_inputs) == self.limit:
args_ = self.cached_inputs.pop(0)
args_ = [a for i,a in enumerate(args_) if i not in self.ignore_args and i not in self.force_kwargs]
[a.remove_observer(self, self.on_cache_changed) for a in args_ if a is not None]
self.inputs_changed.pop(0)
self.cached_outputs.pop(0)
#compute
self.cached_inputs.append(oa_all)
self.cached_outputs.append(self.operation(*args, **kw))
self.inputs_changed.append(False)
[a.add_observer(self, self.on_cache_changed) for a in observable_args]
return self.cached_outputs[-1]#return
except: except:
self.reset() self.reset()
raise raise
# 5: We have seen this cache_id and it is cached:
return self.cached_outputs[cache_id]
def on_cache_changed(self, direct, which=None): def on_cache_changed(self, direct, which=None):
""" """
@ -86,17 +110,19 @@ class Cacher(object):
this function gets 'hooked up' to the inputs when we cache them, and upon their elements being changed we update here. this function gets 'hooked up' to the inputs when we cache them, and upon their elements being changed we update here.
""" """
self.inputs_changed = [any([a is direct or a is which for a in args]) or old_ic for args, old_ic in zip(self.cached_inputs, self.inputs_changed)] for ind_id in [id(direct), id(which)]:
_, cache_ids = self.cached_input_ids.get(ind_id, [None, []])
for cache_id in cache_ids:
self.inputs_changed[cache_id] = True
def reset(self): def reset(self):
""" """
Totally reset the cache Totally reset the cache
""" """
[[a.remove_observer(self, self.on_cache_changed) for a in args if isinstance(a, Observable)] for args in self.cached_inputs] [a().remove_observer(self, self.on_cache_changed) if (a() is not None) else None for [a, _] in self.cached_input_ids.values()]
[[a.remove_observer(self, self.reset) for a in args if isinstance(a, Observable)] for args in self.cached_inputs] self.cached_input_ids = {}
self.cached_inputs = [] self.cached_outputs = {}
self.cached_outputs = [] self.inputs_changed = {}
self.inputs_changed = []
def __deepcopy__(self, memo=None): def __deepcopy__(self, memo=None):
return Cacher(self.operation, self.limit, self.ignore_args, self.force_kwargs) return Cacher(self.operation, self.limit, self.ignore_args, self.force_kwargs)

View file

@ -1,65 +1,354 @@
{ {
"rogers_girolami_data":{
"files":[
[
"firstcoursemldata.tar.gz"
]
],
"license":null,
"citation":"A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146",
"details":"Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.",
"urls":[
"https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/"
],
"suffices":[
[
"?dl=1"
]
],
"size":21949154
},
"ankur_pose_data": { "ankur_pose_data": {
"citation": "3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.",
"details": "Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing.",
"files": [ "files": [
[ [
"ankurDataPoseSilhouette.mat" "ankurDataPoseSilhouette.mat"
] ]
], ],
"citation":"3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.",
"license": null, "license": null,
"size": 1,
"urls": [ "urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/ankur_pose_data/" "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/ankur_pose_data/"
]
},
"boston_housing": {
"citation": "Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.",
"details": "The Boston Housing data relates house values in Boston to a range of input variables.",
"files": [
[
"Index",
"housing.data",
"housing.names"
]
], ],
"details":"Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing.", "license": null,
"size":1 "size": 51276,
"urls": [
"http://archive.ics.uci.edu/ml/machine-learning-databases/housing/"
]
},
"boxjenkins_airline": {
"citation": "Box & Jenkins (1976), in file: data/airpass, Description: International airline passengers: monthly totals in thousands. Jan 49 \\u2013 Dec 60",
"details": "International airline passengers, monthly totals from January 1949 to December 1960.",
"files": [
[
"boxjenkins_airline.csv"
]
],
"license": "You may copy and redistribute the data. You may make derivative works from the data. You may use the data for commercial purposes. You may not sublicence the data when redistributing it. You may not redistribute the data under a different license. Source attribution on any use of this data: Must refer source.",
"size": 46779,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/boxjenkins_airline/"
]
},
"brendan_faces": {
"citation": "Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.",
"details": "A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.",
"files": [
[
"frey_rawface.mat"
]
],
"license": null,
"size": 1100584,
"urls": [
"http://www.cs.nyu.edu/~roweis/data/"
]
},
"cmu_mocap_full": {
"citation": "Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu.\\nThe database was created with funding from NSF EIA-0196217.",
"details": "CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.",
"files": [
[
"allasfamc.zip"
]
],
"license": "From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.",
"size": null,
"urls": [
"http://mocap.cs.cmu.edu/subjects"
]
},
"creep_rupture": {
"citation": "Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.",
"details": "Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.",
"files": [
[
"creeprupt.tar"
]
],
"license": null,
"size": 602797,
"urls": [
"http://www.msm.cam.ac.uk/map/data/tar/"
]
},
"decampos_characters": {
"citation": "T. de Campos, B. R. Babu, and M. Varma. Character recognition in natural images. VISAPP 2009.",
"details": "Examples of hand written digits taken from the de Campos et al paper on Character Recognition in Natural Images.",
"files": [
[
"characters.npy",
"digits.npy"
]
],
"license": null,
"size": 2031872,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/decampos_digits/"
]
},
"della_gatta": {
"citation": "Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008",
"details": "The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.",
"files": [
[
"DellaGattadata.mat"
]
],
"license": null,
"size": 3729650,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/della_gatta/"
]
},
"drosophila_protein": {
"citation": "Becker K, Balsa-Canto E, Cicin-Sain D, Hoermann A, Janssens H, et al. (2013) Reverse-Engineering Post-Transcriptional Regulation of Gap Genes in Drosophila melanogaster. PLoS Comput Biol 9(10): e1003281. doi:10.1371/journal.pcbi.1003281",
"details": "Expression of the gap genes Krüppel, knirps, and giant in Drosophila melanogaster. Data includes quantitative datasets of gap gene mRNA and protein expression to solve and fit a model of post-transcriptional regulation, and establish its structural and practical identifiability",
"files": [
[
"becker_et_al.csv"
]
],
"license": null,
"size": 0,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/drosophila_protein/"
]
},
"epomeo_gpx": {
"citation": "",
"details": "Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).",
"files": [
[
"endomondo_1.gpx",
"endomondo_2.gpx",
"garmin_watch_via_endomondo.gpx",
"viewranger_phone.gpx",
"viewranger_tablet.gpx"
]
],
"license": null,
"size": 2031872,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/epomeo_gpx/"
]
}, },
"football_data": { "football_data": {
"citation": "",
"details": "Results of English football matches since 1993/94 season.",
"files": [ "files": [
[ [
"E0.csv", "E1.csv", "E2.csv", "E3.csv" "E0.csv",
"E1.csv",
"E2.csv",
"E3.csv"
] ]
], ],
"citation":"",
"license": null, "license": null,
"size": 1,
"urls": [ "urls": [
"http://www.football-data.co.uk/mmz4281/" "http://www.football-data.co.uk/mmz4281/"
], ]
"details":"Results of English football matches since 1993/94 season.",
"size":1
}, },
"google_trends":{ "fruitfly_tomancak": {
"citation": "",
"details": "",
"files": [ "files": [
[ [
"tomancak_exprs.csv",
"tomancak_se.csv",
"tomancak_prctile5.csv",
"tomancak_prctile25.csv",
"tomancak_prctile50.csv",
"tomancak_prctile75.csv",
"tomancak_prctile95.csv"
] ]
], ],
"citation":"",
"license": null, "license": null,
"size": 59000000,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/fruitfly_tomancak/"
]
},
"fruitfly_tomancak_cel_files": {
"citation": "'Systematic determination of patterns of gene expression during Drosophila embryogenesis' Pavel Tomancak, Amy Beaton, Richard Weiszmann, Elaine Kwan, ShengQiang Shu, Suzanna E Lewis, Stephen Richards, Michael Ashburner, Volker Hartenstein, Susan E Celniker, and Gerald M Rubin",
"details": "Gene expression results from blastoderm development in Drosophila Melanogaster.",
"files": [
[
"embryo_tc_4_1.CEL",
"embryo_tc_4_2.CEL",
"embryo_tc_4_3.CEL",
"embryo_tc_4_4.CEL",
"embryo_tc_4_5.CEL",
"embryo_tc_4_6.CEL",
"embryo_tc_4_7.CEL",
"embryo_tc_4_8.CEL",
"embryo_tc_4_9.CEL",
"embryo_tc_4_10.CEL",
"embryo_tc_4_11.CEL",
"embryo_tc_4_12.CEL",
"embryo_tc_6_1.CEL",
"embryo_tc_6_2.CEL",
"embryo_tc_6_3.CEL",
"embryo_tc_6_4.CEL",
"embryo_tc_6_5.CEL",
"embryo_tc_6_6.CEL",
"embryo_tc_6_7.CEL",
"embryo_tc_6_8.CEL",
"embryo_tc_6_9.CEL",
"embryo_tc_6_10.CEL",
"embryo_tc_6_11.CEL",
"embryo_tc_6_12.CEL",
"embryo_tc_8_1.CEL",
"embryo_tc_8_2.CEL",
"embryo_tc_8_3.CEL",
"embryo_tc_8_4.CEL",
"embryo_tc_8_5.CEL",
"embryo_tc_8_6.CEL",
"embryo_tc_8_7.CEL",
"embryo_tc_8_8.CEL",
"embryo_tc_8_9.CEL",
"embryo_tc_8_10.CEL",
"embryo_tc_8_11.CEL",
"embryo_tc_8_12.CEL",
"CG_AffyOligo_Gadfly3_01_13_03",
"embryo_tc_rma_release2.txt",
"embryo_tc_rma_release3.txt",
"na_affy_oligo.dros",
"README.TXT"
]
],
"license": null,
"size": 389000000,
"urls": [
"ftp://ftp.fruitfly.org/pub/embryo_tc_array_data/"
]
},
"google_trends": {
"citation": "",
"details": "Google trends results.",
"files": [
[
]
],
"license": null,
"size": 0,
"urls": [ "urls": [
"http://www.google.com/trends/" "http://www.google.com/trends/"
]
},
"hapmap3": {
"citation": "Gibbs, Richard A., et al. 'The international HapMap project.' Nature 426.6968 (2003): 789-796.",
"details": "HapMap Project: Single Nucleotide Polymorphism sequenced in all human populations. \n The HapMap phase three SNP dataset - 1184 samples out of 11 populations.\n See http://www.nature.com/nature/journal/v426/n6968/abs/nature02168.html for details.\n\n SNP_matrix (A) encoding [see Paschou et all. 2007 (PCA-Correlated SNPs...)]:\n Let (B1,B2) be the alphabetically sorted bases, which occur in the j-th SNP, then\n\n / 1, iff SNPij==(B1,B1)\n Aij = | 0, iff SNPij==(B1,B2)\n \\\\ -1, iff SNPij==(B2,B2)\n\n The SNP data and the meta information (such as iid, sex and phenotype) are\n stored in the dataframe datadf, index is the Individual ID, \n with following columns for metainfo:\n\n * family_id -> Family ID\n * paternal_id -> Paternal ID\n * maternal_id -> Maternal ID\n * sex -> Sex (1=male; 2=female; other=unknown)\n * phenotype -> Phenotype (-9, or 0 for unknown)\n * population -> Population string (e.g. 'ASW' - 'YRI')\n * rest are SNP rs (ids)\n\n More information is given in infodf:\n\n * Chromosome:\n - autosomal chromosemes -> 1-22\n - X X chromosome -> 23\n - Y Y chromosome -> 24\n - XY Pseudo-autosomal region of X -> 25\n - MT Mitochondrial -> 26\n * Relative Positon (to Chromosome) [base pairs]\n\n ",
"files": [
[
"hapmap3_r2_b36_fwd.consensus.qc.poly.map.bz2",
"hapmap3_r2_b36_fwd.consensus.qc.poly.ped.bz2",
"relationships_w_pops_121708.txt"
]
], ],
"details":"Google trends results.", "license": "International HapMap Project Public Access License (http://hapmap.ncbi.nlm.nih.gov/cgi-perl/registration#licence)",
"size":0 "size": 3458246739,
"urls": [
"http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/latest_phaseIII_ncbi_b36/plink_format/"
]
},
"isomap_face_data": {
"citation": "A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000",
"details": "Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.",
"files": [
[
"face_data.mat"
]
],
"license": null,
"size": 24229368,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/isomap_face_data/"
]
},
"mauna_loa": {
"citation": "Mauna Loa Data. Dr. Pieter Tans, NOAA/ESRL (www.esrl.noaa.gov/gmd/ccgg/trends/) and Dr. Ralph Keeling, Scripps Institution of Oceanography (scrippsco2.ucsd.edu/).",
"details": "The 'average' column contains the monthly mean CO2 mole fraction determined from daily averages. The mole fraction of CO2, expressed as parts per million (ppm) is the number of molecules of CO2 in every one million molecules of dried air (water vapor removed). If there are missing days concentrated either early or late in the month, the monthly mean is corrected to the middle of the month using the average seasonal cycle. Missing months are denoted by -99.99. The 'interpolated' column includes average values from the preceding column and interpolated values where data are missing. Interpolated values are computed in two steps. First, we compute for each month the average seasonal cycle in a 7-year window around each monthly value. In this way the seasonal cycle is allowed to change slowly over time. We then determine the 'trend' value for each month by removing the seasonal cycle; this result is shown in the 'trend' column. Trend values are linearly interpolated for missing months. The interpolated monthly mean is then the sum of the average seasonal cycle value and the trend value for the missing month.\n\nNOTE: In general, the data presented for the last year are subject to change, depending on recalibration of the reference gas mixtures used, and other quality control procedures. Occasionally, earlier years may also be changed for the same reasons. Usually these changes are minor.\n\nCO2 expressed as a mole fraction in dry air, micromol/mol, abbreviated as ppm \n\n (-99.99 missing data; -1 no data for daily means in month)",
"files": [
[
"co2_mm_mlo.txt"
]
],
"license": "-------------------------------------------------------------------- USE OF NOAA ESRL DATA\n\n These data are made freely available to the public and the scientific community in the belief that their wide dissemination will lead to greater understanding and new scientific insights. The availability of these data does not constitute publication of the data. NOAA relies on the ethics and integrity of the user to insure that ESRL receives fair credit for their work. If the data are obtained for potential use in a publication or presentation, ESRL should be informed at the outset of the nature of this work. If the ESRL data are essential to the work, or if an important result or conclusion depends on the ESRL data, co-authorship may be appropriate. This should be discussed at an early stage in the work. Manuscripts using the ESRL data should be sent to ESRL for review before they are submitted for publication so we can insure that the quality and limitations of the data are accurately represented.\n\n Contact: Pieter Tans (303 497 6678; pieter.tans@noaa.gov)\n\n RECIPROCITY Use of these data implies an agreement to reciprocate. Laboratories making similar measurements agree to make their own data available to the general public and to the scientific community in an equally complete and easily accessible form. Modelers are encouraged to make available to the community, upon request, their own tools used in the interpretation of the ESRL data, namely well documented model code, transport fields, and additional information necessary for other scientists to repeat the work and to run modified versions. Model availability includes collaborative support for new users of the models.\n --------------------------------------------------------------------\n\n See www.esrl.noaa.gov/gmd/ccgg/trends/ for additional details.",
"size": 46779,
"urls": [
"ftp://aftp.cmdl.noaa.gov/products/trends/co2/"
]
},
"olivetti_faces": {
"citation": "Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994",
"details": "Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. ",
"files": [
[
"att_faces.zip"
],
[
"olivettifaces.mat"
]
],
"license": null,
"size": 8561331,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olivetti_faces/",
"http://www.cs.nyu.edu/~roweis/data/"
]
},
"olivetti_glasses": {
"citation": "Information recorded in olivetti_faces entry. Should be used from there.",
"details": "Information recorded in olivetti_faces entry. Should be used from there.",
"files": [
[
"has_glasses.np"
],
[
"olivettifaces.mat"
]
],
"license": null,
"size": 4261047,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olivetti_faces/",
"http://www.cs.nyu.edu/~roweis/data/"
]
},
"olympic_marathon_men": {
"citation": null,
"details": "Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data",
"files": [
[
"olympicMarathonTimes.csv"
]
],
"license": null,
"size": 584,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olympic_marathon_men/"
]
}, },
"osu_accad": { "osu_accad": {
"citation": "The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.",
"details": "Motion capture data of different motions from the Open Motion Data Project at Ohio State University.",
"files": [ "files": [
[ [
"swagger1TXT.ZIP", "swagger1TXT.ZIP",
@ -82,101 +371,47 @@
] ]
], ],
"license": "Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).", "license": "Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).",
"citation":"The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.", "size": 15922790,
"details":"Motion capture data of different motions from the Open Motion Data Project at Ohio State University.",
"urls": [ "urls": [
"http://accad.osu.edu/research/mocap/data/", "http://accad.osu.edu/research/mocap/data/",
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/" "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/"
], ]
"size":15922790
}, },
"isomap_face_data":{ "osu_run1": {
"citation": "The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.",
"details": "Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.",
"files": [ "files": [
[ [
"face_data.mat" "run1TXT.ZIP"
]
], ],
"license":null,
"citation":"A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000",
"details":"Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/isomap_face_data/"
],
"size":24229368
},
"boston_housing":{
"files":[
[ [
"Index", "connections.txt"
"housing.data",
"housing.names"
] ]
], ],
"license":null, "license": "Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).",
"citation":"Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.", "size": 338103,
"details":"The Boston Housing data relates house values in Boston to a range of input variables.",
"urls": [ "urls": [
"http://archive.ics.uci.edu/ml/machine-learning-databases/housing/" "http://accad.osu.edu/research/mocap/data/",
], "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/"
"size":51276
},
"cmu_mocap_full":{
"files":[
[
"allasfamc.zip"
] ]
],
"license":"From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.",
"citation":"Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu.\nThe database was created with funding from NSF EIA-0196217.",
"details":"CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.",
"urls":[
"http://mocap.cs.cmu.edu/subjects"
],
"size":null
},
"brendan_faces":{
"files":[
[
"frey_rawface.mat"
]
],
"license":null,
"citation":"Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.",
"details":"A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.",
"urls":[
"http://www.cs.nyu.edu/~roweis/data/"
],
"size":1100584
},
"olympic_marathon_men":{
"files":[
[
"olympicMarathonTimes.csv"
]
],
"license":null,
"citation":null,
"details":"Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olympic_marathon_men/"
],
"size":584
}, },
"pumadyn-32nm": { "pumadyn-32nm": {
"citation": "Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.",
"details": "Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.",
"files": [ "files": [
[ [
"pumadyn-32nm.tar.gz" "pumadyn-32nm.tar.gz"
] ]
], ],
"license": "Data is made available by the Delve system at the University of Toronto", "license": "Data is made available by the Delve system at the University of Toronto",
"citation":"Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.", "size": 5861646,
"details":"Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.",
"urls": [ "urls": [
"ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/" "ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/"
], ]
"size":5861646
}, },
"ripley_prnn_data": { "ripley_prnn_data": {
"citation": "Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7",
"details": "Data sets from Brian Ripley's Pattern Recognition and Neural Networks",
"files": [ "files": [
[ [
"Cushings.dat", "Cushings.dat",
@ -194,14 +429,90 @@
] ]
], ],
"license": null, "license": null,
"citation":"Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7", "size": 93565,
"details":"Data sets from Brian Ripley's Pattern Recognition and Neural Networks",
"urls": [ "urls": [
"http://www.stats.ox.ac.uk/pub/PRNN/" "http://www.stats.ox.ac.uk/pub/PRNN/"
]
},
"robot_wireless": {
"citation": "WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.",
"details": "Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.",
"files": [
[
"uw-floor.txt"
]
], ],
"size":93565 "license": null,
"size": 284390,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/robot_wireless/"
]
},
"rogers_girolami_data": {
"citation": "A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146",
"details": "Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.",
"files": [
[
"firstcoursemldata.tar.gz"
]
],
"license": null,
"size": 21949154,
"suffices": [
[
"?dl=1"
]
],
"urls": [
"https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/"
]
},
"singlecell": {
"citation": "Guoji Guo, Mikael Huss, Guo Qing Tong, Chaoyang Wang, Li Li Sun, Neil D. Clarke, Paul Robson, Resolution of Cell Fate Decisions Revealed by Single-Cell Gene Expression Analysis from Zygote to Blastocyst, Developmental Cell, Volume 18, Issue 4, 20 April 2010, Pages 675-685, ISSN 1534-5807, http://dx.doi.org/10.1016/j.devcel.2010.02.012. (http://www.sciencedirect.com/science/article/pii/S1534580710001103) Keywords: DEVBIO",
"details": "qPCR TaqMan array single cell experiment in mouse. The data is taken from the early stages of development when the Blastocyst is forming. At the 32 cell stage the data is already separated into the trophectoderm (TE) which goes onto form the placenta and the inner cellular mass (ICM). The ICM further differentiates into the epiblast (EPI)---which gives rise to the endoderm, mesoderm and ectoderm---and the primitive endoderm (PE) which develops into the amniotic sack. Guo et al selected 48 genes for expression measurement. They labelled the resulting cells and their labels are included as an aide to visualization.",
"files": [
[
"singlecell.csv"
]
],
"license": "ScienceDirect: http://www.elsevier.com/locate/termsandconditions?utm_source=sciencedirect&utm_medium=link&utm_campaign=terms",
"size": 233.1,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/singlecell/"
]
},
"sod1_mouse": {
"citation": "Transcriptomic indices of fast and slow disease progression in two mouse models of amyotrophic lateral sclerosis' Nardo G1, Iennaco R, Fusi N, Heath PR, Marino M, Trolese MC, Ferraiuolo L, Lawrence N, Shaw PJ, Bendotti C Brain. 2013 Nov;136(Pt 11):3305-32. doi: 10.1093/brain/awt250. Epub 2013 Sep 24.",
"details": "Gene expression data from two separate strains of mice: C57 and 129Sv in wild type and SOD1 mutant strains.",
"files": [
[
"sod1_C57_129_exprs.csv",
"sod1_C57_129_se.csv"
]
],
"license": null,
"size": 0,
"urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/sod1_mouse/"
]
},
"swiss_roll": {
"citation": "A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000",
"details": "Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.",
"files": [
[
"swiss_roll_data.mat"
]
],
"license": null,
"size": 800256,
"urls": [
"http://isomap.stanford.edu/"
]
}, },
"three_phase_oil_flow": { "three_phase_oil_flow": {
"citation": "Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593",
"details": "The three phase oil data used initially for demonstrating the Generative Topographic mapping.",
"files": [ "files": [
[ [
"DataTrnLbls.txt", "DataTrnLbls.txt",
@ -213,197 +524,23 @@
] ]
], ],
"license": null, "license": null,
"citation":"Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593", "size": 712796,
"details":"The three phase oil data used initially for demonstrating the Generative Topographic mapping.",
"urls": [ "urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/three_phase_oil_flow/" "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/three_phase_oil_flow/"
],
"size":712796
},
"robot_wireless":{
"files":[
[
"uw-floor.txt"
] ]
],
"license":null,
"citation":"WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.",
"details":"Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/robot_wireless/"
],
"size":284390
}, },
"xw_pen": { "xw_pen": {
"citation": "Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005",
"details": "Accelerometer pen data used for robust regression by Tipping and Lawrence.",
"files": [ "files": [
[ [
"xw_pen_15.csv" "xw_pen_15.csv"
] ]
], ],
"license": null, "license": null,
"citation":"Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005", "size": 3410,
"details":"Accelerometer pen data used for robust regression by Tipping and Lawrence.",
"urls": [ "urls": [
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/xw_pen/" "http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/xw_pen/"
],
"size":3410
},
"swiss_roll":{
"files":[
[
"swiss_roll_data.mat"
] ]
],
"license":null,
"citation":"A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000",
"details":"Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.",
"urls":[
"http://isomap.stanford.edu/"
],
"size":800256
},
"osu_run1":{
"files":[
[
"run1TXT.ZIP"
],
[
"connections.txt"
]
],
"license":"Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).",
"citation":"The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.",
"details":"Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.",
"urls":[
"http://accad.osu.edu/research/mocap/data/",
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/stick/"
],
"size":338103
},
"creep_rupture":{
"files":[
[
"creeprupt.tar"
]
],
"license":null,
"citation":"Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.",
"details":"Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.",
"urls":[
"http://www.msm.cam.ac.uk/map/data/tar/"
],
"size":602797
},
"olivetti_faces":{
"files":[
[
"att_faces.zip"
],
[
"olivettifaces.mat"
]
],
"license":null,
"citation":"Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994",
"details":"Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. ",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olivetti_faces/",
"http://www.cs.nyu.edu/~roweis/data/"
],
"size":8561331
},
"olivetti_glasses":{
"files":[
[
"has_glasses.np"
],
[
"olivettifaces.mat"
]
],
"license":null,
"citation":"Information recorded in olivetti_faces entry. Should be used from there.",
"details":"Information recorded in olivetti_faces entry. Should be used from there.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/olivetti_faces/",
"http://www.cs.nyu.edu/~roweis/data/"
],
"size":4261047
},
"della_gatta":{
"files":[
[
"DellaGattadata.mat"
]
],
"license":null,
"citation":"Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008",
"details":"The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/della_gatta/"
],
"size":3729650
},
"epomeo_gpx":{
"files":[
[
"endomondo_1.gpx",
"endomondo_2.gpx",
"garmin_watch_via_endomondo.gpx",
"viewranger_phone.gpx",
"viewranger_tablet.gpx"
]
],
"license":null,
"citation":"",
"details":"Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/epomeo_gpx/"
],
"size":2031872
},
"mauna_loa":{
"files":[
[
"co2_mm_mlo.txt"
]
],
"license":"-------------------------------------------------------------------- USE OF NOAA ESRL DATA\n\n These data are made freely available to the public and the scientific community in the belief that their wide dissemination will lead to greater understanding and new scientific insights. The availability of these data does not constitute publication of the data. NOAA relies on the ethics and integrity of the user to insure that ESRL receives fair credit for their work. If the data are obtained for potential use in a publication or presentation, ESRL should be informed at the outset of the nature of this work. If the ESRL data are essential to the work, or if an important result or conclusion depends on the ESRL data, co-authorship may be appropriate. This should be discussed at an early stage in the work. Manuscripts using the ESRL data should be sent to ESRL for review before they are submitted for publication so we can insure that the quality and limitations of the data are accurately represented.\n\n Contact: Pieter Tans (303 497 6678; pieter.tans@noaa.gov)\n\n RECIPROCITY Use of these data implies an agreement to reciprocate. Laboratories making similar measurements agree to make their own data available to the general public and to the scientific community in an equally complete and easily accessible form. Modelers are encouraged to make available to the community, upon request, their own tools used in the interpretation of the ESRL data, namely well documented model code, transport fields, and additional information necessary for other scientists to repeat the work and to run modified versions. Model availability includes collaborative support for new users of the models.\n --------------------------------------------------------------------\n\n See www.esrl.noaa.gov/gmd/ccgg/trends/ for additional details.",
"citation":"Mauna Loa Data. Dr. Pieter Tans, NOAA/ESRL (www.esrl.noaa.gov/gmd/ccgg/trends/) and Dr. Ralph Keeling, Scripps Institution of Oceanography (scrippsco2.ucsd.edu/).",
"details":"The 'average' column contains the monthly mean CO2 mole fraction determined from daily averages. The mole fraction of CO2, expressed as parts per million (ppm) is the number of molecules of CO2 in every one million molecules of dried air (water vapor removed). If there are missing days concentrated either early or late in the month, the monthly mean is corrected to the middle of the month using the average seasonal cycle. Missing months are denoted by -99.99. The 'interpolated' column includes average values from the preceding column and interpolated values where data are missing. Interpolated values are computed in two steps. First, we compute for each month the average seasonal cycle in a 7-year window around each monthly value. In this way the seasonal cycle is allowed to change slowly over time. We then determine the 'trend' value for each month by removing the seasonal cycle; this result is shown in the 'trend' column. Trend values are linearly interpolated for missing months. The interpolated monthly mean is then the sum of the average seasonal cycle value and the trend value for the missing month.\n\nNOTE: In general, the data presented for the last year are subject to change, depending on recalibration of the reference gas mixtures used, and other quality control procedures. Occasionally, earlier years may also be changed for the same reasons. Usually these changes are minor.\n\nCO2 expressed as a mole fraction in dry air, micromol/mol, abbreviated as ppm \n\n (-99.99 missing data; -1 no data for daily means in month)",
"urls":[
"ftp://aftp.cmdl.noaa.gov/products/trends/co2/"
],
"size":46779
},
"boxjenkins_airline":{
"files":[
[
"boxjenkins_airline.csv"
]
],
"license":"You may copy and redistribute the data. You may make derivative works from the data. You may use the data for commercial purposes. You may not sublicence the data when redistributing it. You may not redistribute the data under a different license. Source attribution on any use of this data: Must refer source.",
"citation":"Box & Jenkins (1976), in file: data/airpass, Description: International airline passengers: monthly totals in thousands. Jan 49 Dec 60",
"details":"International airline passengers, monthly totals from January 1949 to December 1960.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/boxjenkins_airline/"
],
"size":46779
},
"decampos_characters":{
"files":[
[
"characters.npy",
"digits.npy"
]
],
"license":null,
"citation":"T. de Campos, B. R. Babu, and M. Varma. Character recognition in natural images. VISAPP 2009.",
"details":"Examples of hand written digits taken from the de Campos et al paper on Character Recognition in Natural Images.",
"urls":[
"http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/decampos_digits/"
],
"size":2031872
} }
} }

View file

@ -12,6 +12,8 @@ import datetime
import json import json
import re import re
from config import *
ipython_available=True ipython_available=True
try: try:
import IPython import IPython
@ -29,7 +31,8 @@ def reporthook(a,b,c):
sys.stdout.flush() sys.stdout.flush()
# Global variables # Global variables
data_path = os.path.join(os.path.dirname(__file__), 'datasets') data_path = os.path.expandvars(config.get('datasets', 'dir'))
#data_path = os.path.join(os.path.dirname(__file__), 'datasets')
default_seed = 10000 default_seed = 10000
overide_manual_authorize=False overide_manual_authorize=False
neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/' neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/'
@ -108,7 +111,11 @@ def download_url(url, store_directory, save_name = None, messages = True, suffix
raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code)) raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code))
with open(save_name, 'wb') as f: with open(save_name, 'wb') as f:
meta = response.info() meta = response.info()
file_size = int(meta.getheaders("Content-Length")[0]) content_length_str = meta.getheaders("Content-Length")
if content_length_str:
file_size = int(content_length_str[0])
else:
file_size = None
status = "" status = ""
file_size_dl = 0 file_size_dl = 0
block_sz = 8192 block_sz = 8192
@ -120,9 +127,15 @@ def download_url(url, store_directory, save_name = None, messages = True, suffix
file_size_dl += len(buff) file_size_dl += len(buff)
f.write(buff) f.write(buff)
sys.stdout.write(" "*(len(status)) + "\r") sys.stdout.write(" "*(len(status)) + "\r")
status = r"[{perc: <{ll}}] {dl:7.3f}/{full:.3f}MB".format(dl=file_size_dl/(1.*1e6), if file_size:
full=file_size/(1.*1e6), ll=line_length, status = r"[{perc: <{ll}}] {dl:7.3f}/{full:.3f}MB".format(dl=file_size_dl/(1048576.),
full=file_size/(1048576.), ll=line_length,
perc="="*int(line_length*float(file_size_dl)/file_size)) perc="="*int(line_length*float(file_size_dl)/file_size))
else:
status = r"[{perc: <{ll}}] {dl:7.3f}MB".format(dl=file_size_dl/(1048576.),
ll=line_length,
perc="."*int(line_length*float(file_size_dl/(10*1048576.))))
sys.stdout.write(status) sys.stdout.write(status)
sys.stdout.flush() sys.stdout.flush()
sys.stdout.write(" "*(len(status)) + "\r") sys.stdout.write(" "*(len(status)) + "\r")
@ -350,6 +363,68 @@ def football_data(season='1314', data_set='football_data'):
Y = table[:, 4:] Y = table[:, 4:]
return data_details_return({'X': X, 'Y': Y}, data_set) return data_details_return({'X': X, 'Y': Y}, data_set)
def sod1_mouse(data_set='sod1_mouse'):
if not data_available(data_set):
download_data(data_set)
from pandas import read_csv
dirpath = os.path.join(data_path, data_set)
filename = os.path.join(dirpath, 'sod1_C57_129_exprs.csv')
Y = read_csv(filename, header=0, index_col=0)
num_repeats=4
num_time=4
num_cond=4
X = 1
return data_details_return({'X': X, 'Y': Y}, data_set)
def fruitfly_tomancak(data_set='fruitfly_tomancak', gene_number=None):
if not data_available(data_set):
download_data(data_set)
from pandas import read_csv
dirpath = os.path.join(data_path, data_set)
filename = os.path.join(dirpath, 'tomancak_exprs.csv')
Y = read_csv(filename, header=0, index_col=0).T
num_repeats = 3
num_time = 12
xt = np.linspace(0, num_time-1, num_time)
xr = np.linspace(0, num_repeats-1, num_repeats)
xtime, xrepeat = np.meshgrid(xt, xr)
X = np.vstack((xtime.flatten(), xrepeat.flatten())).T
return data_details_return({'X': X, 'Y': Y, 'gene_number' : gene_number}, data_set)
def drosophila_protein(data_set='drosophila_protein'):
if not data_available(data_set):
download_data(data_set)
from pandas import read_csv
dirpath = os.path.join(data_path, data_set)
filename = os.path.join(dirpath, 'becker_et_al.csv')
Y = read_csv(filename, header=0)
return data_details_return({'Y': Y}, data_set)
def drosophila_knirps(data_set='drosophila_protein'):
if not data_available(data_set):
download_data(data_set)
from pandas import read_csv
dirpath = os.path.join(data_path, data_set)
filename = os.path.join(dirpath, 'becker_et_al.csv')
# in the csv file we have facts_kni and ext_kni. We treat facts_kni as protein and ext_kni as mRNA
df = read_csv(filename, header=0)
t = df['t'][:,None]
x = df['x'][:,None]
g = df['expression1'][:,None]
p = df['expression2'][:,None]
leng = x.shape[0]
T = np.vstack([t,t])
S = np.vstack([x,x])
inx = np.zeros(leng*2)[:,None]
inx[leng*2/2:leng*2]=1
X = np.hstack([T,S,inx])
Y = np.vstack([g,p])
return data_details_return({'Y': Y, 'X': X}, data_set)
# This will be for downloading google trends data. # This will be for downloading google trends data.
def google_trends(query_terms=['big data', 'machine learning', 'data science'], data_set='google_trends'): def google_trends(query_terms=['big data', 'machine learning', 'data science'], data_set='google_trends'):
"""Data downloaded from Google trends for given query terms. Warning, if you use this function multiple times in a row you get blocked due to terms of service violations.""" """Data downloaded from Google trends for given query terms. Warning, if you use this function multiple times in a row you get blocked due to terms of service violations."""
@ -718,6 +793,21 @@ def hapmap3(data_set='hapmap3'):
populations=populations) populations=populations)
return hapmap return hapmap
def singlecell(data_set='singlecell'):
if not data_available(data_set):
download_data(data_set)
from pandas import read_csv
dirpath = os.path.join(data_path, data_set)
filename = os.path.join(dirpath, 'singlecell.csv')
Y = read_csv(filename, header=0, index_col=0)
genes = Y.columns
labels = Y.index
# data = np.loadtxt(os.path.join(dirpath, 'singlecell.csv'), delimiter=",", dtype=str)
return data_details_return({'Y': Y, 'info' : "qPCR singlecell experiment in Mouse, measuring 48 gene expressions in 1-64 cell states. The labels have been created as in Guo et al. [2010]",
'genes': genes, 'labels':labels,
}, data_set)
def swiss_roll_1000(): def swiss_roll_1000():
return swiss_roll(num_samples=1000) return swiss_roll(num_samples=1000)

View file

@ -1 +0,0 @@
These datasets are reproduced for educational purposes only. No copyright infringement intended!

File diff suppressed because it is too large Load diff

View file

@ -1,310 +0,0 @@
63.03 22.55 39.61 40.48 98.67 -0.25 1
39.06 10.06 25.02 29 114.41 4.56 1
68.83 22.22 50.09 46.61 105.99 -3.53 1
69.3 24.65 44.31 44.64 101.87 11.21 1
49.71 9.65 28.32 40.06 108.17 7.92 1
40.25 13.92 25.12 26.33 130.33 2.23 1
53.43 15.86 37.17 37.57 120.57 5.99 1
45.37 10.76 29.04 34.61 117.27 -10.68 1
43.79 13.53 42.69 30.26 125 13.29 1
36.69 5.01 41.95 31.68 84.24 0.66 1
49.71 13.04 31.33 36.67 108.65 -7.83 1
31.23 17.72 15.5 13.52 120.06 0.5 1
48.92 19.96 40.26 28.95 119.32 8.03 1
53.57 20.46 33.1 33.11 110.97 7.04 1
57.3 24.19 47 33.11 116.81 5.77 1
44.32 12.54 36.1 31.78 124.12 5.42 1
63.83 20.36 54.55 43.47 112.31 -0.62 1
31.28 3.14 32.56 28.13 129.01 3.62 1
38.7 13.44 31 25.25 123.16 1.43 1
41.73 12.25 30.12 29.48 116.59 -1.24 1
43.92 14.18 37.83 29.74 134.46 6.45 1
54.92 21.06 42.2 33.86 125.21 2.43 1
63.07 24.41 54 38.66 106.42 15.78 1
45.54 13.07 30.3 32.47 117.98 -4.99 1
36.13 22.76 29 13.37 115.58 -3.24 1
54.12 26.65 35.33 27.47 121.45 1.57 1
26.15 10.76 14 15.39 125.2 -10.09 1
43.58 16.51 47 27.07 109.27 8.99 1
44.55 21.93 26.79 22.62 111.07 2.65 1
66.88 24.89 49.28 41.99 113.48 -2.01 1
50.82 15.4 42.53 35.42 112.19 10.87 1
46.39 11.08 32.14 35.31 98.77 6.39 1
44.94 17.44 27.78 27.49 117.98 5.57 1
38.66 12.99 40 25.68 124.91 2.7 1
59.6 32 46.56 27.6 119.33 1.47 1
31.48 7.83 24.28 23.66 113.83 4.39 1
32.09 6.99 36 25.1 132.26 6.41 1
35.7 19.44 20.7 16.26 137.54 -0.26 1
55.84 28.85 47.69 27 123.31 2.81 1
52.42 19.01 35.87 33.41 116.56 1.69 1
35.49 11.7 15.59 23.79 106.94 -3.46 1
46.44 8.4 29.04 38.05 115.48 2.05 1
53.85 19.23 32.78 34.62 121.67 5.33 1
66.29 26.33 47.5 39.96 121.22 -0.8 1
56.03 16.3 62.28 39.73 114.02 -2.33 1
50.91 23.02 47 27.9 117.42 -2.53 1
48.33 22.23 36.18 26.1 117.38 6.48 1
41.35 16.58 30.71 24.78 113.27 -4.5 1
40.56 17.98 34 22.58 121.05 -1.54 1
41.77 17.9 20.03 23.87 118.36 2.06 1
55.29 20.44 34 34.85 115.88 3.56 1
74.43 41.56 27.7 32.88 107.95 5 1
50.21 29.76 36.1 20.45 128.29 5.74 1
30.15 11.92 34 18.23 112.68 11.46 1
41.17 17.32 33.47 23.85 116.38 -9.57 1
47.66 13.28 36.68 34.38 98.25 6.27 1
43.35 7.47 28.07 35.88 112.78 5.75 1
46.86 15.35 38 31.5 116.25 1.66 1
43.2 19.66 35 23.54 124.85 -2.92 1
48.11 14.93 35.56 33.18 124.06 7.95 1
74.38 32.05 78.77 42.32 143.56 56.13 1
89.68 32.7 83.13 56.98 129.96 92.03 1
44.53 9.43 52 35.1 134.71 29.11 1
77.69 21.38 64.43 56.31 114.82 26.93 1
76.15 21.94 82.96 54.21 123.93 10.43 1
83.93 41.29 62 42.65 115.01 26.59 1
78.49 22.18 60 56.31 118.53 27.38 1
75.65 19.34 64.15 56.31 95.9 69.55 1
72.08 18.95 51 53.13 114.21 1.01 1
58.6 -0.26 51.5 58.86 102.04 28.06 1
72.56 17.39 52 55.18 119.19 32.11 1
86.9 32.93 47.79 53.97 135.08 101.72 1
84.97 33.02 60.86 51.95 125.66 74.33 1
55.51 20.1 44 35.42 122.65 34.55 1
72.22 23.08 91 49.14 137.74 56.8 1
70.22 39.82 68.12 30.4 148.53 145.38 1
86.75 36.04 69.22 50.71 139.41 110.86 1
58.78 7.67 53.34 51.12 98.5 51.58 1
67.41 17.44 60.14 49.97 111.12 33.16 1
47.74 12.09 39 35.66 117.51 21.68 1
77.11 30.47 69.48 46.64 112.15 70.76 1
74.01 21.12 57.38 52.88 120.21 74.56 1
88.62 29.09 47.56 59.53 121.76 51.81 1
81.1 24.79 77.89 56.31 151.84 65.21 1
76.33 42.4 57.2 33.93 124.27 50.13 1
45.44 9.91 45 35.54 163.07 20.32 1
59.79 17.88 59.21 41.91 119.32 22.12 1
44.91 10.22 44.63 34.7 130.08 37.36 1
56.61 16.8 42 39.81 127.29 24.02 1
71.19 23.9 43.7 47.29 119.86 27.28 1
81.66 28.75 58.23 52.91 114.77 30.61 1
70.95 20.16 62.86 50.79 116.18 32.52 1
85.35 15.84 71.67 69.51 124.42 76.02 1
58.1 14.84 79.65 43.26 113.59 50.24 1
94.17 15.38 67.71 78.79 114.89 53.26 1
57.52 33.65 50.91 23.88 140.98 148.75 1
96.66 19.46 90.21 77.2 120.67 64.08 1
74.72 19.76 82.74 54.96 109.36 33.31 1
77.66 22.43 93.89 55.22 123.06 61.21 1
58.52 13.92 41.47 44.6 115.51 30.39 1
84.59 30.36 65.48 54.22 108.01 25.12 1
79.94 18.77 63.31 61.16 114.79 38.54 1
70.4 13.47 61.2 56.93 102.34 25.54 1
49.78 6.47 53 43.32 110.86 25.34 1
77.41 29.4 63.23 48.01 118.45 93.56 1
65.01 27.6 50.95 37.41 116.58 7.02 1
65.01 9.84 57.74 55.18 94.74 49.7 1
78.43 33.43 76.28 45 138.55 77.16 1
63.17 6.33 63 56.84 110.64 42.61 1
68.61 15.08 63.01 53.53 123.43 39.5 1
63.9 13.71 62.12 50.19 114.13 41.42 1
85 29.61 83.35 55.39 126.91 71.32 1
42.02 -6.55 67.9 48.58 111.59 27.34 1
69.76 19.28 48.5 50.48 96.49 51.17 1
80.99 36.84 86.96 44.14 141.09 85.87 1
129.83 8.4 48.38 121.43 107.69 418.54 1
70.48 12.49 62.42 57.99 114.19 56.9 1
86.04 38.75 47.87 47.29 122.09 61.99 1
65.54 24.16 45.78 41.38 136.44 16.38 1
60.75 15.75 43.2 45 113.05 31.69 1
54.74 12.1 41 42.65 117.64 40.38 1
83.88 23.08 87.14 60.8 124.65 80.56 1
80.07 48.07 52.4 32.01 110.71 67.73 1
65.67 10.54 56.49 55.12 109.16 53.93 1
74.72 14.32 32.5 60.4 107.18 37.02 1
48.06 5.69 57.06 42.37 95.44 32.84 1
70.68 21.7 59.18 48.97 103.01 27.81 1
80.43 17 66.54 63.43 116.44 57.78 1
90.51 28.27 69.81 62.24 100.89 58.82 1
77.24 16.74 49.78 60.5 110.69 39.79 1
50.07 9.12 32.17 40.95 99.71 26.77 1
69.78 13.78 58 56 118.93 17.91 1
69.63 21.12 52.77 48.5 116.8 54.82 1
81.75 20.12 70.56 61.63 119.43 55.51 1
52.2 17.21 78.09 34.99 136.97 54.94 1
77.12 30.35 77.48 46.77 110.61 82.09 1
88.02 39.84 81.77 48.18 116.6 56.77 1
83.4 34.31 78.42 49.09 110.47 49.67 1
72.05 24.7 79.87 47.35 107.17 56.43 1
85.1 21.07 91.73 64.03 109.06 38.03 1
69.56 15.4 74.44 54.16 105.07 29.7 1
89.5 48.9 72 40.6 134.63 118.35 1
85.29 18.28 100.74 67.01 110.66 58.88 1
60.63 20.6 64.54 40.03 117.23 104.86 1
60.04 14.31 58.04 45.73 105.13 30.41 1
85.64 42.69 78.75 42.95 105.14 42.89 1
85.58 30.46 78.23 55.12 114.87 68.38 1
55.08 -3.76 56 58.84 109.92 31.77 1
65.76 9.83 50.82 55.92 104.39 39.31 1
79.25 23.94 40.8 55.3 98.62 36.71 1
81.11 20.69 60.69 60.42 94.02 40.51 1
48.03 3.97 58.34 44.06 125.35 35 1
63.4 14.12 48.14 49.29 111.92 31.78 1
57.29 15.15 64 42.14 116.74 30.34 1
41.19 5.79 42.87 35.39 103.35 27.66 1
66.8 14.55 72.08 52.25 82.46 41.69 1
79.48 26.73 70.65 52.74 118.59 61.7 1
44.22 1.51 46.11 42.71 108.63 42.81 1
57.04 0.35 49.2 56.69 103.05 52.17 1
64.27 12.51 68.7 51.77 95.25 39.41 1
92.03 35.39 77.42 56.63 115.72 58.06 1
67.26 7.19 51.7 60.07 97.8 42.14 1
118.14 38.45 50.84 79.7 81.02 74.04 1
115.92 37.52 76.8 78.41 104.7 81.2 1
53.94 9.31 43.1 44.64 124.4 25.08 1
83.7 20.27 77.11 63.43 125.48 69.28 1
56.99 6.87 57.01 50.12 109.98 36.81 1
72.34 16.42 59.87 55.92 70.08 12.07 1
95.38 24.82 95.16 70.56 89.31 57.66 1
44.25 1.1 38 43.15 98.27 23.91 1
64.81 15.17 58.84 49.64 111.68 21.41 1
78.4 14.04 79.69 64.36 104.73 12.39 1
56.67 13.46 43.77 43.21 93.69 21.11 1
50.83 9.06 56.3 41.76 79 23.04 1
61.41 25.38 39.1 36.03 103.4 21.84 1
56.56 8.96 52.58 47.6 98.78 50.7 1
67.03 13.28 66.15 53.75 100.72 33.99 1
80.82 19.24 61.64 61.58 89.47 44.17 1
80.65 26.34 60.9 54.31 120.1 52.47 1
68.72 49.43 68.06 19.29 125.02 54.69 1
37.9 4.48 24.71 33.42 157.85 33.61 1
64.62 15.23 67.63 49.4 90.3 31.33 1
75.44 31.54 89.6 43.9 106.83 54.97 1
71 37.52 84.54 33.49 125.16 67.77 1
81.06 20.8 91.78 60.26 125.43 38.18 1
91.47 24.51 84.62 66.96 117.31 52.62 1
81.08 21.26 78.77 59.83 90.07 49.16 1
60.42 5.27 59.81 55.15 109.03 30.27 1
85.68 38.65 82.68 47.03 120.84 61.96 1
82.41 29.28 77.05 53.13 117.04 62.77 1
43.72 9.81 52 33.91 88.43 40.88 1
86.47 40.3 61.14 46.17 97.4 55.75 1
74.47 33.28 66.94 41.19 146.47 124.98 1
70.25 10.34 76.37 59.91 119.24 32.67 1
72.64 18.93 68 53.71 116.96 25.38 1
71.24 5.27 86 65.97 110.7 38.26 1
63.77 12.76 65.36 51.01 89.82 56 1
58.83 37.58 125.74 21.25 135.63 117.31 1
74.85 13.91 62.69 60.95 115.21 33.17 1
75.3 16.67 61.3 58.63 118.88 31.58 1
63.36 20.02 67.5 43.34 131 37.56 1
67.51 33.28 96.28 34.24 145.6 88.3 1
76.31 41.93 93.28 34.38 132.27 101.22 1
73.64 9.71 63 63.92 98.73 26.98 1
56.54 14.38 44.99 42.16 101.72 25.77 1
80.11 33.94 85.1 46.17 125.59 100.29 1
95.48 46.55 59 48.93 96.68 77.28 1
74.09 18.82 76.03 55.27 128.41 73.39 1
87.68 20.37 93.82 67.31 120.94 76.73 1
48.26 16.42 36.33 31.84 94.88 28.34 1
38.51 16.96 35.11 21.54 127.63 7.99 -1
54.92 18.97 51.6 35.95 125.85 2 -1
44.36 8.95 46.9 35.42 129.22 4.99 -1
48.32 17.45 48 30.87 128.98 -0.91 -1
45.7 10.66 42.58 35.04 130.18 -3.39 -1
30.74 13.35 35.9 17.39 142.41 -2.01 -1
50.91 6.68 30.9 44.24 118.15 -1.06 -1
38.13 6.56 50.45 31.57 132.11 6.34 -1
51.62 15.97 35 35.66 129.39 1.01 -1
64.31 26.33 50.96 37.98 106.18 3.12 -1
44.49 21.79 31.47 22.7 113.78 -0.28 -1
54.95 5.87 53 49.09 126.97 -0.63 -1
56.1 13.11 62.64 43 116.23 31.17 -1
69.4 18.9 75.97 50.5 103.58 -0.44 -1
89.83 22.64 90.56 67.2 100.5 3.04 -1
59.73 7.72 55.34 52 125.17 3.24 -1
63.96 16.06 63.12 47.9 142.36 6.3 -1
61.54 19.68 52.89 41.86 118.69 4.82 -1
38.05 8.3 26.24 29.74 123.8 3.89 -1
43.44 10.1 36.03 33.34 137.44 -3.11 -1
65.61 23.14 62.58 42.47 124.13 -4.08 -1
53.91 12.94 39 40.97 118.19 5.07 -1
43.12 13.82 40.35 29.3 128.52 0.97 -1
40.68 9.15 31.02 31.53 139.12 -2.51 -1
37.73 9.39 42 28.35 135.74 13.68 -1
63.93 19.97 40.18 43.96 113.07 -11.06 -1
61.82 13.6 64 48.22 121.78 1.3 -1
62.14 13.96 58 48.18 133.28 4.96 -1
69 13.29 55.57 55.71 126.61 10.83 -1
56.45 19.44 43.58 37 139.19 -1.86 -1
41.65 8.84 36.03 32.81 116.56 -6.05 -1
51.53 13.52 35 38.01 126.72 13.93 -1
39.09 5.54 26.93 33.55 131.58 -0.76 -1
34.65 7.51 43 27.14 123.99 -4.08 -1
63.03 27.34 51.61 35.69 114.51 7.44 -1
47.81 10.69 54 37.12 125.39 -0.4 -1
46.64 15.85 40 30.78 119.38 9.06 -1
49.83 16.74 28 33.09 121.44 1.91 -1
47.32 8.57 35.56 38.75 120.58 1.63 -1
50.75 20.24 37 30.52 122.34 2.29 -1
36.16 -0.81 33.63 36.97 135.94 -2.09 -1
40.75 1.84 50 38.91 139.25 0.67 -1
42.92 -5.85 58 48.76 121.61 -3.36 -1
63.79 21.35 66 42.45 119.55 12.38 -1
72.96 19.58 61.01 53.38 111.23 0.81 -1
67.54 14.66 58 52.88 123.63 25.97 -1
54.75 9.75 48 45 123.04 8.24 -1
50.16 -2.97 42 53.13 131.8 -8.29 -1
40.35 10.19 37.97 30.15 128.01 0.46 -1
63.62 16.93 49.35 46.68 117.09 -0.36 -1
54.14 11.94 43 42.21 122.21 0.15 -1
74.98 14.92 53.73 60.05 105.65 1.59 -1
42.52 14.38 25.32 28.14 128.91 0.76 -1
33.79 3.68 25.5 30.11 128.33 -1.78 -1
54.5 6.82 47 47.68 111.79 -4.41 -1
48.17 9.59 39.71 38.58 135.62 5.36 -1
46.37 10.22 42.7 36.16 121.25 -0.54 -1
52.86 9.41 46.99 43.45 123.09 1.86 -1
57.15 16.49 42.84 40.66 113.81 5.02 -1
37.14 16.48 24 20.66 125.01 7.37 -1
51.31 8.88 57 42.44 126.47 -2.14 -1
42.52 16.54 42 25.97 120.63 7.88 -1
39.36 7.01 37 32.35 117.82 1.9 -1
35.88 1.11 43.46 34.77 126.92 -1.63 -1
43.19 9.98 28.94 33.22 123.47 1.74 -1
67.29 16.72 51 50.57 137.59 4.96 -1
51.33 13.63 33.26 37.69 131.31 1.79 -1
65.76 13.21 44 52.55 129.39 -1.98 -1
40.41 -1.33 30.98 41.74 119.34 -6.17 -1
48.8 18.02 52 30.78 139.15 10.44 -1
50.09 13.43 34.46 36.66 119.13 3.09 -1
64.26 14.5 43.9 49.76 115.39 5.95 -1
53.68 13.45 41.58 40.24 113.91 2.74 -1
49 13.11 51.87 35.88 126.4 0.54 -1
59.17 14.56 43.2 44.6 121.04 2.83 -1
67.8 16.55 43.26 51.25 119.69 4.87 -1
61.73 17.11 46.9 44.62 120.92 3.09 -1
33.04 -0.32 19.07 33.37 120.39 9.35 -1
74.57 15.72 58.62 58.84 105.42 0.6 -1
44.43 14.17 32.24 30.26 131.72 -3.6 -1
36.42 13.88 20.24 22.54 126.08 0.18 -1
51.08 14.21 35.95 36.87 115.8 6.91 -1
34.76 2.63 29.5 32.12 127.14 -0.46 -1
48.9 5.59 55.5 43.32 137.11 19.85 -1
46.24 10.06 37 36.17 128.06 -5.1 -1
46.43 6.62 48.1 39.81 130.35 2.45 -1
39.66 16.21 36.67 23.45 131.92 -4.97 -1
45.58 18.76 33.77 26.82 116.8 3.13 -1
66.51 20.9 31.73 45.61 128.9 1.52 -1
82.91 29.89 58.25 53.01 110.71 6.08 -1
50.68 6.46 35 44.22 116.59 -0.21 -1
89.01 26.08 69.02 62.94 111.48 6.06 -1
54.6 21.49 29.36 33.11 118.34 -1.47 -1
34.38 2.06 32.39 32.32 128.3 -3.37 -1
45.08 12.31 44.58 32.77 147.89 -8.94 -1
47.9 13.62 36 34.29 117.45 -4.25 -1
53.94 20.72 29.22 33.22 114.37 -0.42 -1
61.45 22.69 46.17 38.75 125.67 -2.71 -1
45.25 8.69 41.58 36.56 118.55 0.21 -1
33.84 5.07 36.64 28.77 123.95 -0.2 -1

View file

@ -1,22 +0,0 @@
LFHD, RFHD
RFHD, RBHD
RBHD, LBHD
LBHD, LFHD
LELB, LWRB
LWRB, LFIN
LELB, LSHO
LSHO, RSHO
RSHO, STRN
LSHO, STRN
RSHO, RELB
RELB, RWRB
RWRB, RFIN
LSHO, LFWT
RSHO, RFWT
LFWT, RFWT
LFWT, LKNE
RFWT, RKNE
LKNE, LHEE
RKNE, RHEE
RMT5, RHEE
LMT5, LHEE

View file

@ -1,168 +0,0 @@
import json
neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/'
sam_url = 'http://www.cs.nyu.edu/~roweis/data/'
cmu_url = 'http://mocap.cs.cmu.edu/subjects/'
data_resources = {'ankur_pose_data' : {'urls' : [neil_url + 'ankur_pose_data/'],
'files' : [['ankurDataPoseSilhouette.mat']],
'license' : None,
'citation' : """3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.""",
'details' : """Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing."""},
'boston_housing' : {'urls' : ['http://archive.ics.uci.edu/ml/machine-learning-databases/housing/'],
'files' : [['Index', 'housing.data', 'housing.names']],
'citation' : """Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.""",
'details' : """The Boston Housing data relates house values in Boston to a range of input variables.""",
'license' : None,
'size' : 51276
},
'brendan_faces' : {'urls' : [sam_url],
'files': [['frey_rawface.mat']],
'citation' : 'Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.',
'details' : """A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.""",
'license': None,
'size' : 1100584},
'cmu_mocap_full' : {'urls' : ['http://mocap.cs.cmu.edu'],
'files' : [['allasfamc.zip']],
'citation' : """Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu.'
'The database was created with funding from NSF EIA-0196217.""",
'details' : """CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.""",
'license' : """From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.""",
'size' : None},
'creep_rupture' : {'urls' : ['http://www.msm.cam.ac.uk/map/data/tar/'],
'files' : [['creeprupt.tar']],
'citation' : 'Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.',
'details' : """Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.""",
'license' : None,
'size' : 602797},
'della_gatta' : {'urls' : [neil_url + 'della_gatta/'],
'files': [['DellaGattadata.mat']],
'citation' : 'Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008',
'details': "The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.",
'license':None,
'size':3729650},
'epomeo_gpx' : {'urls' : [neil_url + 'epomeo_gpx/'],
'files': [['endomondo_1.gpx', 'endomondo_2.gpx', 'garmin_watch_via_endomondo.gpx','viewranger_phone.gpx','viewranger_tablet.gpx']],
'citation' : '',
'details': "Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).",
'license':None,
'size': 2031872},
'three_phase_oil_flow': {'urls' : [neil_url + 'three_phase_oil_flow/'],
'files' : [['DataTrnLbls.txt', 'DataTrn.txt', 'DataTst.txt', 'DataTstLbls.txt', 'DataVdn.txt', 'DataVdnLbls.txt']],
'citation' : 'Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593',
'details' : """The three phase oil data used initially for demonstrating the Generative Topographic mapping.""",
'license' : None,
'size' : 712796},
'rogers_girolami_data' : {'urls' : ['https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/'],
'files' : [['firstcoursemldata.tar.gz']],
'suffices' : [['?dl=1']],
'citation' : 'A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146',
'details' : """Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.""",
'license' : None,
'size' : 21949154},
'olivetti_faces' : {'urls' : [neil_url + 'olivetti_faces/', sam_url],
'files' : [['att_faces.zip'], ['olivettifaces.mat']],
'citation' : 'Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994',
'details' : """Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. """,
'license': None,
'size' : 8561331},
'olympic_marathon_men' : {'urls' : [neil_url + 'olympic_marathon_men/'],
'files' : [['olympicMarathonTimes.csv']],
'citation' : None,
'details' : """Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data""",
'license': None,
'size' : 584},
'osu_run1' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'],
'files': [['run1TXT.ZIP'],['connections.txt']],
'details' : "Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.",
'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.',
'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).',
'size': 338103},
'osu_accad' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'],
'files': [['swagger1TXT.ZIP','handspring1TXT.ZIP','quickwalkTXT.ZIP','run1TXT.ZIP','sprintTXT.ZIP','dogwalkTXT.ZIP','camper_04TXT.ZIP','dance_KB3_TXT.ZIP','per20_TXT.ZIP','perTWO07_TXT.ZIP','perTWO13_TXT.ZIP','perTWO14_TXT.ZIP','perTWO15_TXT.ZIP','perTWO16_TXT.ZIP'],['connections.txt']],
'details' : "Motion capture data of different motions from the Open Motion Data Project at Ohio State University.",
'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.',
'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).',
'size': 15922790},
'pumadyn-32nm' : {'urls' : ['ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/'],
'files' : [['pumadyn-32nm.tar.gz']],
'details' : """Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.""",
'citation' : """Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.""",
'license' : """Data is made available by the Delve system at the University of Toronto""",
'size' : 5861646},
'robot_wireless' : {'urls' : [neil_url + 'robot_wireless/'],
'files' : [['uw-floor.txt']],
'citation' : """WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.""",
'details' : """Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.""",
'license' : None,
'size' : 284390},
'swiss_roll' : {'urls' : ['http://isomap.stanford.edu/'],
'files' : [['swiss_roll_data.mat']],
'details' : """Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""",
'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000',
'license' : None,
'size' : 800256},
'ripley_prnn_data' : {'urls' : ['http://www.stats.ox.ac.uk/pub/PRNN/'],
'files' : [['Cushings.dat', 'README', 'crabs.dat', 'fglass.dat', 'fglass.grp', 'pima.te', 'pima.tr', 'pima.tr2', 'synth.te', 'synth.tr', 'viruses.dat', 'virus3.dat']],
'details' : """Data sets from Brian Ripley's Pattern Recognition and Neural Networks""",
'citation': """Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7""",
'license' : None,
'size' : 93565},
'isomap_face_data' : {'urls' : [neil_url + 'isomap_face_data/'],
'files' : [['face_data.mat']],
'details' : """Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""",
'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000',
'license' : None,
'size' : 24229368},
'xw_pen' : {'urls' : [neil_url + 'xw_pen/'],
'files' : [['xw_pen_15.csv']],
'details' : """Accelerometer pen data used for robust regression by Tipping and Lawrence.""",
'citation' : 'Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005',
'license' : None,
'size' : 3410},
'hapmap3' : {'urls' : ['http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/latest_phaseIII_ncbi_b36/plink_format/'],
'files' : [['hapmap3_r2_b36_fwd.consensus.qc.poly.map.bz2', 'hapmap3_r2_b36_fwd.consensus.qc.poly.ped.bz2', 'relationships_w_pops_121708.txt']],
'details' : """
HapMap Project: Single Nucleotide Polymorphism sequenced in all human populations.
The HapMap phase three SNP dataset - 1184 samples out of 11 populations.
See http://www.nature.com/nature/journal/v426/n6968/abs/nature02168.html for details.
SNP_matrix (A) encoding [see Paschou et all. 2007 (PCA-Correlated SNPs...)]:
Let (B1,B2) be the alphabetically sorted bases, which occur in the j-th SNP, then
/ 1, iff SNPij==(B1,B1)
Aij = | 0, iff SNPij==(B1,B2)
\ -1, iff SNPij==(B2,B2)
The SNP data and the meta information (such as iid, sex and phenotype) are
stored in the dataframe datadf, index is the Individual ID,
with following columns for metainfo:
* family_id -> Family ID
* paternal_id -> Paternal ID
* maternal_id -> Maternal ID
* sex -> Sex (1=male; 2=female; other=unknown)
* phenotype -> Phenotype (-9, or 0 for unknown)
* population -> Population string (e.g. 'ASW' - 'YRI')
* rest are SNP rs (ids)
More information is given in infodf:
* Chromosome:
- autosomal chromosemes -> 1-22
- X X chromosome -> 23
- Y Y chromosome -> 24
- XY Pseudo-autosomal region of X -> 25
- MT Mitochondrial -> 26
* Relative Positon (to Chromosome) [base pairs]
""",
'citation': """Gibbs, Richard A., et al. "The international HapMap project." Nature 426.6968 (2003): 789-796.""",
'license' : """International HapMap Project Public Access License (http://hapmap.ncbi.nlm.nih.gov/cgi-perl/registration#licence)""",
'size' : 2*1729092237 + 62265},
}
with open('data_resources.json', 'w') as f:
print "writing data_resources"
json.dump(data_resources, f)

View file

@ -1,769 +0,0 @@
6,148,72,35,0,33.6,0.627,50,1
1,85,66,29,0,26.6,0.351,31,0
8,183,64,0,0,23.3,0.672,32,1
1,89,66,23,94,28.1,0.167,21,0
0,137,40,35,168,43.1,2.288,33,1
5,116,74,0,0,25.6,0.201,30,0
3,78,50,32,88,31.0,0.248,26,1
10,115,0,0,0,35.3,0.134,29,0
2,197,70,45,543,30.5,0.158,53,1
8,125,96,0,0,0.0,0.232,54,1
4,110,92,0,0,37.6,0.191,30,0
10,168,74,0,0,38.0,0.537,34,1
10,139,80,0,0,27.1,1.441,57,0
1,189,60,23,846,30.1,0.398,59,1
5,166,72,19,175,25.8,0.587,51,1
7,100,0,0,0,30.0,0.484,32,1
0,118,84,47,230,45.8,0.551,31,1
7,107,74,0,0,29.6,0.254,31,1
1,103,30,38,83,43.3,0.183,33,0
1,115,70,30,96,34.6,0.529,32,1
3,126,88,41,235,39.3,0.704,27,0
8,99,84,0,0,35.4,0.388,50,0
7,196,90,0,0,39.8,0.451,41,1
9,119,80,35,0,29.0,0.263,29,1
11,143,94,33,146,36.6,0.254,51,1
10,125,70,26,115,31.1,0.205,41,1
7,147,76,0,0,39.4,0.257,43,1
1,97,66,15,140,23.2,0.487,22,0
13,145,82,19,110,22.2,0.245,57,0
5,117,92,0,0,34.1,0.337,38,0
5,109,75,26,0,36.0,0.546,60,0
3,158,76,36,245,31.6,0.851,28,1
3,88,58,11,54,24.8,0.267,22,0
6,92,92,0,0,19.9,0.188,28,0
10,122,78,31,0,27.6,0.512,45,0
4,103,60,33,192,24.0,0.966,33,0
11,138,76,0,0,33.2,0.420,35,0
9,102,76,37,0,32.9,0.665,46,1
2,90,68,42,0,38.2,0.503,27,1
4,111,72,47,207,37.1,1.390,56,1
3,180,64,25,70,34.0,0.271,26,0
7,133,84,0,0,40.2,0.696,37,0
7,106,92,18,0,22.7,0.235,48,0
9,171,110,24,240,45.4,0.721,54,1
7,159,64,0,0,27.4,0.294,40,0
0,180,66,39,0,42.0,1.893,25,1
1,146,56,0,0,29.7,0.564,29,0
2,71,70,27,0,28.0,0.586,22,0
7,103,66,32,0,39.1,0.344,31,1
7,105,0,0,0,0.0,0.305,24,0
1,103,80,11,82,19.4,0.491,22,0
1,101,50,15,36,24.2,0.526,26,0
5,88,66,21,23,24.4,0.342,30,0
8,176,90,34,300,33.7,0.467,58,1
7,150,66,42,342,34.7,0.718,42,0
1,73,50,10,0,23.0,0.248,21,0
7,187,68,39,304,37.7,0.254,41,1
0,100,88,60,110,46.8,0.962,31,0
0,146,82,0,0,40.5,1.781,44,0
0,105,64,41,142,41.5,0.173,22,0
2,84,0,0,0,0.0,0.304,21,0
8,133,72,0,0,32.9,0.270,39,1
5,44,62,0,0,25.0,0.587,36,0
2,141,58,34,128,25.4,0.699,24,0
7,114,66,0,0,32.8,0.258,42,1
5,99,74,27,0,29.0,0.203,32,0
0,109,88,30,0,32.5,0.855,38,1
2,109,92,0,0,42.7,0.845,54,0
1,95,66,13,38,19.6,0.334,25,0
4,146,85,27,100,28.9,0.189,27,0
2,100,66,20,90,32.9,0.867,28,1
5,139,64,35,140,28.6,0.411,26,0
13,126,90,0,0,43.4,0.583,42,1
4,129,86,20,270,35.1,0.231,23,0
1,79,75,30,0,32.0,0.396,22,0
1,0,48,20,0,24.7,0.140,22,0
7,62,78,0,0,32.6,0.391,41,0
5,95,72,33,0,37.7,0.370,27,0
0,131,0,0,0,43.2,0.270,26,1
2,112,66,22,0,25.0,0.307,24,0
3,113,44,13,0,22.4,0.140,22,0
2,74,0,0,0,0.0,0.102,22,0
7,83,78,26,71,29.3,0.767,36,0
0,101,65,28,0,24.6,0.237,22,0
5,137,108,0,0,48.8,0.227,37,1
2,110,74,29,125,32.4,0.698,27,0
13,106,72,54,0,36.6,0.178,45,0
2,100,68,25,71,38.5,0.324,26,0
15,136,70,32,110,37.1,0.153,43,1
1,107,68,19,0,26.5,0.165,24,0
1,80,55,0,0,19.1,0.258,21,0
4,123,80,15,176,32.0,0.443,34,0
7,81,78,40,48,46.7,0.261,42,0
4,134,72,0,0,23.8,0.277,60,1
2,142,82,18,64,24.7,0.761,21,0
6,144,72,27,228,33.9,0.255,40,0
2,92,62,28,0,31.6,0.130,24,0
1,71,48,18,76,20.4,0.323,22,0
6,93,50,30,64,28.7,0.356,23,0
1,122,90,51,220,49.7,0.325,31,1
1,163,72,0,0,39.0,1.222,33,1
1,151,60,0,0,26.1,0.179,22,0
0,125,96,0,0,22.5,0.262,21,0
1,81,72,18,40,26.6,0.283,24,0
2,85,65,0,0,39.6,0.930,27,0
1,126,56,29,152,28.7,0.801,21,0
1,96,122,0,0,22.4,0.207,27,0
4,144,58,28,140,29.5,0.287,37,0
3,83,58,31,18,34.3,0.336,25,0
0,95,85,25,36,37.4,0.247,24,1
3,171,72,33,135,33.3,0.199,24,1
8,155,62,26,495,34.0,0.543,46,1
1,89,76,34,37,31.2,0.192,23,0
4,76,62,0,0,34.0,0.391,25,0
7,160,54,32,175,30.5,0.588,39,1
4,146,92,0,0,31.2,0.539,61,1
5,124,74,0,0,34.0,0.220,38,1
5,78,48,0,0,33.7,0.654,25,0
4,97,60,23,0,28.2,0.443,22,0
4,99,76,15,51,23.2,0.223,21,0
0,162,76,56,100,53.2,0.759,25,1
6,111,64,39,0,34.2,0.260,24,0
2,107,74,30,100,33.6,0.404,23,0
5,132,80,0,0,26.8,0.186,69,0
0,113,76,0,0,33.3,0.278,23,1
1,88,30,42,99,55.0,0.496,26,1
3,120,70,30,135,42.9,0.452,30,0
1,118,58,36,94,33.3,0.261,23,0
1,117,88,24,145,34.5,0.403,40,1
0,105,84,0,0,27.9,0.741,62,1
4,173,70,14,168,29.7,0.361,33,1
9,122,56,0,0,33.3,1.114,33,1
3,170,64,37,225,34.5,0.356,30,1
8,84,74,31,0,38.3,0.457,39,0
2,96,68,13,49,21.1,0.647,26,0
2,125,60,20,140,33.8,0.088,31,0
0,100,70,26,50,30.8,0.597,21,0
0,93,60,25,92,28.7,0.532,22,0
0,129,80,0,0,31.2,0.703,29,0
5,105,72,29,325,36.9,0.159,28,0
3,128,78,0,0,21.1,0.268,55,0
5,106,82,30,0,39.5,0.286,38,0
2,108,52,26,63,32.5,0.318,22,0
10,108,66,0,0,32.4,0.272,42,1
4,154,62,31,284,32.8,0.237,23,0
0,102,75,23,0,0.0,0.572,21,0
9,57,80,37,0,32.8,0.096,41,0
2,106,64,35,119,30.5,1.400,34,0
5,147,78,0,0,33.7,0.218,65,0
2,90,70,17,0,27.3,0.085,22,0
1,136,74,50,204,37.4,0.399,24,0
4,114,65,0,0,21.9,0.432,37,0
9,156,86,28,155,34.3,1.189,42,1
1,153,82,42,485,40.6,0.687,23,0
8,188,78,0,0,47.9,0.137,43,1
7,152,88,44,0,50.0,0.337,36,1
2,99,52,15,94,24.6,0.637,21,0
1,109,56,21,135,25.2,0.833,23,0
2,88,74,19,53,29.0,0.229,22,0
17,163,72,41,114,40.9,0.817,47,1
4,151,90,38,0,29.7,0.294,36,0
7,102,74,40,105,37.2,0.204,45,0
0,114,80,34,285,44.2,0.167,27,0
2,100,64,23,0,29.7,0.368,21,0
0,131,88,0,0,31.6,0.743,32,1
6,104,74,18,156,29.9,0.722,41,1
3,148,66,25,0,32.5,0.256,22,0
4,120,68,0,0,29.6,0.709,34,0
4,110,66,0,0,31.9,0.471,29,0
3,111,90,12,78,28.4,0.495,29,0
6,102,82,0,0,30.8,0.180,36,1
6,134,70,23,130,35.4,0.542,29,1
2,87,0,23,0,28.9,0.773,25,0
1,79,60,42,48,43.5,0.678,23,0
2,75,64,24,55,29.7,0.370,33,0
8,179,72,42,130,32.7,0.719,36,1
6,85,78,0,0,31.2,0.382,42,0
0,129,110,46,130,67.1,0.319,26,1
5,143,78,0,0,45.0,0.190,47,0
5,130,82,0,0,39.1,0.956,37,1
6,87,80,0,0,23.2,0.084,32,0
0,119,64,18,92,34.9,0.725,23,0
1,0,74,20,23,27.7,0.299,21,0
5,73,60,0,0,26.8,0.268,27,0
4,141,74,0,0,27.6,0.244,40,0
7,194,68,28,0,35.9,0.745,41,1
8,181,68,36,495,30.1,0.615,60,1
1,128,98,41,58,32.0,1.321,33,1
8,109,76,39,114,27.9,0.640,31,1
5,139,80,35,160,31.6,0.361,25,1
3,111,62,0,0,22.6,0.142,21,0
9,123,70,44,94,33.1,0.374,40,0
7,159,66,0,0,30.4,0.383,36,1
11,135,0,0,0,52.3,0.578,40,1
8,85,55,20,0,24.4,0.136,42,0
5,158,84,41,210,39.4,0.395,29,1
1,105,58,0,0,24.3,0.187,21,0
3,107,62,13,48,22.9,0.678,23,1
4,109,64,44,99,34.8,0.905,26,1
4,148,60,27,318,30.9,0.150,29,1
0,113,80,16,0,31.0,0.874,21,0
1,138,82,0,0,40.1,0.236,28,0
0,108,68,20,0,27.3,0.787,32,0
2,99,70,16,44,20.4,0.235,27,0
6,103,72,32,190,37.7,0.324,55,0
5,111,72,28,0,23.9,0.407,27,0
8,196,76,29,280,37.5,0.605,57,1
5,162,104,0,0,37.7,0.151,52,1
1,96,64,27,87,33.2,0.289,21,0
7,184,84,33,0,35.5,0.355,41,1
2,81,60,22,0,27.7,0.290,25,0
0,147,85,54,0,42.8,0.375,24,0
7,179,95,31,0,34.2,0.164,60,0
0,140,65,26,130,42.6,0.431,24,1
9,112,82,32,175,34.2,0.260,36,1
12,151,70,40,271,41.8,0.742,38,1
5,109,62,41,129,35.8,0.514,25,1
6,125,68,30,120,30.0,0.464,32,0
5,85,74,22,0,29.0,1.224,32,1
5,112,66,0,0,37.8,0.261,41,1
0,177,60,29,478,34.6,1.072,21,1
2,158,90,0,0,31.6,0.805,66,1
7,119,0,0,0,25.2,0.209,37,0
7,142,60,33,190,28.8,0.687,61,0
1,100,66,15,56,23.6,0.666,26,0
1,87,78,27,32,34.6,0.101,22,0
0,101,76,0,0,35.7,0.198,26,0
3,162,52,38,0,37.2,0.652,24,1
4,197,70,39,744,36.7,2.329,31,0
0,117,80,31,53,45.2,0.089,24,0
4,142,86,0,0,44.0,0.645,22,1
6,134,80,37,370,46.2,0.238,46,1
1,79,80,25,37,25.4,0.583,22,0
4,122,68,0,0,35.0,0.394,29,0
3,74,68,28,45,29.7,0.293,23,0
4,171,72,0,0,43.6,0.479,26,1
7,181,84,21,192,35.9,0.586,51,1
0,179,90,27,0,44.1,0.686,23,1
9,164,84,21,0,30.8,0.831,32,1
0,104,76,0,0,18.4,0.582,27,0
1,91,64,24,0,29.2,0.192,21,0
4,91,70,32,88,33.1,0.446,22,0
3,139,54,0,0,25.6,0.402,22,1
6,119,50,22,176,27.1,1.318,33,1
2,146,76,35,194,38.2,0.329,29,0
9,184,85,15,0,30.0,1.213,49,1
10,122,68,0,0,31.2,0.258,41,0
0,165,90,33,680,52.3,0.427,23,0
9,124,70,33,402,35.4,0.282,34,0
1,111,86,19,0,30.1,0.143,23,0
9,106,52,0,0,31.2,0.380,42,0
2,129,84,0,0,28.0,0.284,27,0
2,90,80,14,55,24.4,0.249,24,0
0,86,68,32,0,35.8,0.238,25,0
12,92,62,7,258,27.6,0.926,44,1
1,113,64,35,0,33.6,0.543,21,1
3,111,56,39,0,30.1,0.557,30,0
2,114,68,22,0,28.7,0.092,25,0
1,193,50,16,375,25.9,0.655,24,0
11,155,76,28,150,33.3,1.353,51,1
3,191,68,15,130,30.9,0.299,34,0
3,141,0,0,0,30.0,0.761,27,1
4,95,70,32,0,32.1,0.612,24,0
3,142,80,15,0,32.4,0.200,63,0
4,123,62,0,0,32.0,0.226,35,1
5,96,74,18,67,33.6,0.997,43,0
0,138,0,0,0,36.3,0.933,25,1
2,128,64,42,0,40.0,1.101,24,0
0,102,52,0,0,25.1,0.078,21,0
2,146,0,0,0,27.5,0.240,28,1
10,101,86,37,0,45.6,1.136,38,1
2,108,62,32,56,25.2,0.128,21,0
3,122,78,0,0,23.0,0.254,40,0
1,71,78,50,45,33.2,0.422,21,0
13,106,70,0,0,34.2,0.251,52,0
2,100,70,52,57,40.5,0.677,25,0
7,106,60,24,0,26.5,0.296,29,1
0,104,64,23,116,27.8,0.454,23,0
5,114,74,0,0,24.9,0.744,57,0
2,108,62,10,278,25.3,0.881,22,0
0,146,70,0,0,37.9,0.334,28,1
10,129,76,28,122,35.9,0.280,39,0
7,133,88,15,155,32.4,0.262,37,0
7,161,86,0,0,30.4,0.165,47,1
2,108,80,0,0,27.0,0.259,52,1
7,136,74,26,135,26.0,0.647,51,0
5,155,84,44,545,38.7,0.619,34,0
1,119,86,39,220,45.6,0.808,29,1
4,96,56,17,49,20.8,0.340,26,0
5,108,72,43,75,36.1,0.263,33,0
0,78,88,29,40,36.9,0.434,21,0
0,107,62,30,74,36.6,0.757,25,1
2,128,78,37,182,43.3,1.224,31,1
1,128,48,45,194,40.5,0.613,24,1
0,161,50,0,0,21.9,0.254,65,0
6,151,62,31,120,35.5,0.692,28,0
2,146,70,38,360,28.0,0.337,29,1
0,126,84,29,215,30.7,0.520,24,0
14,100,78,25,184,36.6,0.412,46,1
8,112,72,0,0,23.6,0.840,58,0
0,167,0,0,0,32.3,0.839,30,1
2,144,58,33,135,31.6,0.422,25,1
5,77,82,41,42,35.8,0.156,35,0
5,115,98,0,0,52.9,0.209,28,1
3,150,76,0,0,21.0,0.207,37,0
2,120,76,37,105,39.7,0.215,29,0
10,161,68,23,132,25.5,0.326,47,1
0,137,68,14,148,24.8,0.143,21,0
0,128,68,19,180,30.5,1.391,25,1
2,124,68,28,205,32.9,0.875,30,1
6,80,66,30,0,26.2,0.313,41,0
0,106,70,37,148,39.4,0.605,22,0
2,155,74,17,96,26.6,0.433,27,1
3,113,50,10,85,29.5,0.626,25,0
7,109,80,31,0,35.9,1.127,43,1
2,112,68,22,94,34.1,0.315,26,0
3,99,80,11,64,19.3,0.284,30,0
3,182,74,0,0,30.5,0.345,29,1
3,115,66,39,140,38.1,0.150,28,0
6,194,78,0,0,23.5,0.129,59,1
4,129,60,12,231,27.5,0.527,31,0
3,112,74,30,0,31.6,0.197,25,1
0,124,70,20,0,27.4,0.254,36,1
13,152,90,33,29,26.8,0.731,43,1
2,112,75,32,0,35.7,0.148,21,0
1,157,72,21,168,25.6,0.123,24,0
1,122,64,32,156,35.1,0.692,30,1
10,179,70,0,0,35.1,0.200,37,0
2,102,86,36,120,45.5,0.127,23,1
6,105,70,32,68,30.8,0.122,37,0
8,118,72,19,0,23.1,1.476,46,0
2,87,58,16,52,32.7,0.166,25,0
1,180,0,0,0,43.3,0.282,41,1
12,106,80,0,0,23.6,0.137,44,0
1,95,60,18,58,23.9,0.260,22,0
0,165,76,43,255,47.9,0.259,26,0
0,117,0,0,0,33.8,0.932,44,0
5,115,76,0,0,31.2,0.343,44,1
9,152,78,34,171,34.2,0.893,33,1
7,178,84,0,0,39.9,0.331,41,1
1,130,70,13,105,25.9,0.472,22,0
1,95,74,21,73,25.9,0.673,36,0
1,0,68,35,0,32.0,0.389,22,0
5,122,86,0,0,34.7,0.290,33,0
8,95,72,0,0,36.8,0.485,57,0
8,126,88,36,108,38.5,0.349,49,0
1,139,46,19,83,28.7,0.654,22,0
3,116,0,0,0,23.5,0.187,23,0
3,99,62,19,74,21.8,0.279,26,0
5,0,80,32,0,41.0,0.346,37,1
4,92,80,0,0,42.2,0.237,29,0
4,137,84,0,0,31.2,0.252,30,0
3,61,82,28,0,34.4,0.243,46,0
1,90,62,12,43,27.2,0.580,24,0
3,90,78,0,0,42.7,0.559,21,0
9,165,88,0,0,30.4,0.302,49,1
1,125,50,40,167,33.3,0.962,28,1
13,129,0,30,0,39.9,0.569,44,1
12,88,74,40,54,35.3,0.378,48,0
1,196,76,36,249,36.5,0.875,29,1
5,189,64,33,325,31.2,0.583,29,1
5,158,70,0,0,29.8,0.207,63,0
5,103,108,37,0,39.2,0.305,65,0
4,146,78,0,0,38.5,0.520,67,1
4,147,74,25,293,34.9,0.385,30,0
5,99,54,28,83,34.0,0.499,30,0
6,124,72,0,0,27.6,0.368,29,1
0,101,64,17,0,21.0,0.252,21,0
3,81,86,16,66,27.5,0.306,22,0
1,133,102,28,140,32.8,0.234,45,1
3,173,82,48,465,38.4,2.137,25,1
0,118,64,23,89,0.0,1.731,21,0
0,84,64,22,66,35.8,0.545,21,0
2,105,58,40,94,34.9,0.225,25,0
2,122,52,43,158,36.2,0.816,28,0
12,140,82,43,325,39.2,0.528,58,1
0,98,82,15,84,25.2,0.299,22,0
1,87,60,37,75,37.2,0.509,22,0
4,156,75,0,0,48.3,0.238,32,1
0,93,100,39,72,43.4,1.021,35,0
1,107,72,30,82,30.8,0.821,24,0
0,105,68,22,0,20.0,0.236,22,0
1,109,60,8,182,25.4,0.947,21,0
1,90,62,18,59,25.1,1.268,25,0
1,125,70,24,110,24.3,0.221,25,0
1,119,54,13,50,22.3,0.205,24,0
5,116,74,29,0,32.3,0.660,35,1
8,105,100,36,0,43.3,0.239,45,1
5,144,82,26,285,32.0,0.452,58,1
3,100,68,23,81,31.6,0.949,28,0
1,100,66,29,196,32.0,0.444,42,0
5,166,76,0,0,45.7,0.340,27,1
1,131,64,14,415,23.7,0.389,21,0
4,116,72,12,87,22.1,0.463,37,0
4,158,78,0,0,32.9,0.803,31,1
2,127,58,24,275,27.7,1.600,25,0
3,96,56,34,115,24.7,0.944,39,0
0,131,66,40,0,34.3,0.196,22,1
3,82,70,0,0,21.1,0.389,25,0
3,193,70,31,0,34.9,0.241,25,1
4,95,64,0,0,32.0,0.161,31,1
6,137,61,0,0,24.2,0.151,55,0
5,136,84,41,88,35.0,0.286,35,1
9,72,78,25,0,31.6,0.280,38,0
5,168,64,0,0,32.9,0.135,41,1
2,123,48,32,165,42.1,0.520,26,0
4,115,72,0,0,28.9,0.376,46,1
0,101,62,0,0,21.9,0.336,25,0
8,197,74,0,0,25.9,1.191,39,1
1,172,68,49,579,42.4,0.702,28,1
6,102,90,39,0,35.7,0.674,28,0
1,112,72,30,176,34.4,0.528,25,0
1,143,84,23,310,42.4,1.076,22,0
1,143,74,22,61,26.2,0.256,21,0
0,138,60,35,167,34.6,0.534,21,1
3,173,84,33,474,35.7,0.258,22,1
1,97,68,21,0,27.2,1.095,22,0
4,144,82,32,0,38.5,0.554,37,1
1,83,68,0,0,18.2,0.624,27,0
3,129,64,29,115,26.4,0.219,28,1
1,119,88,41,170,45.3,0.507,26,0
2,94,68,18,76,26.0,0.561,21,0
0,102,64,46,78,40.6,0.496,21,0
2,115,64,22,0,30.8,0.421,21,0
8,151,78,32,210,42.9,0.516,36,1
4,184,78,39,277,37.0,0.264,31,1
0,94,0,0,0,0.0,0.256,25,0
1,181,64,30,180,34.1,0.328,38,1
0,135,94,46,145,40.6,0.284,26,0
1,95,82,25,180,35.0,0.233,43,1
2,99,0,0,0,22.2,0.108,23,0
3,89,74,16,85,30.4,0.551,38,0
1,80,74,11,60,30.0,0.527,22,0
2,139,75,0,0,25.6,0.167,29,0
1,90,68,8,0,24.5,1.138,36,0
0,141,0,0,0,42.4,0.205,29,1
12,140,85,33,0,37.4,0.244,41,0
5,147,75,0,0,29.9,0.434,28,0
1,97,70,15,0,18.2,0.147,21,0
6,107,88,0,0,36.8,0.727,31,0
0,189,104,25,0,34.3,0.435,41,1
2,83,66,23,50,32.2,0.497,22,0
4,117,64,27,120,33.2,0.230,24,0
8,108,70,0,0,30.5,0.955,33,1
4,117,62,12,0,29.7,0.380,30,1
0,180,78,63,14,59.4,2.420,25,1
1,100,72,12,70,25.3,0.658,28,0
0,95,80,45,92,36.5,0.330,26,0
0,104,64,37,64,33.6,0.510,22,1
0,120,74,18,63,30.5,0.285,26,0
1,82,64,13,95,21.2,0.415,23,0
2,134,70,0,0,28.9,0.542,23,1
0,91,68,32,210,39.9,0.381,25,0
2,119,0,0,0,19.6,0.832,72,0
2,100,54,28,105,37.8,0.498,24,0
14,175,62,30,0,33.6,0.212,38,1
1,135,54,0,0,26.7,0.687,62,0
5,86,68,28,71,30.2,0.364,24,0
10,148,84,48,237,37.6,1.001,51,1
9,134,74,33,60,25.9,0.460,81,0
9,120,72,22,56,20.8,0.733,48,0
1,71,62,0,0,21.8,0.416,26,0
8,74,70,40,49,35.3,0.705,39,0
5,88,78,30,0,27.6,0.258,37,0
10,115,98,0,0,24.0,1.022,34,0
0,124,56,13,105,21.8,0.452,21,0
0,74,52,10,36,27.8,0.269,22,0
0,97,64,36,100,36.8,0.600,25,0
8,120,0,0,0,30.0,0.183,38,1
6,154,78,41,140,46.1,0.571,27,0
1,144,82,40,0,41.3,0.607,28,0
0,137,70,38,0,33.2,0.170,22,0
0,119,66,27,0,38.8,0.259,22,0
7,136,90,0,0,29.9,0.210,50,0
4,114,64,0,0,28.9,0.126,24,0
0,137,84,27,0,27.3,0.231,59,0
2,105,80,45,191,33.7,0.711,29,1
7,114,76,17,110,23.8,0.466,31,0
8,126,74,38,75,25.9,0.162,39,0
4,132,86,31,0,28.0,0.419,63,0
3,158,70,30,328,35.5,0.344,35,1
0,123,88,37,0,35.2,0.197,29,0
4,85,58,22,49,27.8,0.306,28,0
0,84,82,31,125,38.2,0.233,23,0
0,145,0,0,0,44.2,0.630,31,1
0,135,68,42,250,42.3,0.365,24,1
1,139,62,41,480,40.7,0.536,21,0
0,173,78,32,265,46.5,1.159,58,0
4,99,72,17,0,25.6,0.294,28,0
8,194,80,0,0,26.1,0.551,67,0
2,83,65,28,66,36.8,0.629,24,0
2,89,90,30,0,33.5,0.292,42,0
4,99,68,38,0,32.8,0.145,33,0
4,125,70,18,122,28.9,1.144,45,1
3,80,0,0,0,0.0,0.174,22,0
6,166,74,0,0,26.6,0.304,66,0
5,110,68,0,0,26.0,0.292,30,0
2,81,72,15,76,30.1,0.547,25,0
7,195,70,33,145,25.1,0.163,55,1
6,154,74,32,193,29.3,0.839,39,0
2,117,90,19,71,25.2,0.313,21,0
3,84,72,32,0,37.2,0.267,28,0
6,0,68,41,0,39.0,0.727,41,1
7,94,64,25,79,33.3,0.738,41,0
3,96,78,39,0,37.3,0.238,40,0
10,75,82,0,0,33.3,0.263,38,0
0,180,90,26,90,36.5,0.314,35,1
1,130,60,23,170,28.6,0.692,21,0
2,84,50,23,76,30.4,0.968,21,0
8,120,78,0,0,25.0,0.409,64,0
12,84,72,31,0,29.7,0.297,46,1
0,139,62,17,210,22.1,0.207,21,0
9,91,68,0,0,24.2,0.200,58,0
2,91,62,0,0,27.3,0.525,22,0
3,99,54,19,86,25.6,0.154,24,0
3,163,70,18,105,31.6,0.268,28,1
9,145,88,34,165,30.3,0.771,53,1
7,125,86,0,0,37.6,0.304,51,0
13,76,60,0,0,32.8,0.180,41,0
6,129,90,7,326,19.6,0.582,60,0
2,68,70,32,66,25.0,0.187,25,0
3,124,80,33,130,33.2,0.305,26,0
6,114,0,0,0,0.0,0.189,26,0
9,130,70,0,0,34.2,0.652,45,1
3,125,58,0,0,31.6,0.151,24,0
3,87,60,18,0,21.8,0.444,21,0
1,97,64,19,82,18.2,0.299,21,0
3,116,74,15,105,26.3,0.107,24,0
0,117,66,31,188,30.8,0.493,22,0
0,111,65,0,0,24.6,0.660,31,0
2,122,60,18,106,29.8,0.717,22,0
0,107,76,0,0,45.3,0.686,24,0
1,86,66,52,65,41.3,0.917,29,0
6,91,0,0,0,29.8,0.501,31,0
1,77,56,30,56,33.3,1.251,24,0
4,132,0,0,0,32.9,0.302,23,1
0,105,90,0,0,29.6,0.197,46,0
0,57,60,0,0,21.7,0.735,67,0
0,127,80,37,210,36.3,0.804,23,0
3,129,92,49,155,36.4,0.968,32,1
8,100,74,40,215,39.4,0.661,43,1
3,128,72,25,190,32.4,0.549,27,1
10,90,85,32,0,34.9,0.825,56,1
4,84,90,23,56,39.5,0.159,25,0
1,88,78,29,76,32.0,0.365,29,0
8,186,90,35,225,34.5,0.423,37,1
5,187,76,27,207,43.6,1.034,53,1
4,131,68,21,166,33.1,0.160,28,0
1,164,82,43,67,32.8,0.341,50,0
4,189,110,31,0,28.5,0.680,37,0
1,116,70,28,0,27.4,0.204,21,0
3,84,68,30,106,31.9,0.591,25,0
6,114,88,0,0,27.8,0.247,66,0
1,88,62,24,44,29.9,0.422,23,0
1,84,64,23,115,36.9,0.471,28,0
7,124,70,33,215,25.5,0.161,37,0
1,97,70,40,0,38.1,0.218,30,0
8,110,76,0,0,27.8,0.237,58,0
11,103,68,40,0,46.2,0.126,42,0
11,85,74,0,0,30.1,0.300,35,0
6,125,76,0,0,33.8,0.121,54,1
0,198,66,32,274,41.3,0.502,28,1
1,87,68,34,77,37.6,0.401,24,0
6,99,60,19,54,26.9,0.497,32,0
0,91,80,0,0,32.4,0.601,27,0
2,95,54,14,88,26.1,0.748,22,0
1,99,72,30,18,38.6,0.412,21,0
6,92,62,32,126,32.0,0.085,46,0
4,154,72,29,126,31.3,0.338,37,0
0,121,66,30,165,34.3,0.203,33,1
3,78,70,0,0,32.5,0.270,39,0
2,130,96,0,0,22.6,0.268,21,0
3,111,58,31,44,29.5,0.430,22,0
2,98,60,17,120,34.7,0.198,22,0
1,143,86,30,330,30.1,0.892,23,0
1,119,44,47,63,35.5,0.280,25,0
6,108,44,20,130,24.0,0.813,35,0
2,118,80,0,0,42.9,0.693,21,1
10,133,68,0,0,27.0,0.245,36,0
2,197,70,99,0,34.7,0.575,62,1
0,151,90,46,0,42.1,0.371,21,1
6,109,60,27,0,25.0,0.206,27,0
12,121,78,17,0,26.5,0.259,62,0
8,100,76,0,0,38.7,0.190,42,0
8,124,76,24,600,28.7,0.687,52,1
1,93,56,11,0,22.5,0.417,22,0
8,143,66,0,0,34.9,0.129,41,1
6,103,66,0,0,24.3,0.249,29,0
3,176,86,27,156,33.3,1.154,52,1
0,73,0,0,0,21.1,0.342,25,0
11,111,84,40,0,46.8,0.925,45,1
2,112,78,50,140,39.4,0.175,24,0
3,132,80,0,0,34.4,0.402,44,1
2,82,52,22,115,28.5,1.699,25,0
6,123,72,45,230,33.6,0.733,34,0
0,188,82,14,185,32.0,0.682,22,1
0,67,76,0,0,45.3,0.194,46,0
1,89,24,19,25,27.8,0.559,21,0
1,173,74,0,0,36.8,0.088,38,1
1,109,38,18,120,23.1,0.407,26,0
1,108,88,19,0,27.1,0.400,24,0
6,96,0,0,0,23.7,0.190,28,0
1,124,74,36,0,27.8,0.100,30,0
7,150,78,29,126,35.2,0.692,54,1
4,183,0,0,0,28.4,0.212,36,1
1,124,60,32,0,35.8,0.514,21,0
1,181,78,42,293,40.0,1.258,22,1
1,92,62,25,41,19.5,0.482,25,0
0,152,82,39,272,41.5,0.270,27,0
1,111,62,13,182,24.0,0.138,23,0
3,106,54,21,158,30.9,0.292,24,0
3,174,58,22,194,32.9,0.593,36,1
7,168,88,42,321,38.2,0.787,40,1
6,105,80,28,0,32.5,0.878,26,0
11,138,74,26,144,36.1,0.557,50,1
3,106,72,0,0,25.8,0.207,27,0
6,117,96,0,0,28.7,0.157,30,0
2,68,62,13,15,20.1,0.257,23,0
9,112,82,24,0,28.2,1.282,50,1
0,119,0,0,0,32.4,0.141,24,1
2,112,86,42,160,38.4,0.246,28,0
2,92,76,20,0,24.2,1.698,28,0
6,183,94,0,0,40.8,1.461,45,0
0,94,70,27,115,43.5,0.347,21,0
2,108,64,0,0,30.8,0.158,21,0
4,90,88,47,54,37.7,0.362,29,0
0,125,68,0,0,24.7,0.206,21,0
0,132,78,0,0,32.4,0.393,21,0
5,128,80,0,0,34.6,0.144,45,0
4,94,65,22,0,24.7,0.148,21,0
7,114,64,0,0,27.4,0.732,34,1
0,102,78,40,90,34.5,0.238,24,0
2,111,60,0,0,26.2,0.343,23,0
1,128,82,17,183,27.5,0.115,22,0
10,92,62,0,0,25.9,0.167,31,0
13,104,72,0,0,31.2,0.465,38,1
5,104,74,0,0,28.8,0.153,48,0
2,94,76,18,66,31.6,0.649,23,0
7,97,76,32,91,40.9,0.871,32,1
1,100,74,12,46,19.5,0.149,28,0
0,102,86,17,105,29.3,0.695,27,0
4,128,70,0,0,34.3,0.303,24,0
6,147,80,0,0,29.5,0.178,50,1
4,90,0,0,0,28.0,0.610,31,0
3,103,72,30,152,27.6,0.730,27,0
2,157,74,35,440,39.4,0.134,30,0
1,167,74,17,144,23.4,0.447,33,1
0,179,50,36,159,37.8,0.455,22,1
11,136,84,35,130,28.3,0.260,42,1
0,107,60,25,0,26.4,0.133,23,0
1,91,54,25,100,25.2,0.234,23,0
1,117,60,23,106,33.8,0.466,27,0
5,123,74,40,77,34.1,0.269,28,0
2,120,54,0,0,26.8,0.455,27,0
1,106,70,28,135,34.2,0.142,22,0
2,155,52,27,540,38.7,0.240,25,1
2,101,58,35,90,21.8,0.155,22,0
1,120,80,48,200,38.9,1.162,41,0
11,127,106,0,0,39.0,0.190,51,0
3,80,82,31,70,34.2,1.292,27,1
10,162,84,0,0,27.7,0.182,54,0
1,199,76,43,0,42.9,1.394,22,1
8,167,106,46,231,37.6,0.165,43,1
9,145,80,46,130,37.9,0.637,40,1
6,115,60,39,0,33.7,0.245,40,1
1,112,80,45,132,34.8,0.217,24,0
4,145,82,18,0,32.5,0.235,70,1
10,111,70,27,0,27.5,0.141,40,1
6,98,58,33,190,34.0,0.430,43,0
9,154,78,30,100,30.9,0.164,45,0
6,165,68,26,168,33.6,0.631,49,0
1,99,58,10,0,25.4,0.551,21,0
10,68,106,23,49,35.5,0.285,47,0
3,123,100,35,240,57.3,0.880,22,0
8,91,82,0,0,35.6,0.587,68,0
6,195,70,0,0,30.9,0.328,31,1
9,156,86,0,0,24.8,0.230,53,1
0,93,60,0,0,35.3,0.263,25,0
3,121,52,0,0,36.0,0.127,25,1
2,101,58,17,265,24.2,0.614,23,0
2,56,56,28,45,24.2,0.332,22,0
0,162,76,36,0,49.6,0.364,26,1
0,95,64,39,105,44.6,0.366,22,0
4,125,80,0,0,32.3,0.536,27,1
5,136,82,0,0,0.0,0.640,69,0
2,129,74,26,205,33.2,0.591,25,0
3,130,64,0,0,23.1,0.314,22,0
1,107,50,19,0,28.3,0.181,29,0
1,140,74,26,180,24.1,0.828,23,0
1,144,82,46,180,46.1,0.335,46,1
8,107,80,0,0,24.6,0.856,34,0
13,158,114,0,0,42.3,0.257,44,1
2,121,70,32,95,39.1,0.886,23,0
7,129,68,49,125,38.5,0.439,43,1
2,90,60,0,0,23.5,0.191,25,0
7,142,90,24,480,30.4,0.128,43,1
3,169,74,19,125,29.9,0.268,31,1
0,99,0,0,0,25.0,0.253,22,0
4,127,88,11,155,34.5,0.598,28,0
4,118,70,0,0,44.5,0.904,26,0
2,122,76,27,200,35.9,0.483,26,0
6,125,78,31,0,27.6,0.565,49,1
1,168,88,29,0,35.0,0.905,52,1
2,129,0,0,0,38.5,0.304,41,0
4,110,76,20,100,28.4,0.118,27,0
6,80,80,36,0,39.8,0.177,28,0
10,115,0,0,0,0.0,0.261,30,1
2,127,46,21,335,34.4,0.176,22,0
9,164,78,0,0,32.8,0.148,45,1
2,93,64,32,160,38.0,0.674,23,1
3,158,64,13,387,31.2,0.295,24,0
5,126,78,27,22,29.6,0.439,40,0
10,129,62,36,0,41.2,0.441,38,1
0,134,58,20,291,26.4,0.352,21,0
3,102,74,0,0,29.5,0.121,32,0
7,187,50,33,392,33.9,0.826,34,1
3,173,78,39,185,33.8,0.970,31,1
10,94,72,18,0,23.1,0.595,56,0
1,108,60,46,178,35.5,0.415,24,0
5,97,76,27,0,35.6,0.378,52,1
4,83,86,19,0,29.3,0.317,34,0
1,114,66,36,200,38.1,0.289,21,0
1,149,68,29,127,29.3,0.349,42,1
5,117,86,30,105,39.1,0.251,42,0
1,111,94,0,0,32.8,0.265,45,0
4,112,78,40,0,39.4,0.236,38,0
1,116,78,29,180,36.1,0.496,25,0
0,141,84,26,0,32.4,0.433,22,0
2,175,88,0,0,22.9,0.326,22,0
2,92,52,0,0,30.1,0.141,22,0
3,130,78,23,79,28.4,0.323,34,1
8,120,86,0,0,28.4,0.259,22,1
2,174,88,37,120,44.5,0.646,24,1
2,106,56,27,165,29.0,0.426,22,0
2,105,75,0,0,23.3,0.560,53,0
4,95,60,32,0,35.4,0.284,28,0
0,126,86,27,120,27.4,0.515,21,0
8,65,72,23,0,32.0,0.600,42,0
2,99,60,17,160,36.6,0.453,21,0
1,102,74,0,0,39.5,0.293,42,1
11,120,80,37,150,42.3,0.785,48,1
3,102,44,20,94,30.8,0.400,26,0
1,109,58,18,116,28.5,0.219,22,0
9,140,94,0,0,32.7,0.734,45,1
13,153,88,37,140,40.6,1.174,39,0
12,100,84,33,105,30.0,0.488,46,0
1,147,94,41,0,49.3,0.358,27,1
1,81,74,41,57,46.3,1.096,32,0
3,187,70,22,200,36.4,0.408,36,1
6,162,62,0,0,24.3,0.178,50,1
4,136,70,0,0,31.2,1.182,22,1
1,121,78,39,74,39.0,0.261,28,0
3,108,62,24,0,26.0,0.223,25,0
0,181,88,44,510,43.3,0.222,26,1
8,154,78,32,0,32.4,0.443,45,1
1,128,88,39,110,36.5,1.057,37,1
7,137,90,41,0,32.0,0.391,39,0
0,123,72,0,0,36.3,0.258,52,1
1,106,76,0,0,37.5,0.197,26,0
6,190,92,0,0,35.5,0.278,66,1
2,88,58,26,16,28.4,0.766,22,0
9,170,74,31,0,44.0,0.403,43,1
9,89,62,0,0,22.5,0.142,33,0
10,101,76,48,180,32.9,0.171,63,0
2,122,70,27,0,36.8,0.340,27,0
5,121,72,23,112,26.2,0.245,30,0
1,126,60,0,0,30.1,0.349,47,1
1,93,70,31,0,30.4,0.315,23,0

View file

@ -1,271 +0,0 @@
70.0 1.0 4.0 130.0 322.0 0.0 2.0 109.0 0.0 2.4 2.0 3.0 3.0 2
67.0 0.0 3.0 115.0 564.0 0.0 2.0 160.0 0.0 1.6 2.0 0.0 7.0 1
57.0 1.0 2.0 124.0 261.0 0.0 0.0 141.0 0.0 0.3 1.0 0.0 7.0 2
64.0 1.0 4.0 128.0 263.0 0.0 0.0 105.0 1.0 0.2 2.0 1.0 7.0 1
74.0 0.0 2.0 120.0 269.0 0.0 2.0 121.0 1.0 0.2 1.0 1.0 3.0 1
65.0 1.0 4.0 120.0 177.0 0.0 0.0 140.0 0.0 0.4 1.0 0.0 7.0 1
56.0 1.0 3.0 130.0 256.0 1.0 2.0 142.0 1.0 0.6 2.0 1.0 6.0 2
59.0 1.0 4.0 110.0 239.0 0.0 2.0 142.0 1.0 1.2 2.0 1.0 7.0 2
60.0 1.0 4.0 140.0 293.0 0.0 2.0 170.0 0.0 1.2 2.0 2.0 7.0 2
63.0 0.0 4.0 150.0 407.0 0.0 2.0 154.0 0.0 4.0 2.0 3.0 7.0 2
59.0 1.0 4.0 135.0 234.0 0.0 0.0 161.0 0.0 0.5 2.0 0.0 7.0 1
53.0 1.0 4.0 142.0 226.0 0.0 2.0 111.0 1.0 0.0 1.0 0.0 7.0 1
44.0 1.0 3.0 140.0 235.0 0.0 2.0 180.0 0.0 0.0 1.0 0.0 3.0 1
61.0 1.0 1.0 134.0 234.0 0.0 0.0 145.0 0.0 2.6 2.0 2.0 3.0 2
57.0 0.0 4.0 128.0 303.0 0.0 2.0 159.0 0.0 0.0 1.0 1.0 3.0 1
71.0 0.0 4.0 112.0 149.0 0.0 0.0 125.0 0.0 1.6 2.0 0.0 3.0 1
46.0 1.0 4.0 140.0 311.0 0.0 0.0 120.0 1.0 1.8 2.0 2.0 7.0 2
53.0 1.0 4.0 140.0 203.0 1.0 2.0 155.0 1.0 3.1 3.0 0.0 7.0 2
64.0 1.0 1.0 110.0 211.0 0.0 2.0 144.0 1.0 1.8 2.0 0.0 3.0 1
40.0 1.0 1.0 140.0 199.0 0.0 0.0 178.0 1.0 1.4 1.0 0.0 7.0 1
67.0 1.0 4.0 120.0 229.0 0.0 2.0 129.0 1.0 2.6 2.0 2.0 7.0 2
48.0 1.0 2.0 130.0 245.0 0.0 2.0 180.0 0.0 0.2 2.0 0.0 3.0 1
43.0 1.0 4.0 115.0 303.0 0.0 0.0 181.0 0.0 1.2 2.0 0.0 3.0 1
47.0 1.0 4.0 112.0 204.0 0.0 0.0 143.0 0.0 0.1 1.0 0.0 3.0 1
54.0 0.0 2.0 132.0 288.0 1.0 2.0 159.0 1.0 0.0 1.0 1.0 3.0 1
48.0 0.0 3.0 130.0 275.0 0.0 0.0 139.0 0.0 0.2 1.0 0.0 3.0 1
46.0 0.0 4.0 138.0 243.0 0.0 2.0 152.0 1.0 0.0 2.0 0.0 3.0 1
51.0 0.0 3.0 120.0 295.0 0.0 2.0 157.0 0.0 0.6 1.0 0.0 3.0 1
58.0 1.0 3.0 112.0 230.0 0.0 2.0 165.0 0.0 2.5 2.0 1.0 7.0 2
71.0 0.0 3.0 110.0 265.0 1.0 2.0 130.0 0.0 0.0 1.0 1.0 3.0 1
57.0 1.0 3.0 128.0 229.0 0.0 2.0 150.0 0.0 0.4 2.0 1.0 7.0 2
66.0 1.0 4.0 160.0 228.0 0.0 2.0 138.0 0.0 2.3 1.0 0.0 6.0 1
37.0 0.0 3.0 120.0 215.0 0.0 0.0 170.0 0.0 0.0 1.0 0.0 3.0 1
59.0 1.0 4.0 170.0 326.0 0.0 2.0 140.0 1.0 3.4 3.0 0.0 7.0 2
50.0 1.0 4.0 144.0 200.0 0.0 2.0 126.0 1.0 0.9 2.0 0.0 7.0 2
48.0 1.0 4.0 130.0 256.0 1.0 2.0 150.0 1.0 0.0 1.0 2.0 7.0 2
61.0 1.0 4.0 140.0 207.0 0.0 2.0 138.0 1.0 1.9 1.0 1.0 7.0 2
59.0 1.0 1.0 160.0 273.0 0.0 2.0 125.0 0.0 0.0 1.0 0.0 3.0 2
42.0 1.0 3.0 130.0 180.0 0.0 0.0 150.0 0.0 0.0 1.0 0.0 3.0 1
48.0 1.0 4.0 122.0 222.0 0.0 2.0 186.0 0.0 0.0 1.0 0.0 3.0 1
40.0 1.0 4.0 152.0 223.0 0.0 0.0 181.0 0.0 0.0 1.0 0.0 7.0 2
62.0 0.0 4.0 124.0 209.0 0.0 0.0 163.0 0.0 0.0 1.0 0.0 3.0 1
44.0 1.0 3.0 130.0 233.0 0.0 0.0 179.0 1.0 0.4 1.0 0.0 3.0 1
46.0 1.0 2.0 101.0 197.0 1.0 0.0 156.0 0.0 0.0 1.0 0.0 7.0 1
59.0 1.0 3.0 126.0 218.0 1.0 0.0 134.0 0.0 2.2 2.0 1.0 6.0 2
58.0 1.0 3.0 140.0 211.0 1.0 2.0 165.0 0.0 0.0 1.0 0.0 3.0 1
49.0 1.0 3.0 118.0 149.0 0.0 2.0 126.0 0.0 0.8 1.0 3.0 3.0 2
44.0 1.0 4.0 110.0 197.0 0.0 2.0 177.0 0.0 0.0 1.0 1.0 3.0 2
66.0 1.0 2.0 160.0 246.0 0.0 0.0 120.0 1.0 0.0 2.0 3.0 6.0 2
65.0 0.0 4.0 150.0 225.0 0.0 2.0 114.0 0.0 1.0 2.0 3.0 7.0 2
42.0 1.0 4.0 136.0 315.0 0.0 0.0 125.0 1.0 1.8 2.0 0.0 6.0 2
52.0 1.0 2.0 128.0 205.0 1.0 0.0 184.0 0.0 0.0 1.0 0.0 3.0 1
65.0 0.0 3.0 140.0 417.0 1.0 2.0 157.0 0.0 0.8 1.0 1.0 3.0 1
63.0 0.0 2.0 140.0 195.0 0.0 0.0 179.0 0.0 0.0 1.0 2.0 3.0 1
45.0 0.0 2.0 130.0 234.0 0.0 2.0 175.0 0.0 0.6 2.0 0.0 3.0 1
41.0 0.0 2.0 105.0 198.0 0.0 0.0 168.0 0.0 0.0 1.0 1.0 3.0 1
61.0 1.0 4.0 138.0 166.0 0.0 2.0 125.0 1.0 3.6 2.0 1.0 3.0 2
60.0 0.0 3.0 120.0 178.0 1.0 0.0 96.0 0.0 0.0 1.0 0.0 3.0 1
59.0 0.0 4.0 174.0 249.0 0.0 0.0 143.0 1.0 0.0 2.0 0.0 3.0 2
62.0 1.0 2.0 120.0 281.0 0.0 2.0 103.0 0.0 1.4 2.0 1.0 7.0 2
57.0 1.0 3.0 150.0 126.0 1.0 0.0 173.0 0.0 0.2 1.0 1.0 7.0 1
51.0 0.0 4.0 130.0 305.0 0.0 0.0 142.0 1.0 1.2 2.0 0.0 7.0 2
44.0 1.0 3.0 120.0 226.0 0.0 0.0 169.0 0.0 0.0 1.0 0.0 3.0 1
60.0 0.0 1.0 150.0 240.0 0.0 0.0 171.0 0.0 0.9 1.0 0.0 3.0 1
63.0 1.0 1.0 145.0 233.0 1.0 2.0 150.0 0.0 2.3 3.0 0.0 6.0 1
57.0 1.0 4.0 150.0 276.0 0.0 2.0 112.0 1.0 0.6 2.0 1.0 6.0 2
51.0 1.0 4.0 140.0 261.0 0.0 2.0 186.0 1.0 0.0 1.0 0.0 3.0 1
58.0 0.0 2.0 136.0 319.0 1.0 2.0 152.0 0.0 0.0 1.0 2.0 3.0 2
44.0 0.0 3.0 118.0 242.0 0.0 0.0 149.0 0.0 0.3 2.0 1.0 3.0 1
47.0 1.0 3.0 108.0 243.0 0.0 0.0 152.0 0.0 0.0 1.0 0.0 3.0 2
61.0 1.0 4.0 120.0 260.0 0.0 0.0 140.0 1.0 3.6 2.0 1.0 7.0 2
57.0 0.0 4.0 120.0 354.0 0.0 0.0 163.0 1.0 0.6 1.0 0.0 3.0 1
70.0 1.0 2.0 156.0 245.0 0.0 2.0 143.0 0.0 0.0 1.0 0.0 3.0 1
76.0 0.0 3.0 140.0 197.0 0.0 1.0 116.0 0.0 1.1 2.0 0.0 3.0 1
67.0 0.0 4.0 106.0 223.0 0.0 0.0 142.0 0.0 0.3 1.0 2.0 3.0 1
45.0 1.0 4.0 142.0 309.0 0.0 2.0 147.0 1.0 0.0 2.0 3.0 7.0 2
45.0 1.0 4.0 104.0 208.0 0.0 2.0 148.0 1.0 3.0 2.0 0.0 3.0 1
39.0 0.0 3.0 94.0 199.0 0.0 0.0 179.0 0.0 0.0 1.0 0.0 3.0 1
42.0 0.0 3.0 120.0 209.0 0.0 0.0 173.0 0.0 0.0 2.0 0.0 3.0 1
56.0 1.0 2.0 120.0 236.0 0.0 0.0 178.0 0.0 0.8 1.0 0.0 3.0 1
58.0 1.0 4.0 146.0 218.0 0.0 0.0 105.0 0.0 2.0 2.0 1.0 7.0 2
35.0 1.0 4.0 120.0 198.0 0.0 0.0 130.0 1.0 1.6 2.0 0.0 7.0 2
58.0 1.0 4.0 150.0 270.0 0.0 2.0 111.0 1.0 0.8 1.0 0.0 7.0 2
41.0 1.0 3.0 130.0 214.0 0.0 2.0 168.0 0.0 2.0 2.0 0.0 3.0 1
57.0 1.0 4.0 110.0 201.0 0.0 0.0 126.0 1.0 1.5 2.0 0.0 6.0 1
42.0 1.0 1.0 148.0 244.0 0.0 2.0 178.0 0.0 0.8 1.0 2.0 3.0 1
62.0 1.0 2.0 128.0 208.0 1.0 2.0 140.0 0.0 0.0 1.0 0.0 3.0 1
59.0 1.0 1.0 178.0 270.0 0.0 2.0 145.0 0.0 4.2 3.0 0.0 7.0 1
41.0 0.0 2.0 126.0 306.0 0.0 0.0 163.0 0.0 0.0 1.0 0.0 3.0 1
50.0 1.0 4.0 150.0 243.0 0.0 2.0 128.0 0.0 2.6 2.0 0.0 7.0 2
59.0 1.0 2.0 140.0 221.0 0.0 0.0 164.0 1.0 0.0 1.0 0.0 3.0 1
61.0 0.0 4.0 130.0 330.0 0.0 2.0 169.0 0.0 0.0 1.0 0.0 3.0 2
54.0 1.0 4.0 124.0 266.0 0.0 2.0 109.0 1.0 2.2 2.0 1.0 7.0 2
54.0 1.0 4.0 110.0 206.0 0.0 2.0 108.0 1.0 0.0 2.0 1.0 3.0 2
52.0 1.0 4.0 125.0 212.0 0.0 0.0 168.0 0.0 1.0 1.0 2.0 7.0 2
47.0 1.0 4.0 110.0 275.0 0.0 2.0 118.0 1.0 1.0 2.0 1.0 3.0 2
66.0 1.0 4.0 120.0 302.0 0.0 2.0 151.0 0.0 0.4 2.0 0.0 3.0 1
58.0 1.0 4.0 100.0 234.0 0.0 0.0 156.0 0.0 0.1 1.0 1.0 7.0 2
64.0 0.0 3.0 140.0 313.0 0.0 0.0 133.0 0.0 0.2 1.0 0.0 7.0 1
50.0 0.0 2.0 120.0 244.0 0.0 0.0 162.0 0.0 1.1 1.0 0.0 3.0 1
44.0 0.0 3.0 108.0 141.0 0.0 0.0 175.0 0.0 0.6 2.0 0.0 3.0 1
67.0 1.0 4.0 120.0 237.0 0.0 0.0 71.0 0.0 1.0 2.0 0.0 3.0 2
49.0 0.0 4.0 130.0 269.0 0.0 0.0 163.0 0.0 0.0 1.0 0.0 3.0 1
57.0 1.0 4.0 165.0 289.0 1.0 2.0 124.0 0.0 1.0 2.0 3.0 7.0 2
63.0 1.0 4.0 130.0 254.0 0.0 2.0 147.0 0.0 1.4 2.0 1.0 7.0 2
48.0 1.0 4.0 124.0 274.0 0.0 2.0 166.0 0.0 0.5 2.0 0.0 7.0 2
51.0 1.0 3.0 100.0 222.0 0.0 0.0 143.0 1.0 1.2 2.0 0.0 3.0 1
60.0 0.0 4.0 150.0 258.0 0.0 2.0 157.0 0.0 2.6 2.0 2.0 7.0 2
59.0 1.0 4.0 140.0 177.0 0.0 0.0 162.0 1.0 0.0 1.0 1.0 7.0 2
45.0 0.0 2.0 112.0 160.0 0.0 0.0 138.0 0.0 0.0 2.0 0.0 3.0 1
55.0 0.0 4.0 180.0 327.0 0.0 1.0 117.0 1.0 3.4 2.0 0.0 3.0 2
41.0 1.0 2.0 110.0 235.0 0.0 0.0 153.0 0.0 0.0 1.0 0.0 3.0 1
60.0 0.0 4.0 158.0 305.0 0.0 2.0 161.0 0.0 0.0 1.0 0.0 3.0 2
54.0 0.0 3.0 135.0 304.0 1.0 0.0 170.0 0.0 0.0 1.0 0.0 3.0 1
42.0 1.0 2.0 120.0 295.0 0.0 0.0 162.0 0.0 0.0 1.0 0.0 3.0 1
49.0 0.0 2.0 134.0 271.0 0.0 0.0 162.0 0.0 0.0 2.0 0.0 3.0 1
46.0 1.0 4.0 120.0 249.0 0.0 2.0 144.0 0.0 0.8 1.0 0.0 7.0 2
56.0 0.0 4.0 200.0 288.0 1.0 2.0 133.0 1.0 4.0 3.0 2.0 7.0 2
66.0 0.0 1.0 150.0 226.0 0.0 0.0 114.0 0.0 2.6 3.0 0.0 3.0 1
56.0 1.0 4.0 130.0 283.0 1.0 2.0 103.0 1.0 1.6 3.0 0.0 7.0 2
49.0 1.0 3.0 120.0 188.0 0.0 0.0 139.0 0.0 2.0 2.0 3.0 7.0 2
54.0 1.0 4.0 122.0 286.0 0.0 2.0 116.0 1.0 3.2 2.0 2.0 3.0 2
57.0 1.0 4.0 152.0 274.0 0.0 0.0 88.0 1.0 1.2 2.0 1.0 7.0 2
65.0 0.0 3.0 160.0 360.0 0.0 2.0 151.0 0.0 0.8 1.0 0.0 3.0 1
54.0 1.0 3.0 125.0 273.0 0.0 2.0 152.0 0.0 0.5 3.0 1.0 3.0 1
54.0 0.0 3.0 160.0 201.0 0.0 0.0 163.0 0.0 0.0 1.0 1.0 3.0 1
62.0 1.0 4.0 120.0 267.0 0.0 0.0 99.0 1.0 1.8 2.0 2.0 7.0 2
52.0 0.0 3.0 136.0 196.0 0.0 2.0 169.0 0.0 0.1 2.0 0.0 3.0 1
52.0 1.0 2.0 134.0 201.0 0.0 0.0 158.0 0.0 0.8 1.0 1.0 3.0 1
60.0 1.0 4.0 117.0 230.0 1.0 0.0 160.0 1.0 1.4 1.0 2.0 7.0 2
63.0 0.0 4.0 108.0 269.0 0.0 0.0 169.0 1.0 1.8 2.0 2.0 3.0 2
66.0 1.0 4.0 112.0 212.0 0.0 2.0 132.0 1.0 0.1 1.0 1.0 3.0 2
42.0 1.0 4.0 140.0 226.0 0.0 0.0 178.0 0.0 0.0 1.0 0.0 3.0 1
64.0 1.0 4.0 120.0 246.0 0.0 2.0 96.0 1.0 2.2 3.0 1.0 3.0 2
54.0 1.0 3.0 150.0 232.0 0.0 2.0 165.0 0.0 1.6 1.0 0.0 7.0 1
46.0 0.0 3.0 142.0 177.0 0.0 2.0 160.0 1.0 1.4 3.0 0.0 3.0 1
67.0 0.0 3.0 152.0 277.0 0.0 0.0 172.0 0.0 0.0 1.0 1.0 3.0 1
56.0 1.0 4.0 125.0 249.0 1.0 2.0 144.0 1.0 1.2 2.0 1.0 3.0 2
34.0 0.0 2.0 118.0 210.0 0.0 0.0 192.0 0.0 0.7 1.0 0.0 3.0 1
57.0 1.0 4.0 132.0 207.0 0.0 0.0 168.0 1.0 0.0 1.0 0.0 7.0 1
64.0 1.0 4.0 145.0 212.0 0.0 2.0 132.0 0.0 2.0 2.0 2.0 6.0 2
59.0 1.0 4.0 138.0 271.0 0.0 2.0 182.0 0.0 0.0 1.0 0.0 3.0 1
50.0 1.0 3.0 140.0 233.0 0.0 0.0 163.0 0.0 0.6 2.0 1.0 7.0 2
51.0 1.0 1.0 125.0 213.0 0.0 2.0 125.0 1.0 1.4 1.0 1.0 3.0 1
54.0 1.0 2.0 192.0 283.0 0.0 2.0 195.0 0.0 0.0 1.0 1.0 7.0 2
53.0 1.0 4.0 123.0 282.0 0.0 0.0 95.0 1.0 2.0 2.0 2.0 7.0 2
52.0 1.0 4.0 112.0 230.0 0.0 0.0 160.0 0.0 0.0 1.0 1.0 3.0 2
40.0 1.0 4.0 110.0 167.0 0.0 2.0 114.0 1.0 2.0 2.0 0.0 7.0 2
58.0 1.0 3.0 132.0 224.0 0.0 2.0 173.0 0.0 3.2 1.0 2.0 7.0 2
41.0 0.0 3.0 112.0 268.0 0.0 2.0 172.0 1.0 0.0 1.0 0.0 3.0 1
41.0 1.0 3.0 112.0 250.0 0.0 0.0 179.0 0.0 0.0 1.0 0.0 3.0 1
50.0 0.0 3.0 120.0 219.0 0.0 0.0 158.0 0.0 1.6 2.0 0.0 3.0 1
54.0 0.0 3.0 108.0 267.0 0.0 2.0 167.0 0.0 0.0 1.0 0.0 3.0 1
64.0 0.0 4.0 130.0 303.0 0.0 0.0 122.0 0.0 2.0 2.0 2.0 3.0 1
51.0 0.0 3.0 130.0 256.0 0.0 2.0 149.0 0.0 0.5 1.0 0.0 3.0 1
46.0 0.0 2.0 105.0 204.0 0.0 0.0 172.0 0.0 0.0 1.0 0.0 3.0 1
55.0 1.0 4.0 140.0 217.0 0.0 0.0 111.0 1.0 5.6 3.0 0.0 7.0 2
45.0 1.0 2.0 128.0 308.0 0.0 2.0 170.0 0.0 0.0 1.0 0.0 3.0 1
56.0 1.0 1.0 120.0 193.0 0.0 2.0 162.0 0.0 1.9 2.0 0.0 7.0 1
66.0 0.0 4.0 178.0 228.0 1.0 0.0 165.0 1.0 1.0 2.0 2.0 7.0 2
38.0 1.0 1.0 120.0 231.0 0.0 0.0 182.0 1.0 3.8 2.0 0.0 7.0 2
62.0 0.0 4.0 150.0 244.0 0.0 0.0 154.0 1.0 1.4 2.0 0.0 3.0 2
55.0 1.0 2.0 130.0 262.0 0.0 0.0 155.0 0.0 0.0 1.0 0.0 3.0 1
58.0 1.0 4.0 128.0 259.0 0.0 2.0 130.0 1.0 3.0 2.0 2.0 7.0 2
43.0 1.0 4.0 110.0 211.0 0.0 0.0 161.0 0.0 0.0 1.0 0.0 7.0 1
64.0 0.0 4.0 180.0 325.0 0.0 0.0 154.0 1.0 0.0 1.0 0.0 3.0 1
50.0 0.0 4.0 110.0 254.0 0.0 2.0 159.0 0.0 0.0 1.0 0.0 3.0 1
53.0 1.0 3.0 130.0 197.0 1.0 2.0 152.0 0.0 1.2 3.0 0.0 3.0 1
45.0 0.0 4.0 138.0 236.0 0.0 2.0 152.0 1.0 0.2 2.0 0.0 3.0 1
65.0 1.0 1.0 138.0 282.0 1.0 2.0 174.0 0.0 1.4 2.0 1.0 3.0 2
69.0 1.0 1.0 160.0 234.0 1.0 2.0 131.0 0.0 0.1 2.0 1.0 3.0 1
69.0 1.0 3.0 140.0 254.0 0.0 2.0 146.0 0.0 2.0 2.0 3.0 7.0 2
67.0 1.0 4.0 100.0 299.0 0.0 2.0 125.0 1.0 0.9 2.0 2.0 3.0 2
68.0 0.0 3.0 120.0 211.0 0.0 2.0 115.0 0.0 1.5 2.0 0.0 3.0 1
34.0 1.0 1.0 118.0 182.0 0.0 2.0 174.0 0.0 0.0 1.0 0.0 3.0 1
62.0 0.0 4.0 138.0 294.0 1.0 0.0 106.0 0.0 1.9 2.0 3.0 3.0 2
51.0 1.0 4.0 140.0 298.0 0.0 0.0 122.0 1.0 4.2 2.0 3.0 7.0 2
46.0 1.0 3.0 150.0 231.0 0.0 0.0 147.0 0.0 3.6 2.0 0.0 3.0 2
67.0 1.0 4.0 125.0 254.0 1.0 0.0 163.0 0.0 0.2 2.0 2.0 7.0 2
50.0 1.0 3.0 129.0 196.0 0.0 0.0 163.0 0.0 0.0 1.0 0.0 3.0 1
42.0 1.0 3.0 120.0 240.0 1.0 0.0 194.0 0.0 0.8 3.0 0.0 7.0 1
56.0 0.0 4.0 134.0 409.0 0.0 2.0 150.0 1.0 1.9 2.0 2.0 7.0 2
41.0 1.0 4.0 110.0 172.0 0.0 2.0 158.0 0.0 0.0 1.0 0.0 7.0 2
42.0 0.0 4.0 102.0 265.0 0.0 2.0 122.0 0.0 0.6 2.0 0.0 3.0 1
53.0 1.0 3.0 130.0 246.0 1.0 2.0 173.0 0.0 0.0 1.0 3.0 3.0 1
43.0 1.0 3.0 130.0 315.0 0.0 0.0 162.0 0.0 1.9 1.0 1.0 3.0 1
56.0 1.0 4.0 132.0 184.0 0.0 2.0 105.0 1.0 2.1 2.0 1.0 6.0 2
52.0 1.0 4.0 108.0 233.0 1.0 0.0 147.0 0.0 0.1 1.0 3.0 7.0 1
62.0 0.0 4.0 140.0 394.0 0.0 2.0 157.0 0.0 1.2 2.0 0.0 3.0 1
70.0 1.0 3.0 160.0 269.0 0.0 0.0 112.0 1.0 2.9 2.0 1.0 7.0 2
54.0 1.0 4.0 140.0 239.0 0.0 0.0 160.0 0.0 1.2 1.0 0.0 3.0 1
70.0 1.0 4.0 145.0 174.0 0.0 0.0 125.0 1.0 2.6 3.0 0.0 7.0 2
54.0 1.0 2.0 108.0 309.0 0.0 0.0 156.0 0.0 0.0 1.0 0.0 7.0 1
35.0 1.0 4.0 126.0 282.0 0.0 2.0 156.0 1.0 0.0 1.0 0.0 7.0 2
48.0 1.0 3.0 124.0 255.0 1.0 0.0 175.0 0.0 0.0 1.0 2.0 3.0 1
55.0 0.0 2.0 135.0 250.0 0.0 2.0 161.0 0.0 1.4 2.0 0.0 3.0 1
58.0 0.0 4.0 100.0 248.0 0.0 2.0 122.0 0.0 1.0 2.0 0.0 3.0 1
54.0 0.0 3.0 110.0 214.0 0.0 0.0 158.0 0.0 1.6 2.0 0.0 3.0 1
69.0 0.0 1.0 140.0 239.0 0.0 0.0 151.0 0.0 1.8 1.0 2.0 3.0 1
77.0 1.0 4.0 125.0 304.0 0.0 2.0 162.0 1.0 0.0 1.0 3.0 3.0 2
68.0 1.0 3.0 118.0 277.0 0.0 0.0 151.0 0.0 1.0 1.0 1.0 7.0 1
58.0 1.0 4.0 125.0 300.0 0.0 2.0 171.0 0.0 0.0 1.0 2.0 7.0 2
60.0 1.0 4.0 125.0 258.0 0.0 2.0 141.0 1.0 2.8 2.0 1.0 7.0 2
51.0 1.0 4.0 140.0 299.0 0.0 0.0 173.0 1.0 1.6 1.0 0.0 7.0 2
55.0 1.0 4.0 160.0 289.0 0.0 2.0 145.0 1.0 0.8 2.0 1.0 7.0 2
52.0 1.0 1.0 152.0 298.0 1.0 0.0 178.0 0.0 1.2 2.0 0.0 7.0 1
60.0 0.0 3.0 102.0 318.0 0.0 0.0 160.0 0.0 0.0 1.0 1.0 3.0 1
58.0 1.0 3.0 105.0 240.0 0.0 2.0 154.0 1.0 0.6 2.0 0.0 7.0 1
64.0 1.0 3.0 125.0 309.0 0.0 0.0 131.0 1.0 1.8 2.0 0.0 7.0 2
37.0 1.0 3.0 130.0 250.0 0.0 0.0 187.0 0.0 3.5 3.0 0.0 3.0 1
59.0 1.0 1.0 170.0 288.0 0.0 2.0 159.0 0.0 0.2 2.0 0.0 7.0 2
51.0 1.0 3.0 125.0 245.0 1.0 2.0 166.0 0.0 2.4 2.0 0.0 3.0 1
43.0 0.0 3.0 122.0 213.0 0.0 0.0 165.0 0.0 0.2 2.0 0.0 3.0 1
58.0 1.0 4.0 128.0 216.0 0.0 2.0 131.0 1.0 2.2 2.0 3.0 7.0 2
29.0 1.0 2.0 130.0 204.0 0.0 2.0 202.0 0.0 0.0 1.0 0.0 3.0 1
41.0 0.0 2.0 130.0 204.0 0.0 2.0 172.0 0.0 1.4 1.0 0.0 3.0 1
63.0 0.0 3.0 135.0 252.0 0.0 2.0 172.0 0.0 0.0 1.0 0.0 3.0 1
51.0 1.0 3.0 94.0 227.0 0.0 0.0 154.0 1.0 0.0 1.0 1.0 7.0 1
54.0 1.0 3.0 120.0 258.0 0.0 2.0 147.0 0.0 0.4 2.0 0.0 7.0 1
44.0 1.0 2.0 120.0 220.0 0.0 0.0 170.0 0.0 0.0 1.0 0.0 3.0 1
54.0 1.0 4.0 110.0 239.0 0.0 0.0 126.0 1.0 2.8 2.0 1.0 7.0 2
65.0 1.0 4.0 135.0 254.0 0.0 2.0 127.0 0.0 2.8 2.0 1.0 7.0 2
57.0 1.0 3.0 150.0 168.0 0.0 0.0 174.0 0.0 1.6 1.0 0.0 3.0 1
63.0 1.0 4.0 130.0 330.0 1.0 2.0 132.0 1.0 1.8 1.0 3.0 7.0 2
35.0 0.0 4.0 138.0 183.0 0.0 0.0 182.0 0.0 1.4 1.0 0.0 3.0 1
41.0 1.0 2.0 135.0 203.0 0.0 0.0 132.0 0.0 0.0 2.0 0.0 6.0 1
62.0 0.0 3.0 130.0 263.0 0.0 0.0 97.0 0.0 1.2 2.0 1.0 7.0 2
43.0 0.0 4.0 132.0 341.0 1.0 2.0 136.0 1.0 3.0 2.0 0.0 7.0 2
58.0 0.0 1.0 150.0 283.0 1.0 2.0 162.0 0.0 1.0 1.0 0.0 3.0 1
52.0 1.0 1.0 118.0 186.0 0.0 2.0 190.0 0.0 0.0 2.0 0.0 6.0 1
61.0 0.0 4.0 145.0 307.0 0.0 2.0 146.0 1.0 1.0 2.0 0.0 7.0 2
39.0 1.0 4.0 118.0 219.0 0.0 0.0 140.0 0.0 1.2 2.0 0.0 7.0 2
45.0 1.0 4.0 115.0 260.0 0.0 2.0 185.0 0.0 0.0 1.0 0.0 3.0 1
52.0 1.0 4.0 128.0 255.0 0.0 0.0 161.0 1.0 0.0 1.0 1.0 7.0 2
62.0 1.0 3.0 130.0 231.0 0.0 0.0 146.0 0.0 1.8 2.0 3.0 7.0 1
62.0 0.0 4.0 160.0 164.0 0.0 2.0 145.0 0.0 6.2 3.0 3.0 7.0 2
53.0 0.0 4.0 138.0 234.0 0.0 2.0 160.0 0.0 0.0 1.0 0.0 3.0 1
43.0 1.0 4.0 120.0 177.0 0.0 2.0 120.0 1.0 2.5 2.0 0.0 7.0 2
47.0 1.0 3.0 138.0 257.0 0.0 2.0 156.0 0.0 0.0 1.0 0.0 3.0 1
52.0 1.0 2.0 120.0 325.0 0.0 0.0 172.0 0.0 0.2 1.0 0.0 3.0 1
68.0 1.0 3.0 180.0 274.0 1.0 2.0 150.0 1.0 1.6 2.0 0.0 7.0 2
39.0 1.0 3.0 140.0 321.0 0.0 2.0 182.0 0.0 0.0 1.0 0.0 3.0 1
53.0 0.0 4.0 130.0 264.0 0.0 2.0 143.0 0.0 0.4 2.0 0.0 3.0 1
62.0 0.0 4.0 140.0 268.0 0.0 2.0 160.0 0.0 3.6 3.0 2.0 3.0 2
51.0 0.0 3.0 140.0 308.0 0.0 2.0 142.0 0.0 1.5 1.0 1.0 3.0 1
60.0 1.0 4.0 130.0 253.0 0.0 0.0 144.0 1.0 1.4 1.0 1.0 7.0 2
65.0 1.0 4.0 110.0 248.0 0.0 2.0 158.0 0.0 0.6 1.0 2.0 6.0 2
65.0 0.0 3.0 155.0 269.0 0.0 0.0 148.0 0.0 0.8 1.0 0.0 3.0 1
60.0 1.0 3.0 140.0 185.0 0.0 2.0 155.0 0.0 3.0 2.0 0.0 3.0 2
60.0 1.0 4.0 145.0 282.0 0.0 2.0 142.0 1.0 2.8 2.0 2.0 7.0 2
54.0 1.0 4.0 120.0 188.0 0.0 0.0 113.0 0.0 1.4 2.0 1.0 7.0 2
44.0 1.0 2.0 130.0 219.0 0.0 2.0 188.0 0.0 0.0 1.0 0.0 3.0 1
44.0 1.0 4.0 112.0 290.0 0.0 2.0 153.0 0.0 0.0 1.0 1.0 3.0 2
51.0 1.0 3.0 110.0 175.0 0.0 0.0 123.0 0.0 0.6 1.0 0.0 3.0 1
59.0 1.0 3.0 150.0 212.0 1.0 0.0 157.0 0.0 1.6 1.0 0.0 3.0 1
71.0 0.0 2.0 160.0 302.0 0.0 0.0 162.0 0.0 0.4 1.0 2.0 3.0 1
61.0 1.0 3.0 150.0 243.0 1.0 0.0 137.0 1.0 1.0 2.0 0.0 3.0 1
55.0 1.0 4.0 132.0 353.0 0.0 0.0 132.0 1.0 1.2 2.0 1.0 7.0 2
64.0 1.0 3.0 140.0 335.0 0.0 0.0 158.0 0.0 0.0 1.0 0.0 3.0 2
43.0 1.0 4.0 150.0 247.0 0.0 0.0 171.0 0.0 1.5 1.0 0.0 3.0 1
58.0 0.0 3.0 120.0 340.0 0.0 0.0 172.0 0.0 0.0 1.0 0.0 3.0 1
60.0 1.0 4.0 130.0 206.0 0.0 2.0 132.0 1.0 2.4 2.0 2.0 7.0 2
58.0 1.0 2.0 120.0 284.0 0.0 2.0 160.0 0.0 1.8 2.0 0.0 3.0 2
49.0 1.0 2.0 130.0 266.0 0.0 0.0 171.0 0.0 0.6 1.0 0.0 3.0 1
48.0 1.0 2.0 110.0 229.0 0.0 0.0 168.0 0.0 1.0 3.0 0.0 7.0 2
52.0 1.0 3.0 172.0 199.0 1.0 0.0 162.0 0.0 0.5 1.0 0.0 7.0 1
44.0 1.0 2.0 120.0 263.0 0.0 0.0 173.0 0.0 0.0 1.0 0.0 7.0 1
56.0 0.0 2.0 140.0 294.0 0.0 2.0 153.0 0.0 1.3 2.0 0.0 3.0 1
57.0 1.0 4.0 140.0 192.0 0.0 0.0 148.0 0.0 0.4 2.0 0.0 6.0 1
67.0 1.0 4.0 160.0 286.0 0.0 2.0 108.0 1.0 1.5 2.0 3.0 3.0 2

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
this otherwise empty directory is for storing mnocap data, which we don;t distribute

View file

@ -1,209 +0,0 @@
0.0200,0.0371,0.0428,0.0207,0.0954,0.0986,0.1539,0.1601,0.3109,0.2111,0.1609,0.1582,0.2238,0.0645,0.0660,0.2273,0.3100,0.2999,0.5078,0.4797,0.5783,0.5071,0.4328,0.5550,0.6711,0.6415,0.7104,0.8080,0.6791,0.3857,0.1307,0.2604,0.5121,0.7547,0.8537,0.8507,0.6692,0.6097,0.4943,0.2744,0.0510,0.2834,0.2825,0.4256,0.2641,0.1386,0.1051,0.1343,0.0383,0.0324,0.0232,0.0027,0.0065,0.0159,0.0072,0.0167,0.0180,0.0084,0.0090,0.0032,1
0.0453,0.0523,0.0843,0.0689,0.1183,0.2583,0.2156,0.3481,0.3337,0.2872,0.4918,0.6552,0.6919,0.7797,0.7464,0.9444,1.0000,0.8874,0.8024,0.7818,0.5212,0.4052,0.3957,0.3914,0.3250,0.3200,0.3271,0.2767,0.4423,0.2028,0.3788,0.2947,0.1984,0.2341,0.1306,0.4182,0.3835,0.1057,0.1840,0.1970,0.1674,0.0583,0.1401,0.1628,0.0621,0.0203,0.0530,0.0742,0.0409,0.0061,0.0125,0.0084,0.0089,0.0048,0.0094,0.0191,0.0140,0.0049,0.0052,0.0044,1
0.0262,0.0582,0.1099,0.1083,0.0974,0.2280,0.2431,0.3771,0.5598,0.6194,0.6333,0.7060,0.5544,0.5320,0.6479,0.6931,0.6759,0.7551,0.8929,0.8619,0.7974,0.6737,0.4293,0.3648,0.5331,0.2413,0.5070,0.8533,0.6036,0.8514,0.8512,0.5045,0.1862,0.2709,0.4232,0.3043,0.6116,0.6756,0.5375,0.4719,0.4647,0.2587,0.2129,0.2222,0.2111,0.0176,0.1348,0.0744,0.0130,0.0106,0.0033,0.0232,0.0166,0.0095,0.0180,0.0244,0.0316,0.0164,0.0095,0.0078,1
0.0100,0.0171,0.0623,0.0205,0.0205,0.0368,0.1098,0.1276,0.0598,0.1264,0.0881,0.1992,0.0184,0.2261,0.1729,0.2131,0.0693,0.2281,0.4060,0.3973,0.2741,0.3690,0.5556,0.4846,0.3140,0.5334,0.5256,0.2520,0.2090,0.3559,0.6260,0.7340,0.6120,0.3497,0.3953,0.3012,0.5408,0.8814,0.9857,0.9167,0.6121,0.5006,0.3210,0.3202,0.4295,0.3654,0.2655,0.1576,0.0681,0.0294,0.0241,0.0121,0.0036,0.0150,0.0085,0.0073,0.0050,0.0044,0.0040,0.0117,1
0.0762,0.0666,0.0481,0.0394,0.0590,0.0649,0.1209,0.2467,0.3564,0.4459,0.4152,0.3952,0.4256,0.4135,0.4528,0.5326,0.7306,0.6193,0.2032,0.4636,0.4148,0.4292,0.5730,0.5399,0.3161,0.2285,0.6995,1.0000,0.7262,0.4724,0.5103,0.5459,0.2881,0.0981,0.1951,0.4181,0.4604,0.3217,0.2828,0.2430,0.1979,0.2444,0.1847,0.0841,0.0692,0.0528,0.0357,0.0085,0.0230,0.0046,0.0156,0.0031,0.0054,0.0105,0.0110,0.0015,0.0072,0.0048,0.0107,0.0094,1
0.0286,0.0453,0.0277,0.0174,0.0384,0.0990,0.1201,0.1833,0.2105,0.3039,0.2988,0.4250,0.6343,0.8198,1.0000,0.9988,0.9508,0.9025,0.7234,0.5122,0.2074,0.3985,0.5890,0.2872,0.2043,0.5782,0.5389,0.3750,0.3411,0.5067,0.5580,0.4778,0.3299,0.2198,0.1407,0.2856,0.3807,0.4158,0.4054,0.3296,0.2707,0.2650,0.0723,0.1238,0.1192,0.1089,0.0623,0.0494,0.0264,0.0081,0.0104,0.0045,0.0014,0.0038,0.0013,0.0089,0.0057,0.0027,0.0051,0.0062,1
0.0317,0.0956,0.1321,0.1408,0.1674,0.1710,0.0731,0.1401,0.2083,0.3513,0.1786,0.0658,0.0513,0.3752,0.5419,0.5440,0.5150,0.4262,0.2024,0.4233,0.7723,0.9735,0.9390,0.5559,0.5268,0.6826,0.5713,0.5429,0.2177,0.2149,0.5811,0.6323,0.2965,0.1873,0.2969,0.5163,0.6153,0.4283,0.5479,0.6133,0.5017,0.2377,0.1957,0.1749,0.1304,0.0597,0.1124,0.1047,0.0507,0.0159,0.0195,0.0201,0.0248,0.0131,0.0070,0.0138,0.0092,0.0143,0.0036,0.0103,1
0.0519,0.0548,0.0842,0.0319,0.1158,0.0922,0.1027,0.0613,0.1465,0.2838,0.2802,0.3086,0.2657,0.3801,0.5626,0.4376,0.2617,0.1199,0.6676,0.9402,0.7832,0.5352,0.6809,0.9174,0.7613,0.8220,0.8872,0.6091,0.2967,0.1103,0.1318,0.0624,0.0990,0.4006,0.3666,0.1050,0.1915,0.3930,0.4288,0.2546,0.1151,0.2196,0.1879,0.1437,0.2146,0.2360,0.1125,0.0254,0.0285,0.0178,0.0052,0.0081,0.0120,0.0045,0.0121,0.0097,0.0085,0.0047,0.0048,0.0053,1
0.0223,0.0375,0.0484,0.0475,0.0647,0.0591,0.0753,0.0098,0.0684,0.1487,0.1156,0.1654,0.3833,0.3598,0.1713,0.1136,0.0349,0.3796,0.7401,0.9925,0.9802,0.8890,0.6712,0.4286,0.3374,0.7366,0.9611,0.7353,0.4856,0.1594,0.3007,0.4096,0.3170,0.3305,0.3408,0.2186,0.2463,0.2726,0.1680,0.2792,0.2558,0.1740,0.2121,0.1099,0.0985,0.1271,0.1459,0.1164,0.0777,0.0439,0.0061,0.0145,0.0128,0.0145,0.0058,0.0049,0.0065,0.0093,0.0059,0.0022,1
0.0164,0.0173,0.0347,0.0070,0.0187,0.0671,0.1056,0.0697,0.0962,0.0251,0.0801,0.1056,0.1266,0.0890,0.0198,0.1133,0.2826,0.3234,0.3238,0.4333,0.6068,0.7652,0.9203,0.9719,0.9207,0.7545,0.8289,0.8907,0.7309,0.6896,0.5829,0.4935,0.3101,0.0306,0.0244,0.1108,0.1594,0.1371,0.0696,0.0452,0.0620,0.1421,0.1597,0.1384,0.0372,0.0688,0.0867,0.0513,0.0092,0.0198,0.0118,0.0090,0.0223,0.0179,0.0084,0.0068,0.0032,0.0035,0.0056,0.0040,1
0.0039,0.0063,0.0152,0.0336,0.0310,0.0284,0.0396,0.0272,0.0323,0.0452,0.0492,0.0996,0.1424,0.1194,0.0628,0.0907,0.1177,0.1429,0.1223,0.1104,0.1847,0.3715,0.4382,0.5707,0.6654,0.7476,0.7654,0.8555,0.9720,0.9221,0.7502,0.7209,0.7757,0.6055,0.5021,0.4499,0.3947,0.4281,0.4427,0.3749,0.1972,0.0511,0.0793,0.1269,0.1533,0.0690,0.0402,0.0534,0.0228,0.0073,0.0062,0.0062,0.0120,0.0052,0.0056,0.0093,0.0042,0.0003,0.0053,0.0036,1
0.0123,0.0309,0.0169,0.0313,0.0358,0.0102,0.0182,0.0579,0.1122,0.0835,0.0548,0.0847,0.2026,0.2557,0.1870,0.2032,0.1463,0.2849,0.5824,0.7728,0.7852,0.8515,0.5312,0.3653,0.5973,0.8275,1.0000,0.8673,0.6301,0.4591,0.3940,0.2576,0.2817,0.2641,0.2757,0.2698,0.3994,0.4576,0.3940,0.2522,0.1782,0.1354,0.0516,0.0337,0.0894,0.0861,0.0872,0.0445,0.0134,0.0217,0.0188,0.0133,0.0265,0.0224,0.0074,0.0118,0.0026,0.0092,0.0009,0.0044,1
0.0079,0.0086,0.0055,0.0250,0.0344,0.0546,0.0528,0.0958,0.1009,0.1240,0.1097,0.1215,0.1874,0.3383,0.3227,0.2723,0.3943,0.6432,0.7271,0.8673,0.9674,0.9847,0.9480,0.8036,0.6833,0.5136,0.3090,0.0832,0.4019,0.2344,0.1905,0.1235,0.1717,0.2351,0.2489,0.3649,0.3382,0.1589,0.0989,0.1089,0.1043,0.0839,0.1391,0.0819,0.0678,0.0663,0.1202,0.0692,0.0152,0.0266,0.0174,0.0176,0.0127,0.0088,0.0098,0.0019,0.0059,0.0058,0.0059,0.0032,1
0.0090,0.0062,0.0253,0.0489,0.1197,0.1589,0.1392,0.0987,0.0955,0.1895,0.1896,0.2547,0.4073,0.2988,0.2901,0.5326,0.4022,0.1571,0.3024,0.3907,0.3542,0.4438,0.6414,0.4601,0.6009,0.8690,0.8345,0.7669,0.5081,0.4620,0.5380,0.5375,0.3844,0.3601,0.7402,0.7761,0.3858,0.0667,0.3684,0.6114,0.3510,0.2312,0.2195,0.3051,0.1937,0.1570,0.0479,0.0538,0.0146,0.0068,0.0187,0.0059,0.0095,0.0194,0.0080,0.0152,0.0158,0.0053,0.0189,0.0102,1
0.0124,0.0433,0.0604,0.0449,0.0597,0.0355,0.0531,0.0343,0.1052,0.2120,0.1640,0.1901,0.3026,0.2019,0.0592,0.2390,0.3657,0.3809,0.5929,0.6299,0.5801,0.4574,0.4449,0.3691,0.6446,0.8940,0.8978,0.4980,0.3333,0.2350,0.1553,0.3666,0.4340,0.3082,0.3024,0.4109,0.5501,0.4129,0.5499,0.5018,0.3132,0.2802,0.2351,0.2298,0.1155,0.0724,0.0621,0.0318,0.0450,0.0167,0.0078,0.0083,0.0057,0.0174,0.0188,0.0054,0.0114,0.0196,0.0147,0.0062,1
0.0298,0.0615,0.0650,0.0921,0.1615,0.2294,0.2176,0.2033,0.1459,0.0852,0.2476,0.3645,0.2777,0.2826,0.3237,0.4335,0.5638,0.4555,0.4348,0.6433,0.3932,0.1989,0.3540,0.9165,0.9371,0.4620,0.2771,0.6613,0.8028,0.4200,0.5192,0.6962,0.5792,0.8889,0.7863,0.7133,0.7615,0.4401,0.3009,0.3163,0.2809,0.2898,0.0526,0.1867,0.1553,0.1633,0.1252,0.0748,0.0452,0.0064,0.0154,0.0031,0.0153,0.0071,0.0212,0.0076,0.0152,0.0049,0.0200,0.0073,1
0.0352,0.0116,0.0191,0.0469,0.0737,0.1185,0.1683,0.1541,0.1466,0.2912,0.2328,0.2237,0.2470,0.1560,0.3491,0.3308,0.2299,0.2203,0.2493,0.4128,0.3158,0.6191,0.5854,0.3395,0.2561,0.5599,0.8145,0.6941,0.6985,0.8660,0.5930,0.3664,0.6750,0.8697,0.7837,0.7552,0.5789,0.4713,0.1252,0.6087,0.7322,0.5977,0.3431,0.1803,0.2378,0.3424,0.2303,0.0689,0.0216,0.0469,0.0426,0.0346,0.0158,0.0154,0.0109,0.0048,0.0095,0.0015,0.0073,0.0067,1
0.0192,0.0607,0.0378,0.0774,0.1388,0.0809,0.0568,0.0219,0.1037,0.1186,0.1237,0.1601,0.3520,0.4479,0.3769,0.5761,0.6426,0.6790,0.7157,0.5466,0.5399,0.6362,0.7849,0.7756,0.5780,0.4862,0.4181,0.2457,0.0716,0.0613,0.1816,0.4493,0.5976,0.3785,0.2495,0.5771,0.8852,0.8409,0.3570,0.3133,0.6096,0.6378,0.2709,0.1419,0.1260,0.1288,0.0790,0.0829,0.0520,0.0216,0.0360,0.0331,0.0131,0.0120,0.0108,0.0024,0.0045,0.0037,0.0112,0.0075,1
0.0270,0.0092,0.0145,0.0278,0.0412,0.0757,0.1026,0.1138,0.0794,0.1520,0.1675,0.1370,0.1361,0.1345,0.2144,0.5354,0.6830,0.5600,0.3093,0.3226,0.4430,0.5573,0.5782,0.6173,0.8132,0.9819,0.9823,0.9166,0.7423,0.7736,0.8473,0.7352,0.6671,0.6083,0.6239,0.5972,0.5715,0.5242,0.2924,0.1536,0.2003,0.2031,0.2207,0.1778,0.1353,0.1373,0.0749,0.0472,0.0325,0.0179,0.0045,0.0084,0.0010,0.0018,0.0068,0.0039,0.0120,0.0132,0.0070,0.0088,1
0.0126,0.0149,0.0641,0.1732,0.2565,0.2559,0.2947,0.4110,0.4983,0.5920,0.5832,0.5419,0.5472,0.5314,0.4981,0.6985,0.8292,0.7839,0.8215,0.9363,1.0000,0.9224,0.7839,0.5470,0.4562,0.5922,0.5448,0.3971,0.0882,0.2385,0.2005,0.0587,0.2544,0.2009,0.0329,0.1547,0.1212,0.2446,0.3171,0.3195,0.3051,0.0836,0.1266,0.1381,0.1136,0.0516,0.0073,0.0278,0.0372,0.0121,0.0153,0.0092,0.0035,0.0098,0.0121,0.0006,0.0181,0.0094,0.0116,0.0063,1
0.0473,0.0509,0.0819,0.1252,0.1783,0.3070,0.3008,0.2362,0.3830,0.3759,0.3021,0.2909,0.2301,0.1411,0.1582,0.2430,0.4474,0.5964,0.6744,0.7969,0.8319,0.7813,0.8626,0.7369,0.4122,0.2596,0.3392,0.3788,0.4488,0.6281,0.7449,0.7328,0.7704,0.7870,0.6048,0.5860,0.6385,0.7279,0.6286,0.5316,0.4069,0.1791,0.1625,0.2527,0.1903,0.1643,0.0604,0.0209,0.0436,0.0175,0.0107,0.0193,0.0118,0.0064,0.0042,0.0054,0.0049,0.0082,0.0028,0.0027,1
0.0664,0.0575,0.0842,0.0372,0.0458,0.0771,0.0771,0.1130,0.2353,0.1838,0.2869,0.4129,0.3647,0.1984,0.2840,0.4039,0.5837,0.6792,0.6086,0.4858,0.3246,0.2013,0.2082,0.1686,0.2484,0.2736,0.2984,0.4655,0.6990,0.7474,0.7956,0.7981,0.6715,0.6942,0.7440,0.8169,0.8912,1.0000,0.8753,0.7061,0.6803,0.5898,0.4618,0.3639,0.1492,0.1216,0.1306,0.1198,0.0578,0.0235,0.0135,0.0141,0.0190,0.0043,0.0036,0.0026,0.0024,0.0162,0.0109,0.0079,1
0.0099,0.0484,0.0299,0.0297,0.0652,0.1077,0.2363,0.2385,0.0075,0.1882,0.1456,0.1892,0.3176,0.1340,0.2169,0.2458,0.2589,0.2786,0.2298,0.0656,0.1441,0.1179,0.1668,0.1783,0.2476,0.2570,0.1036,0.5356,0.7124,0.6291,0.4756,0.6015,0.7208,0.6234,0.5725,0.7523,0.8712,0.9252,0.9709,0.9297,0.8995,0.7911,0.5600,0.2838,0.4407,0.5507,0.4331,0.2905,0.1981,0.0779,0.0396,0.0173,0.0149,0.0115,0.0202,0.0139,0.0029,0.0160,0.0106,0.0134,1
0.0115,0.0150,0.0136,0.0076,0.0211,0.1058,0.1023,0.0440,0.0931,0.0734,0.0740,0.0622,0.1055,0.1183,0.1721,0.2584,0.3232,0.3817,0.4243,0.4217,0.4449,0.4075,0.3306,0.4012,0.4466,0.5218,0.7552,0.9503,1.0000,0.9084,0.8283,0.7571,0.7262,0.6152,0.5680,0.5757,0.5324,0.3672,0.1669,0.0866,0.0646,0.1891,0.2683,0.2887,0.2341,0.1668,0.1015,0.1195,0.0704,0.0167,0.0107,0.0091,0.0016,0.0084,0.0064,0.0026,0.0029,0.0037,0.0070,0.0041,1
0.0293,0.0644,0.0390,0.0173,0.0476,0.0816,0.0993,0.0315,0.0736,0.0860,0.0414,0.0472,0.0835,0.0938,0.1466,0.0809,0.1179,0.2179,0.3326,0.3258,0.2111,0.2302,0.3361,0.4259,0.4609,0.2606,0.0874,0.2862,0.5606,0.8344,0.8096,0.7250,0.8048,0.9435,1.0000,0.8960,0.5516,0.3037,0.2338,0.2382,0.3318,0.3821,0.1575,0.2228,0.1582,0.1433,0.1634,0.1133,0.0567,0.0133,0.0170,0.0035,0.0052,0.0083,0.0078,0.0075,0.0105,0.0160,0.0095,0.0011,1
0.0201,0.0026,0.0138,0.0062,0.0133,0.0151,0.0541,0.0210,0.0505,0.1097,0.0841,0.0942,0.1204,0.0420,0.0031,0.0162,0.0624,0.2127,0.3436,0.3813,0.3825,0.4764,0.6313,0.7523,0.8675,0.8788,0.7901,0.8357,0.9631,0.9619,0.9236,0.8903,0.9708,0.9647,0.7892,0.5307,0.2718,0.1953,0.1374,0.3105,0.3790,0.4105,0.3355,0.2998,0.2748,0.2024,0.1043,0.0453,0.0337,0.0122,0.0072,0.0108,0.0070,0.0063,0.0030,0.0011,0.0007,0.0024,0.0057,0.0044,1
0.0151,0.0320,0.0599,0.1050,0.1163,0.1734,0.1679,0.1119,0.0889,0.1205,0.0847,0.1518,0.2305,0.2793,0.3404,0.4527,0.6950,0.8807,0.9154,0.7542,0.6736,0.7146,0.8335,0.7701,0.6993,0.6543,0.5040,0.4926,0.4992,0.4161,0.1631,0.0404,0.0637,0.2962,0.3609,0.1866,0.0476,0.1497,0.2405,0.1980,0.3175,0.2379,0.1716,0.1559,0.1556,0.0422,0.0493,0.0476,0.0219,0.0059,0.0086,0.0061,0.0015,0.0084,0.0128,0.0054,0.0011,0.0019,0.0023,0.0062,1
0.0177,0.0300,0.0288,0.0394,0.0630,0.0526,0.0688,0.0633,0.0624,0.0613,0.1680,0.3476,0.4561,0.5188,0.6308,0.7201,0.5153,0.3818,0.2644,0.3345,0.4865,0.6628,0.7389,0.9213,1.0000,0.7750,0.5593,0.6172,0.8635,0.6592,0.4770,0.4983,0.3330,0.3076,0.2876,0.2226,0.0794,0.0603,0.1049,0.0606,0.1530,0.0983,0.1643,0.1901,0.1107,0.1917,0.1467,0.0392,0.0356,0.0270,0.0168,0.0102,0.0122,0.0044,0.0075,0.0124,0.0099,0.0057,0.0032,0.0019,1
0.0100,0.0275,0.0190,0.0371,0.0416,0.0201,0.0314,0.0651,0.1896,0.2668,0.3376,0.3282,0.2432,0.1268,0.1278,0.4441,0.6795,0.7051,0.7966,0.9401,0.9857,0.8193,0.5789,0.6394,0.7043,0.6875,0.4081,0.1811,0.2064,0.3917,0.3791,0.2042,0.2227,0.3341,0.3984,0.5077,0.5534,0.3352,0.2723,0.2278,0.2044,0.1986,0.0835,0.0908,0.1380,0.1948,0.1211,0.0843,0.0589,0.0247,0.0118,0.0088,0.0104,0.0036,0.0088,0.0047,0.0117,0.0020,0.0091,0.0058,1
0.0189,0.0308,0.0197,0.0622,0.0080,0.0789,0.1440,0.1451,0.1789,0.2522,0.2607,0.3710,0.3906,0.2672,0.2716,0.4183,0.6988,0.5733,0.2226,0.2631,0.7473,0.7263,0.3393,0.2824,0.6053,0.5897,0.4967,0.8616,0.8339,0.4084,0.2268,0.1745,0.0507,0.1588,0.3040,0.1369,0.1605,0.2061,0.0734,0.0202,0.1638,0.1583,0.1830,0.1886,0.1008,0.0663,0.0183,0.0404,0.0108,0.0143,0.0091,0.0038,0.0096,0.0142,0.0190,0.0140,0.0099,0.0092,0.0052,0.0075,1
0.0240,0.0218,0.0324,0.0569,0.0330,0.0513,0.0897,0.0713,0.0569,0.0389,0.1934,0.2434,0.2906,0.2606,0.3811,0.4997,0.3015,0.3655,0.6791,0.7307,0.5053,0.4441,0.6987,0.8133,0.7781,0.8943,0.8929,0.8913,0.8610,0.8063,0.5540,0.2446,0.3459,0.1615,0.2467,0.5564,0.4681,0.0979,0.1582,0.0751,0.3321,0.3745,0.2666,0.1078,0.1418,0.1687,0.0738,0.0634,0.0144,0.0226,0.0061,0.0162,0.0146,0.0093,0.0112,0.0094,0.0054,0.0019,0.0066,0.0023,1
0.0084,0.0153,0.0291,0.0432,0.0951,0.0752,0.0414,0.0259,0.0692,0.1753,0.1970,0.1167,0.1683,0.0814,0.2179,0.5121,0.7231,0.7776,0.6222,0.3501,0.3733,0.2622,0.3776,0.7361,0.8673,0.8223,0.7772,0.7862,0.5652,0.3635,0.3534,0.3865,0.3370,0.1693,0.2627,0.3195,0.1388,0.1048,0.1681,0.1910,0.1174,0.0933,0.0856,0.0951,0.0986,0.0956,0.0426,0.0407,0.0106,0.0179,0.0056,0.0236,0.0114,0.0136,0.0117,0.0060,0.0058,0.0031,0.0072,0.0045,1
0.0195,0.0213,0.0058,0.0190,0.0319,0.0571,0.1004,0.0668,0.0691,0.0242,0.0728,0.0639,0.3002,0.3854,0.4767,0.4602,0.3175,0.4160,0.6428,1.0000,0.8631,0.5212,0.3156,0.5952,0.7732,0.6042,0.4375,0.5487,0.4720,0.6235,0.3851,0.1590,0.3891,0.5294,0.3504,0.4480,0.4041,0.5031,0.6475,0.5493,0.3548,0.2028,0.1882,0.0845,0.1315,0.1590,0.0562,0.0617,0.0343,0.0370,0.0261,0.0157,0.0074,0.0271,0.0203,0.0089,0.0095,0.0095,0.0021,0.0053,1
0.0442,0.0477,0.0049,0.0581,0.0278,0.0678,0.1664,0.1490,0.0974,0.1268,0.1109,0.2375,0.2007,0.2140,0.1109,0.2036,0.2468,0.6682,0.8345,0.8252,0.8017,0.8982,0.9664,0.8515,0.6626,0.3241,0.2054,0.5669,0.5726,0.4877,0.7532,0.7600,0.5185,0.4120,0.5560,0.5569,0.1336,0.3831,0.4611,0.4330,0.2556,0.1466,0.3489,0.2659,0.0944,0.1370,0.1344,0.0416,0.0719,0.0637,0.0210,0.0204,0.0216,0.0135,0.0055,0.0073,0.0080,0.0105,0.0059,0.0105,1
0.0311,0.0491,0.0692,0.0831,0.0079,0.0200,0.0981,0.1016,0.2025,0.0767,0.1767,0.2555,0.2812,0.2722,0.3227,0.3463,0.5395,0.7911,0.9064,0.8701,0.7672,0.2957,0.4148,0.6043,0.3178,0.3482,0.6158,0.8049,0.6289,0.4999,0.5830,0.6660,0.4124,0.1260,0.2487,0.4676,0.5382,0.3150,0.2139,0.1848,0.1679,0.2328,0.1015,0.0713,0.0615,0.0779,0.0761,0.0845,0.0592,0.0068,0.0089,0.0087,0.0032,0.0130,0.0188,0.0101,0.0229,0.0182,0.0046,0.0038,1
0.0206,0.0132,0.0533,0.0569,0.0647,0.1432,0.1344,0.2041,0.1571,0.1573,0.2327,0.1785,0.1507,0.1916,0.2061,0.2307,0.2360,0.1299,0.3812,0.5858,0.4497,0.4876,1.0000,0.8675,0.4718,0.5341,0.6197,0.7143,0.5605,0.3728,0.2481,0.1921,0.1386,0.3325,0.2883,0.3228,0.2607,0.2040,0.2396,0.1319,0.0683,0.0334,0.0716,0.0976,0.0787,0.0522,0.0500,0.0231,0.0221,0.0144,0.0307,0.0386,0.0147,0.0018,0.0100,0.0096,0.0077,0.0180,0.0109,0.0070,1
0.0094,0.0166,0.0398,0.0359,0.0681,0.0706,0.1020,0.0893,0.0381,0.1328,0.1303,0.0273,0.0644,0.0712,0.1204,0.0717,0.1224,0.2349,0.3684,0.3918,0.4925,0.8793,0.9606,0.8786,0.6905,0.6937,0.5674,0.6540,0.7802,0.7575,0.5836,0.6316,0.8108,0.9039,0.8647,0.6695,0.4027,0.2370,0.2685,0.3662,0.3267,0.2200,0.2996,0.2205,0.1163,0.0635,0.0465,0.0422,0.0174,0.0172,0.0134,0.0141,0.0191,0.0145,0.0065,0.0129,0.0217,0.0087,0.0077,0.0122,1
0.0333,0.0221,0.0270,0.0481,0.0679,0.0981,0.0843,0.1172,0.0759,0.0920,0.1475,0.0522,0.1119,0.0970,0.1174,0.1678,0.1642,0.1205,0.0494,0.1544,0.3485,0.6146,0.9146,0.9364,0.8677,0.8772,0.8553,0.8833,1.0000,0.8296,0.6601,0.5499,0.5716,0.6859,0.6825,0.5142,0.2750,0.1358,0.1551,0.2646,0.1994,0.1883,0.2746,0.1651,0.0575,0.0695,0.0598,0.0456,0.0021,0.0068,0.0036,0.0022,0.0032,0.0060,0.0054,0.0063,0.0143,0.0132,0.0051,0.0041,1
0.0123,0.0022,0.0196,0.0206,0.0180,0.0492,0.0033,0.0398,0.0791,0.0475,0.1152,0.0520,0.1192,0.1943,0.1840,0.2077,0.1956,0.1630,0.1218,0.1017,0.1354,0.3157,0.4645,0.5906,0.6776,0.8119,0.8594,0.9228,0.8387,0.7238,0.6292,0.5181,0.4629,0.5255,0.5147,0.3929,0.1279,0.0411,0.0859,0.1131,0.1306,0.1757,0.2648,0.1955,0.0656,0.0580,0.0319,0.0301,0.0272,0.0074,0.0149,0.0125,0.0134,0.0026,0.0038,0.0018,0.0113,0.0058,0.0047,0.0071,1
0.0091,0.0213,0.0206,0.0505,0.0657,0.0795,0.0970,0.0872,0.0743,0.0837,0.1579,0.0898,0.0309,0.1856,0.2969,0.2032,0.1264,0.1655,0.1661,0.2091,0.2310,0.4460,0.6634,0.6933,0.7663,0.8206,0.7049,0.7560,0.7466,0.6387,0.4846,0.3328,0.5356,0.8741,0.8573,0.6718,0.3446,0.3150,0.2702,0.2598,0.2742,0.3594,0.4382,0.2460,0.0758,0.0187,0.0797,0.0748,0.0367,0.0155,0.0300,0.0112,0.0112,0.0102,0.0026,0.0097,0.0098,0.0043,0.0071,0.0108,1
0.0068,0.0232,0.0513,0.0444,0.0249,0.0637,0.0422,0.1130,0.1911,0.2475,0.1606,0.0922,0.2398,0.3220,0.4295,0.2652,0.0666,0.1442,0.2373,0.2595,0.2493,0.3903,0.6384,0.8037,0.7026,0.6874,0.6997,0.8558,1.0000,0.9621,0.8996,0.7575,0.6902,0.5686,0.4396,0.4546,0.2959,0.1587,0.1681,0.0842,0.1173,0.1754,0.2728,0.1705,0.0194,0.0213,0.0354,0.0420,0.0093,0.0204,0.0199,0.0173,0.0163,0.0055,0.0045,0.0068,0.0041,0.0052,0.0194,0.0105,1
0.0093,0.0185,0.0056,0.0064,0.0260,0.0458,0.0470,0.0057,0.0425,0.0640,0.0888,0.1599,0.1541,0.2768,0.2176,0.2799,0.3491,0.2824,0.2479,0.3005,0.4300,0.4684,0.4520,0.5026,0.6217,0.6571,0.6632,0.7321,0.8534,1.0000,0.8448,0.6354,0.6308,0.6211,0.6976,0.5868,0.4889,0.3683,0.2043,0.1469,0.2220,0.1449,0.1490,0.1211,0.1144,0.0791,0.0365,0.0152,0.0085,0.0120,0.0022,0.0069,0.0064,0.0129,0.0114,0.0054,0.0089,0.0050,0.0058,0.0025,1
0.0211,0.0319,0.0415,0.0286,0.0121,0.0438,0.1299,0.1390,0.0695,0.0568,0.0869,0.1935,0.1478,0.1871,0.1994,0.3283,0.6861,0.5814,0.2500,0.1734,0.3363,0.5588,0.6592,0.7012,0.8099,0.8901,0.8745,0.7887,0.8725,0.9376,0.8920,0.7508,0.6832,0.7610,0.9017,1.0000,0.9123,0.7388,0.5915,0.4057,0.3019,0.2331,0.2931,0.2298,0.2391,0.1910,0.1096,0.0300,0.0171,0.0383,0.0053,0.0090,0.0042,0.0153,0.0106,0.0020,0.0105,0.0049,0.0070,0.0080,1
0.0093,0.0269,0.0217,0.0339,0.0305,0.1172,0.1450,0.0638,0.0740,0.1360,0.2132,0.3738,0.3738,0.2673,0.2333,0.5367,0.7312,0.7659,0.6271,0.4395,0.4330,0.4326,0.5544,0.7360,0.8589,0.8989,0.9420,0.9401,0.9379,0.8575,0.7284,0.6700,0.7547,0.8773,0.9919,0.9922,0.9419,0.8388,0.6605,0.4816,0.2917,0.1769,0.1136,0.0701,0.1578,0.1938,0.1106,0.0693,0.0176,0.0205,0.0309,0.0212,0.0091,0.0056,0.0086,0.0092,0.0070,0.0116,0.0060,0.0110,1
0.0257,0.0447,0.0388,0.0239,0.1315,0.1323,0.1608,0.2145,0.0847,0.0561,0.0891,0.0861,0.1531,0.1524,0.1849,0.2871,0.2009,0.2748,0.5017,0.2172,0.4978,0.5265,0.3647,0.5768,0.5161,0.5715,0.4006,0.3650,0.6685,0.8659,0.8052,0.4082,0.3379,0.5092,0.6776,0.7313,0.6062,0.7040,0.8849,0.8979,0.7751,0.7247,0.7733,0.7762,0.6009,0.4514,0.3096,0.1859,0.0956,0.0206,0.0206,0.0096,0.0153,0.0096,0.0131,0.0198,0.0025,0.0199,0.0255,0.0180,1
0.0408,0.0653,0.0397,0.0604,0.0496,0.1817,0.1178,0.1024,0.0583,0.2176,0.2459,0.3332,0.3087,0.2613,0.3232,0.3731,0.4203,0.5364,0.7062,0.8196,0.8835,0.8299,0.7609,0.7605,0.8367,0.8905,0.7652,0.5897,0.3037,0.0823,0.2787,0.7241,0.8032,0.8050,0.7676,0.7468,0.6253,0.1730,0.2916,0.5003,0.5220,0.4824,0.4004,0.3877,0.1651,0.0442,0.0663,0.0418,0.0475,0.0235,0.0066,0.0062,0.0129,0.0184,0.0069,0.0198,0.0199,0.0102,0.0070,0.0055,1
0.0308,0.0339,0.0202,0.0889,0.1570,0.1750,0.0920,0.1353,0.1593,0.2795,0.3336,0.2940,0.1608,0.3335,0.4985,0.7295,0.7350,0.8253,0.8793,0.9657,1.0000,0.8707,0.6471,0.5973,0.8218,0.7755,0.6111,0.4195,0.2990,0.1354,0.2438,0.5624,0.5555,0.6963,0.7298,0.7022,0.5468,0.1421,0.4738,0.6410,0.4375,0.3178,0.2377,0.2808,0.1374,0.1136,0.1034,0.0688,0.0422,0.0117,0.0070,0.0167,0.0127,0.0138,0.0090,0.0051,0.0029,0.0122,0.0056,0.0020,1
0.0373,0.0281,0.0232,0.0225,0.0179,0.0733,0.0841,0.1031,0.0993,0.0802,0.1564,0.2565,0.2624,0.1179,0.0597,0.1563,0.2241,0.3586,0.1792,0.3256,0.6079,0.6988,0.8391,0.8553,0.7710,0.6215,0.5736,0.4402,0.4056,0.4411,0.5130,0.5965,0.7272,0.6539,0.5902,0.5393,0.4897,0.4081,0.4145,0.6003,0.7196,0.6633,0.6287,0.4087,0.3212,0.2518,0.1482,0.0988,0.0317,0.0269,0.0066,0.0008,0.0045,0.0024,0.0006,0.0073,0.0096,0.0054,0.0085,0.0060,1
0.0190,0.0038,0.0642,0.0452,0.0333,0.0690,0.0901,0.1454,0.0740,0.0349,0.1459,0.3473,0.3197,0.2823,0.0166,0.0572,0.2164,0.4563,0.3819,0.5627,0.6484,0.7235,0.8242,0.8766,1.0000,0.8582,0.6563,0.5087,0.4817,0.4530,0.4521,0.4532,0.5385,0.5308,0.5356,0.5271,0.4260,0.2436,0.1205,0.3845,0.4107,0.5067,0.4216,0.2479,0.1586,0.1124,0.0651,0.0789,0.0325,0.0070,0.0026,0.0093,0.0118,0.0112,0.0094,0.0140,0.0072,0.0022,0.0055,0.0122,1
0.0119,0.0582,0.0623,0.0600,0.1397,0.1883,0.1422,0.1447,0.0487,0.0864,0.2143,0.3720,0.2665,0.2113,0.1103,0.1136,0.1934,0.4142,0.3279,0.6222,0.7468,0.7676,0.7867,0.8253,1.0000,0.9481,0.7539,0.6008,0.5437,0.5387,0.5619,0.5141,0.6084,0.5621,0.5956,0.6078,0.5025,0.2829,0.0477,0.2811,0.3422,0.5147,0.4372,0.2470,0.1708,0.1343,0.0838,0.0755,0.0304,0.0074,0.0069,0.0025,0.0103,0.0074,0.0123,0.0069,0.0076,0.0073,0.0030,0.0138,1
0.0353,0.0713,0.0326,0.0272,0.0370,0.0792,0.1083,0.0687,0.0298,0.0880,0.1078,0.0979,0.2250,0.2819,0.2099,0.1240,0.1699,0.0939,0.1091,0.1410,0.1268,0.3151,0.1430,0.2264,0.5756,0.7876,0.7158,0.5998,0.5583,0.6295,0.7659,0.8940,0.8436,0.6807,0.8380,1.0000,0.9497,0.7866,0.5647,0.3480,0.2585,0.2304,0.2948,0.3363,0.3017,0.2193,0.1316,0.1078,0.0559,0.0035,0.0098,0.0163,0.0242,0.0043,0.0202,0.0108,0.0037,0.0096,0.0093,0.0053,1
0.0131,0.0068,0.0308,0.0311,0.0085,0.0767,0.0771,0.0640,0.0726,0.0901,0.0750,0.0844,0.1226,0.1619,0.2317,0.2934,0.3526,0.3657,0.3221,0.3093,0.4084,0.4285,0.4663,0.5956,0.6948,0.8386,0.8875,0.6404,0.3308,0.3425,0.4920,0.4592,0.3034,0.4366,0.5175,0.5122,0.4746,0.4902,0.4603,0.4460,0.4196,0.2873,0.2296,0.0949,0.0095,0.0527,0.0383,0.0107,0.0108,0.0077,0.0109,0.0062,0.0028,0.0040,0.0075,0.0039,0.0053,0.0013,0.0052,0.0023,1
0.0087,0.0046,0.0081,0.0230,0.0586,0.0682,0.0993,0.0717,0.0576,0.0818,0.1315,0.1862,0.2789,0.2579,0.2240,0.2568,0.2933,0.2991,0.3924,0.4691,0.5665,0.6464,0.6774,0.7577,0.8856,0.9419,1.0000,0.8564,0.6790,0.5587,0.4147,0.2946,0.2025,0.0688,0.1171,0.2157,0.2216,0.2776,0.2309,0.1444,0.1513,0.1745,0.1756,0.1424,0.0908,0.0138,0.0469,0.0480,0.0159,0.0045,0.0015,0.0052,0.0038,0.0079,0.0114,0.0050,0.0030,0.0064,0.0058,0.0030,1
0.0293,0.0378,0.0257,0.0062,0.0130,0.0612,0.0895,0.1107,0.0973,0.0751,0.0528,0.1209,0.1763,0.2039,0.2727,0.2321,0.2676,0.2934,0.3295,0.4910,0.5402,0.6257,0.6826,0.7527,0.8504,0.8938,0.9928,0.9134,0.7080,0.6318,0.6126,0.4638,0.2797,0.1721,0.1665,0.2561,0.2735,0.3209,0.2724,0.1880,0.1552,0.2522,0.2121,0.1801,0.1473,0.0681,0.1091,0.0919,0.0397,0.0093,0.0076,0.0065,0.0072,0.0108,0.0051,0.0102,0.0041,0.0055,0.0050,0.0087,1
0.0132,0.0080,0.0188,0.0141,0.0436,0.0668,0.0609,0.0131,0.0899,0.0922,0.1445,0.1475,0.2087,0.2558,0.2603,0.1985,0.2394,0.3134,0.4077,0.4529,0.4893,0.5666,0.6234,0.6741,0.8282,0.8823,0.9196,0.8965,0.7549,0.6736,0.6463,0.5007,0.3663,0.2298,0.1362,0.2123,0.2395,0.2673,0.2865,0.2060,0.1659,0.2633,0.2552,0.1696,0.1467,0.1286,0.0926,0.0716,0.0325,0.0258,0.0136,0.0044,0.0028,0.0021,0.0022,0.0048,0.0138,0.0140,0.0028,0.0064,1
0.0201,0.0116,0.0123,0.0245,0.0547,0.0208,0.0891,0.0836,0.1335,0.1199,0.1742,0.1387,0.2042,0.2580,0.2616,0.2097,0.2532,0.3213,0.4327,0.4760,0.5328,0.6057,0.6696,0.7476,0.8930,0.9405,1.0000,0.9785,0.8473,0.7639,0.6701,0.4989,0.3718,0.2196,0.1416,0.2680,0.2630,0.3104,0.3392,0.2123,0.1170,0.2655,0.2203,0.1541,0.1464,0.1044,0.1225,0.0745,0.0490,0.0224,0.0032,0.0076,0.0045,0.0056,0.0075,0.0037,0.0045,0.0029,0.0008,0.0018,1
0.0152,0.0102,0.0113,0.0263,0.0097,0.0391,0.0857,0.0915,0.0949,0.1504,0.1911,0.2115,0.2249,0.2573,0.1701,0.2023,0.2538,0.3417,0.4026,0.4553,0.5525,0.5991,0.5854,0.7114,0.9500,0.9858,1.0000,0.9578,0.8642,0.7128,0.5893,0.4323,0.2897,0.1744,0.0770,0.2297,0.2459,0.3101,0.3312,0.2220,0.0871,0.2064,0.1808,0.1624,0.1120,0.0815,0.1117,0.0950,0.0412,0.0120,0.0048,0.0049,0.0041,0.0036,0.0013,0.0046,0.0037,0.0011,0.0034,0.0033,1
0.0216,0.0124,0.0174,0.0152,0.0608,0.1026,0.1139,0.0877,0.1160,0.0866,0.1564,0.0780,0.0997,0.0915,0.0662,0.1134,0.1740,0.2573,0.3294,0.3910,0.5438,0.6115,0.7022,0.7610,0.7973,0.9105,0.8807,0.7949,0.7990,0.7180,0.6407,0.6312,0.5929,0.6168,0.6498,0.6764,0.6253,0.5117,0.3890,0.3273,0.2509,0.1530,0.1323,0.1657,0.1215,0.0978,0.0452,0.0273,0.0179,0.0092,0.0018,0.0052,0.0049,0.0096,0.0134,0.0122,0.0047,0.0018,0.0006,0.0023,1
0.0225,0.0019,0.0075,0.0097,0.0445,0.0906,0.0889,0.0655,0.1624,0.1452,0.1442,0.0948,0.0618,0.1641,0.0708,0.0844,0.2590,0.2679,0.3094,0.4678,0.5958,0.7245,0.8773,0.9214,0.9282,0.9942,1.0000,0.9071,0.8545,0.7293,0.6499,0.6071,0.5588,0.5967,0.6275,0.5459,0.4786,0.3965,0.2087,0.1651,0.1836,0.0652,0.0758,0.0486,0.0353,0.0297,0.0241,0.0379,0.0119,0.0073,0.0051,0.0034,0.0129,0.0100,0.0044,0.0057,0.0030,0.0035,0.0021,0.0027,1
0.0125,0.0152,0.0218,0.0175,0.0362,0.0696,0.0873,0.0616,0.1252,0.1302,0.0888,0.0500,0.0628,0.1274,0.0801,0.0742,0.2048,0.2950,0.3193,0.4567,0.5959,0.7101,0.8225,0.8425,0.9065,0.9802,1.0000,0.8752,0.7583,0.6616,0.5786,0.5128,0.4776,0.4994,0.5197,0.5071,0.4577,0.3505,0.1845,0.1890,0.1967,0.1041,0.0550,0.0492,0.0622,0.0505,0.0247,0.0219,0.0102,0.0047,0.0019,0.0041,0.0074,0.0030,0.0050,0.0048,0.0017,0.0041,0.0086,0.0058,1
0.0130,0.0006,0.0088,0.0456,0.0525,0.0778,0.0931,0.0941,0.1711,0.1483,0.1532,0.1100,0.0890,0.1236,0.1197,0.1145,0.2137,0.2838,0.3640,0.5430,0.6673,0.7979,0.9273,0.9027,0.9192,1.0000,0.9821,0.9092,0.8184,0.6962,0.5900,0.5447,0.5142,0.5389,0.5531,0.5318,0.4826,0.3790,0.1831,0.1750,0.1679,0.0674,0.0609,0.0375,0.0533,0.0278,0.0179,0.0114,0.0073,0.0116,0.0092,0.0078,0.0041,0.0013,0.0011,0.0045,0.0039,0.0022,0.0023,0.0016,1
0.0135,0.0045,0.0051,0.0289,0.0561,0.0929,0.1031,0.0883,0.1596,0.1908,0.1576,0.1112,0.1197,0.1174,0.1415,0.2215,0.2658,0.2713,0.3862,0.5717,0.6797,0.8747,1.0000,0.8948,0.8420,0.9174,0.9307,0.9050,0.8228,0.6986,0.5831,0.4924,0.4563,0.5159,0.5670,0.5284,0.5144,0.3742,0.2282,0.1193,0.1088,0.0431,0.1070,0.0583,0.0046,0.0473,0.0408,0.0290,0.0192,0.0094,0.0025,0.0037,0.0084,0.0102,0.0096,0.0024,0.0037,0.0028,0.0030,0.0030,1
0.0086,0.0215,0.0242,0.0445,0.0667,0.0771,0.0499,0.0906,0.1229,0.1185,0.0775,0.1101,0.1042,0.0853,0.0456,0.1304,0.2690,0.2947,0.3669,0.4948,0.6275,0.8162,0.9237,0.8710,0.8052,0.8756,1.0000,0.9858,0.9427,0.8114,0.6987,0.6810,0.6591,0.6954,0.7290,0.6680,0.5917,0.4899,0.3439,0.2366,0.1716,0.1013,0.0766,0.0845,0.0260,0.0333,0.0205,0.0309,0.0101,0.0095,0.0047,0.0072,0.0054,0.0022,0.0016,0.0029,0.0058,0.0050,0.0024,0.0030,1
0.0067,0.0096,0.0024,0.0058,0.0197,0.0618,0.0432,0.0951,0.0836,0.1180,0.0978,0.0909,0.0656,0.0593,0.0832,0.1297,0.2038,0.3811,0.4451,0.5224,0.5911,0.6566,0.6308,0.5998,0.4958,0.5647,0.6906,0.8513,1.0000,0.9166,0.7676,0.6177,0.5468,0.5516,0.5463,0.5515,0.4561,0.3466,0.3384,0.2853,0.2502,0.1641,0.1605,0.1491,0.1326,0.0687,0.0602,0.0561,0.0306,0.0154,0.0029,0.0048,0.0023,0.0020,0.0040,0.0019,0.0034,0.0034,0.0051,0.0031,1
0.0071,0.0103,0.0135,0.0494,0.0253,0.0806,0.0701,0.0738,0.0117,0.0898,0.0289,0.1554,0.1437,0.1035,0.1424,0.1227,0.0892,0.2047,0.0827,0.1524,0.3031,0.1608,0.0667,0.1426,0.0395,0.1653,0.3399,0.4855,0.5206,0.5508,0.6102,0.5989,0.6764,0.8897,1.0000,0.9517,0.8459,0.7073,0.6697,0.6326,0.5102,0.4161,0.2816,0.1705,0.1421,0.0971,0.0879,0.0863,0.0355,0.0233,0.0252,0.0043,0.0048,0.0076,0.0124,0.0105,0.0054,0.0032,0.0073,0.0063,1
0.0176,0.0172,0.0501,0.0285,0.0262,0.0351,0.0362,0.0535,0.0258,0.0474,0.0526,0.1854,0.1040,0.0948,0.0912,0.1688,0.1568,0.0375,0.1316,0.2086,0.1976,0.0946,0.1965,0.1242,0.0616,0.2141,0.4642,0.6471,0.6340,0.6107,0.7046,0.5376,0.5934,0.8443,0.9481,0.9705,0.7766,0.6313,0.5760,0.6148,0.5450,0.4813,0.3406,0.1916,0.1134,0.0640,0.0911,0.0980,0.0563,0.0187,0.0088,0.0042,0.0175,0.0171,0.0079,0.0050,0.0112,0.0179,0.0294,0.0063,1
0.0265,0.0440,0.0137,0.0084,0.0305,0.0438,0.0341,0.0780,0.0844,0.0779,0.0327,0.2060,0.1908,0.1065,0.1457,0.2232,0.2070,0.1105,0.1078,0.1165,0.2224,0.0689,0.2060,0.2384,0.0904,0.2278,0.5872,0.8457,0.8467,0.7679,0.8055,0.6260,0.6545,0.8747,0.9885,0.9348,0.6960,0.5733,0.5872,0.6663,0.5651,0.5247,0.3684,0.1997,0.1512,0.0508,0.0931,0.0982,0.0524,0.0188,0.0100,0.0038,0.0187,0.0156,0.0068,0.0097,0.0073,0.0081,0.0086,0.0095,1
0.0368,0.0403,0.0317,0.0293,0.0820,0.1342,0.1161,0.0663,0.0155,0.0506,0.0906,0.2545,0.1464,0.1272,0.1223,0.1669,0.1424,0.1285,0.1857,0.1136,0.2069,0.0219,0.2400,0.2547,0.0240,0.1923,0.4753,0.7003,0.6825,0.6443,0.7063,0.5373,0.6601,0.8708,0.9518,0.9605,0.7712,0.6772,0.6431,0.6720,0.6035,0.5155,0.3802,0.2278,0.1522,0.0801,0.0804,0.0752,0.0566,0.0175,0.0058,0.0091,0.0160,0.0160,0.0081,0.0070,0.0135,0.0067,0.0078,0.0068,1
0.0195,0.0142,0.0181,0.0406,0.0391,0.0249,0.0892,0.0973,0.0840,0.1191,0.1522,0.1322,0.1434,0.1244,0.0653,0.0890,0.1226,0.1846,0.3880,0.3658,0.2297,0.2610,0.4193,0.5848,0.5643,0.5448,0.4772,0.6897,0.9797,1.0000,0.9546,0.8835,0.7662,0.6547,0.5447,0.4593,0.4679,0.1987,0.0699,0.1493,0.1713,0.1654,0.2600,0.3846,0.3754,0.2414,0.1077,0.0224,0.0155,0.0187,0.0125,0.0028,0.0067,0.0120,0.0012,0.0022,0.0058,0.0042,0.0067,0.0012,1
0.0216,0.0215,0.0273,0.0139,0.0357,0.0785,0.0906,0.0908,0.1151,0.0973,0.1203,0.1102,0.1192,0.1762,0.2390,0.2138,0.1929,0.1765,0.0746,0.1265,0.2005,0.1571,0.2605,0.5386,0.8440,1.0000,0.8684,0.6742,0.5537,0.4638,0.3609,0.2055,0.1620,0.2092,0.3100,0.2344,0.1058,0.0383,0.0528,0.1291,0.2241,0.1915,0.1587,0.0942,0.0840,0.0670,0.0342,0.0469,0.0357,0.0136,0.0082,0.0140,0.0044,0.0052,0.0073,0.0021,0.0047,0.0024,0.0009,0.0017,1
0.0065,0.0122,0.0068,0.0108,0.0217,0.0284,0.0527,0.0575,0.1054,0.1109,0.0937,0.0827,0.0920,0.0911,0.1487,0.1666,0.1268,0.1374,0.1095,0.1286,0.2146,0.2889,0.4238,0.6168,0.8167,0.9622,0.8280,0.5816,0.4667,0.3539,0.2727,0.1410,0.1863,0.2176,0.2360,0.1725,0.0589,0.0621,0.1847,0.2452,0.2984,0.3041,0.2275,0.1480,0.1102,0.1178,0.0608,0.0333,0.0276,0.0100,0.0023,0.0069,0.0025,0.0027,0.0052,0.0036,0.0026,0.0036,0.0006,0.0035,1
0.0036,0.0078,0.0092,0.0387,0.0530,0.1197,0.1243,0.1026,0.1239,0.0888,0.0937,0.1245,0.1599,0.1542,0.1846,0.1732,0.1477,0.1748,0.1455,0.1579,0.2257,0.1975,0.3368,0.5828,0.8505,1.0000,0.8457,0.6624,0.5564,0.3925,0.3233,0.2054,0.1920,0.2227,0.3147,0.2268,0.0795,0.0748,0.1166,0.1969,0.2619,0.2507,0.1983,0.0948,0.0931,0.0965,0.0381,0.0435,0.0336,0.0055,0.0079,0.0119,0.0055,0.0035,0.0036,0.0004,0.0018,0.0049,0.0024,0.0016,1
0.0208,0.0186,0.0131,0.0211,0.0610,0.0613,0.0612,0.0506,0.0989,0.1093,0.1063,0.1179,0.1291,0.1591,0.1680,0.1918,0.1615,0.1647,0.1397,0.1426,0.2429,0.2816,0.4290,0.6443,0.9061,1.0000,0.8087,0.6119,0.5260,0.3677,0.2746,0.1020,0.1339,0.1582,0.1952,0.1787,0.0429,0.1096,0.1762,0.2481,0.3150,0.2920,0.1902,0.0696,0.0758,0.0910,0.0441,0.0244,0.0265,0.0095,0.0140,0.0074,0.0063,0.0081,0.0087,0.0044,0.0028,0.0019,0.0049,0.0023,1
0.0139,0.0222,0.0089,0.0108,0.0215,0.0136,0.0659,0.0954,0.0786,0.1015,0.1261,0.0828,0.0493,0.0848,0.1514,0.1396,0.1066,0.1923,0.2991,0.3247,0.3797,0.5658,0.7483,0.8757,0.9048,0.7511,0.6858,0.7043,0.5864,0.3773,0.2206,0.2628,0.2672,0.2907,0.1982,0.2288,0.3186,0.2871,0.2921,0.2806,0.2682,0.2112,0.1513,0.1789,0.1850,0.1717,0.0898,0.0656,0.0445,0.0110,0.0024,0.0062,0.0072,0.0113,0.0012,0.0022,0.0025,0.0059,0.0039,0.0048,1
0.0109,0.0093,0.0121,0.0378,0.0679,0.0863,0.1004,0.0664,0.0941,0.1036,0.0972,0.0501,0.1546,0.3404,0.4804,0.6570,0.7738,0.7827,0.8152,0.8129,0.8297,0.8535,0.8870,0.8894,0.8980,0.9667,1.0000,0.9134,0.6762,0.4659,0.2895,0.2959,0.1746,0.2112,0.2569,0.2276,0.2149,0.1601,0.0371,0.0117,0.0488,0.0288,0.0597,0.0431,0.0369,0.0025,0.0327,0.0257,0.0182,0.0108,0.0124,0.0077,0.0023,0.0117,0.0053,0.0077,0.0076,0.0056,0.0055,0.0039,1
0.0202,0.0104,0.0325,0.0239,0.0807,0.1529,0.1154,0.0608,0.1317,0.1370,0.0843,0.0269,0.1254,0.3046,0.5584,0.7973,0.8341,0.8057,0.8616,0.8769,0.9413,0.9403,0.9409,1.0000,0.9725,0.9309,0.9351,0.7317,0.4421,0.3244,0.4161,0.4611,0.4031,0.3000,0.2459,0.1348,0.2541,0.2255,0.1598,0.1485,0.0845,0.0569,0.0855,0.1262,0.1153,0.0570,0.0426,0.0425,0.0235,0.0006,0.0188,0.0127,0.0081,0.0067,0.0043,0.0065,0.0049,0.0054,0.0073,0.0054,1
0.0239,0.0189,0.0466,0.0440,0.0657,0.0742,0.1380,0.1099,0.1384,0.1376,0.0938,0.0259,0.1499,0.2851,0.5743,0.8278,0.8669,0.8131,0.9045,0.9046,1.0000,0.9976,0.9872,0.9761,0.9009,0.9724,0.9675,0.7633,0.4434,0.3822,0.4727,0.4007,0.3381,0.3172,0.2222,0.0733,0.2692,0.1888,0.0712,0.1062,0.0694,0.0300,0.0893,0.1459,0.1348,0.0391,0.0546,0.0469,0.0201,0.0095,0.0155,0.0091,0.0151,0.0080,0.0018,0.0078,0.0045,0.0026,0.0036,0.0024,1
0.0336,0.0294,0.0476,0.0539,0.0794,0.0804,0.1136,0.1228,0.1235,0.0842,0.0357,0.0689,0.1705,0.3257,0.4602,0.6225,0.7327,0.7843,0.7988,0.8261,1.0000,0.9814,0.9620,0.9601,0.9118,0.9086,0.7931,0.5877,0.3474,0.4235,0.4633,0.3410,0.2849,0.2847,0.1742,0.0549,0.1192,0.1154,0.0855,0.1811,0.1264,0.0799,0.0378,0.1268,0.1125,0.0505,0.0949,0.0677,0.0259,0.0170,0.0033,0.0150,0.0111,0.0032,0.0035,0.0169,0.0137,0.0015,0.0069,0.0051,1
0.0231,0.0351,0.0030,0.0304,0.0339,0.0860,0.1738,0.1351,0.1063,0.0347,0.0575,0.1382,0.2274,0.4038,0.5223,0.6847,0.7521,0.7760,0.7708,0.8627,1.0000,0.8873,0.8057,0.8760,0.9066,0.9430,0.8846,0.6500,0.2970,0.2423,0.2992,0.2285,0.2277,0.1529,0.1037,0.0352,0.1073,0.1373,0.1331,0.1454,0.1115,0.0440,0.0762,0.1381,0.0831,0.0654,0.0844,0.0595,0.0497,0.0313,0.0154,0.0106,0.0097,0.0022,0.0052,0.0072,0.0056,0.0038,0.0043,0.0030,1
0.0108,0.0086,0.0058,0.0460,0.0752,0.0887,0.1015,0.0494,0.0472,0.0393,0.1106,0.1412,0.2202,0.2976,0.4116,0.4754,0.5390,0.6279,0.7060,0.7918,0.9493,1.0000,0.9645,0.9432,0.8658,0.7895,0.6501,0.4492,0.4739,0.6153,0.4929,0.3195,0.3735,0.3336,0.1052,0.0671,0.0379,0.0461,0.1694,0.2169,0.1677,0.0644,0.0159,0.0778,0.0653,0.0210,0.0509,0.0387,0.0262,0.0101,0.0161,0.0029,0.0078,0.0114,0.0083,0.0058,0.0003,0.0023,0.0026,0.0027,1
0.0229,0.0369,0.0040,0.0375,0.0455,0.1452,0.2211,0.1188,0.0750,0.1631,0.2709,0.3358,0.4091,0.4400,0.5485,0.7213,0.8137,0.9185,1.0000,0.9418,0.9116,0.9349,0.7484,0.5146,0.4106,0.3443,0.6981,0.8713,0.9013,0.8014,0.4380,0.1319,0.1709,0.2484,0.3044,0.2312,0.1338,0.2056,0.2474,0.2790,0.1610,0.0056,0.0351,0.1148,0.1331,0.0276,0.0763,0.0631,0.0309,0.0240,0.0115,0.0064,0.0022,0.0122,0.0151,0.0056,0.0026,0.0029,0.0104,0.0163,1
0.0100,0.0194,0.0155,0.0489,0.0839,0.1009,0.1627,0.2071,0.2696,0.2990,0.3242,0.3565,0.3951,0.5201,0.6953,0.8468,1.0000,0.9278,0.8510,0.8010,0.8142,0.8825,0.7302,0.6107,0.7159,0.8458,0.6319,0.4808,0.6291,0.7152,0.6005,0.4235,0.4106,0.3992,0.1730,0.1975,0.2370,0.1339,0.1583,0.3151,0.1968,0.2054,0.1272,0.1129,0.1946,0.2195,0.1930,0.1498,0.0773,0.0196,0.0122,0.0130,0.0073,0.0077,0.0075,0.0060,0.0080,0.0019,0.0053,0.0019,1
0.0409,0.0421,0.0573,0.0130,0.0183,0.1019,0.1054,0.1070,0.2302,0.2259,0.2373,0.3323,0.3827,0.4840,0.6812,0.7555,0.9522,0.9826,0.8871,0.8268,0.7561,0.8217,0.6967,0.6444,0.6948,0.8014,0.6053,0.6084,0.8877,0.8557,0.5563,0.2897,0.3638,0.4786,0.2908,0.0899,0.2043,0.1707,0.0407,0.1286,0.1581,0.2191,0.1701,0.0971,0.2217,0.2732,0.1874,0.1062,0.0665,0.0405,0.0113,0.0028,0.0036,0.0105,0.0120,0.0087,0.0061,0.0061,0.0030,0.0078,1
0.0217,0.0340,0.0392,0.0236,0.1081,0.1164,0.1398,0.1009,0.1147,0.1777,0.4079,0.4113,0.3973,0.5078,0.6509,0.8073,0.9819,1.0000,0.9407,0.8452,0.8106,0.8460,0.6212,0.5815,0.7745,0.8204,0.5601,0.2989,0.5009,0.6628,0.5753,0.4055,0.3746,0.3481,0.1580,0.1422,0.2130,0.1866,0.1003,0.2396,0.2241,0.2029,0.0710,0.1606,0.1669,0.1700,0.1829,0.1403,0.0506,0.0224,0.0095,0.0031,0.0103,0.0078,0.0077,0.0094,0.0031,0.0030,0.0013,0.0069,1
0.0378,0.0318,0.0423,0.0350,0.1787,0.1635,0.0887,0.0817,0.1779,0.2053,0.3135,0.3118,0.3686,0.3885,0.5850,0.7868,0.9739,1.0000,0.9843,0.8610,0.8443,0.9061,0.5847,0.4033,0.5946,0.6793,0.6389,0.5002,0.5578,0.4831,0.4729,0.3318,0.3969,0.3894,0.2314,0.1036,0.1312,0.0864,0.2569,0.3179,0.2649,0.2714,0.1713,0.0584,0.1230,0.2200,0.2198,0.1074,0.0423,0.0162,0.0093,0.0046,0.0044,0.0078,0.0102,0.0065,0.0061,0.0062,0.0043,0.0053,1
0.0365,0.1632,0.1636,0.1421,0.1130,0.1306,0.2112,0.2268,0.2992,0.3735,0.3042,0.0387,0.2679,0.5397,0.6204,0.7257,0.8350,0.6888,0.4450,0.3921,0.5605,0.7545,0.8311,1.0000,0.8762,0.7092,0.7009,0.5014,0.3942,0.4456,0.4072,0.0773,0.1423,0.0401,0.3597,0.6847,0.7076,0.3597,0.0612,0.3027,0.3966,0.3868,0.2380,0.2059,0.2288,0.1704,0.1587,0.1792,0.1022,0.0151,0.0223,0.0110,0.0071,0.0205,0.0164,0.0063,0.0078,0.0094,0.0110,0.0068,1
0.0188,0.0370,0.0953,0.0824,0.0249,0.0488,0.1424,0.1972,0.1873,0.1806,0.2139,0.1523,0.1975,0.4844,0.7298,0.7807,0.7906,0.6122,0.4200,0.2807,0.5148,0.7569,0.8596,1.0000,0.8457,0.6797,0.6971,0.5843,0.4772,0.5201,0.4241,0.1592,0.1668,0.0588,0.3967,0.7147,0.7319,0.3509,0.0589,0.2690,0.4200,0.3874,0.2440,0.2000,0.2307,0.1886,0.1960,0.1701,0.1366,0.0398,0.0143,0.0093,0.0033,0.0113,0.0030,0.0057,0.0090,0.0057,0.0068,0.0024,1
0.0856,0.0454,0.0382,0.0203,0.0385,0.0534,0.2140,0.3110,0.2837,0.2751,0.2707,0.0946,0.1020,0.4519,0.6737,0.6699,0.7066,0.5632,0.3785,0.2721,0.5297,0.7697,0.8643,0.9304,0.9372,0.6247,0.6024,0.6810,0.5047,0.5775,0.4754,0.2400,0.2779,0.1997,0.5305,0.7409,0.7775,0.4424,0.1416,0.3508,0.4482,0.4208,0.3054,0.2235,0.2611,0.2798,0.2392,0.2021,0.1326,0.0358,0.0128,0.0172,0.0138,0.0079,0.0037,0.0051,0.0258,0.0102,0.0037,0.0037,1
0.0274,0.0242,0.0621,0.0560,0.1129,0.0973,0.1823,0.1745,0.1440,0.1808,0.2366,0.0906,0.1749,0.4012,0.5187,0.7312,0.9062,0.9260,0.7434,0.4463,0.5103,0.6952,0.7755,0.8364,0.7283,0.6399,0.5759,0.4146,0.3495,0.4437,0.2665,0.2024,0.1942,0.0765,0.3725,0.5843,0.4827,0.2347,0.0999,0.3244,0.3990,0.2975,0.1684,0.1761,0.1683,0.0729,0.1190,0.1297,0.0748,0.0067,0.0255,0.0113,0.0108,0.0085,0.0047,0.0074,0.0104,0.0161,0.0220,0.0173,1
0.0235,0.0291,0.0749,0.0519,0.0227,0.0834,0.0677,0.2002,0.2876,0.3674,0.2974,0.0837,0.1912,0.5040,0.6352,0.6804,0.7505,0.6595,0.4509,0.2964,0.4019,0.6794,0.8297,1.0000,0.8240,0.7115,0.7726,0.6124,0.4936,0.5648,0.4906,0.1820,0.1811,0.1107,0.4603,0.6650,0.6423,0.2166,0.1951,0.4947,0.4925,0.4041,0.2402,0.1392,0.1779,0.1946,0.1723,0.1522,0.0929,0.0179,0.0242,0.0083,0.0037,0.0095,0.0105,0.0030,0.0132,0.0068,0.0108,0.0090,1
0.0126,0.0519,0.0621,0.0518,0.1072,0.2587,0.2304,0.2067,0.3416,0.4284,0.3015,0.1207,0.3299,0.5707,0.6962,0.9751,1.0000,0.9293,0.6210,0.4586,0.5001,0.5032,0.7082,0.8420,0.8109,0.7690,0.8105,0.6203,0.2356,0.2595,0.6299,0.6762,0.2903,0.4393,0.8529,0.7180,0.4801,0.5856,0.4993,0.2866,0.0601,0.1167,0.2737,0.2812,0.2078,0.0660,0.0491,0.0345,0.0172,0.0287,0.0027,0.0208,0.0048,0.0199,0.0126,0.0022,0.0037,0.0034,0.0114,0.0077,1
0.0253,0.0808,0.0507,0.0244,0.1724,0.3823,0.3729,0.3583,0.3429,0.2197,0.2653,0.3223,0.5582,0.6916,0.7943,0.7152,0.3512,0.2008,0.2676,0.4299,0.5280,0.3489,0.1430,0.5453,0.6338,0.7712,0.6838,0.8015,0.8073,0.8310,0.7792,0.5049,0.1413,0.2767,0.5084,0.4787,0.1356,0.2299,0.2789,0.3833,0.2933,0.1155,0.1705,0.1294,0.0909,0.0800,0.0567,0.0198,0.0114,0.0151,0.0085,0.0178,0.0073,0.0079,0.0038,0.0116,0.0033,0.0039,0.0081,0.0053,1
0.0260,0.0192,0.0254,0.0061,0.0352,0.0701,0.1263,0.1080,0.1523,0.1630,0.1030,0.2187,0.1542,0.2630,0.2940,0.2978,0.0699,0.1401,0.2990,0.3915,0.3598,0.2403,0.4208,0.5675,0.6094,0.6323,0.6549,0.7673,1.0000,0.8463,0.5509,0.4444,0.5169,0.4268,0.1802,0.0791,0.0535,0.1906,0.2561,0.2153,0.2769,0.2841,0.1733,0.0815,0.0335,0.0933,0.1018,0.0309,0.0208,0.0318,0.0132,0.0118,0.0120,0.0051,0.0070,0.0015,0.0035,0.0008,0.0044,0.0077,1
0.0459,0.0437,0.0347,0.0456,0.0067,0.0890,0.1798,0.1741,0.1598,0.1408,0.2693,0.3259,0.4545,0.5785,0.4471,0.2231,0.2164,0.3201,0.2915,0.4235,0.4460,0.2380,0.6415,0.8966,0.8918,0.7529,0.6838,0.8390,1.0000,0.8362,0.5427,0.4577,0.8067,0.6973,0.3915,0.1558,0.1598,0.2161,0.5178,0.4782,0.2344,0.3599,0.2785,0.1807,0.0352,0.0473,0.0322,0.0408,0.0163,0.0088,0.0121,0.0067,0.0032,0.0109,0.0164,0.0151,0.0070,0.0085,0.0117,0.0056,1
0.0025,0.0309,0.0171,0.0228,0.0434,0.1224,0.1947,0.1661,0.1368,0.1430,0.0994,0.2250,0.2444,0.3239,0.3039,0.2410,0.0367,0.1672,0.3038,0.4069,0.3613,0.1994,0.4611,0.6849,0.7272,0.7152,0.7102,0.8516,1.0000,0.7690,0.4841,0.3717,0.6096,0.5110,0.2586,0.0916,0.0947,0.2287,0.3480,0.2095,0.1901,0.2941,0.2211,0.1524,0.0746,0.0606,0.0692,0.0446,0.0344,0.0082,0.0108,0.0149,0.0077,0.0036,0.0114,0.0085,0.0101,0.0016,0.0028,0.0014,1
0.0291,0.0400,0.0771,0.0809,0.0521,0.1051,0.0145,0.0674,0.1294,0.1146,0.0942,0.0794,0.0252,0.1191,0.1045,0.2050,0.1556,0.2690,0.3784,0.4024,0.3470,0.1395,0.1208,0.2827,0.1500,0.2626,0.4468,0.7520,0.9036,0.7812,0.4766,0.2483,0.5372,0.6279,0.3647,0.4572,0.6359,0.6474,0.5520,0.3253,0.2292,0.0653,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0056,0.0237,0.0204,0.0050,0.0137,0.0164,0.0081,0.0139,0.0111,1
0.0181,0.0146,0.0026,0.0141,0.0421,0.0473,0.0361,0.0741,0.1398,0.1045,0.0904,0.0671,0.0997,0.1056,0.0346,0.1231,0.1626,0.3652,0.3262,0.2995,0.2109,0.2104,0.2085,0.2282,0.0747,0.1969,0.4086,0.6385,0.7970,0.7508,0.5517,0.2214,0.4672,0.4479,0.2297,0.3235,0.4480,0.5581,0.6520,0.5354,0.2478,0.2268,0.1788,0.0898,0.0536,0.0374,0.0990,0.0956,0.0317,0.0142,0.0076,0.0223,0.0255,0.0145,0.0233,0.0041,0.0018,0.0048,0.0089,0.0085,1
0.0491,0.0279,0.0592,0.1270,0.1772,0.1908,0.2217,0.0768,0.1246,0.2028,0.0947,0.2497,0.2209,0.3195,0.3340,0.3323,0.2780,0.2975,0.2948,0.1729,0.3264,0.3834,0.3523,0.5410,0.5228,0.4475,0.5340,0.5323,0.3907,0.3456,0.4091,0.4639,0.5580,0.5727,0.6355,0.7563,0.6903,0.6176,0.5379,0.5622,0.6508,0.4797,0.3736,0.2804,0.1982,0.2438,0.1789,0.1706,0.0762,0.0238,0.0268,0.0081,0.0129,0.0161,0.0063,0.0119,0.0194,0.0140,0.0332,0.0439,-1
0.1313,0.2339,0.3059,0.4264,0.4010,0.1791,0.1853,0.0055,0.1929,0.2231,0.2907,0.2259,0.3136,0.3302,0.3660,0.3956,0.4386,0.4670,0.5255,0.3735,0.2243,0.1973,0.4337,0.6532,0.5070,0.2796,0.4163,0.5950,0.5242,0.4178,0.3714,0.2375,0.0863,0.1437,0.2896,0.4577,0.3725,0.3372,0.3803,0.4181,0.3603,0.2711,0.1653,0.1951,0.2811,0.2246,0.1921,0.1500,0.0665,0.0193,0.0156,0.0362,0.0210,0.0154,0.0180,0.0013,0.0106,0.0127,0.0178,0.0231,-1
0.0201,0.0423,0.0554,0.0783,0.0620,0.0871,0.1201,0.2707,0.1206,0.0279,0.2251,0.2615,0.1770,0.3709,0.4533,0.5553,0.4616,0.3797,0.3450,0.2665,0.2395,0.1127,0.2556,0.5169,0.3779,0.4082,0.5353,0.5116,0.4544,0.4258,0.3869,0.3939,0.4661,0.3974,0.2194,0.1816,0.1023,0.2108,0.3253,0.3697,0.2912,0.3010,0.2563,0.1927,0.2062,0.1751,0.0841,0.1035,0.0641,0.0153,0.0081,0.0191,0.0182,0.0160,0.0290,0.0090,0.0242,0.0224,0.0190,0.0096,-1
0.0629,0.1065,0.1526,0.1229,0.1437,0.1190,0.0884,0.0907,0.2107,0.3597,0.5466,0.5205,0.5127,0.5395,0.6558,0.8705,0.9786,0.9335,0.7917,0.7383,0.6908,0.3850,0.0671,0.0502,0.2717,0.2839,0.2234,0.1911,0.0408,0.2531,0.1979,0.1891,0.2433,0.1956,0.2667,0.1340,0.1073,0.2023,0.1794,0.0227,0.1313,0.1775,0.1549,0.1626,0.0708,0.0129,0.0795,0.0762,0.0117,0.0061,0.0257,0.0089,0.0262,0.0108,0.0138,0.0187,0.0230,0.0057,0.0113,0.0131,-1
0.0335,0.0134,0.0696,0.1180,0.0348,0.1180,0.1948,0.1607,0.3036,0.4372,0.5533,0.5771,0.7022,0.7067,0.7367,0.7391,0.8622,0.9458,0.8782,0.7913,0.5760,0.3061,0.0563,0.0239,0.2554,0.4862,0.5027,0.4402,0.2847,0.1797,0.3560,0.3522,0.3321,0.3112,0.3638,0.0754,0.1834,0.1820,0.1815,0.1593,0.0576,0.0954,0.1086,0.0812,0.0784,0.0487,0.0439,0.0586,0.0370,0.0185,0.0302,0.0244,0.0232,0.0093,0.0159,0.0193,0.0032,0.0377,0.0126,0.0156,-1
0.0587,0.1210,0.1268,0.1498,0.1436,0.0561,0.0832,0.0672,0.1372,0.2352,0.3208,0.4257,0.5201,0.4914,0.5950,0.7221,0.9039,0.9111,0.8723,0.7686,0.7326,0.5222,0.3097,0.3172,0.2270,0.1640,0.1746,0.1835,0.2048,0.1674,0.2767,0.3104,0.3399,0.4441,0.5046,0.2814,0.1681,0.2633,0.3198,0.1933,0.0934,0.0443,0.0780,0.0722,0.0405,0.0553,0.1081,0.1139,0.0767,0.0265,0.0215,0.0331,0.0111,0.0088,0.0158,0.0122,0.0038,0.0101,0.0228,0.0124,-1
0.0162,0.0253,0.0262,0.0386,0.0645,0.0472,0.1056,0.1388,0.0598,0.1334,0.2969,0.4754,0.5677,0.5690,0.6421,0.7487,0.8999,1.0000,0.9690,0.9032,0.7685,0.6998,0.6644,0.5964,0.3711,0.0921,0.0481,0.0876,0.1040,0.1714,0.3264,0.4612,0.3939,0.5050,0.4833,0.3511,0.2319,0.4029,0.3676,0.1510,0.0745,0.1395,0.1552,0.0377,0.0636,0.0443,0.0264,0.0223,0.0187,0.0077,0.0137,0.0071,0.0082,0.0232,0.0198,0.0074,0.0035,0.0100,0.0048,0.0019,-1
0.0307,0.0523,0.0653,0.0521,0.0611,0.0577,0.0665,0.0664,0.1460,0.2792,0.3877,0.4992,0.4981,0.4972,0.5607,0.7339,0.8230,0.9173,0.9975,0.9911,0.8240,0.6498,0.5980,0.4862,0.3150,0.1543,0.0989,0.0284,0.1008,0.2636,0.2694,0.2930,0.2925,0.3998,0.3660,0.3172,0.4609,0.4374,0.1820,0.3376,0.6202,0.4448,0.1863,0.1420,0.0589,0.0576,0.0672,0.0269,0.0245,0.0190,0.0063,0.0321,0.0189,0.0137,0.0277,0.0152,0.0052,0.0121,0.0124,0.0055,-1
0.0116,0.0179,0.0449,0.1096,0.1913,0.0924,0.0761,0.1092,0.0757,0.1006,0.2500,0.3988,0.3809,0.4753,0.6165,0.6464,0.8024,0.9208,0.9832,0.9634,0.8646,0.8325,0.8276,0.8007,0.6102,0.4853,0.4355,0.4307,0.4399,0.3833,0.3032,0.3035,0.3197,0.2292,0.2131,0.2347,0.3201,0.4455,0.3655,0.2715,0.1747,0.1781,0.2199,0.1056,0.0573,0.0307,0.0237,0.0470,0.0102,0.0057,0.0031,0.0163,0.0099,0.0084,0.0270,0.0277,0.0097,0.0054,0.0148,0.0092,-1
0.0331,0.0423,0.0474,0.0818,0.0835,0.0756,0.0374,0.0961,0.0548,0.0193,0.0897,0.1734,0.1936,0.2803,0.3313,0.5020,0.6360,0.7096,0.8333,0.8730,0.8073,0.7507,0.7526,0.7298,0.6177,0.4946,0.4531,0.4099,0.4540,0.4124,0.3139,0.3194,0.3692,0.3776,0.4469,0.4777,0.4716,0.4664,0.3893,0.4255,0.4064,0.3712,0.3863,0.2802,0.1283,0.1117,0.1303,0.0787,0.0436,0.0224,0.0133,0.0078,0.0174,0.0176,0.0038,0.0129,0.0066,0.0044,0.0134,0.0092,-1
0.0428,0.0555,0.0708,0.0618,0.1215,0.1524,0.1543,0.0391,0.0610,0.0113,0.1255,0.2473,0.3011,0.3747,0.4520,0.5392,0.6588,0.7113,0.7602,0.8672,0.8416,0.7974,0.8385,0.9317,0.8555,0.6162,0.4139,0.3269,0.3108,0.2554,0.3367,0.4465,0.5000,0.5111,0.5194,0.4619,0.4234,0.4372,0.4277,0.4433,0.3700,0.3324,0.2564,0.2527,0.2137,0.1789,0.1010,0.0528,0.0453,0.0118,0.0009,0.0142,0.0179,0.0079,0.0060,0.0131,0.0089,0.0084,0.0113,0.0049,-1
0.0599,0.0474,0.0498,0.0387,0.1026,0.0773,0.0853,0.0447,0.1094,0.0351,0.1582,0.2023,0.2268,0.2829,0.3819,0.4665,0.6687,0.8647,0.9361,0.9367,0.9144,0.9162,0.9311,0.8604,0.7327,0.5763,0.4162,0.4113,0.4146,0.3149,0.2936,0.3169,0.3149,0.4132,0.3994,0.4195,0.4532,0.4419,0.4737,0.3431,0.3194,0.3370,0.2493,0.2650,0.1748,0.0932,0.0530,0.0081,0.0342,0.0137,0.0028,0.0013,0.0005,0.0227,0.0209,0.0081,0.0117,0.0114,0.0112,0.0100,-1
0.0264,0.0071,0.0342,0.0793,0.1043,0.0783,0.1417,0.1176,0.0453,0.0945,0.1132,0.0840,0.0717,0.1968,0.2633,0.4191,0.5050,0.6711,0.7922,0.8381,0.8759,0.9422,1.0000,0.9931,0.9575,0.8647,0.7215,0.5801,0.4964,0.4886,0.4079,0.2443,0.1768,0.2472,0.3518,0.3762,0.2909,0.2311,0.3168,0.3554,0.3741,0.4443,0.3261,0.1963,0.0864,0.1688,0.1991,0.1217,0.0628,0.0323,0.0253,0.0214,0.0262,0.0177,0.0037,0.0068,0.0121,0.0077,0.0078,0.0066,-1
0.0210,0.0121,0.0203,0.1036,0.1675,0.0418,0.0723,0.0828,0.0494,0.0686,0.1125,0.1741,0.2710,0.3087,0.3575,0.4998,0.6011,0.6470,0.8067,0.9008,0.8906,0.9338,1.0000,0.9102,0.8496,0.7867,0.7688,0.7718,0.6268,0.4301,0.2077,0.1198,0.1660,0.2618,0.3862,0.3958,0.3248,0.2302,0.3250,0.4022,0.4344,0.4008,0.3370,0.2518,0.2101,0.1181,0.1150,0.0550,0.0293,0.0183,0.0104,0.0117,0.0101,0.0061,0.0031,0.0099,0.0080,0.0107,0.0161,0.0133,-1
0.0530,0.0885,0.1997,0.2604,0.3225,0.2247,0.0617,0.2287,0.0950,0.0740,0.1610,0.2226,0.2703,0.3365,0.4266,0.4144,0.5655,0.6921,0.8547,0.9234,0.9171,1.0000,0.9532,0.9101,0.8337,0.7053,0.6534,0.4483,0.2460,0.2020,0.1446,0.0994,0.1510,0.2392,0.4434,0.5023,0.4441,0.4571,0.3927,0.2900,0.3408,0.4990,0.3632,0.1387,0.1800,0.1299,0.0523,0.0817,0.0469,0.0114,0.0299,0.0244,0.0199,0.0257,0.0082,0.0151,0.0171,0.0146,0.0134,0.0056,-1
0.0454,0.0472,0.0697,0.1021,0.1397,0.1493,0.1487,0.0771,0.1171,0.1675,0.2799,0.3323,0.4012,0.4296,0.5350,0.5411,0.6870,0.8045,0.9194,0.9169,1.0000,0.9972,0.9093,0.7918,0.6705,0.5324,0.3572,0.2484,0.3161,0.3775,0.3138,0.1713,0.2937,0.5234,0.5926,0.5437,0.4516,0.3379,0.3215,0.2178,0.1674,0.2634,0.2980,0.2037,0.1155,0.0919,0.0882,0.0228,0.0380,0.0142,0.0137,0.0120,0.0042,0.0238,0.0129,0.0084,0.0218,0.0321,0.0154,0.0053,-1
0.0283,0.0599,0.0656,0.0229,0.0839,0.1673,0.1154,0.1098,0.1370,0.1767,0.1995,0.2869,0.3275,0.3769,0.4169,0.5036,0.6180,0.8025,0.9333,0.9399,0.9275,0.9450,0.8328,0.7773,0.7007,0.6154,0.5810,0.4454,0.3707,0.2891,0.2185,0.1711,0.3578,0.3947,0.2867,0.2401,0.3619,0.3314,0.3763,0.4767,0.4059,0.3661,0.2320,0.1450,0.1017,0.1111,0.0655,0.0271,0.0244,0.0179,0.0109,0.0147,0.0170,0.0158,0.0046,0.0073,0.0054,0.0033,0.0045,0.0079,-1
0.0114,0.0222,0.0269,0.0384,0.1217,0.2062,0.1489,0.0929,0.1350,0.1799,0.2486,0.2973,0.3672,0.4394,0.5258,0.6755,0.7402,0.8284,0.9033,0.9584,1.0000,0.9982,0.8899,0.7493,0.6367,0.6744,0.7207,0.6821,0.5512,0.4789,0.3924,0.2533,0.1089,0.1390,0.2551,0.3301,0.2818,0.2142,0.2266,0.2142,0.2354,0.2871,0.2596,0.1925,0.1256,0.1003,0.0951,0.1210,0.0728,0.0174,0.0213,0.0269,0.0152,0.0257,0.0097,0.0041,0.0050,0.0145,0.0103,0.0025,-1
0.0414,0.0436,0.0447,0.0844,0.0419,0.1215,0.2002,0.1516,0.0818,0.1975,0.2309,0.3025,0.3938,0.5050,0.5872,0.6610,0.7417,0.8006,0.8456,0.7939,0.8804,0.8384,0.7852,0.8479,0.7434,0.6433,0.5514,0.3519,0.3168,0.3346,0.2056,0.1032,0.3168,0.4040,0.4282,0.4538,0.3704,0.3741,0.3839,0.3494,0.4380,0.4265,0.2854,0.2808,0.2395,0.0369,0.0805,0.0541,0.0177,0.0065,0.0222,0.0045,0.0136,0.0113,0.0053,0.0165,0.0141,0.0077,0.0246,0.0198,-1
0.0094,0.0333,0.0306,0.0376,0.1296,0.1795,0.1909,0.1692,0.1870,0.1725,0.2228,0.3106,0.4144,0.5157,0.5369,0.5107,0.6441,0.7326,0.8164,0.8856,0.9891,1.0000,0.8750,0.8631,0.9074,0.8674,0.7750,0.6600,0.5615,0.4016,0.2331,0.1164,0.1095,0.0431,0.0619,0.1956,0.2120,0.3242,0.4102,0.2939,0.1911,0.1702,0.1010,0.1512,0.1427,0.1097,0.1173,0.0972,0.0703,0.0281,0.0216,0.0153,0.0112,0.0241,0.0164,0.0055,0.0078,0.0055,0.0091,0.0067,-1
0.0228,0.0106,0.0130,0.0842,0.1117,0.1506,0.1776,0.0997,0.1428,0.2227,0.2621,0.3109,0.2859,0.3316,0.3755,0.4499,0.4765,0.6254,0.7304,0.8702,0.9349,0.9614,0.9126,0.9443,1.0000,0.9455,0.8815,0.7520,0.7068,0.5986,0.3857,0.2510,0.2162,0.0968,0.1323,0.1344,0.2250,0.3244,0.3939,0.3806,0.3258,0.3654,0.2983,0.1779,0.1535,0.1199,0.0959,0.0765,0.0649,0.0313,0.0185,0.0098,0.0178,0.0077,0.0074,0.0095,0.0055,0.0045,0.0063,0.0039,-1
0.0363,0.0478,0.0298,0.0210,0.1409,0.1916,0.1349,0.1613,0.1703,0.1444,0.1989,0.2154,0.2863,0.3570,0.3980,0.4359,0.5334,0.6304,0.6995,0.7435,0.8379,0.8641,0.9014,0.9432,0.9536,1.0000,0.9547,0.9745,0.8962,0.7196,0.5462,0.3156,0.2525,0.1969,0.2189,0.1533,0.0711,0.1498,0.1755,0.2276,0.1322,0.1056,0.1973,0.1692,0.1881,0.1177,0.0779,0.0495,0.0492,0.0194,0.0250,0.0115,0.0190,0.0055,0.0096,0.0050,0.0066,0.0114,0.0073,0.0033,-1
0.0261,0.0266,0.0223,0.0749,0.1364,0.1513,0.1316,0.1654,0.1864,0.2013,0.2890,0.3650,0.3510,0.3495,0.4325,0.5398,0.6237,0.6876,0.7329,0.8107,0.8396,0.8632,0.8747,0.9607,0.9716,0.9121,0.8576,0.8798,0.7720,0.5711,0.4264,0.2860,0.3114,0.2066,0.1165,0.0185,0.1302,0.2480,0.1637,0.1103,0.2144,0.2033,0.1887,0.1370,0.1376,0.0307,0.0373,0.0606,0.0399,0.0169,0.0135,0.0222,0.0175,0.0127,0.0022,0.0124,0.0054,0.0021,0.0028,0.0023,-1
0.0346,0.0509,0.0079,0.0243,0.0432,0.0735,0.0938,0.1134,0.1228,0.1508,0.1809,0.2390,0.2947,0.2866,0.4010,0.5325,0.5486,0.5823,0.6041,0.6749,0.7084,0.7890,0.9284,0.9781,0.9738,1.0000,0.9702,0.9956,0.8235,0.6020,0.5342,0.4867,0.3526,0.1566,0.0946,0.1613,0.2824,0.3390,0.3019,0.2945,0.2978,0.2676,0.2055,0.2069,0.1625,0.1216,0.1013,0.0744,0.0386,0.0050,0.0146,0.0040,0.0122,0.0107,0.0112,0.0102,0.0052,0.0024,0.0079,0.0031,-1
0.0162,0.0041,0.0239,0.0441,0.0630,0.0921,0.1368,0.1078,0.1552,0.1779,0.2164,0.2568,0.3089,0.3829,0.4393,0.5335,0.5996,0.6728,0.7309,0.8092,0.8941,0.9668,1.0000,0.9893,0.9376,0.8991,0.9184,0.9128,0.7811,0.6018,0.3765,0.3300,0.2280,0.0212,0.1117,0.1788,0.2373,0.2843,0.2241,0.2715,0.3363,0.2546,0.1867,0.2160,0.1278,0.0768,0.1070,0.0946,0.0636,0.0227,0.0128,0.0173,0.0135,0.0114,0.0062,0.0157,0.0088,0.0036,0.0053,0.0030,-1
0.0249,0.0119,0.0277,0.0760,0.1218,0.1538,0.1192,0.1229,0.2119,0.2531,0.2855,0.2961,0.3341,0.4287,0.5205,0.6087,0.7236,0.7577,0.7726,0.8098,0.8995,0.9247,0.9365,0.9853,0.9776,1.0000,0.9896,0.9076,0.7306,0.5758,0.4469,0.3719,0.2079,0.0955,0.0488,0.1406,0.2554,0.2054,0.1614,0.2232,0.1773,0.2293,0.2521,0.1464,0.0673,0.0965,0.1492,0.1128,0.0463,0.0193,0.0140,0.0027,0.0068,0.0150,0.0012,0.0133,0.0048,0.0244,0.0077,0.0074,-1
0.0270,0.0163,0.0341,0.0247,0.0822,0.1256,0.1323,0.1584,0.2017,0.2122,0.2210,0.2399,0.2964,0.4061,0.5095,0.5512,0.6613,0.6804,0.6520,0.6788,0.7811,0.8369,0.8969,0.9856,1.0000,0.9395,0.8917,0.8105,0.6828,0.5572,0.4301,0.3339,0.2035,0.0798,0.0809,0.1525,0.2626,0.2456,0.1980,0.2412,0.2409,0.1901,0.2077,0.1767,0.1119,0.0779,0.1344,0.0960,0.0598,0.0330,0.0197,0.0189,0.0204,0.0085,0.0043,0.0092,0.0138,0.0094,0.0105,0.0093,-1
0.0388,0.0324,0.0688,0.0898,0.1267,0.1515,0.2134,0.2613,0.2832,0.2718,0.3645,0.3934,0.3843,0.4677,0.5364,0.4823,0.4835,0.5862,0.7579,0.6997,0.6918,0.8633,0.9107,0.9346,0.7884,0.8585,0.9261,0.7080,0.5779,0.5215,0.4505,0.3129,0.1448,0.1046,0.1820,0.1519,0.1017,0.1438,0.1986,0.2039,0.2778,0.2879,0.1331,0.1140,0.1310,0.1433,0.0624,0.0100,0.0098,0.0131,0.0152,0.0255,0.0071,0.0263,0.0079,0.0111,0.0107,0.0068,0.0097,0.0067,-1
0.0228,0.0853,0.1000,0.0428,0.1117,0.1651,0.1597,0.2116,0.3295,0.3517,0.3330,0.3643,0.4020,0.4731,0.5196,0.6573,0.8426,0.8476,0.8344,0.8453,0.7999,0.8537,0.9642,1.0000,0.9357,0.9409,0.9070,0.7104,0.6320,0.5667,0.3501,0.2447,0.1698,0.3290,0.3674,0.2331,0.2413,0.2556,0.1892,0.1940,0.3074,0.2785,0.0308,0.1238,0.1854,0.1753,0.1079,0.0728,0.0242,0.0191,0.0159,0.0172,0.0191,0.0260,0.0140,0.0125,0.0116,0.0093,0.0012,0.0036,-1
0.0715,0.0849,0.0587,0.0218,0.0862,0.1801,0.1916,0.1896,0.2960,0.4186,0.4867,0.5249,0.5959,0.6855,0.8573,0.9718,0.8693,0.8711,0.8954,0.9922,0.8980,0.8158,0.8373,0.7541,0.5893,0.5488,0.5643,0.5406,0.4783,0.4439,0.3698,0.2574,0.1478,0.1743,0.1229,0.1588,0.1803,0.1436,0.1667,0.2630,0.2234,0.1239,0.0869,0.2092,0.1499,0.0676,0.0899,0.0927,0.0658,0.0086,0.0216,0.0153,0.0121,0.0096,0.0196,0.0042,0.0066,0.0099,0.0083,0.0124,-1
0.0209,0.0261,0.0120,0.0768,0.1064,0.1680,0.3016,0.3460,0.3314,0.4125,0.3943,0.1334,0.4622,0.9970,0.9137,0.8292,0.6994,0.7825,0.8789,0.8501,0.8920,0.9473,1.0000,0.8975,0.7806,0.8321,0.6502,0.4548,0.4732,0.3391,0.2747,0.0978,0.0477,0.1403,0.1834,0.2148,0.1271,0.1912,0.3391,0.3444,0.2369,0.1195,0.2665,0.2587,0.1393,0.1083,0.1383,0.1321,0.1069,0.0325,0.0316,0.0057,0.0159,0.0085,0.0372,0.0101,0.0127,0.0288,0.0129,0.0023,-1
0.0374,0.0586,0.0628,0.0534,0.0255,0.1422,0.2072,0.2734,0.3070,0.2597,0.3483,0.3999,0.4574,0.5950,0.7924,0.8272,0.8087,0.8977,0.9828,0.8982,0.8890,0.9367,0.9122,0.7936,0.6718,0.6318,0.4865,0.3388,0.4832,0.3822,0.3075,0.1267,0.0743,0.1510,0.1906,0.1817,0.1709,0.0946,0.2829,0.3006,0.1602,0.1483,0.2875,0.2047,0.1064,0.1395,0.1065,0.0527,0.0395,0.0183,0.0353,0.0118,0.0063,0.0237,0.0032,0.0087,0.0124,0.0113,0.0098,0.0126,-1
0.1371,0.1226,0.1385,0.1484,0.1776,0.1428,0.1773,0.2161,0.1630,0.2067,0.4257,0.5484,0.7131,0.7003,0.6777,0.7939,0.9382,0.8925,0.9146,0.7832,0.7960,0.7983,0.7716,0.6615,0.4860,0.5572,0.4697,0.5640,0.4517,0.3369,0.2684,0.2339,0.3052,0.3016,0.2753,0.1041,0.1757,0.3156,0.3603,0.2736,0.1301,0.2458,0.3404,0.1753,0.0679,0.1062,0.0643,0.0532,0.0531,0.0272,0.0171,0.0118,0.0129,0.0344,0.0065,0.0067,0.0022,0.0079,0.0146,0.0051,-1
0.0443,0.0446,0.0235,0.1008,0.2252,0.2611,0.2061,0.1668,0.1801,0.3083,0.3794,0.5364,0.6173,0.7842,0.8392,0.9016,1.0000,0.8911,0.8753,0.7886,0.7156,0.7581,0.6372,0.3210,0.2076,0.2279,0.3309,0.2847,0.1949,0.1671,0.1025,0.1362,0.2212,0.1124,0.1677,0.1039,0.2562,0.2624,0.2236,0.1180,0.1103,0.2831,0.2385,0.0255,0.1967,0.1483,0.0434,0.0627,0.0513,0.0473,0.0248,0.0274,0.0205,0.0141,0.0185,0.0055,0.0045,0.0115,0.0152,0.0100,-1
0.1150,0.1163,0.0866,0.0358,0.0232,0.1267,0.2417,0.2661,0.4346,0.5378,0.3816,0.0991,0.0616,0.1795,0.3907,0.3602,0.3041,0.2428,0.4060,0.8395,0.9777,0.4680,0.0610,0.2143,0.1348,0.2854,0.1617,0.2649,0.4565,0.6502,0.2848,0.3296,0.5370,0.6627,0.8626,0.8547,0.7848,0.9016,0.8827,0.6086,0.2810,0.0906,0.1177,0.2694,0.5214,0.4232,0.2340,0.1928,0.1092,0.0507,0.0228,0.0099,0.0065,0.0085,0.0166,0.0110,0.0190,0.0141,0.0068,0.0086,-1
0.0968,0.0821,0.0629,0.0608,0.0617,0.1207,0.0944,0.4223,0.5744,0.5025,0.3488,0.1700,0.2076,0.3087,0.4224,0.5312,0.2436,0.1884,0.1908,0.8321,1.0000,0.4076,0.0960,0.1928,0.2419,0.3790,0.2893,0.3451,0.3777,0.5213,0.2316,0.3335,0.4781,0.6116,0.6705,0.7375,0.7356,0.7792,0.6788,0.5259,0.2762,0.1545,0.2019,0.2231,0.4221,0.3067,0.1329,0.1349,0.1057,0.0499,0.0206,0.0073,0.0081,0.0303,0.0190,0.0212,0.0126,0.0201,0.0210,0.0041,-1
0.0790,0.0707,0.0352,0.1660,0.1330,0.0226,0.0771,0.2678,0.5664,0.6609,0.5002,0.2583,0.1650,0.4347,0.4515,0.4579,0.3366,0.4000,0.5325,0.9010,0.9939,0.3689,0.1012,0.0248,0.2318,0.3981,0.2259,0.5247,0.6898,0.8316,0.4326,0.3741,0.5756,0.8043,0.7963,0.7174,0.7056,0.8148,0.7601,0.6034,0.4554,0.4729,0.4478,0.3722,0.4693,0.3839,0.0768,0.1467,0.0777,0.0469,0.0193,0.0298,0.0390,0.0294,0.0175,0.0249,0.0141,0.0073,0.0025,0.0101,-1
0.1083,0.1070,0.0257,0.0837,0.0748,0.1125,0.3322,0.4590,0.5526,0.5966,0.5304,0.2251,0.2402,0.2689,0.6646,0.6632,0.1674,0.0837,0.4331,0.8718,0.7992,0.3712,0.1703,0.1611,0.2086,0.2847,0.2211,0.6134,0.5807,0.6925,0.3825,0.4303,0.7791,0.8703,1.0000,0.9212,0.9386,0.9303,0.7314,0.4791,0.2087,0.2016,0.1669,0.2872,0.4374,0.3097,0.1578,0.0553,0.0334,0.0209,0.0172,0.0180,0.0110,0.0234,0.0276,0.0032,0.0084,0.0122,0.0082,0.0143,-1
0.0094,0.0611,0.1136,0.1203,0.0403,0.1227,0.2495,0.4566,0.6587,0.5079,0.3350,0.0834,0.3004,0.3957,0.3769,0.3828,0.1247,0.1363,0.2678,0.9188,0.9779,0.3236,0.1944,0.1874,0.0885,0.3443,0.2953,0.5908,0.4564,0.7334,0.1969,0.2790,0.6212,0.8681,0.8621,0.9380,0.8327,0.9480,0.6721,0.4436,0.5163,0.3809,0.1557,0.1449,0.2662,0.1806,0.1699,0.2559,0.1129,0.0201,0.0480,0.0234,0.0175,0.0352,0.0158,0.0326,0.0201,0.0168,0.0245,0.0154,-1
0.1088,0.1278,0.0926,0.1234,0.1276,0.1731,0.1948,0.4262,0.6828,0.5761,0.4733,0.2362,0.1023,0.2904,0.4713,0.4659,0.1415,0.0849,0.3257,0.9007,0.9312,0.4856,0.1346,0.1604,0.2737,0.5609,0.3654,0.6139,0.5470,0.8474,0.5638,0.5443,0.5086,0.6253,0.8497,0.8406,0.8420,0.9136,0.7713,0.4882,0.3724,0.4469,0.4586,0.4491,0.5616,0.4305,0.0945,0.0794,0.0274,0.0154,0.0140,0.0455,0.0213,0.0082,0.0124,0.0167,0.0103,0.0205,0.0178,0.0187,-1
0.0430,0.0902,0.0833,0.0813,0.0165,0.0277,0.0569,0.2057,0.3887,0.7106,0.7342,0.5033,0.3000,0.1951,0.2767,0.3737,0.2507,0.2507,0.3292,0.4871,0.6527,0.8454,0.9739,1.0000,0.6665,0.5323,0.4024,0.3444,0.4239,0.4182,0.4393,0.1162,0.4336,0.6553,0.6172,0.4373,0.4118,0.3641,0.4572,0.4367,0.2964,0.4312,0.4155,0.1824,0.1487,0.0138,0.1164,0.2052,0.1069,0.0199,0.0208,0.0176,0.0197,0.0210,0.0141,0.0049,0.0027,0.0162,0.0059,0.0021,-1
0.0731,0.1249,0.1665,0.1496,0.1443,0.2770,0.2555,0.1712,0.0466,0.1114,0.1739,0.3160,0.3249,0.2164,0.2031,0.2580,0.1796,0.2422,0.3609,0.1810,0.2604,0.6572,0.9734,0.9757,0.8079,0.6521,0.4915,0.5363,0.7649,0.5250,0.5101,0.4219,0.4160,0.1906,0.0223,0.4219,0.5496,0.2483,0.2034,0.2729,0.2837,0.4463,0.3178,0.0807,0.1192,0.2134,0.3241,0.2945,0.1474,0.0211,0.0361,0.0444,0.0230,0.0290,0.0141,0.0161,0.0177,0.0194,0.0207,0.0057,-1
0.0164,0.0627,0.0738,0.0608,0.0233,0.1048,0.1338,0.0644,0.1522,0.0780,0.1791,0.2681,0.1788,0.1039,0.1980,0.3234,0.3748,0.2586,0.3680,0.3508,0.5606,0.5231,0.5469,0.6954,0.6352,0.6757,0.8499,0.8025,0.6563,0.8591,0.6655,0.5369,0.3118,0.3763,0.2801,0.0875,0.3319,0.4237,0.1801,0.3743,0.4627,0.1614,0.2494,0.3202,0.2265,0.1146,0.0476,0.0943,0.0824,0.0171,0.0244,0.0258,0.0143,0.0226,0.0187,0.0185,0.0110,0.0094,0.0078,0.0112,-1
0.0412,0.1135,0.0518,0.0232,0.0646,0.1124,0.1787,0.2407,0.2682,0.2058,0.1546,0.2671,0.3141,0.2904,0.3531,0.5079,0.4639,0.1859,0.4474,0.4079,0.5400,0.4786,0.4332,0.6113,0.5091,0.4606,0.7243,0.8987,0.8826,0.9201,0.8005,0.6033,0.2120,0.2866,0.4033,0.2803,0.3087,0.3550,0.2545,0.1432,0.5869,0.6431,0.5826,0.4286,0.4894,0.5777,0.4315,0.2640,0.1794,0.0772,0.0798,0.0376,0.0143,0.0272,0.0127,0.0166,0.0095,0.0225,0.0098,0.0085,-1
0.0707,0.1252,0.1447,0.1644,0.1693,0.0844,0.0715,0.0947,0.1583,0.1247,0.2340,0.1764,0.2284,0.3115,0.4725,0.5543,0.5386,0.3746,0.4583,0.5961,0.7464,0.7644,0.5711,0.6257,0.6695,0.7131,0.7567,0.8077,0.8477,0.9289,0.9513,0.7995,0.4362,0.4048,0.4952,0.1712,0.3652,0.3763,0.2841,0.0427,0.5331,0.6952,0.4288,0.3063,0.5835,0.5692,0.2630,0.1196,0.0983,0.0374,0.0291,0.0156,0.0197,0.0135,0.0127,0.0138,0.0133,0.0131,0.0154,0.0218,-1
0.0526,0.0563,0.1219,0.1206,0.0246,0.1022,0.0539,0.0439,0.2291,0.1632,0.2544,0.2807,0.3011,0.3361,0.3024,0.2285,0.2910,0.1316,0.1151,0.3404,0.5562,0.6379,0.6553,0.7384,0.6534,0.5423,0.6877,0.7325,0.7726,0.8229,0.8787,0.9108,0.6705,0.6092,0.7505,0.4775,0.1666,0.3749,0.3776,0.2106,0.5886,0.5628,0.2577,0.5245,0.6149,0.5123,0.3385,0.1499,0.0546,0.0270,0.0380,0.0339,0.0149,0.0335,0.0376,0.0174,0.0132,0.0103,0.0364,0.0208,-1
0.0516,0.0944,0.0622,0.0415,0.0995,0.2431,0.1777,0.2018,0.2611,0.1294,0.2646,0.2778,0.4432,0.3672,0.2035,0.2764,0.3252,0.1536,0.2784,0.3508,0.5187,0.7052,0.7143,0.6814,0.5100,0.5308,0.6131,0.8388,0.9031,0.8607,0.9656,0.9168,0.7132,0.6898,0.7310,0.4134,0.1580,0.1819,0.1381,0.2960,0.6935,0.8246,0.5351,0.4403,0.6448,0.6214,0.3016,0.1379,0.0364,0.0355,0.0456,0.0432,0.0274,0.0152,0.0120,0.0129,0.0020,0.0109,0.0074,0.0078,-1
0.0299,0.0688,0.0992,0.1021,0.0800,0.0629,0.0130,0.0813,0.1761,0.0998,0.0523,0.0904,0.2655,0.3099,0.3520,0.3892,0.3962,0.2449,0.2355,0.3045,0.3112,0.4698,0.5534,0.4532,0.4464,0.4670,0.4621,0.6988,0.7626,0.7025,0.7382,0.7446,0.7927,0.5227,0.3967,0.3042,0.1309,0.2408,0.1780,0.1598,0.5657,0.6443,0.4241,0.4567,0.5760,0.5293,0.3287,0.1283,0.0698,0.0334,0.0342,0.0459,0.0277,0.0172,0.0087,0.0046,0.0203,0.0130,0.0115,0.0015,-1
0.0721,0.1574,0.1112,0.1085,0.0666,0.1800,0.1108,0.2794,0.1408,0.0795,0.2534,0.3920,0.3375,0.1610,0.1889,0.3308,0.2282,0.2177,0.1853,0.5167,0.5342,0.6298,0.8437,0.6756,0.5825,0.6141,0.8809,0.8375,0.3869,0.5051,0.5455,0.4241,0.1534,0.4950,0.6983,0.7109,0.5647,0.4870,0.5515,0.4433,0.5250,0.6075,0.5251,0.1359,0.4268,0.4442,0.2193,0.0900,0.1200,0.0628,0.0234,0.0309,0.0127,0.0082,0.0281,0.0117,0.0092,0.0147,0.0157,0.0129,-1
0.1021,0.0830,0.0577,0.0627,0.0635,0.1328,0.0988,0.1787,0.1199,0.1369,0.2509,0.2631,0.2796,0.2977,0.3823,0.3129,0.3956,0.2093,0.3218,0.3345,0.3184,0.2887,0.3610,0.2566,0.4106,0.4591,0.4722,0.7278,0.7591,0.6579,0.7514,0.6666,0.4903,0.5962,0.6552,0.4014,0.1188,0.3245,0.3107,0.1354,0.5109,0.7988,0.7517,0.5508,0.5858,0.7292,0.5522,0.3339,0.1608,0.0475,0.1004,0.0709,0.0317,0.0309,0.0252,0.0087,0.0177,0.0214,0.0227,0.0106,-1
0.0654,0.0649,0.0737,0.1132,0.2482,0.1257,0.1797,0.0989,0.2460,0.3422,0.2128,0.1377,0.4032,0.5684,0.2398,0.4331,0.5954,0.5772,0.8176,0.8835,0.5248,0.6373,0.8375,0.6699,0.7756,0.8750,0.8300,0.6896,0.3372,0.6405,0.7138,0.8202,0.6657,0.5254,0.2960,0.0704,0.0970,0.3941,0.6028,0.3521,0.3924,0.4808,0.4602,0.4164,0.5438,0.5649,0.3195,0.2484,0.1299,0.0825,0.0243,0.0210,0.0361,0.0239,0.0447,0.0394,0.0355,0.0440,0.0243,0.0098,-1
0.0712,0.0901,0.1276,0.1497,0.1284,0.1165,0.1285,0.1684,0.1830,0.2127,0.2891,0.3985,0.4576,0.5821,0.5027,0.1930,0.2579,0.3177,0.2745,0.6186,0.8958,0.7442,0.5188,0.2811,0.1773,0.6607,0.7576,0.5122,0.4701,0.5479,0.4347,0.1276,0.0846,0.0927,0.0313,0.0998,0.1781,0.1586,0.3001,0.2208,0.1455,0.2895,0.3203,0.1414,0.0629,0.0734,0.0805,0.0608,0.0565,0.0286,0.0154,0.0154,0.0156,0.0054,0.0030,0.0048,0.0087,0.0101,0.0095,0.0068,-1
0.0207,0.0535,0.0334,0.0818,0.0740,0.0324,0.0918,0.1070,0.1553,0.1234,0.1796,0.1787,0.1247,0.2577,0.3370,0.3990,0.1647,0.2266,0.3219,0.5356,0.8159,1.0000,0.8701,0.6889,0.6299,0.5738,0.5707,0.5976,0.4301,0.2058,0.1000,0.2247,0.2308,0.3977,0.3317,0.1726,0.1429,0.2168,0.1967,0.2140,0.3674,0.2023,0.0778,0.0925,0.2388,0.3400,0.2594,0.1102,0.0911,0.0462,0.0171,0.0033,0.0050,0.0190,0.0103,0.0121,0.0042,0.0090,0.0070,0.0099,-1
0.0209,0.0278,0.0115,0.0445,0.0427,0.0766,0.1458,0.1430,0.1894,0.1853,0.1748,0.1556,0.1476,0.1378,0.2584,0.3827,0.4784,0.5360,0.6192,0.7912,0.9264,1.0000,0.9080,0.7435,0.5557,0.3172,0.1295,0.0598,0.2722,0.3616,0.3293,0.4855,0.3936,0.1845,0.0342,0.2489,0.3837,0.3514,0.2654,0.1760,0.1599,0.0866,0.0590,0.0813,0.0492,0.0417,0.0495,0.0367,0.0115,0.0118,0.0133,0.0096,0.0014,0.0049,0.0039,0.0029,0.0078,0.0047,0.0021,0.0011,-1
0.0231,0.0315,0.0170,0.0226,0.0410,0.0116,0.0223,0.0805,0.2365,0.2461,0.2245,0.1520,0.1732,0.3099,0.4380,0.5595,0.6820,0.6164,0.6803,0.8435,0.9921,1.0000,0.7983,0.5426,0.3952,0.5179,0.5650,0.3042,0.1881,0.3960,0.2286,0.3544,0.4187,0.2398,0.1847,0.3760,0.4331,0.3626,0.2519,0.1870,0.1046,0.2339,0.1991,0.1100,0.0684,0.0303,0.0674,0.0785,0.0455,0.0246,0.0151,0.0125,0.0036,0.0123,0.0043,0.0114,0.0052,0.0091,0.0008,0.0092,-1
0.0131,0.0201,0.0045,0.0217,0.0230,0.0481,0.0742,0.0333,0.1369,0.2079,0.2295,0.1990,0.1184,0.1891,0.2949,0.5343,0.6850,0.7923,0.8220,0.7290,0.7352,0.7918,0.8057,0.4898,0.1934,0.2924,0.6255,0.8546,0.8966,0.7821,0.5168,0.4840,0.4038,0.3411,0.2849,0.2353,0.2699,0.4442,0.4323,0.3314,0.1195,0.1669,0.3702,0.3072,0.0945,0.1545,0.1394,0.0772,0.0615,0.0230,0.0111,0.0168,0.0086,0.0045,0.0062,0.0065,0.0030,0.0066,0.0029,0.0053,-1
0.0233,0.0394,0.0416,0.0547,0.0993,0.1515,0.1674,0.1513,0.1723,0.2078,0.1239,0.0236,0.1771,0.3115,0.4990,0.6707,0.7655,0.8485,0.9805,1.0000,1.0000,0.9992,0.9067,0.6803,0.5103,0.4716,0.4980,0.6196,0.7171,0.6316,0.3554,0.2897,0.4316,0.3791,0.2421,0.0944,0.0351,0.0844,0.0436,0.1130,0.2045,0.1937,0.0834,0.1502,0.1675,0.1058,0.1111,0.0849,0.0596,0.0201,0.0071,0.0104,0.0062,0.0026,0.0025,0.0061,0.0038,0.0101,0.0078,0.0006,-1
0.0117,0.0069,0.0279,0.0583,0.0915,0.1267,0.1577,0.1927,0.2361,0.2169,0.1180,0.0754,0.2782,0.3758,0.5093,0.6592,0.7071,0.7532,0.8357,0.8593,0.9615,0.9838,0.8705,0.6403,0.5067,0.5395,0.6934,0.8487,0.8213,0.5962,0.2950,0.2758,0.2885,0.1893,0.1446,0.0955,0.0888,0.0836,0.0894,0.1547,0.2318,0.2225,0.1035,0.1721,0.2017,0.1787,0.1112,0.0398,0.0305,0.0084,0.0039,0.0053,0.0029,0.0020,0.0013,0.0029,0.0020,0.0062,0.0026,0.0052,-1
0.0211,0.0128,0.0015,0.0450,0.0711,0.1563,0.1518,0.1206,0.1666,0.1345,0.0785,0.0367,0.1227,0.2614,0.4280,0.6122,0.7435,0.8130,0.9006,0.9603,0.9162,0.9140,0.7851,0.5134,0.3439,0.3290,0.2571,0.3685,0.5765,0.6190,0.4613,0.3615,0.4434,0.3864,0.3093,0.2138,0.1112,0.1386,0.1523,0.0996,0.1644,0.1902,0.1313,0.1776,0.2000,0.0765,0.0727,0.0749,0.0449,0.0134,0.0174,0.0117,0.0023,0.0047,0.0049,0.0031,0.0024,0.0039,0.0051,0.0015,-1
0.0047,0.0059,0.0080,0.0554,0.0883,0.1278,0.1674,0.1373,0.2922,0.3469,0.3265,0.3263,0.2301,0.1253,0.2102,0.2401,0.1928,0.1673,0.1228,0.0902,0.1557,0.3291,0.5268,0.6740,0.7906,0.8938,0.9395,0.9493,0.9040,0.9151,0.8828,0.8086,0.7180,0.6720,0.6447,0.6879,0.6241,0.4936,0.4144,0.4240,0.4546,0.4392,0.4323,0.4921,0.4710,0.3196,0.2241,0.1806,0.0990,0.0251,0.0129,0.0095,0.0126,0.0069,0.0039,0.0068,0.0060,0.0045,0.0002,0.0029,-1
0.0201,0.0178,0.0274,0.0232,0.0724,0.0833,0.1232,0.1298,0.2085,0.2720,0.2188,0.3037,0.2959,0.2059,0.0906,0.1610,0.1800,0.2180,0.2026,0.1506,0.0521,0.2143,0.4333,0.5943,0.6926,0.7576,0.8787,0.9060,0.8528,0.9087,0.9657,0.9306,0.7774,0.6643,0.6604,0.6884,0.6938,0.5932,0.5774,0.6223,0.5841,0.4527,0.4911,0.5762,0.5013,0.4042,0.3123,0.2232,0.1085,0.0414,0.0253,0.0131,0.0049,0.0104,0.0102,0.0092,0.0083,0.0020,0.0048,0.0036,-1
0.0107,0.0453,0.0289,0.0713,0.1075,0.1019,0.1606,0.2119,0.3061,0.2936,0.3104,0.3431,0.2456,0.1887,0.1184,0.2080,0.2736,0.3274,0.2344,0.1260,0.0576,0.1241,0.3239,0.4357,0.5734,0.7825,0.9252,0.9349,0.9348,1.0000,0.9308,0.8478,0.7605,0.7040,0.7539,0.7990,0.7673,0.5955,0.4731,0.4840,0.4340,0.3954,0.4837,0.5379,0.4485,0.2674,0.1541,0.1359,0.0941,0.0261,0.0079,0.0164,0.0120,0.0113,0.0021,0.0097,0.0072,0.0060,0.0017,0.0036,-1
0.0235,0.0220,0.0167,0.0516,0.0746,0.1121,0.1258,0.1717,0.3074,0.3199,0.2946,0.2484,0.2510,0.1806,0.1413,0.3019,0.3635,0.3887,0.2980,0.2219,0.1624,0.1343,0.2046,0.3791,0.5771,0.7545,0.8406,0.8547,0.9036,1.0000,0.9646,0.7912,0.6412,0.5986,0.6835,0.7771,0.8084,0.7426,0.6295,0.5708,0.4433,0.3361,0.3795,0.4950,0.4373,0.2404,0.1128,0.1654,0.0933,0.0225,0.0214,0.0221,0.0152,0.0083,0.0058,0.0023,0.0057,0.0052,0.0027,0.0021,-1
0.0258,0.0433,0.0547,0.0681,0.0784,0.1250,0.1296,0.1729,0.2794,0.2954,0.2506,0.2601,0.2249,0.2115,0.1270,0.1193,0.1794,0.2185,0.1646,0.0740,0.0625,0.2381,0.4824,0.6372,0.7531,0.8959,0.9941,0.9957,0.9328,0.9344,0.8854,0.7690,0.6865,0.6390,0.6378,0.6629,0.5983,0.4565,0.3129,0.4158,0.4325,0.4031,0.4201,0.4557,0.3955,0.2966,0.2095,0.1558,0.0884,0.0265,0.0121,0.0091,0.0062,0.0019,0.0045,0.0079,0.0031,0.0063,0.0048,0.0050,-1
0.0305,0.0363,0.0214,0.0227,0.0456,0.0665,0.0939,0.0972,0.2535,0.3127,0.2192,0.2621,0.2419,0.2179,0.1159,0.1237,0.0886,0.1755,0.1758,0.1540,0.0512,0.1805,0.4039,0.5697,0.6577,0.7474,0.8543,0.9085,0.8668,0.8892,0.9065,0.8522,0.7204,0.6200,0.6253,0.6848,0.7337,0.6281,0.5725,0.6119,0.5597,0.4965,0.5027,0.5772,0.5907,0.4803,0.3877,0.2779,0.1427,0.0424,0.0271,0.0200,0.0070,0.0070,0.0086,0.0089,0.0074,0.0042,0.0055,0.0021,-1
0.0217,0.0152,0.0346,0.0346,0.0484,0.0526,0.0773,0.0862,0.1451,0.2110,0.2343,0.2087,0.1645,0.1689,0.1650,0.1967,0.2934,0.3709,0.4309,0.4161,0.5116,0.6501,0.7717,0.8491,0.9104,0.8912,0.8189,0.6779,0.5368,0.5207,0.5651,0.5749,0.5250,0.4255,0.3330,0.2331,0.1451,0.1648,0.2694,0.3730,0.4467,0.4133,0.3743,0.3021,0.2069,0.1790,0.1689,0.1341,0.0769,0.0222,0.0205,0.0123,0.0067,0.0011,0.0026,0.0049,0.0029,0.0022,0.0022,0.0032,-1
0.0072,0.0027,0.0089,0.0061,0.0420,0.0865,0.1182,0.0999,0.1976,0.2318,0.2472,0.2880,0.2126,0.0708,0.1194,0.2808,0.4221,0.5279,0.5857,0.6153,0.6753,0.7873,0.8974,0.9828,1.0000,0.8460,0.6055,0.3036,0.0144,0.2526,0.4335,0.4918,0.5409,0.5961,0.5248,0.3777,0.2369,0.1720,0.1878,0.3250,0.2575,0.2423,0.2706,0.2323,0.1724,0.1457,0.1175,0.0868,0.0392,0.0131,0.0092,0.0078,0.0071,0.0081,0.0034,0.0064,0.0037,0.0036,0.0012,0.0037,-1
0.0163,0.0198,0.0202,0.0386,0.0752,0.1444,0.1487,0.1484,0.2442,0.2822,0.3691,0.3750,0.3927,0.3308,0.1085,0.1139,0.3446,0.5441,0.6470,0.7276,0.7894,0.8264,0.8697,0.7836,0.7140,0.5698,0.2908,0.4636,0.6409,0.7405,0.8069,0.8420,1.0000,0.9536,0.6755,0.3905,0.1249,0.3629,0.6356,0.8116,0.7664,0.5417,0.2614,0.1723,0.2814,0.2764,0.1985,0.1502,0.1219,0.0493,0.0027,0.0077,0.0026,0.0031,0.0083,0.0020,0.0084,0.0108,0.0083,0.0033,-1
0.0221,0.0065,0.0164,0.0487,0.0519,0.0849,0.0812,0.1833,0.2228,0.1810,0.2549,0.2984,0.2624,0.1893,0.0668,0.2666,0.4274,0.6291,0.7782,0.7686,0.8099,0.8493,0.9440,0.9450,0.9655,0.8045,0.4969,0.3960,0.3856,0.5574,0.7309,0.8549,0.9425,0.8726,0.6673,0.4694,0.1546,0.1748,0.3607,0.5208,0.5177,0.3702,0.2240,0.0816,0.0395,0.0785,0.1052,0.1034,0.0764,0.0216,0.0167,0.0089,0.0051,0.0015,0.0075,0.0058,0.0016,0.0070,0.0074,0.0038,-1
0.0411,0.0277,0.0604,0.0525,0.0489,0.0385,0.0611,0.1117,0.1237,0.2300,0.1370,0.1335,0.2137,0.1526,0.0775,0.1196,0.0903,0.0689,0.2071,0.2975,0.2836,0.3353,0.3622,0.3202,0.3452,0.3562,0.3892,0.6622,0.9254,1.0000,0.8528,0.6297,0.5250,0.4012,0.2901,0.2007,0.3356,0.4799,0.6147,0.6246,0.4973,0.3492,0.2662,0.3137,0.4282,0.4262,0.3511,0.2458,0.1259,0.0327,0.0181,0.0217,0.0038,0.0019,0.0065,0.0132,0.0108,0.0050,0.0085,0.0044,-1
0.0137,0.0297,0.0116,0.0082,0.0241,0.0253,0.0279,0.0130,0.0489,0.0874,0.1100,0.1084,0.1094,0.1023,0.0601,0.0906,0.1313,0.2758,0.3660,0.5269,0.5810,0.6181,0.5875,0.4639,0.5424,0.7367,0.9089,1.0000,0.8247,0.5441,0.3349,0.0877,0.1600,0.4169,0.6576,0.7390,0.7963,0.7493,0.6795,0.4713,0.2355,0.1704,0.2728,0.4016,0.4125,0.3470,0.2739,0.1790,0.0922,0.0276,0.0169,0.0081,0.0040,0.0025,0.0036,0.0058,0.0067,0.0035,0.0043,0.0033,-1
0.0015,0.0186,0.0289,0.0195,0.0515,0.0817,0.1005,0.0124,0.1168,0.1476,0.2118,0.2575,0.2354,0.1334,0.0092,0.1951,0.3685,0.4646,0.5418,0.6260,0.7420,0.8257,0.8609,0.8400,0.8949,0.9945,1.0000,0.9649,0.8747,0.6257,0.2184,0.2945,0.3645,0.5012,0.7843,0.9361,0.8195,0.6207,0.4513,0.3004,0.2674,0.2241,0.3141,0.3693,0.2986,0.2226,0.0849,0.0359,0.0289,0.0122,0.0045,0.0108,0.0075,0.0089,0.0036,0.0029,0.0013,0.0010,0.0032,0.0047,-1
0.0130,0.0120,0.0436,0.0624,0.0428,0.0349,0.0384,0.0446,0.1318,0.1375,0.2026,0.2389,0.2112,0.1444,0.0742,0.1533,0.3052,0.4116,0.5466,0.5933,0.6663,0.7333,0.7136,0.7014,0.7758,0.9137,0.9964,1.0000,0.8881,0.6585,0.2707,0.1746,0.2709,0.4853,0.7184,0.8209,0.7536,0.6496,0.4708,0.3482,0.3508,0.3181,0.3524,0.3659,0.2846,0.1714,0.0694,0.0303,0.0292,0.0116,0.0024,0.0084,0.0100,0.0018,0.0035,0.0058,0.0011,0.0009,0.0033,0.0026,-1
0.0134,0.0172,0.0178,0.0363,0.0444,0.0744,0.0800,0.0456,0.0368,0.1250,0.2405,0.2325,0.2523,0.1472,0.0669,0.1100,0.2353,0.3282,0.4416,0.5167,0.6508,0.7793,0.7978,0.7786,0.8587,0.9321,0.9454,0.8645,0.7220,0.4850,0.1357,0.2951,0.4715,0.6036,0.8083,0.9870,0.8800,0.6411,0.4276,0.2702,0.2642,0.3342,0.4335,0.4542,0.3960,0.2525,0.1084,0.0372,0.0286,0.0099,0.0046,0.0094,0.0048,0.0047,0.0016,0.0008,0.0042,0.0024,0.0027,0.0041,-1
0.0179,0.0136,0.0408,0.0633,0.0596,0.0808,0.2090,0.3465,0.5276,0.5965,0.6254,0.4507,0.3693,0.2864,0.1635,0.0422,0.1785,0.4394,0.6950,0.8097,0.8550,0.8717,0.8601,0.9201,0.8729,0.8084,0.8694,0.8411,0.5793,0.3754,0.3485,0.4639,0.6495,0.6901,0.5666,0.5188,0.5060,0.3885,0.3762,0.3738,0.2605,0.1591,0.1875,0.2267,0.1577,0.1211,0.0883,0.0850,0.0355,0.0219,0.0086,0.0123,0.0060,0.0187,0.0111,0.0126,0.0081,0.0155,0.0160,0.0085,-1
0.0180,0.0444,0.0476,0.0698,0.1615,0.0887,0.0596,0.1071,0.3175,0.2918,0.3273,0.3035,0.3033,0.2587,0.1682,0.1308,0.2803,0.4519,0.6641,0.7683,0.6960,0.4393,0.2432,0.2886,0.4974,0.8172,1.0000,0.9238,0.8519,0.7722,0.5772,0.5190,0.6824,0.6220,0.5054,0.3578,0.3809,0.3813,0.3359,0.2771,0.3648,0.3834,0.3453,0.2096,0.1031,0.0798,0.0701,0.0526,0.0241,0.0117,0.0122,0.0122,0.0114,0.0098,0.0027,0.0025,0.0026,0.0050,0.0073,0.0022,-1
0.0329,0.0216,0.0386,0.0627,0.1158,0.1482,0.2054,0.1605,0.2532,0.2672,0.3056,0.3161,0.2314,0.2067,0.1804,0.2808,0.4423,0.5947,0.6601,0.5844,0.4539,0.4789,0.5646,0.5281,0.7115,1.0000,0.9564,0.6090,0.5112,0.4000,0.0482,0.1852,0.2186,0.1436,0.1757,0.1428,0.1644,0.3089,0.3648,0.4441,0.3859,0.2813,0.1238,0.0953,0.1201,0.0825,0.0618,0.0141,0.0108,0.0124,0.0104,0.0095,0.0151,0.0059,0.0015,0.0053,0.0016,0.0042,0.0053,0.0074,-1
0.0191,0.0173,0.0291,0.0301,0.0463,0.0690,0.0576,0.1103,0.2423,0.3134,0.4786,0.5239,0.4393,0.3440,0.2869,0.3889,0.4420,0.3892,0.4088,0.5006,0.7271,0.9385,1.0000,0.9831,0.9932,0.9161,0.8237,0.6957,0.4536,0.3281,0.2522,0.3964,0.4154,0.3308,0.1445,0.1923,0.3208,0.3367,0.5683,0.5505,0.3231,0.0448,0.3131,0.3387,0.4130,0.3639,0.2069,0.0859,0.0600,0.0267,0.0125,0.0040,0.0136,0.0137,0.0172,0.0132,0.0110,0.0122,0.0114,0.0068,-1
0.0294,0.0123,0.0117,0.0113,0.0497,0.0998,0.1326,0.1117,0.2984,0.3473,0.4231,0.5044,0.5237,0.4398,0.3236,0.2956,0.3286,0.3231,0.4528,0.6339,0.7044,0.8314,0.8449,0.8512,0.9138,0.9985,1.0000,0.7544,0.4661,0.3924,0.3849,0.4674,0.4245,0.3095,0.0752,0.2885,0.4072,0.3170,0.2863,0.2634,0.0541,0.1874,0.3459,0.4646,0.4366,0.2581,0.1319,0.0505,0.0112,0.0059,0.0041,0.0056,0.0104,0.0079,0.0014,0.0054,0.0015,0.0006,0.0081,0.0043,-1
0.0635,0.0709,0.0453,0.0333,0.0185,0.1260,0.1015,0.1918,0.3362,0.3900,0.4674,0.5632,0.5506,0.4343,0.3052,0.3492,0.3975,0.3875,0.5280,0.7198,0.7702,0.8562,0.8688,0.9236,1.0000,0.9662,0.9822,0.7360,0.4158,0.2918,0.3280,0.3690,0.3450,0.2863,0.0864,0.3724,0.4649,0.3488,0.1817,0.1142,0.1220,0.2621,0.4461,0.4726,0.3263,0.1423,0.0390,0.0406,0.0311,0.0086,0.0154,0.0048,0.0025,0.0087,0.0072,0.0095,0.0086,0.0085,0.0040,0.0051,-1
0.0201,0.0165,0.0344,0.0330,0.0397,0.0443,0.0684,0.0903,0.1739,0.2571,0.2931,0.3108,0.3603,0.3002,0.2718,0.2007,0.1801,0.2234,0.3568,0.5492,0.7209,0.8318,0.8864,0.9520,0.9637,1.0000,0.9673,0.8664,0.7896,0.6345,0.5351,0.4056,0.2563,0.2894,0.3588,0.4296,0.4773,0.4516,0.3765,0.3051,0.1921,0.1184,0.1984,0.1570,0.0660,0.1294,0.0797,0.0052,0.0233,0.0152,0.0125,0.0054,0.0057,0.0137,0.0109,0.0035,0.0056,0.0105,0.0082,0.0036,-1
0.0197,0.0394,0.0384,0.0076,0.0251,0.0629,0.0747,0.0578,0.1357,0.1695,0.1734,0.2470,0.3141,0.3297,0.2759,0.2056,0.1162,0.1884,0.3390,0.3926,0.4282,0.5418,0.6448,0.7223,0.7853,0.7984,0.8847,0.9582,0.8990,0.6831,0.6108,0.5480,0.5058,0.4476,0.2401,0.1405,0.1772,0.1742,0.3326,0.4021,0.3009,0.2075,0.1206,0.0255,0.0298,0.0691,0.0781,0.0777,0.0369,0.0057,0.0091,0.0134,0.0097,0.0042,0.0058,0.0072,0.0041,0.0045,0.0047,0.0054,-1
0.0394,0.0420,0.0446,0.0551,0.0597,0.1416,0.0956,0.0802,0.1618,0.2558,0.3078,0.3404,0.3400,0.3951,0.3352,0.2252,0.2086,0.2248,0.3382,0.4578,0.6474,0.6708,0.7007,0.7619,0.7745,0.6767,0.7373,0.7834,0.9619,1.0000,0.8086,0.5558,0.5409,0.4988,0.3108,0.2897,0.2244,0.0960,0.2287,0.3228,0.3454,0.3882,0.3240,0.0926,0.1173,0.0566,0.0766,0.0969,0.0588,0.0050,0.0118,0.0146,0.0040,0.0114,0.0032,0.0062,0.0101,0.0068,0.0053,0.0087,-1
0.0310,0.0221,0.0433,0.0191,0.0964,0.1827,0.1106,0.1702,0.2804,0.4432,0.5222,0.5611,0.5379,0.4048,0.2245,0.1784,0.2297,0.2720,0.5209,0.6898,0.8202,0.8780,0.7600,0.7616,0.7152,0.7288,0.8686,0.9509,0.8348,0.5730,0.4363,0.4289,0.4240,0.3156,0.1287,0.1477,0.2062,0.2400,0.5173,0.5168,0.1491,0.2407,0.3415,0.4494,0.4624,0.2001,0.0775,0.1232,0.0783,0.0089,0.0249,0.0204,0.0059,0.0053,0.0079,0.0037,0.0015,0.0056,0.0067,0.0054,-1
0.0423,0.0321,0.0709,0.0108,0.1070,0.0973,0.0961,0.1323,0.2462,0.2696,0.3412,0.4292,0.3682,0.3940,0.2965,0.3172,0.2825,0.3050,0.2408,0.5420,0.6802,0.6320,0.5824,0.6805,0.5984,0.8412,0.9911,0.9187,0.8005,0.6713,0.5632,0.7332,0.6038,0.2575,0.0349,0.1799,0.3039,0.4760,0.5756,0.4254,0.5046,0.7179,0.6163,0.5663,0.5749,0.3593,0.2526,0.2299,0.1271,0.0356,0.0367,0.0176,0.0035,0.0093,0.0121,0.0075,0.0056,0.0021,0.0043,0.0017,-1
0.0095,0.0308,0.0539,0.0411,0.0613,0.1039,0.1016,0.1394,0.2592,0.3745,0.4229,0.4499,0.5404,0.4303,0.3333,0.3496,0.3426,0.2851,0.4062,0.6833,0.7650,0.6670,0.5703,0.5995,0.6484,0.8614,0.9819,0.9380,0.8435,0.6074,0.5403,0.6890,0.5977,0.3244,0.0516,0.3157,0.3590,0.3881,0.5716,0.4314,0.3051,0.4393,0.4302,0.4831,0.5084,0.1952,0.1539,0.2037,0.1054,0.0251,0.0357,0.0181,0.0019,0.0102,0.0133,0.0040,0.0042,0.0030,0.0031,0.0033,-1
0.0096,0.0404,0.0682,0.0688,0.0887,0.0932,0.0955,0.2140,0.2546,0.2952,0.4025,0.5148,0.4901,0.4127,0.3575,0.3447,0.3068,0.2945,0.4351,0.7264,0.8147,0.8103,0.6665,0.6958,0.7748,0.8688,1.0000,0.9941,0.8793,0.6482,0.5876,0.6408,0.4972,0.2755,0.0300,0.3356,0.3167,0.4133,0.6281,0.4977,0.2613,0.4697,0.4806,0.4921,0.5294,0.2216,0.1401,0.1888,0.0947,0.0134,0.0310,0.0237,0.0078,0.0144,0.0170,0.0012,0.0109,0.0036,0.0043,0.0018,-1
0.0269,0.0383,0.0505,0.0707,0.1313,0.2103,0.2263,0.2524,0.3595,0.5915,0.6675,0.5679,0.5175,0.3334,0.2002,0.2856,0.2937,0.3424,0.5949,0.7526,0.8959,0.8147,0.7109,0.7378,0.7201,0.8254,0.8917,0.9820,0.8179,0.4848,0.3203,0.2775,0.2382,0.2911,0.1675,0.3156,0.1869,0.3391,0.5993,0.4124,0.1181,0.3651,0.4655,0.4777,0.3517,0.0920,0.1227,0.1785,0.1085,0.0300,0.0346,0.0167,0.0199,0.0145,0.0081,0.0045,0.0043,0.0027,0.0055,0.0057,-1
0.0340,0.0625,0.0381,0.0257,0.0441,0.1027,0.1287,0.1850,0.2647,0.4117,0.5245,0.5341,0.5554,0.3915,0.2950,0.3075,0.3021,0.2719,0.5443,0.7932,0.8751,0.8667,0.7107,0.6911,0.7287,0.8792,1.0000,0.9816,0.8984,0.6048,0.4934,0.5371,0.4586,0.2908,0.0774,0.2249,0.1602,0.3958,0.6117,0.5196,0.2321,0.4370,0.3797,0.4322,0.4892,0.1901,0.0940,0.1364,0.0906,0.0144,0.0329,0.0141,0.0019,0.0067,0.0099,0.0042,0.0057,0.0051,0.0033,0.0058,-1
0.0209,0.0191,0.0411,0.0321,0.0698,0.1579,0.1438,0.1402,0.3048,0.3914,0.3504,0.3669,0.3943,0.3311,0.3331,0.3002,0.2324,0.1381,0.3450,0.4428,0.4890,0.3677,0.4379,0.4864,0.6207,0.7256,0.6624,0.7689,0.7981,0.8577,0.9273,0.7009,0.4851,0.3409,0.1406,0.1147,0.1433,0.1820,0.3605,0.5529,0.5988,0.5077,0.5512,0.5027,0.7034,0.5904,0.4069,0.2761,0.1584,0.0510,0.0054,0.0078,0.0201,0.0104,0.0039,0.0031,0.0062,0.0087,0.0070,0.0042,-1
0.0368,0.0279,0.0103,0.0566,0.0759,0.0679,0.0970,0.1473,0.2164,0.2544,0.2936,0.2935,0.2657,0.3187,0.2794,0.2534,0.1980,0.1929,0.2826,0.3245,0.3504,0.3324,0.4217,0.4774,0.4808,0.6325,0.8334,0.9458,1.0000,0.8425,0.5524,0.4795,0.5200,0.3968,0.1940,0.1519,0.2010,0.1736,0.1029,0.2244,0.3717,0.4449,0.3939,0.2030,0.2010,0.2187,0.1840,0.1477,0.0971,0.0224,0.0151,0.0105,0.0024,0.0018,0.0057,0.0092,0.0009,0.0086,0.0110,0.0052,-1
0.0089,0.0274,0.0248,0.0237,0.0224,0.0845,0.1488,0.1224,0.1569,0.2119,0.3003,0.3094,0.2743,0.2547,0.1870,0.1452,0.1457,0.2429,0.3259,0.3679,0.3355,0.3100,0.3914,0.5280,0.6409,0.7707,0.8754,1.0000,0.9806,0.6969,0.4973,0.5020,0.5359,0.3842,0.1848,0.1149,0.1570,0.1311,0.1583,0.2631,0.3103,0.4512,0.3785,0.1269,0.1459,0.1092,0.1485,0.1385,0.0716,0.0176,0.0199,0.0096,0.0103,0.0093,0.0025,0.0044,0.0021,0.0069,0.0060,0.0018,-1
0.0158,0.0239,0.0150,0.0494,0.0988,0.1425,0.1463,0.1219,0.1697,0.1923,0.2361,0.2719,0.3049,0.2986,0.2226,0.1745,0.2459,0.3100,0.3572,0.4283,0.4268,0.3735,0.4585,0.6094,0.7221,0.7595,0.8706,1.0000,0.9815,0.7187,0.5848,0.4192,0.3756,0.3263,0.1944,0.1394,0.1670,0.1275,0.1666,0.2574,0.2258,0.2777,0.1613,0.1335,0.1976,0.1234,0.1554,0.1057,0.0490,0.0097,0.0223,0.0121,0.0108,0.0057,0.0028,0.0079,0.0034,0.0046,0.0022,0.0021,-1
0.0156,0.0210,0.0282,0.0596,0.0462,0.0779,0.1365,0.0780,0.1038,0.1567,0.2476,0.2783,0.2896,0.2956,0.3189,0.1892,0.1730,0.2226,0.2427,0.3149,0.4102,0.3808,0.4896,0.6292,0.7519,0.7985,0.8830,0.9915,0.9223,0.6981,0.6167,0.5069,0.3921,0.3524,0.2183,0.1245,0.1592,0.1626,0.2356,0.2483,0.2437,0.2715,0.1184,0.1157,0.1449,0.1883,0.1954,0.1492,0.0511,0.0155,0.0189,0.0150,0.0060,0.0082,0.0091,0.0038,0.0056,0.0056,0.0048,0.0024,-1
0.0315,0.0252,0.0167,0.0479,0.0902,0.1057,0.1024,0.1209,0.1241,0.1533,0.2128,0.2536,0.2686,0.2803,0.1886,0.1485,0.2160,0.2417,0.2989,0.3341,0.3786,0.3956,0.5232,0.6913,0.7868,0.8337,0.9199,1.0000,0.8990,0.6456,0.5967,0.4355,0.2997,0.2294,0.1866,0.0922,0.1829,0.1743,0.2452,0.2407,0.2518,0.3184,0.1685,0.0675,0.1186,0.1833,0.1878,0.1114,0.0310,0.0143,0.0138,0.0108,0.0062,0.0044,0.0072,0.0007,0.0054,0.0035,0.0001,0.0055,-1
0.0056,0.0267,0.0221,0.0561,0.0936,0.1146,0.0706,0.0996,0.1673,0.1859,0.2481,0.2712,0.2934,0.2637,0.1880,0.1405,0.2028,0.2613,0.2778,0.3346,0.3830,0.4003,0.5114,0.6860,0.7490,0.7843,0.9021,1.0000,0.8888,0.6511,0.6083,0.4463,0.2948,0.1729,0.1488,0.0801,0.1770,0.1382,0.2404,0.2046,0.1970,0.2778,0.1377,0.0685,0.0664,0.1665,0.1807,0.1245,0.0516,0.0044,0.0185,0.0072,0.0055,0.0074,0.0068,0.0084,0.0037,0.0024,0.0034,0.0007,-1
0.0203,0.0121,0.0380,0.0128,0.0537,0.0874,0.1021,0.0852,0.1136,0.1747,0.2198,0.2721,0.2105,0.1727,0.2040,0.1786,0.1318,0.2260,0.2358,0.3107,0.3906,0.3631,0.4809,0.6531,0.7812,0.8395,0.9180,0.9769,0.8937,0.7022,0.6500,0.5069,0.3903,0.3009,0.1565,0.0985,0.2200,0.2243,0.2736,0.2152,0.2438,0.3154,0.2112,0.0991,0.0594,0.1940,0.1937,0.1082,0.0336,0.0177,0.0209,0.0134,0.0094,0.0047,0.0045,0.0042,0.0028,0.0036,0.0013,0.0016,-1
0.0392,0.0108,0.0267,0.0257,0.0410,0.0491,0.1053,0.1690,0.2105,0.2471,0.2680,0.3049,0.2863,0.2294,0.1165,0.2127,0.2062,0.2222,0.3241,0.4330,0.5071,0.5944,0.7078,0.7641,0.8878,0.9711,0.9880,0.9812,0.9464,0.8542,0.6457,0.3397,0.3828,0.3204,0.1331,0.0440,0.1234,0.2030,0.1652,0.1043,0.1066,0.2110,0.2417,0.1631,0.0769,0.0723,0.0912,0.0812,0.0496,0.0101,0.0089,0.0083,0.0080,0.0026,0.0079,0.0042,0.0071,0.0044,0.0022,0.0014,-1
0.0129,0.0141,0.0309,0.0375,0.0767,0.0787,0.0662,0.1108,0.1777,0.2245,0.2431,0.3134,0.3206,0.2917,0.2249,0.2347,0.2143,0.2939,0.4898,0.6127,0.7531,0.7718,0.7432,0.8673,0.9308,0.9836,1.0000,0.9595,0.8722,0.6862,0.4901,0.3280,0.3115,0.1969,0.1019,0.0317,0.0756,0.0907,0.1066,0.1380,0.0665,0.1475,0.2470,0.2788,0.2709,0.2283,0.1818,0.1185,0.0546,0.0219,0.0204,0.0124,0.0093,0.0072,0.0019,0.0027,0.0054,0.0017,0.0024,0.0029,-1
0.0050,0.0017,0.0270,0.0450,0.0958,0.0830,0.0879,0.1220,0.1977,0.2282,0.2521,0.3484,0.3309,0.2614,0.1782,0.2055,0.2298,0.3545,0.6218,0.7265,0.8346,0.8268,0.8366,0.9408,0.9510,0.9801,0.9974,1.0000,0.9036,0.6409,0.3857,0.2908,0.2040,0.1653,0.1769,0.1140,0.0740,0.0941,0.0621,0.0426,0.0572,0.1068,0.1909,0.2229,0.2203,0.2265,0.1766,0.1097,0.0558,0.0142,0.0281,0.0165,0.0056,0.0010,0.0027,0.0062,0.0024,0.0063,0.0017,0.0028,-1
0.0366,0.0421,0.0504,0.0250,0.0596,0.0252,0.0958,0.0991,0.1419,0.1847,0.2222,0.2648,0.2508,0.2291,0.1555,0.1863,0.2387,0.3345,0.5233,0.6684,0.7766,0.7928,0.7940,0.9129,0.9498,0.9835,1.0000,0.9471,0.8237,0.6252,0.4181,0.3209,0.2658,0.2196,0.1588,0.0561,0.0948,0.1700,0.1215,0.1282,0.0386,0.1329,0.2331,0.2468,0.1960,0.1985,0.1570,0.0921,0.0549,0.0194,0.0166,0.0132,0.0027,0.0022,0.0059,0.0016,0.0025,0.0017,0.0027,0.0027,-1
0.0238,0.0318,0.0422,0.0399,0.0788,0.0766,0.0881,0.1143,0.1594,0.2048,0.2652,0.3100,0.2381,0.1918,0.1430,0.1735,0.1781,0.2852,0.5036,0.6166,0.7616,0.8125,0.7793,0.8788,0.8813,0.9470,1.0000,0.9739,0.8446,0.6151,0.4302,0.3165,0.2869,0.2017,0.1206,0.0271,0.0580,0.1262,0.1072,0.1082,0.0360,0.1197,0.2061,0.2054,0.1878,0.2047,0.1716,0.1069,0.0477,0.0170,0.0186,0.0096,0.0071,0.0084,0.0038,0.0026,0.0028,0.0013,0.0035,0.0060,-1
0.0116,0.0744,0.0367,0.0225,0.0076,0.0545,0.1110,0.1069,0.1708,0.2271,0.3171,0.2882,0.2657,0.2307,0.1889,0.1791,0.2298,0.3715,0.6223,0.7260,0.7934,0.8045,0.8067,0.9173,0.9327,0.9562,1.0000,0.9818,0.8684,0.6381,0.3997,0.3242,0.2835,0.2413,0.2321,0.1260,0.0693,0.0701,0.1439,0.1475,0.0438,0.0469,0.1476,0.1742,0.1555,0.1651,0.1181,0.0720,0.0321,0.0056,0.0202,0.0141,0.0103,0.0100,0.0034,0.0026,0.0037,0.0044,0.0057,0.0035,-1
0.0131,0.0387,0.0329,0.0078,0.0721,0.1341,0.1626,0.1902,0.2610,0.3193,0.3468,0.3738,0.3055,0.1926,0.1385,0.2122,0.2758,0.4576,0.6487,0.7154,0.8010,0.7924,0.8793,1.0000,0.9865,0.9474,0.9474,0.9315,0.8326,0.6213,0.3772,0.2822,0.2042,0.2190,0.2223,0.1327,0.0521,0.0618,0.1416,0.1460,0.0846,0.1055,0.1639,0.1916,0.2085,0.2335,0.1964,0.1300,0.0633,0.0183,0.0137,0.0150,0.0076,0.0032,0.0037,0.0071,0.0040,0.0009,0.0015,0.0085,-1
0.0335,0.0258,0.0398,0.0570,0.0529,0.1091,0.1709,0.1684,0.1865,0.2660,0.3188,0.3553,0.3116,0.1965,0.1780,0.2794,0.2870,0.3969,0.5599,0.6936,0.7969,0.7452,0.8203,0.9261,0.8810,0.8814,0.9301,0.9955,0.8576,0.6069,0.3934,0.2464,0.1645,0.1140,0.0956,0.0080,0.0702,0.0936,0.0894,0.1127,0.0873,0.1020,0.1964,0.2256,0.1814,0.2012,0.1688,0.1037,0.0501,0.0136,0.0130,0.0120,0.0039,0.0053,0.0062,0.0046,0.0045,0.0022,0.0005,0.0031,-1
0.0272,0.0378,0.0488,0.0848,0.1127,0.1103,0.1349,0.2337,0.3113,0.3997,0.3941,0.3309,0.2926,0.1760,0.1739,0.2043,0.2088,0.2678,0.2434,0.1839,0.2802,0.6172,0.8015,0.8313,0.8440,0.8494,0.9168,1.0000,0.7896,0.5371,0.6472,0.6505,0.4959,0.2175,0.0990,0.0434,0.1708,0.1979,0.1880,0.1108,0.1702,0.0585,0.0638,0.1391,0.0638,0.0581,0.0641,0.1044,0.0732,0.0275,0.0146,0.0091,0.0045,0.0043,0.0043,0.0098,0.0054,0.0051,0.0065,0.0103,-1
0.0187,0.0346,0.0168,0.0177,0.0393,0.1630,0.2028,0.1694,0.2328,0.2684,0.3108,0.2933,0.2275,0.0994,0.1801,0.2200,0.2732,0.2862,0.2034,0.1740,0.4130,0.6879,0.8120,0.8453,0.8919,0.9300,0.9987,1.0000,0.8104,0.6199,0.6041,0.5547,0.4160,0.1472,0.0849,0.0608,0.0969,0.1411,0.1676,0.1200,0.1201,0.1036,0.1977,0.1339,0.0902,0.1085,0.1521,0.1363,0.0858,0.0290,0.0203,0.0116,0.0098,0.0199,0.0033,0.0101,0.0065,0.0115,0.0193,0.0157,-1
0.0323,0.0101,0.0298,0.0564,0.0760,0.0958,0.0990,0.1018,0.1030,0.2154,0.3085,0.3425,0.2990,0.1402,0.1235,0.1534,0.1901,0.2429,0.2120,0.2395,0.3272,0.5949,0.8302,0.9045,0.9888,0.9912,0.9448,1.0000,0.9092,0.7412,0.7691,0.7117,0.5304,0.2131,0.0928,0.1297,0.1159,0.1226,0.1768,0.0345,0.1562,0.0824,0.1149,0.1694,0.0954,0.0080,0.0790,0.1255,0.0647,0.0179,0.0051,0.0061,0.0093,0.0135,0.0063,0.0063,0.0034,0.0032,0.0062,0.0067,-1
0.0522,0.0437,0.0180,0.0292,0.0351,0.1171,0.1257,0.1178,0.1258,0.2529,0.2716,0.2374,0.1878,0.0983,0.0683,0.1503,0.1723,0.2339,0.1962,0.1395,0.3164,0.5888,0.7631,0.8473,0.9424,0.9986,0.9699,1.0000,0.8630,0.6979,0.7717,0.7305,0.5197,0.1786,0.1098,0.1446,0.1066,0.1440,0.1929,0.0325,0.1490,0.0328,0.0537,0.1309,0.0910,0.0757,0.1059,0.1005,0.0535,0.0235,0.0155,0.0160,0.0029,0.0051,0.0062,0.0089,0.0140,0.0138,0.0077,0.0031,-1
0.0303,0.0353,0.0490,0.0608,0.0167,0.1354,0.1465,0.1123,0.1945,0.2354,0.2898,0.2812,0.1578,0.0273,0.0673,0.1444,0.2070,0.2645,0.2828,0.4293,0.5685,0.6990,0.7246,0.7622,0.9242,1.0000,0.9979,0.8297,0.7032,0.7141,0.6893,0.4961,0.2584,0.0969,0.0776,0.0364,0.1572,0.1823,0.1349,0.0849,0.0492,0.1367,0.1552,0.1548,0.1319,0.0985,0.1258,0.0954,0.0489,0.0241,0.0042,0.0086,0.0046,0.0126,0.0036,0.0035,0.0034,0.0079,0.0036,0.0048,-1
0.0260,0.0363,0.0136,0.0272,0.0214,0.0338,0.0655,0.1400,0.1843,0.2354,0.2720,0.2442,0.1665,0.0336,0.1302,0.1708,0.2177,0.3175,0.3714,0.4552,0.5700,0.7397,0.8062,0.8837,0.9432,1.0000,0.9375,0.7603,0.7123,0.8358,0.7622,0.4567,0.1715,0.1549,0.1641,0.1869,0.2655,0.1713,0.0959,0.0768,0.0847,0.2076,0.2505,0.1862,0.1439,0.1470,0.0991,0.0041,0.0154,0.0116,0.0181,0.0146,0.0129,0.0047,0.0039,0.0061,0.0040,0.0036,0.0061,0.0115,-1

File diff suppressed because one or more lines are too long

View file

@ -1,216 +0,0 @@
1,107,10.1,2.2,0.9,2.7
1,113,9.9,3.1,2.0,5.9
1,127,12.9,2.4,1.4,0.6
1,109,5.3,1.6,1.4,1.5
1,105,7.3,1.5,1.5,-0.1
1,105,6.1,2.1,1.4,7.0
1,110,10.4,1.6,1.6,2.7
1,114,9.9,2.4,1.5,5.7
1,106,9.4,2.2,1.5,0.0
1,107,13.0,1.1,0.9,3.1
1,106,4.2,1.2,1.6,1.4
1,110,11.3,2.3,0.9,3.3
1,116,9.2,2.7,1.0,4.2
1,112,8.1,1.9,3.7,2.0
1,122,9.7,1.6,0.9,2.2
1,109,8.4,2.1,1.1,3.6
1,111,8.4,1.5,0.8,1.2
1,114,6.7,1.5,1.0,3.5
1,119,10.6,2.1,1.3,1.1
1,115,7.1,1.3,1.3,2.0
1,101,7.8,1.2,1.0,1.7
1,103,10.1,1.3,0.7,0.1
1,109,10.4,1.9,0.4,-0.1
1,102,7.6,1.8,2.0,2.5
1,121,10.1,1.7,1.3,0.1
1,100,6.1,2.4,1.8,3.8
1,106,9.6,2.4,1.0,1.3
1,116,10.1,2.2,1.6,0.8
1,105,11.1,2.0,1.0,1.0
1,110,10.4,1.8,1.0,2.3
1,120,8.4,1.1,1.4,1.4
1,116,11.1,2.0,1.2,2.3
1,110,7.8,1.9,2.1,6.4
1,90,8.1,1.6,1.4,1.1
1,117,12.2,1.9,1.2,3.9
1,117,11.0,1.4,1.5,2.1
1,113,9.0,2.0,1.8,1.6
1,106,9.4,1.5,0.8,0.5
1,130,9.5,1.7,0.4,3.2
1,100,10.5,2.4,0.9,1.9
1,121,10.1,2.4,0.8,3.0
1,110,9.2,1.6,1.5,0.3
1,129,11.9,2.7,1.2,3.5
1,121,13.5,1.5,1.6,0.5
1,123,8.1,2.3,1.0,5.1
1,107,8.4,1.8,1.5,0.8
1,109,10.0,1.3,1.8,4.3
1,120,6.8,1.9,1.3,1.9
1,100,9.5,2.5,1.3,-0.2
1,118,8.1,1.9,1.5,13.7
1,100,11.3,2.5,0.7,-0.3
1,103,12.2,1.2,1.3,2.7
1,115,8.1,1.7,0.6,2.2
1,119,8.0,2.0,0.6,3.2
1,106,9.4,1.7,0.9,3.1
1,114,10.9,2.1,0.3,1.4
1,93,8.9,1.5,0.8,2.7
1,120,10.4,2.1,1.1,1.8
1,106,11.3,1.8,0.9,1.0
1,110,8.7,1.9,1.6,4.4
1,103,8.1,1.4,0.5,3.8
1,101,7.1,2.2,0.8,2.2
1,115,10.4,1.8,1.6,2.0
1,116,10.0,1.7,1.5,4.3
1,117,9.2,1.9,1.5,6.8
1,106,6.7,1.5,1.2,3.9
1,118,10.5,2.1,0.7,3.5
1,97,7.8,1.3,1.2,0.9
1,113,11.1,1.7,0.8,2.3
1,104,6.3,2.0,1.2,4.0
1,96,9.4,1.5,1.0,3.1
1,120,12.4,2.4,0.8,1.9
1,133,9.7,2.9,0.8,1.9
1,126,9.4,2.3,1.0,4.0
1,113,8.5,1.8,0.8,0.5
1,109,9.7,1.4,1.1,2.1
1,119,12.9,1.5,1.3,3.6
1,101,7.1,1.6,1.5,1.6
1,108,10.4,2.1,1.3,2.4
1,117,6.7,2.2,1.8,6.7
1,115,15.3,2.3,2.0,2.0
1,91,8.0,1.7,2.1,4.6
1,103,8.5,1.8,1.9,1.1
1,98,9.1,1.4,1.9,-0.3
1,111,7.8,2.0,1.8,4.1
1,107,13.0,1.5,2.8,1.7
1,119,11.4,2.3,2.2,1.6
1,122,11.8,2.7,1.7,2.3
1,105,8.1,2.0,1.9,-0.5
1,109,7.6,1.3,2.2,1.9
1,105,9.5,1.8,1.6,3.6
1,112,5.9,1.7,2.0,1.3
1,112,9.5,2.0,1.2,0.7
1,98,8.6,1.6,1.6,6.0
1,109,12.4,2.3,1.7,0.8
1,114,9.1,2.6,1.5,1.5
1,114,11.1,2.4,2.0,-0.3
1,110,8.4,1.4,1.0,1.9
1,120,7.1,1.2,1.5,4.3
1,108,10.9,1.2,1.9,1.0
1,108,8.7,1.2,2.2,2.5
1,116,11.9,1.8,1.9,1.5
1,113,11.5,1.5,1.9,2.9
1,105,7.0,1.5,2.7,4.3
1,114,8.4,1.6,1.6,-0.2
1,114,8.1,1.6,1.6,0.5
1,105,11.1,1.1,0.8,1.2
1,107,13.8,1.5,1.0,1.9
1,116,11.5,1.8,1.4,5.4
1,102,9.5,1.4,1.1,1.6
1,116,16.1,0.9,1.3,1.5
1,118,10.6,1.8,1.4,3.0
1,109,8.9,1.7,1.0,0.9
1,110,7.0,1.0,1.6,4.3
1,104,9.6,1.1,1.3,0.8
1,105,8.7,1.5,1.1,1.5
1,102,8.5,1.2,1.3,1.4
1,112,6.8,1.7,1.4,3.3
1,111,8.5,1.6,1.1,3.9
1,111,8.5,1.6,1.2,7.7
1,103,7.3,1.0,0.7,0.5
1,98,10.4,1.6,2.3,-0.7
1,117,7.8,2.0,1.0,3.9
1,111,9.1,1.7,1.2,4.1
1,101,6.3,1.5,0.9,2.9
1,106,8.9,0.7,1.0,2.3
1,102,8.4,1.5,0.8,2.4
1,115,10.6,0.8,2.1,4.6
1,130,10.0,1.6,0.9,4.6
1,101,6.7,1.3,1.0,5.7
1,110,6.3,1.0,0.8,1.0
1,103,9.5,2.9,1.4,-0.1
1,113,7.8,2.0,1.1,3.0
1,112,10.6,1.6,0.9,-0.1
1,118,6.5,1.2,1.2,1.7
1,109,9.2,1.8,1.1,4.4
1,116,7.8,1.4,1.1,3.7
1,127,7.7,1.8,1.9,6.4
1,108,6.5,1.0,0.9,1.5
1,108,7.1,1.3,1.6,2.2
1,105,5.7,1.0,0.9,0.9
1,98,5.7,0.4,1.3,2.8
1,112,6.5,1.2,1.2,2.0
1,118,12.2,1.5,1.0,2.3
1,94,7.5,1.2,1.3,4.4
1,126,10.4,1.7,1.2,3.5
1,114,7.5,1.1,1.6,4.4
1,111,11.9,2.3,0.9,3.8
1,104,6.1,1.8,0.5,0.8
1,102,6.6,1.2,1.4,1.3
2,139,16.4,3.8,1.1,-0.2
2,111,16.0,2.1,0.9,-0.1
2,113,17.2,1.8,1.0,0.0
2,65,25.3,5.8,1.3,0.2
2,88,24.1,5.5,0.8,0.1
2,65,18.2,10.0,1.3,0.1
2,134,16.4,4.8,0.6,0.1
2,110,20.3,3.7,0.6,0.2
2,67,23.3,7.4,1.8,-0.6
2,95,11.1,2.7,1.6,-0.3
2,89,14.3,4.1,0.5,0.2
2,89,23.8,5.4,0.5,0.1
2,88,12.9,2.7,0.1,0.2
2,105,17.4,1.6,0.3,0.4
2,89,20.1,7.3,1.1,-0.2
2,99,13.0,3.6,0.7,-0.1
2,80,23.0,10.0,0.9,-0.1
2,89,21.8,7.1,0.7,-0.1
2,99,13.0,3.1,0.5,-0.1
2,68,14.7,7.8,0.6,-0.2
2,97,14.2,3.6,1.5,0.3
2,84,21.5,2.7,1.1,-0.6
2,84,18.5,4.4,1.1,-0.3
2,98,16.7,4.3,1.7,0.2
2,94,20.5,1.8,1.4,-0.5
2,99,17.5,1.9,1.4,0.3
2,76,25.3,4.5,1.2,-0.1
2,110,15.2,1.9,0.7,-0.2
2,144,22.3,3.3,1.3,0.6
2,105,12.0,3.3,1.1,0.0
2,88,16.5,4.9,0.8,0.1
2,97,15.1,1.8,1.2,-0.2
2,106,13.4,3.0,1.1,0.0
2,79,19.0,5.5,0.9,0.3
2,92,11.1,2.0,0.7,-0.2
3,125,2.3,0.9,16.5,9.5
3,120,6.8,2.1,10.4,38.6
3,108,3.5,0.6,1.7,1.4
3,120,3.0,2.5,1.2,4.5
3,119,3.8,1.1,23.0,5.7
3,141,5.6,1.8,9.2,14.4
3,129,1.5,0.6,12.5,2.9
3,118,3.6,1.5,11.6,48.8
3,120,1.9,0.7,18.5,24.0
3,119,0.8,0.7,56.4,21.6
3,123,5.6,1.1,13.7,56.3
3,115,6.3,1.2,4.7,14.4
3,126,0.5,0.2,12.2,8.8
3,121,4.7,1.8,11.2,53.0
3,131,2.7,0.8,9.9,4.7
3,134,2.0,0.5,12.2,2.2
3,141,2.5,1.3,8.5,7.5
3,113,5.1,0.7,5.8,19.6
3,136,1.4,0.3,32.6,8.4
3,120,3.4,1.8,7.5,21.5
3,125,3.7,1.1,8.5,25.9
3,123,1.9,0.3,22.8,22.2
3,112,2.6,0.7,41.0,19.0
3,134,1.9,0.6,18.4,8.2
3,119,5.1,1.1,7.0,40.8
3,118,6.5,1.3,1.7,11.5
3,139,4.2,0.7,4.3,6.3
3,103,5.1,1.4,1.2,5.0
3,97,4.7,1.1,2.1,12.6
3,102,5.3,1.4,1.3,6.7

View file

@ -13,7 +13,11 @@ def initialize_latent(init, input_dim, Y):
p = pca(Y) p = pca(Y)
PC = p.project(Y, min(input_dim, Y.shape[1])) PC = p.project(Y, min(input_dim, Y.shape[1]))
Xr[:PC.shape[0], :PC.shape[1]] = PC Xr[:PC.shape[0], :PC.shape[1]] = PC
var = p.fracs[:input_dim]
else: else:
var = Xr.var(0) var = Xr.var(0)
Xr -= Xr.mean(0)
Xr /= Xr.var(0)
return Xr, var/var.max() return Xr, var/var.max()
return Xr, p.fracs[:input_dim]

View file

@ -130,14 +130,14 @@ def fast_array_equal(A, B):
""" % pragma_string """ % pragma_string
if config.getboolean('parallel', 'openmp'): if config.getboolean('parallel', 'openmp'):
pragma_string = '#include <omp.h>' header_string = '#include <omp.h>'
else: else:
pragma_string = '' header_string = ''
support_code = """ support_code = """
%s %s
#include <math.h> #include <math.h>
""" % pragma_string """ % header_string
weave_options_openmp = {'headers' : ['<omp.h>'], weave_options_openmp = {'headers' : ['<omp.h>'],

View file

@ -10,6 +10,16 @@ A Gaussian processes framework in Python.
Continuous integration status: ![CI status](https://travis-ci.org/SheffieldML/GPy.png) Continuous integration status: ![CI status](https://travis-ci.org/SheffieldML/GPy.png)
Citation
========
@Misc{gpy2014,
author = {The GPy authors},
title = {{GPy}: A Gaussian process framework in python},
howpublished = {\url{http://github.com/SheffieldML/GPy}},
year = {2012--2014}
}
Getting started Getting started
=============== ===============
Installing with pip Installing with pip