[paramz] fully integrated all tests running

This commit is contained in:
mzwiessele 2015-10-15 14:59:57 +01:00
parent e49c75ce2e
commit dce82847a7
78 changed files with 1581 additions and 1222 deletions

View file

@ -4,7 +4,7 @@
from .kern import Kern
from .independent_outputs import index_to_slices
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
class ODE_UY(Kern):

View file

@ -3,7 +3,7 @@
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
from .independent_outputs import index_to_slices

View file

@ -2,7 +2,7 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
from .independent_outputs import index_to_slices

View file

@ -1,6 +1,6 @@
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
from .independent_outputs import index_to_slices

View file

@ -3,7 +3,7 @@
import numpy as np
import itertools
from ...util.caching import Cache_this
from paramz.caching import Cache_this
from .kern import CombinationKernel, Kern
from functools import reduce

View file

@ -3,8 +3,8 @@
import numpy as np
from .kern import Kern
from ...core.parameterization.param import Param
from ...core.parameterization.transformations import Logexp
from ...util.caching import Cache_this
from paramz.transformations import Logexp
from paramz.caching import Cache_this
from ...util.linalg import tdot, mdot
class BasisFuncKernel(Kern):

View file

@ -3,7 +3,7 @@
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
class Brownian(Kern):

View file

@ -4,7 +4,7 @@
from .kern import Kern
import numpy as np
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
from ...util.config import config # for assesing whether to use cython
try:
from . import coregionalize_cython

View file

@ -5,8 +5,8 @@ import numpy as np
from scipy.special import wofz
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from ...util.caching import Cache_this
from paramz.transformations import Logexp
from paramz.caching import Cache_this
class EQ_ODE2(Kern):
"""

View file

@ -3,8 +3,8 @@
import sys
import numpy as np
from ...core.parameterization.parameterized import Parameterized
from ...core.parameterization.observable_array import ObsAr
from ...util.caching import Cache_this
from paramz.core.observable_array import ObsAr
from paramz.caching import Cache_this
from .kernel_slice_operations import KernCallsViaSlicerMeta
from functools import reduce
import six
@ -30,18 +30,16 @@ class Kern(Parameterized):
tight dimensionality of inputs.
You most likely want this to be the integer telling the number of
input dimensions of the kernel.
If this is not an integer (!) we will work on the whole input matrix X,
and not check whether dimensions match or not (!).
_all_dims_active:
active_dims:
is the active_dimensions of inputs X we will work on.
All kernels will get sliced Xes as inputs, if _all_dims_active is not None
Only positive integers are allowed in _all_dims_active!
if _all_dims_active is None, slicing is switched off and all X will be passed through as given.
Only positive integers are allowed in active_dims!
if active_dims is None, slicing is switched off and all X will be passed through as given.
:param int input_dim: the number of input dimensions to the function
:param array-like|None _all_dims_active: list of indices on which dimensions this kernel works on, or none if no slicing
:param array-like|None active_dims: list of indices on which dimensions this kernel works on, or none if no slicing
Do not instantiate.
"""

View file

@ -7,9 +7,9 @@ This module provides a meta class for the kernels. The meta class is for
slicing the inputs (X, X2) for the kernels, before K (or any other method involving X)
gets calls. The `_all_dims_active` of a kernel decide which dimensions the kernel works on.
'''
from ...core.parameterization.parameterized import ParametersChangedMeta
import numpy as np
from functools import wraps
from paramz.parameterized import ParametersChangedMeta
def put_clean(dct, name, func):
if name in dct:

View file

@ -6,8 +6,8 @@ import numpy as np
from .kern import Kern
from ...util.linalg import tdot
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from ...util.caching import Cache_this
from paramz.transformations import Logexp
from paramz.caching import Cache_this
from .psi_comp import PSICOMP_Linear
class Linear(Kern):

View file

@ -3,9 +3,9 @@
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np
from ...util.caching import Cache_this
from paramz.caching import Cache_this
four_over_tau = 2./np.pi
class MLP(Kern):

View file

@ -7,7 +7,7 @@ from .kern import Kern
from ...util.linalg import mdot
from ...util.decorators import silence_errors
from ...core.parameterization.param import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
class Periodic(Kern):
def __init__(self, input_dim, variance, lengthscale, period, n_freq, lower, upper, active_dims, name):

View file

@ -4,7 +4,8 @@
import numpy as np
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
class Poly(Kern):
"""
Polynomial kernel

View file

@ -3,7 +3,7 @@
import numpy as np
from .kern import CombinationKernel
from ...util.caching import Cache_this
from paramz.caching import Cache_this
import itertools
from functools import reduce

View file

@ -1,9 +1,9 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ....core.parameterization.parameter_core import Pickleable
from ....util.caching import Cache_this
from ....core.parameterization import variational
from paramz.core.pickleable import Pickleable
from paramz.caching import Cache_this
from ....core import variational
#from linear_psi_comp import LINEAr
class PSICOMP(Pickleable):

View file

@ -8,7 +8,7 @@ An approximated psi-statistics implementation based on Gauss-Hermite Quadrature
import numpy as np
from ....core.parameterization import Param
from ....util.caching import Cache_this
from paramz.caching import Cache_this
from ....util.linalg import tdot
from . import PSICOMP
@ -30,7 +30,7 @@ class PSICOMP_GH(PSICOMP):
@Cache_this(limit=10, ignore_args=(0,))
def comp_K(self, Z, qX):
if self.Xs is None or self.Xs.shape != qX.mean.shape:
from ....core.parameterization import ObsAr
from paramz import ObsAr
self.Xs = ObsAr(np.empty((self.degree,)+qX.mean.shape))
mu, S = qX.mean.values, qX.variance.values
S_sq = np.sqrt(S)

View file

@ -3,7 +3,7 @@ The module for psi-statistics for RBF kernel
"""
import numpy as np
from GPy.util.caching import Cacher
from paramz.caching import Cacher
def psicomputations(variance, lengthscale, Z, variational_posterior, return_psi2_n=False):
# here are the "statistics" for psi0, psi1 and psi2

View file

@ -3,7 +3,7 @@ The module for psi-statistics for RBF kernel
"""
import numpy as np
from ....util.caching import Cache_this
from paramz.caching import Cache_this
from . import PSICOMP_RBF
from ....util import gpu_init

View file

@ -4,7 +4,7 @@ The module for psi-statistics for RBF kernel for Spike-and-Slab GPLVM
"""
import numpy as np
from ....util.caching import Cache_this
from paramz.caching import Cache_this
from . import PSICOMP_RBF

View file

@ -6,7 +6,7 @@ import numpy as np
from .stationary import Stationary
from .psi_comp import PSICOMP_RBF, PSICOMP_RBF_GPU
from ...core import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
class RBF(Stationary):
"""

View file

@ -4,7 +4,7 @@
import numpy as np
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
class Spline(Kern):
"""

View file

@ -15,7 +15,7 @@ Neural Networks and Machine Learning, pages 133-165. Springer, 1998.
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
import numpy as np

View file

@ -5,7 +5,7 @@
from .kern import Kern
import numpy as np
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
class Static(Kern):
def __init__(self, input_dim, variance, active_dims, name):

View file

@ -2,15 +2,15 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from ...util.linalg import tdot
from ... import util
import numpy as np
from scipy import integrate
from .kern import Kern
from ...core.parameterization import Param
from ...util.linalg import tdot
from ... import util
from ...util.config import config # for assesing whether to use cython
from ...util.caching import Cache_this
from paramz.caching import Cache_this
from paramz.transformations import Logexp
try:
from . import stationary_cython

View file

@ -5,8 +5,8 @@
import numpy as np
from .kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
from ...util.caching import Cache_this
from paramz.transformations import Logexp
from paramz.caching import Cache_this
class TruncLinear(Kern):
"""