[paramz] fully integrated all tests running

This commit is contained in:
mzwiessele 2015-10-15 14:59:57 +01:00
parent e49c75ce2e
commit dce82847a7
78 changed files with 1581 additions and 1222 deletions

View file

@ -5,7 +5,7 @@ import numpy as np
from .. import kern
from ..core.sparse_gp_mpi import SparseGP_MPI
from ..likelihoods import Gaussian
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from ..core.variational import NormalPosterior, NormalPrior
from ..inference.latent_function_inference.var_dtc_parallel import VarDTC_minibatch
import logging

View file

@ -2,14 +2,12 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import logging
from .. import kern
from ..likelihoods import Gaussian
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from ..inference.latent_function_inference.var_dtc_parallel import VarDTC_minibatch
import logging
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
from GPy.core.parameterization.param import Param
from GPy.core.parameterization.observable_array import ObsAr
from ..core.variational import NormalPosterior, NormalPrior
from .sparse_gp_minibatch import SparseGPMiniBatch
from ..core.parameterization.param import Param
class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
"""

View file

@ -1,10 +1,7 @@
# Copyright (c) 2015 the GPy Austhors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from .. import kern
from .bayesian_gplvm import BayesianGPLVM
from ..core.parameterization.variational import NormalPosterior, NormalPrior
class DPBayesianGPLVM(BayesianGPLVM):
"""

View file

@ -2,11 +2,11 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from GPy.core.probabilistic_model import Model
from ..core.parameterization import ObsAr
from ..core import ProbabilisticModel
from paramz import ObsAr
from .. import likelihoods
class GPKroneckerGaussianRegression(Model):
class GPKroneckerGaussianRegression(ProbabilisticModel):
"""
Kronecker GP regression
@ -29,7 +29,7 @@ class GPKroneckerGaussianRegression(Model):
"""
def __init__(self, X1, X2, Y, kern1, kern2, noise_var=1., name='KGPR'):
Model.__init__(self, name=name)
ProbabilisticModel.__init__(self, name=name)
# accept the construction arguments
self.X1 = ObsAr(X1)
self.X2 = ObsAr(X2)

View file

@ -3,8 +3,6 @@
import numpy as np
from ..core import GP
from ..core.parameterization import ObsAr
from .. import kern
from ..core.parameterization.param import Param
from ..inference.latent_function_inference import VarGauss

View file

@ -1,11 +1,11 @@
# ## Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from GPy.core.probabilistic_model import Model
import itertools
import numpy
from ..core.parameterization import Param
np = numpy
from ..core.parameterization import Param
from ..core.probabilistic_model import ProbabilisticModel
from ..util.block_matrices import get_blocks, get_block_shapes, unblock, get_blocks_3d, get_block_shapes_3d
def get_shape(x):
@ -21,7 +21,7 @@ def at_least_one_element(x):
def flatten_if_needed(x):
return numpy.atleast_1d(x).flatten()
class GradientChecker(Model):
class GradientChecker(ProbabilisticModel):
def __init__(self, f, df, x0, names=None, *args, **kwargs):
"""
@ -62,7 +62,7 @@ class GradientChecker(Model):
grad.randomize()
grad.checkgrad(verbose=1)
"""
Model.__init__(self, 'GradientChecker')
super(GradientChecker, self).__init__(name='GradientChecker')
if isinstance(x0, (list, tuple)) and names is None:
self.shapes = [get_shape(xi) for xi in x0]
self.names = ['X{i}'.format(i=i) for i in range(len(x0))]

View file

@ -5,18 +5,14 @@ import numpy as np
import itertools, logging
from ..kern import Kern
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from ..core.parameterization import Param, Parameterized
from ..core.parameterization.observable_array import ObsAr
from ..core.variational import NormalPrior
from ..core.parameterization import Param
from paramz import ObsAr
from ..inference.latent_function_inference.var_dtc import VarDTC
from ..inference.latent_function_inference import InferenceMethodList
from ..likelihoods import Gaussian
from ..util.initialization import initialize_latent
from ..core.sparse_gp import SparseGP, GP
from GPy.core.parameterization.variational import VariationalPosterior
from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
from GPy.models.bayesian_gplvm import BayesianGPLVM
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
class MRD(BayesianGPLVMMiniBatch):
"""

View file

@ -1,7 +1,6 @@
# Copyright (c) 2013, the GPy Authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ..core import GP
from . import SparseGPClassification
from .. import likelihoods
from .. import kern

View file

@ -62,7 +62,7 @@ class SparseGPClassificationUncertainInput(SparseGP):
.. Note:: Multiple independent outputs are allowed using columns of Y
"""
def __init__(self, X, X_variance, Y, kernel=None, Z=None, num_inducing=10, Y_metadata=None, normalizer=None):
from ..core.parameterization.variational import NormalPosterior
from ..core.variational import NormalPosterior
if kernel is None:
kernel = kern.RBF(X.shape[1])

View file

@ -4,7 +4,6 @@
import numpy as np
from ..core import SparseGP
from ..inference.latent_function_inference import VarDTC
from .. import likelihoods
from .. import kern
from .. import util

View file

@ -4,18 +4,15 @@
from __future__ import print_function
import numpy as np
from ..core.parameterization.param import Param
from ..core.variational import VariationalPosterior
from ..core.sparse_gp import SparseGP
from ..core.gp import GP
from ..inference.latent_function_inference import var_dtc
from .. import likelihoods
from ..core.parameterization.variational import VariationalPosterior
import logging
from GPy.inference.latent_function_inference.posterior import Posterior
from GPy.inference.optimization.stochastics import SparseGPStochastics,\
SparseGPMissing
#no stochastics.py file added! from GPy.inference.optimization.stochastics import SparseGPStochastics,\
#SparseGPMissing
from ..inference.latent_function_inference.posterior import Posterior
from ..inference.optimization.stochastics import SparseGPStochastics, SparseGPMissing
logger = logging.getLogger("sparse gp")
class SparseGPMiniBatch(SparseGP):

View file

@ -3,12 +3,11 @@
import numpy as np
from ..core import SparseGP
from ..core.sparse_gp_mpi import SparseGP_MPI
from .. import likelihoods
from .. import kern
from ..inference.latent_function_inference import VarDTC
from ..core.parameterization.variational import NormalPosterior
from ..core.variational import NormalPosterior
class SparseGPRegression(SparseGP_MPI):
"""

View file

@ -2,9 +2,8 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import sys
from GPy.models.sparse_gp_regression import SparseGPRegression
from .sparse_gp_regression import SparseGPRegression
class SparseGPLVM(SparseGPRegression):
"""

View file

@ -7,7 +7,7 @@ from ..core.sparse_gp_mpi import SparseGP_MPI
from .. import kern
from ..core.parameterization import Param
from ..likelihoods import Gaussian
from ..core.parameterization.variational import SpikeAndSlabPrior, SpikeAndSlabPosterior,VariationalPrior
from ..core.variational import SpikeAndSlabPrior, SpikeAndSlabPosterior,VariationalPrior
from ..inference.latent_function_inference.var_dtc_parallel import update_gradients, VarDTC_minibatch
from ..kern.src.psi_comp.ssrbf_psi_gpucomp import PSICOMP_SSRBF_GPU
@ -19,7 +19,7 @@ class IBPPosterior(SpikeAndSlabPosterior):
"""
binary_prob : the probability of the distribution on the slab part.
"""
from ..core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
super(IBPPosterior, self).__init__(means, variances, binary_prob, group_spike=True, name=name)
self.sharedX = sharedX
if sharedX:
@ -60,7 +60,7 @@ class IBPPosterior(SpikeAndSlabPosterior):
class IBPPrior(VariationalPrior):
def __init__(self, input_dim, alpha =2., name='IBPPrior', **kw):
super(IBPPrior, self).__init__(name=name, **kw)
from ..core.parameterization.transformations import Logexp, __fixed__
from paramz.transformations import Logexp, __fixed__
self.input_dim = input_dim
self.variance = 1.
self.alpha = Param('alpha', alpha, __fixed__)

View file

@ -3,15 +3,15 @@ The Maniforld Relevance Determination model with the spike-and-slab prior
"""
import numpy as np
from ..core import Model
from ..core import ProbabilisticModel
from .ss_gplvm import SSGPLVM
from ..core.parameterization.variational import SpikeAndSlabPrior,NormalPosterior,VariationalPrior
from ..core.variational import SpikeAndSlabPrior,NormalPosterior,VariationalPrior
from ..util.misc import param_to_array
from ..kern import RBF
from ..core import Param
from numpy.linalg.linalg import LinAlgError
class SSMRD(Model):
class SSMRD(ProbabilisticModel):
def __init__(self, Ylist, input_dim, X=None, X_variance=None, Gammas=None, initx = 'PCA_concat', initz = 'permute',
num_inducing=10, Zs=None, kernels=None, inference_methods=None, likelihoods=None, group_spike=True,
@ -117,13 +117,13 @@ class SSMRD(Model):
Gammas.append(gamma)
return X, X_variance, Gammas, fracs
@Model.optimizer_array.setter
@ProbabilisticModel.optimizer_array.setter
def optimizer_array(self, p):
if self.mpi_comm != None:
if self._IN_OPTIMIZATION_ and self.mpi_comm.rank==0:
self.mpi_comm.Bcast(np.int32(1),root=0)
self.mpi_comm.Bcast(p, root=0)
Model.optimizer_array.fset(self,p)
ProbabilisticModel.optimizer_array.fset(self,p)
def optimize(self, optimizer=None, start=None, **kwargs):
self._IN_OPTIMIZATION_ = True
@ -214,7 +214,7 @@ class SpikeAndSlabPrior_SSMRD(SpikeAndSlabPrior):
class IBPPrior_SSMRD(VariationalPrior):
def __init__(self, nModels, input_dim, alpha =2., tau=None, name='IBPPrior', **kw):
super(IBPPrior_SSMRD, self).__init__(name=name, **kw)
from ..core.parameterization.transformations import Logexp, __fixed__
from paramz.transformations import Logexp, __fixed__
self.nModels = nModels
self._b_prob_all = 0.5
self.input_dim = input_dim