merge with current GPy devel

This commit is contained in:
Zhenwen Dai 2015-12-09 17:27:06 +00:00
commit c0e6978054
164 changed files with 733 additions and 5931 deletions

View file

@ -5,7 +5,7 @@ import numpy as np
from .. import kern
from ..core.sparse_gp_mpi import SparseGP_MPI
from ..likelihoods import Gaussian
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from GPy.core.parameterization.variational import NormalPosterior, NormalPrior
from ..inference.latent_function_inference.var_dtc_parallel import VarDTC_minibatch
import logging

View file

@ -2,14 +2,12 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import logging
from .. import kern
from ..likelihoods import Gaussian
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from ..inference.latent_function_inference.var_dtc_parallel import VarDTC_minibatch
import logging
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
from GPy.core.parameterization.param import Param
from GPy.core.parameterization.observable_array import ObsAr
from GPy.core.parameterization.variational import NormalPosterior, NormalPrior
from .sparse_gp_minibatch import SparseGPMiniBatch
from ..core.parameterization.param import Param
class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
"""

View file

@ -1,10 +1,7 @@
# Copyright (c) 2015 the GPy Austhors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from .. import kern
from .bayesian_gplvm import BayesianGPLVM
from ..core.parameterization.variational import NormalPosterior, NormalPrior
class DPBayesianGPLVM(BayesianGPLVM):
"""

View file

@ -36,7 +36,9 @@ class GPCoregionalizedRegression(GP):
#Kernel
if kernel is None:
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kern.RBF(X.shape[1]-1), W_rank=1,name=kernel_name)
kernel = kern.RBF(X.shape[1]-1)
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kernel, W_rank=1,name=kernel_name)
#Likelihood
likelihood = util.multioutput.build_likelihood(Y_list,self.output_index,likelihoods_list)

View file

@ -2,8 +2,8 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ..core.model import Model
from ..core.parameterization import ObsAr
from ..core import Model
from paramz import ObsAr
from .. import likelihoods
class GPKroneckerGaussianRegression(Model):

View file

@ -3,8 +3,6 @@
import numpy as np
from ..core import GP
from ..core.parameterization import ObsAr
from .. import kern
from ..core.parameterization.param import Param
from ..inference.latent_function_inference import VarGauss

View file

@ -1,11 +1,11 @@
# ## Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ..core.model import Model
import itertools
import numpy
from ..core.parameterization import Param
np = numpy
from ..core.parameterization import Param
from GPy.core.model import Model
from ..util.block_matrices import get_blocks, get_block_shapes, unblock, get_blocks_3d, get_block_shapes_3d
def get_shape(x):
@ -62,7 +62,7 @@ class GradientChecker(Model):
grad.randomize()
grad.checkgrad(verbose=1)
"""
Model.__init__(self, 'GradientChecker')
super(GradientChecker, self).__init__(name='GradientChecker')
if isinstance(x0, (list, tuple)) and names is None:
self.shapes = [get_shape(xi) for xi in x0]
self.names = ['X{i}'.format(i=i) for i in range(len(x0))]

View file

@ -5,18 +5,14 @@ import numpy as np
import itertools, logging
from ..kern import Kern
from ..core.parameterization.variational import NormalPosterior, NormalPrior
from ..core.parameterization import Param, Parameterized
from ..core.parameterization.observable_array import ObsAr
from GPy.core.parameterization.variational import NormalPrior
from ..core.parameterization import Param
from paramz import ObsAr
from ..inference.latent_function_inference.var_dtc import VarDTC
from ..inference.latent_function_inference import InferenceMethodList
from ..likelihoods import Gaussian
from ..util.initialization import initialize_latent
from ..core.sparse_gp import SparseGP, GP
from GPy.core.parameterization.variational import VariationalPosterior
from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
from GPy.models.bayesian_gplvm import BayesianGPLVM
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
class MRD(BayesianGPLVMMiniBatch):
"""

View file

@ -1,7 +1,6 @@
# Copyright (c) 2013, the GPy Authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ..core import GP
from . import SparseGPClassification
from .. import likelihoods
from .. import kern

View file

@ -62,7 +62,7 @@ class SparseGPClassificationUncertainInput(SparseGP):
.. Note:: Multiple independent outputs are allowed using columns of Y
"""
def __init__(self, X, X_variance, Y, kernel=None, Z=None, num_inducing=10, Y_metadata=None, normalizer=None):
from ..core.parameterization.variational import NormalPosterior
from GPy.core.parameterization.variational import NormalPosterior
if kernel is None:
kernel = kern.RBF(X.shape[1])

View file

@ -4,7 +4,6 @@
import numpy as np
from ..core import SparseGP
from ..inference.latent_function_inference import VarDTC
from .. import likelihoods
from .. import kern
from .. import util
@ -43,7 +42,9 @@ class SparseGPCoregionalizedRegression(SparseGP):
#Kernel
if kernel is None:
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kern.RBF(X.shape[1]-1), W_rank=1,name=kernel_name)
kernel = kern.RBF(X.shape[1]-1)
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kernel, W_rank=1,name=kernel_name)
#Likelihood
likelihood = util.multioutput.build_likelihood(Y_list,self.output_index,likelihoods_list)

View file

@ -4,18 +4,15 @@
from __future__ import print_function
import numpy as np
from ..core.parameterization.param import Param
from GPy.core.parameterization.variational import VariationalPosterior
from ..core.sparse_gp import SparseGP
from ..core.gp import GP
from ..inference.latent_function_inference import var_dtc
from .. import likelihoods
from ..core.parameterization.variational import VariationalPosterior
import logging
from GPy.inference.latent_function_inference.posterior import Posterior
from GPy.inference.optimization.stochastics import SparseGPStochastics,\
SparseGPMissing
#no stochastics.py file added! from GPy.inference.optimization.stochastics import SparseGPStochastics,\
#SparseGPMissing
from ..inference.latent_function_inference.posterior import Posterior
from ..inference.optimization.stochastics import SparseGPStochastics, SparseGPMissing
logger = logging.getLogger("sparse gp")
class SparseGPMiniBatch(SparseGP):

View file

@ -3,12 +3,11 @@
import numpy as np
from ..core import SparseGP
from ..core.sparse_gp_mpi import SparseGP_MPI
from .. import likelihoods
from .. import kern
from ..inference.latent_function_inference import VarDTC
from ..core.parameterization.variational import NormalPosterior
from GPy.core.parameterization.variational import NormalPosterior
class SparseGPRegression(SparseGP_MPI):
"""

View file

@ -2,9 +2,8 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import sys
from GPy.models.sparse_gp_regression import SparseGPRegression
from .sparse_gp_regression import SparseGPRegression
class SparseGPLVM(SparseGPRegression):
"""

View file

@ -7,7 +7,7 @@ from ..core.sparse_gp_mpi import SparseGP_MPI
from .. import kern
from ..core.parameterization import Param
from ..likelihoods import Gaussian
from ..core.parameterization.variational import SpikeAndSlabPrior, SpikeAndSlabPosterior,VariationalPrior
from GPy.core.parameterization.variational import SpikeAndSlabPrior, SpikeAndSlabPosterior,VariationalPrior
from ..inference.latent_function_inference.var_dtc_parallel import update_gradients, VarDTC_minibatch
from ..kern.src.psi_comp.ssrbf_psi_gpucomp import PSICOMP_SSRBF_GPU
@ -19,7 +19,7 @@ class IBPPosterior(SpikeAndSlabPosterior):
"""
binary_prob : the probability of the distribution on the slab part.
"""
from ..core.parameterization.transformations import Logexp
from paramz.transformations import Logexp
super(IBPPosterior, self).__init__(means, variances, binary_prob, group_spike=True, name=name)
self.sharedX = sharedX
if sharedX:
@ -60,7 +60,7 @@ class IBPPosterior(SpikeAndSlabPosterior):
class IBPPrior(VariationalPrior):
def __init__(self, input_dim, alpha =2., name='IBPPrior', **kw):
super(IBPPrior, self).__init__(name=name, **kw)
from ..core.parameterization.transformations import Logexp, __fixed__
from paramz.transformations import Logexp, __fixed__
self.input_dim = input_dim
self.variance = 1.
self.alpha = Param('alpha', alpha, __fixed__)
@ -224,4 +224,4 @@ class SSGPLVM(SparseGP_MPI):

View file

@ -5,7 +5,7 @@ The Maniforld Relevance Determination model with the spike-and-slab prior
import numpy as np
from ..core import Model
from .ss_gplvm import SSGPLVM
from ..core.parameterization.variational import SpikeAndSlabPrior,NormalPosterior,VariationalPrior
from GPy.core.parameterization.variational import SpikeAndSlabPrior,NormalPosterior,VariationalPrior
from ..util.misc import param_to_array
from ..kern import RBF
from ..core import Param
@ -214,7 +214,7 @@ class SpikeAndSlabPrior_SSMRD(SpikeAndSlabPrior):
class IBPPrior_SSMRD(VariationalPrior):
def __init__(self, nModels, input_dim, alpha =2., tau=None, name='IBPPrior', **kw):
super(IBPPrior_SSMRD, self).__init__(name=name, **kw)
from ..core.parameterization.transformations import Logexp, __fixed__
from paramz.transformations import Logexp, __fixed__
self.nModels = nModels
self._b_prob_all = 0.5
self.input_dim = input_dim