diff --git a/GPy/core/parameterization/priors.py b/GPy/core/parameterization/priors.py index c21f6bc5..71b9833e 100644 --- a/GPy/core/parameterization/priors.py +++ b/GPy/core/parameterization/priors.py @@ -1309,3 +1309,52 @@ class Exponential(Prior): def rvs(self, n): return np.random.exponential(scale=self.l, size=n) + +class StudentT(Prior): + """ + Implementation of the student t probability function, coupled with random variables. + + :param mu: mean + :param sigma: standard deviation + :param nu: degrees of freedom + + .. Note:: Bishop 2006 notation is used throughout the code + + """ + domain = _REAL + _instances = [] + + def __new__(cls, mu=0, sigma=1, nu=4): # Singleton: + if cls._instances: + cls._instances[:] = [instance for instance in cls._instances if instance()] + for instance in cls._instances: + if instance().mu == mu and instance().sigma == sigma and instance().nu == nu: + return instance() + newfunc = super(Prior, cls).__new__ + if newfunc is object.__new__: + o = newfunc(cls) + else: + o = newfunc(cls, mu, sigma, nu) + cls._instances.append(weakref.ref(o)) + return cls._instances[-1]() + + def __init__(self, mu, sigma, nu): + self.mu = float(mu) + self.sigma = float(sigma) + self.sigma2 = np.square(self.sigma) + self.nu = float(nu) + + def __str__(self): + return "St({:.2g}, {:.2g}, {:.2g})".format(self.mu, self.sigma, self.nu) + + def lnpdf(self, x): + from scipy.stats import t + return t.logpdf(x,self.nu,self.mu,self.sigma) + + def lnpdf_grad(self, x): + return -(self.nu + 1.)*(x - self.mu)/( self.nu*self.sigma2 + np.square(x - self.mu) ) + + def rvs(self, n): + from scipy.stats import t + ret = t.rvs(self.nu, loc=self.mu, scale=self.sigma, size=n) + return ret diff --git a/GPy/kern/src/basis_funcs.py b/GPy/kern/src/basis_funcs.py index 7a5f84dd..81b308b0 100644 --- a/GPy/kern/src/basis_funcs.py +++ b/GPy/kern/src/basis_funcs.py @@ -142,9 +142,9 @@ class LogisticBasisFuncKernel(BasisFuncKernel): self.centers = np.atleast_2d(centers) self.ARD_slope = ARD_slope if self.ARD_slope: - self.slope = Param('slope', slope * np.ones(self.centers.size), Logexp()) + self.slope = Param('slope', slope * np.ones(self.centers.size)) else: - self.slope = Param('slope', slope, Logexp()) + self.slope = Param('slope', slope) super(LogisticBasisFuncKernel, self).__init__(input_dim, variance, active_dims, ARD, name) self.link_parameter(self.slope) diff --git a/GPy/testing/prior_tests.py b/GPy/testing/prior_tests.py index ca03ad93..23822a5a 100644 --- a/GPy/testing/prior_tests.py +++ b/GPy/testing/prior_tests.py @@ -6,6 +6,29 @@ import numpy as np import GPy class PriorTests(unittest.TestCase): + def test_studentT(self): + xmin, xmax = 1, 2.5*np.pi + b, C, SNR = 1, 0, 0.1 + X = np.linspace(xmin, xmax, 500) + y = b*X + C + 1*np.sin(X) + y += 0.05*np.random.randn(len(X)) + X, y = X[:, None], y[:, None] + studentT = GPy.priors.StudentT(1, 2, 4) + + m = GPy.models.SparseGPRegression(X, y) + m.Z.set_prior(studentT) + + # setting a StudentT prior on non-negative parameters + # should raise an assertionerror. + self.assertRaises(AssertionError, m.rbf.set_prior, studentT) + + # The gradients need to be checked + self.assertTrue(m.checkgrad()) + + # Check the singleton pattern: + self.assertIs(studentT, GPy.priors.StudentT(1,2,4)) + self.assertIsNot(studentT, GPy.priors.StudentT(2,2,4)) + def test_lognormal(self): xmin, xmax = 1, 2.5*np.pi b, C, SNR = 1, 0, 0.1 @@ -74,7 +97,7 @@ class PriorTests(unittest.TestCase): # setting a Gaussian prior on non-negative parameters # should raise an assertionerror. #self.assertRaises(AssertionError, m.Z.set_prior, gaussian) - + self.assertTrue(m.checkgrad()) def test_fixed_domain_check(self): @@ -107,8 +130,6 @@ class PriorTests(unittest.TestCase): # should raise an assertionerror. self.assertRaises(AssertionError, m.rbf.set_prior, gaussian) - - if __name__ == "__main__": print("Running unit tests, please be (very) patient...") unittest.main()