mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-02 08:12:39 +02:00
Merge pull request #432 from SheffieldML/mathDR-studentTprior
@Mathdr student t prior
This commit is contained in:
commit
81033bbaeb
3 changed files with 75 additions and 5 deletions
|
|
@ -1309,3 +1309,52 @@ class Exponential(Prior):
|
||||||
|
|
||||||
def rvs(self, n):
|
def rvs(self, n):
|
||||||
return np.random.exponential(scale=self.l, size=n)
|
return np.random.exponential(scale=self.l, size=n)
|
||||||
|
|
||||||
|
class StudentT(Prior):
|
||||||
|
"""
|
||||||
|
Implementation of the student t probability function, coupled with random variables.
|
||||||
|
|
||||||
|
:param mu: mean
|
||||||
|
:param sigma: standard deviation
|
||||||
|
:param nu: degrees of freedom
|
||||||
|
|
||||||
|
.. Note:: Bishop 2006 notation is used throughout the code
|
||||||
|
|
||||||
|
"""
|
||||||
|
domain = _REAL
|
||||||
|
_instances = []
|
||||||
|
|
||||||
|
def __new__(cls, mu=0, sigma=1, nu=4): # Singleton:
|
||||||
|
if cls._instances:
|
||||||
|
cls._instances[:] = [instance for instance in cls._instances if instance()]
|
||||||
|
for instance in cls._instances:
|
||||||
|
if instance().mu == mu and instance().sigma == sigma and instance().nu == nu:
|
||||||
|
return instance()
|
||||||
|
newfunc = super(Prior, cls).__new__
|
||||||
|
if newfunc is object.__new__:
|
||||||
|
o = newfunc(cls)
|
||||||
|
else:
|
||||||
|
o = newfunc(cls, mu, sigma, nu)
|
||||||
|
cls._instances.append(weakref.ref(o))
|
||||||
|
return cls._instances[-1]()
|
||||||
|
|
||||||
|
def __init__(self, mu, sigma, nu):
|
||||||
|
self.mu = float(mu)
|
||||||
|
self.sigma = float(sigma)
|
||||||
|
self.sigma2 = np.square(self.sigma)
|
||||||
|
self.nu = float(nu)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "St({:.2g}, {:.2g}, {:.2g})".format(self.mu, self.sigma, self.nu)
|
||||||
|
|
||||||
|
def lnpdf(self, x):
|
||||||
|
from scipy.stats import t
|
||||||
|
return t.logpdf(x,self.nu,self.mu,self.sigma)
|
||||||
|
|
||||||
|
def lnpdf_grad(self, x):
|
||||||
|
return -(self.nu + 1.)*(x - self.mu)/( self.nu*self.sigma2 + np.square(x - self.mu) )
|
||||||
|
|
||||||
|
def rvs(self, n):
|
||||||
|
from scipy.stats import t
|
||||||
|
ret = t.rvs(self.nu, loc=self.mu, scale=self.sigma, size=n)
|
||||||
|
return ret
|
||||||
|
|
|
||||||
|
|
@ -142,9 +142,9 @@ class LogisticBasisFuncKernel(BasisFuncKernel):
|
||||||
self.centers = np.atleast_2d(centers)
|
self.centers = np.atleast_2d(centers)
|
||||||
self.ARD_slope = ARD_slope
|
self.ARD_slope = ARD_slope
|
||||||
if self.ARD_slope:
|
if self.ARD_slope:
|
||||||
self.slope = Param('slope', slope * np.ones(self.centers.size), Logexp())
|
self.slope = Param('slope', slope * np.ones(self.centers.size))
|
||||||
else:
|
else:
|
||||||
self.slope = Param('slope', slope, Logexp())
|
self.slope = Param('slope', slope)
|
||||||
super(LogisticBasisFuncKernel, self).__init__(input_dim, variance, active_dims, ARD, name)
|
super(LogisticBasisFuncKernel, self).__init__(input_dim, variance, active_dims, ARD, name)
|
||||||
self.link_parameter(self.slope)
|
self.link_parameter(self.slope)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,29 @@ import numpy as np
|
||||||
import GPy
|
import GPy
|
||||||
|
|
||||||
class PriorTests(unittest.TestCase):
|
class PriorTests(unittest.TestCase):
|
||||||
|
def test_studentT(self):
|
||||||
|
xmin, xmax = 1, 2.5*np.pi
|
||||||
|
b, C, SNR = 1, 0, 0.1
|
||||||
|
X = np.linspace(xmin, xmax, 500)
|
||||||
|
y = b*X + C + 1*np.sin(X)
|
||||||
|
y += 0.05*np.random.randn(len(X))
|
||||||
|
X, y = X[:, None], y[:, None]
|
||||||
|
studentT = GPy.priors.StudentT(1, 2, 4)
|
||||||
|
|
||||||
|
m = GPy.models.SparseGPRegression(X, y)
|
||||||
|
m.Z.set_prior(studentT)
|
||||||
|
|
||||||
|
# setting a StudentT prior on non-negative parameters
|
||||||
|
# should raise an assertionerror.
|
||||||
|
self.assertRaises(AssertionError, m.rbf.set_prior, studentT)
|
||||||
|
|
||||||
|
# The gradients need to be checked
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
# Check the singleton pattern:
|
||||||
|
self.assertIs(studentT, GPy.priors.StudentT(1,2,4))
|
||||||
|
self.assertIsNot(studentT, GPy.priors.StudentT(2,2,4))
|
||||||
|
|
||||||
def test_lognormal(self):
|
def test_lognormal(self):
|
||||||
xmin, xmax = 1, 2.5*np.pi
|
xmin, xmax = 1, 2.5*np.pi
|
||||||
b, C, SNR = 1, 0, 0.1
|
b, C, SNR = 1, 0, 0.1
|
||||||
|
|
@ -74,7 +97,7 @@ class PriorTests(unittest.TestCase):
|
||||||
# setting a Gaussian prior on non-negative parameters
|
# setting a Gaussian prior on non-negative parameters
|
||||||
# should raise an assertionerror.
|
# should raise an assertionerror.
|
||||||
#self.assertRaises(AssertionError, m.Z.set_prior, gaussian)
|
#self.assertRaises(AssertionError, m.Z.set_prior, gaussian)
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
|
||||||
def test_fixed_domain_check(self):
|
def test_fixed_domain_check(self):
|
||||||
|
|
@ -107,8 +130,6 @@ class PriorTests(unittest.TestCase):
|
||||||
# should raise an assertionerror.
|
# should raise an assertionerror.
|
||||||
self.assertRaises(AssertionError, m.rbf.set_prior, gaussian)
|
self.assertRaises(AssertionError, m.rbf.set_prior, gaussian)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
print("Running unit tests, please be (very) patient...")
|
print("Running unit tests, please be (very) patient...")
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue