mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-07 02:52:40 +02:00
Merge branch 'devel' of github.com:SheffieldML/GPy into devel
This commit is contained in:
commit
71b435b2ab
71 changed files with 1949 additions and 733 deletions
|
|
@ -17,7 +17,7 @@ class ODE_UY(Kern):
|
|||
self.lengthscale_Y = Param('lengthscale_Y', lengthscale_Y, Logexp())
|
||||
self.lengthscale_U = Param('lengthscale_U', lengthscale_Y, Logexp())
|
||||
|
||||
self.add_parameters(self.variance_Y, self.variance_U, self.lengthscale_Y, self.lengthscale_U)
|
||||
self.link_parameters(self.variance_Y, self.variance_U, self.lengthscale_Y, self.lengthscale_U)
|
||||
|
||||
def K(self, X, X2=None):
|
||||
# model : a * dy/dt + b * y = U
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class Add(CombinationKernel):
|
|||
if isinstance(kern, Add):
|
||||
del subkerns[i]
|
||||
for part in kern.parts[::-1]:
|
||||
kern.remove_parameter(part)
|
||||
kern.unlink_parameter(part)
|
||||
subkerns.insert(i, part)
|
||||
|
||||
super(Add, self).__init__(subkerns, name)
|
||||
|
|
@ -171,10 +171,10 @@ class Add(CombinationKernel):
|
|||
if isinstance(other, Add):
|
||||
other_params = other.parameters[:]
|
||||
for p in other_params:
|
||||
other.remove_parameter(p)
|
||||
self.add_parameters(*other_params)
|
||||
other.unlink_parameter(p)
|
||||
self.link_parameters(*other_params)
|
||||
else:
|
||||
self.add_parameter(other)
|
||||
self.link_parameter(other)
|
||||
self.input_dim, self.active_dims = self.get_input_dim_active_dims(self.parts)
|
||||
return self
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class Brownian(Kern):
|
|||
super(Brownian, self).__init__(input_dim, active_dims, name)
|
||||
|
||||
self.variance = Param('variance', variance, Logexp())
|
||||
self.add_parameters(self.variance)
|
||||
self.link_parameters(self.variance)
|
||||
|
||||
def K(self,X,X2=None):
|
||||
if X2 is None:
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ class Coregionalize(Kern):
|
|||
else:
|
||||
assert kappa.shape==(self.output_dim, )
|
||||
self.kappa = Param('kappa', kappa, Logexp())
|
||||
self.add_parameters(self.W, self.kappa)
|
||||
self.link_parameters(self.W, self.kappa)
|
||||
|
||||
def parameters_changed(self):
|
||||
self.B = np.dot(self.W, self.W.T) + np.diag(self.kappa)
|
||||
|
|
|
|||
|
|
@ -221,7 +221,7 @@ class CombinationKernel(Kern):
|
|||
# initialize the kernel with the full input_dim
|
||||
super(CombinationKernel, self).__init__(input_dim, active_dims, name)
|
||||
self.extra_dims = extra_dims
|
||||
self.add_parameters(*kernels)
|
||||
self.link_parameters(*kernels)
|
||||
|
||||
@property
|
||||
def parts(self):
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class Linear(Kern):
|
|||
variances = np.ones(self.input_dim)
|
||||
|
||||
self.variances = Param('variances', variances, Logexp())
|
||||
self.add_parameter(self.variances)
|
||||
self.link_parameter(self.variances)
|
||||
self.psicomp = PSICOMP_Linear()
|
||||
|
||||
@Cache_this(limit=2)
|
||||
|
|
@ -103,7 +103,7 @@ class Linear(Kern):
|
|||
def gradients_X_diag(self, dL_dKdiag, X):
|
||||
return 2.*self.variances*dL_dKdiag[:,None]*X
|
||||
|
||||
def input_sensitivity(self):
|
||||
def input_sensitivity(self, summarize=True):
|
||||
return np.ones(self.input_dim) * self.variances
|
||||
|
||||
#---------------------------------------#
|
||||
|
|
@ -144,7 +144,7 @@ class LinearFull(Kern):
|
|||
|
||||
self.W = Param('W', W)
|
||||
self.kappa = Param('kappa', kappa, Logexp())
|
||||
self.add_parameters(self.W, self.kappa)
|
||||
self.link_parameters(self.W, self.kappa)
|
||||
|
||||
def K(self, X, X2=None):
|
||||
P = np.dot(self.W, self.W.T) + np.diag(self.kappa)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class MLP(Kern):
|
|||
self.variance = Param('variance', variance, Logexp())
|
||||
self.weight_variance = Param('weight_variance', weight_variance, Logexp())
|
||||
self.bias_variance = Param('bias_variance', bias_variance, Logexp())
|
||||
self.add_parameters(self.variance, self.weight_variance, self.bias_variance)
|
||||
self.link_parameters(self.variance, self.weight_variance, self.bias_variance)
|
||||
|
||||
|
||||
def K(self, X, X2=None):
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class Periodic(Kern):
|
|||
self.variance = Param('variance', np.float64(variance), Logexp())
|
||||
self.lengthscale = Param('lengthscale', np.float64(lengthscale), Logexp())
|
||||
self.period = Param('period', np.float64(period), Logexp())
|
||||
self.add_parameters(self.variance, self.lengthscale, self.period)
|
||||
self.link_parameters(self.variance, self.lengthscale, self.period)
|
||||
|
||||
def _cos(self, alpha, omega, phase):
|
||||
def f(x):
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class Poly(Kern):
|
|||
def __init__(self, input_dim, variance=1., order=3., active_dims=None, name='poly'):
|
||||
super(Poly, self).__init__(input_dim, active_dims, name)
|
||||
self.variance = Param('variance', variance, Logexp())
|
||||
self.add_parameter(self.variance)
|
||||
self.link_parameter(self.variance)
|
||||
self.order=order
|
||||
|
||||
def K(self, X, X2=None):
|
||||
|
|
|
|||
|
|
@ -29,6 +29,9 @@ class PSICOMP_RBF(Pickleable):
|
|||
else:
|
||||
raise ValueError, "unknown distriubtion received for psi-statistics"
|
||||
|
||||
def _setup_observers(self):
|
||||
pass
|
||||
|
||||
class PSICOMP_Linear(Pickleable):
|
||||
|
||||
@Cache_this(limit=2, ignore_args=(0,))
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class Static(Kern):
|
|||
def __init__(self, input_dim, variance, active_dims, name):
|
||||
super(Static, self).__init__(input_dim, active_dims, name)
|
||||
self.variance = Param('variance', variance, Logexp())
|
||||
self.add_parameters(self.variance)
|
||||
self.link_parameters(self.variance)
|
||||
|
||||
def Kdiag(self, X):
|
||||
ret = np.empty((X.shape[0],), dtype=np.float64)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class Stationary(Kern):
|
|||
self.lengthscale = Param('lengthscale', lengthscale, Logexp())
|
||||
self.variance = Param('variance', variance, Logexp())
|
||||
assert self.variance.size==1
|
||||
self.add_parameters(self.variance, self.lengthscale)
|
||||
self.link_parameters(self.variance, self.lengthscale)
|
||||
|
||||
def K_of_r(self, r):
|
||||
raise NotImplementedError, "implement the covariance function as a fn of r to use this class"
|
||||
|
|
@ -343,7 +343,7 @@ class RatQuad(Stationary):
|
|||
def __init__(self, input_dim, variance=1., lengthscale=None, power=2., ARD=False, active_dims=None, name='RatQuad'):
|
||||
super(RatQuad, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name)
|
||||
self.power = Param('power', power, Logexp())
|
||||
self.add_parameters(self.power)
|
||||
self.link_parameters(self.power)
|
||||
|
||||
def K_of_r(self, r):
|
||||
r2 = np.power(r, 2.)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue