mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 20:42:39 +02:00
merged variational posterior changes
This commit is contained in:
commit
d29fa56af2
43 changed files with 1424 additions and 1936 deletions
|
|
@ -26,41 +26,48 @@ class Kern(Parameterized):
|
|||
raise NotImplementedError
|
||||
def Kdiag(self, Xa):
|
||||
raise NotImplementedError
|
||||
def psi0(self,Z,posterior_variational):
|
||||
def psi0(self, Z, variational_posterior):
|
||||
raise NotImplementedError
|
||||
def psi1(self,Z,posterior_variational):
|
||||
def psi1(self, Z, variational_posterior):
|
||||
raise NotImplementedError
|
||||
def psi2(self,Z,posterior_variational):
|
||||
def psi2(self, Z, variational_posterior):
|
||||
raise NotImplementedError
|
||||
def gradients_X(self, dL_dK, X, X2):
|
||||
raise NotImplementedError
|
||||
def gradients_X_diag(self, dL_dK, X):
|
||||
raise NotImplementedError
|
||||
def update_gradients_full(self, dL_dK, X):
|
||||
|
||||
def update_gradients_full(self, dL_dK, X, X2):
|
||||
"""Set the gradients of all parameters when doing full (N) inference."""
|
||||
raise NotImplementedError
|
||||
def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
|
||||
target = np.zeros(self.size)
|
||||
self.update_gradients_diag(dL_dKdiag, X)
|
||||
self._collect_gradient(target)
|
||||
self.update_gradients_full(dL_dKnm, X, Z)
|
||||
self._collect_gradient(target)
|
||||
self.update_gradients_full(dL_dKmm, Z, None)
|
||||
self._collect_gradient(target)
|
||||
self._set_gradient(target)
|
||||
|
||||
def update_gradients_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational):
|
||||
"""Set the gradients of all parameters when doing variational (M) inference with uncertain inputs."""
|
||||
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
|
||||
"""
|
||||
Set the gradients of all parameters when doing inference with
|
||||
uncertain inputs, using expectations of the kernel.
|
||||
|
||||
The esential maths is
|
||||
|
||||
dL_d{theta_i} = dL_dpsi0 * dpsi0_d{theta_i} +
|
||||
dL_dpsi1 * dpsi1_d{theta_i} +
|
||||
dL_dpsi2 * dpsi2_d{theta_i}
|
||||
"""
|
||||
raise NotImplementedError
|
||||
def gradients_Z_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
|
||||
grad = self.gradients_X(dL_dKmm, Z)
|
||||
grad += self.gradients_X(dL_dKnm.T, Z, X)
|
||||
return grad
|
||||
def gradients_Z_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational):
|
||||
|
||||
def gradients_Z_expectations(self, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
|
||||
"""
|
||||
Returns the derivative of the objective wrt Z, using the chain rule
|
||||
through the expectation variables.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
def gradients_q_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational):
|
||||
|
||||
def gradients_qX_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
|
||||
"""
|
||||
Compute the gradients wrt the parameters of the variational
|
||||
distruibution q(X), chain-ruling via the expectations of the kernel
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def plot_ARD(self, *args, **kw):
|
||||
"""
|
||||
See :class:`~GPy.plotting.matplot_dep.kernel_plots`
|
||||
|
|
@ -69,13 +76,13 @@ class Kern(Parameterized):
|
|||
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
||||
from ...plotting.matplot_dep import kernel_plots
|
||||
return kernel_plots.plot_ARD(self,*args,**kw)
|
||||
|
||||
|
||||
def input_sensitivity(self):
|
||||
"""
|
||||
Returns the sensitivity for each dimension of this kernel.
|
||||
"""
|
||||
return np.zeros(self.input_dim)
|
||||
|
||||
|
||||
def __add__(self, other):
|
||||
""" Overloading of the '+' operator. for more control, see self.add """
|
||||
return self.add(other)
|
||||
|
|
@ -114,7 +121,8 @@ class Kern(Parameterized):
|
|||
|
||||
def prod(self, other, tensor=False):
|
||||
"""
|
||||
Multiply two kernels (either on the same space, or on the tensor product of the input space).
|
||||
Multiply two kernels (either on the same space, or on the tensor
|
||||
product of the input space).
|
||||
|
||||
:param other: the other kernel to be added
|
||||
:type other: GPy.kern
|
||||
|
|
@ -125,209 +133,3 @@ class Kern(Parameterized):
|
|||
assert isinstance(other, Kern), "only kernels can be added to kernels..."
|
||||
from prod import Prod
|
||||
return Prod(self, other, tensor)
|
||||
|
||||
|
||||
from GPy.core.model import Model
|
||||
|
||||
class Kern_check_model(Model):
|
||||
"""This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgradient() to be called independently on a kernel."""
|
||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
||||
from GPy.kern import RBF
|
||||
Model.__init__(self, 'kernel_test_model')
|
||||
num_samples = 20
|
||||
num_samples2 = 10
|
||||
if kernel==None:
|
||||
kernel = RBF(1)
|
||||
if X==None:
|
||||
X = np.random.randn(num_samples, kernel.input_dim)
|
||||
if dL_dK==None:
|
||||
if X2==None:
|
||||
dL_dK = np.ones((X.shape[0], X.shape[0]))
|
||||
else:
|
||||
dL_dK = np.ones((X.shape[0], X2.shape[0]))
|
||||
|
||||
self.kernel=kernel
|
||||
self.add_parameter(kernel)
|
||||
self.X = X
|
||||
self.X2 = X2
|
||||
self.dL_dK = dL_dK
|
||||
|
||||
def is_positive_definite(self):
|
||||
v = np.linalg.eig(self.kernel.K(self.X))[0]
|
||||
if any(v<-10*sys.float_info.epsilon):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def log_likelihood(self):
|
||||
return (self.dL_dK*self.kernel.K(self.X, self.X2)).sum()
|
||||
|
||||
def _log_likelihood_gradients(self):
|
||||
raise NotImplementedError, "This needs to be implemented to use the kern_check_model class."
|
||||
|
||||
class Kern_check_dK_dtheta(Kern_check_model):
|
||||
"""This class allows gradient checks for the gradient of a kernel with respect to parameters. """
|
||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
|
||||
|
||||
def _log_likelihood_gradients(self):
|
||||
return self.kernel._param_grad_helper(self.dL_dK, self.X, self.X2)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class Kern_check_dKdiag_dtheta(Kern_check_model):
|
||||
"""This class allows gradient checks of the gradient of the diagonal of a kernel with respect to the parameters."""
|
||||
def __init__(self, kernel=None, dL_dK=None, X=None):
|
||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
|
||||
if dL_dK==None:
|
||||
self.dL_dK = np.ones((self.X.shape[0]))
|
||||
def parameters_changed(self):
|
||||
self.kernel.update_gradients_full(self.dL_dK, self.X)
|
||||
|
||||
def log_likelihood(self):
|
||||
return (self.dL_dK*self.kernel.Kdiag(self.X)).sum()
|
||||
|
||||
def _log_likelihood_gradients(self):
|
||||
return self.kernel.dKdiag_dtheta(self.dL_dK, self.X)
|
||||
|
||||
class Kern_check_dK_dX(Kern_check_model):
|
||||
"""This class allows gradient checks for the gradient of a kernel with respect to X. """
|
||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
|
||||
self.remove_parameter(kernel)
|
||||
self.X = Param('X', self.X)
|
||||
self.add_parameter(self.X)
|
||||
def _log_likelihood_gradients(self):
|
||||
return self.kernel.gradients_X(self.dL_dK, self.X, self.X2).flatten()
|
||||
|
||||
class Kern_check_dKdiag_dX(Kern_check_dK_dX):
|
||||
"""This class allows gradient checks for the gradient of a kernel diagonal with respect to X. """
|
||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
||||
Kern_check_dK_dX.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
|
||||
if dL_dK==None:
|
||||
self.dL_dK = np.ones((self.X.shape[0]))
|
||||
|
||||
def log_likelihood(self):
|
||||
return (self.dL_dK*self.kernel.Kdiag(self.X)).sum()
|
||||
|
||||
def _log_likelihood_gradients(self):
|
||||
return self.kernel.dKdiag_dX(self.dL_dK, self.X).flatten()
|
||||
|
||||
def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
||||
"""
|
||||
This function runs on kernels to check the correctness of their
|
||||
implementation. It checks that the covariance function is positive definite
|
||||
for a randomly generated data set.
|
||||
|
||||
:param kern: the kernel to be tested.
|
||||
:type kern: GPy.kern.Kernpart
|
||||
:param X: X input values to test the covariance function.
|
||||
:type X: ndarray
|
||||
:param X2: X2 input values to test the covariance function.
|
||||
:type X2: ndarray
|
||||
|
||||
"""
|
||||
pass_checks = True
|
||||
if X==None:
|
||||
X = np.random.randn(10, kern.input_dim)
|
||||
if output_ind is not None:
|
||||
X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0])
|
||||
if X2==None:
|
||||
X2 = np.random.randn(20, kern.input_dim)
|
||||
if output_ind is not None:
|
||||
X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0])
|
||||
|
||||
if verbose:
|
||||
print("Checking covariance function is positive definite.")
|
||||
result = Kern_check_model(kern, X=X).is_positive_definite()
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Positive definite check failed for " + kern.name + " covariance function.")
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of K(X, X) wrt theta.")
|
||||
result = Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=verbose)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of K(X, X2) wrt theta.")
|
||||
result = Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=verbose)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of Kdiag(X) wrt theta.")
|
||||
result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of K(X, X) wrt X.")
|
||||
try:
|
||||
result = Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=verbose)
|
||||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of K(X, X2) wrt X.")
|
||||
try:
|
||||
result = Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=verbose)
|
||||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of Kdiag(X) wrt X.")
|
||||
try:
|
||||
result = Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=verbose)
|
||||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
return pass_checks
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue