diff --git a/GPy/inference/latent_function_inference/dtc.py b/GPy/inference/latent_function_inference/dtc.py index 5590a079..a12726e2 100644 --- a/GPy/inference/latent_function_inference/dtc.py +++ b/GPy/inference/latent_function_inference/dtc.py @@ -20,7 +20,8 @@ class DTC(LatentFunctionInference): def __init__(self): self.const_jitter = 1e-6 - def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None): + def inference(self, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None): + assert mean_function is None, "inference with a mean function not implemented" assert X_variance is None, "cannot use X_variance with DTC. Try varDTC." num_inducing, _ = Z.shape @@ -88,7 +89,8 @@ class vDTC(object): def __init__(self): self.const_jitter = 1e-6 - def inference(self, kern, X, X_variance, Z, likelihood, Y, Y_metadata): + def inference(self, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None): + assert mean_function is None, "inference with a mean function not implemented" assert X_variance is None, "cannot use X_variance with DTC. Try varDTC." num_inducing, _ = Z.shape diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index 1312d36a..312855f7 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -36,10 +36,12 @@ class ExactGaussianInference(LatentFunctionInference): #print "WARNING: N>D of Y, we need caching of L, such that L*L^T = Y, returning Y still!" return Y - def inference(self, kern, X, likelihood, Y, Y_metadata=None): + def inference(self, kern, X, likelihood, Y, mean_function=None, Y_metadata=None): """ Returns a Posterior class containing essential quantities of the posterior """ + assert mean_function is None, "inference with a mean function not implemented" + YYT_factor = self.get_YYTfactor(Y) K = kern.K(X) diff --git a/GPy/inference/latent_function_inference/expectation_propagation.py b/GPy/inference/latent_function_inference/expectation_propagation.py index 26144974..ba920569 100644 --- a/GPy/inference/latent_function_inference/expectation_propagation.py +++ b/GPy/inference/latent_function_inference/expectation_propagation.py @@ -33,7 +33,8 @@ class EP(LatentFunctionInference): # TODO: update approximation in the end as well? Maybe even with a switch? pass - def inference(self, kern, X, likelihood, Y, Y_metadata=None, Z=None): + def inference(self, kern, X, likelihood, Y, mean_function=None, Y_metadata=None, Z=None): + assert mean_function is None, "inference with a mean function not implemented" num_data, output_dim = Y.shape assert output_dim ==1, "ep in 1D only (for now!)" diff --git a/GPy/inference/latent_function_inference/expectation_propagation_dtc.py b/GPy/inference/latent_function_inference/expectation_propagation_dtc.py index 35b1b7dc..466cbbb2 100644 --- a/GPy/inference/latent_function_inference/expectation_propagation_dtc.py +++ b/GPy/inference/latent_function_inference/expectation_propagation_dtc.py @@ -64,7 +64,8 @@ class EPDTC(LatentFunctionInference): self.old_mutilde, self.old_vtilde = None, None self._ep_approximation = None - def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None): + def inference(self, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None): + assert mean_function is None, "inference with a mean function not implemented" num_data, output_dim = Y.shape assert output_dim ==1, "ep in 1D only (for now!)" diff --git a/GPy/inference/latent_function_inference/fitc.py b/GPy/inference/latent_function_inference/fitc.py index a184c6c4..f99b35ff 100644 --- a/GPy/inference/latent_function_inference/fitc.py +++ b/GPy/inference/latent_function_inference/fitc.py @@ -18,7 +18,8 @@ class FITC(LatentFunctionInference): """ const_jitter = 1e-6 - def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None): + def inference(self, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None): + assert mean_function is None, "inference with a mean function not implemented" num_inducing, _ = Z.shape num_data, output_dim = Y.shape diff --git a/GPy/inference/latent_function_inference/laplace.py b/GPy/inference/latent_function_inference/laplace.py index 05711b0b..4e6ece11 100644 --- a/GPy/inference/latent_function_inference/laplace.py +++ b/GPy/inference/latent_function_inference/laplace.py @@ -39,10 +39,11 @@ class Laplace(LatentFunctionInference): self.first_run = True self._previous_Ki_fhat = None - def inference(self, kern, X, likelihood, Y, Y_metadata=None): + def inference(self, kern, X, likelihood, Y, mean_function=None, Y_metadata=None): """ Returns a Posterior class containing essential quantities of the posterior """ + assert mean_function is None, "inference with a mean function not implemented" # Compute K K = kern.K(X) diff --git a/GPy/inference/latent_function_inference/svgp.py b/GPy/inference/latent_function_inference/svgp.py index 1974991b..23d36a14 100644 --- a/GPy/inference/latent_function_inference/svgp.py +++ b/GPy/inference/latent_function_inference/svgp.py @@ -6,7 +6,8 @@ from posterior import Posterior class SVGP(LatentFunctionInference): - def inference(self, q_u_mean, q_u_chol, kern, X, Z, likelihood, Y, Y_metadata=None, KL_scale=1.0, batch_scale=1.0): + def inference(self, q_u_mean, q_u_chol, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None, KL_scale=1.0, batch_scale=1.0): + assert mean_function is None, "inference with a mean function not implemented" num_inducing = Z.shape[0] num_data, num_outputs = Y.shape diff --git a/GPy/mappings/linear.py b/GPy/mappings/linear.py index e172b4e2..6fc91944 100644 --- a/GPy/mappings/linear.py +++ b/GPy/mappings/linear.py @@ -33,7 +33,7 @@ class Linear(Mapping): return np.dot(X, self.A) def update_gradients(self, dL_dF, X): - self.A.gradient = np.dot( X.T dL_dF) + self.A.gradient = np.dot( X.T, dL_dF) def gradients_X(self, dL_dF, X): return np.dot(dL_dF, self.A.T)