constant jitter to Kmm, deleted some white kernels in models and examples

This commit is contained in:
Max Zwiessele 2013-08-02 16:36:51 +01:00
parent 1cc8f95717
commit 5570e82943
5 changed files with 165 additions and 161 deletions

View file

@ -44,7 +44,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
assert Z.shape[1] == X.shape[1]
if kernel is None:
kernel = kern.rbf(input_dim) + kern.white(input_dim)
kernel = kern.rbf(input_dim) # + kern.white(input_dim)
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs)
self.ensure_default_constraints()
@ -175,7 +175,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
X = np.zeros((resolution ** 2, self.input_dim))
indices = np.r_[:X.shape[0]]
if labels is None:
labels = range(self.input_dim)
labels = range(self.output_dim)
def plot_function(x):
X[:, significant_dims] = x

View file

@ -29,7 +29,7 @@ class SparseGPRegression(SparseGP):
def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10, X_variance=None):
# kern defaults to rbf (plus white for stability)
if kernel is None:
kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1], 1e-3)
kernel = kern.rbf(X.shape[1]) # + kern.white(X.shape[1], 1e-3)
# Z defaults to a subset of the data
if Z is None: