[plotting&kern] bugfixes in plotting and kernel size

This commit is contained in:
Max Zwiessele 2016-01-22 11:26:29 +00:00
parent 1bda209469
commit 7b5422b694
5 changed files with 21 additions and 17 deletions

View file

@ -365,13 +365,14 @@ class GP(Model):
mean_jac[:,:,i] = kern.gradients_X(self.posterior.woodbury_vector[:,i:i+1].T, Xnew, self._predictive_variable) mean_jac[:,:,i] = kern.gradients_X(self.posterior.woodbury_vector[:,i:i+1].T, Xnew, self._predictive_variable)
dK_dXnew_full = np.empty((self._predictive_variable.shape[0], Xnew.shape[0], Xnew.shape[1])) dK_dXnew_full = np.empty((self._predictive_variable.shape[0], Xnew.shape[0], Xnew.shape[1]))
one = np.ones((1,1))
for i in range(self._predictive_variable.shape[0]): for i in range(self._predictive_variable.shape[0]):
dK_dXnew_full[i] = kern.gradients_X([[1.]], Xnew, self._predictive_variable[[i]]) dK_dXnew_full[i] = kern.gradients_X(one, Xnew, self._predictive_variable[[i]])
if full_cov: if full_cov:
dK2_dXdX = kern.gradients_XX([[1.]], Xnew) dK2_dXdX = kern.gradients_XX(one, Xnew)
else: else:
dK2_dXdX = kern.gradients_XX_diag([[1.]], Xnew) dK2_dXdX = kern.gradients_XX_diag(one, Xnew)
def compute_cov_inner(wi): def compute_cov_inner(wi):
if full_cov: if full_cov:

View file

@ -61,12 +61,12 @@ class Kern(Parameterized):
self.psicomp = PSICOMP_GH() self.psicomp = PSICOMP_GH()
def __setstate__(self, state): def __setstate__(self, state):
self._all_dims_active = range(0, max(state['active_dims'])+1) self._all_dims_active = np.arange(0, max(state['active_dims'])+1)
super(Kern, self).__setstate__(state) super(Kern, self).__setstate__(state)
@property @property
def _effective_input_dim(self): def _effective_input_dim(self):
return self._all_dims_active.size return np.size(self._all_dims_active)
@Cache_this(limit=20) @Cache_this(limit=20)
def _slice_X(self, X): def _slice_X(self, X):

View file

@ -97,7 +97,7 @@ class Stationary(Kern):
r = self._scaled_dist(X, X2) r = self._scaled_dist(X, X2)
return self.K_of_r(r) return self.K_of_r(r)
@Cache_this(limit=20, ignore_args=()) @Cache_this(limit=3, ignore_args=())
def dK_dr_via_X(self, X, X2): def dK_dr_via_X(self, X, X2):
#a convenience function, so we can cache dK_dr #a convenience function, so we can cache dK_dr
return self.dK_dr(self._scaled_dist(X, X2)) return self.dK_dr(self._scaled_dist(X, X2))
@ -127,7 +127,7 @@ class Stationary(Kern):
r2 = np.clip(r2, 0, np.inf) r2 = np.clip(r2, 0, np.inf)
return np.sqrt(r2) return np.sqrt(r2)
@Cache_this(limit=20, ignore_args=()) @Cache_this(limit=3, ignore_args=())
def _scaled_dist(self, X, X2=None): def _scaled_dist(self, X, X2=None):
""" """
Efficiently compute the scaled distance, r. Efficiently compute the scaled distance, r.

View file

@ -330,6 +330,8 @@ def plot(self, plot_limits=None, fixed_inputs=None,
# It does not make sense to plot the data (which lives not in the latent function space) into latent function space. # It does not make sense to plot the data (which lives not in the latent function space) into latent function space.
plot_data = False plot_data = False
plots = {} plots = {}
if hasattr(self, 'Z') and plot_inducing:
plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing'))
if plot_data: if plot_data:
plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data")) plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data"))
plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error")) plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error"))
@ -340,8 +342,6 @@ def plot(self, plot_limits=None, fixed_inputs=None,
get_which_data_ycols(self, which_data_ycols), get_which_data_ycols(self, which_data_ycols),
predict_kw, samples_likelihood) predict_kw, samples_likelihood)
plots.update(_plot_samples(canvas, helper_data, helper_prediction, projection, "Lik Samples")) plots.update(_plot_samples(canvas, helper_data, helper_prediction, projection, "Lik Samples"))
if hasattr(self, 'Z') and plot_inducing:
plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing'))
return pl().add_to_canvas(canvas, plots, legend=legend) return pl().add_to_canvas(canvas, plots, legend=legend)

View file

@ -285,7 +285,10 @@ def get_x_y_var(model):
X = model.X.mean.values X = model.X.mean.values
X_variance = model.X.variance.values X_variance = model.X.variance.values
else: else:
try:
X = model.X.values X = model.X.values
except AttributeError:
X = model.X
X_variance = None X_variance = None
try: try:
Y = model.Y.values Y = model.Y.values