mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 03:22:38 +02:00
[plotting&kern] bugfixes in plotting and kernel size
This commit is contained in:
parent
1bda209469
commit
7b5422b694
5 changed files with 21 additions and 17 deletions
|
|
@ -365,13 +365,14 @@ class GP(Model):
|
||||||
mean_jac[:,:,i] = kern.gradients_X(self.posterior.woodbury_vector[:,i:i+1].T, Xnew, self._predictive_variable)
|
mean_jac[:,:,i] = kern.gradients_X(self.posterior.woodbury_vector[:,i:i+1].T, Xnew, self._predictive_variable)
|
||||||
|
|
||||||
dK_dXnew_full = np.empty((self._predictive_variable.shape[0], Xnew.shape[0], Xnew.shape[1]))
|
dK_dXnew_full = np.empty((self._predictive_variable.shape[0], Xnew.shape[0], Xnew.shape[1]))
|
||||||
|
one = np.ones((1,1))
|
||||||
for i in range(self._predictive_variable.shape[0]):
|
for i in range(self._predictive_variable.shape[0]):
|
||||||
dK_dXnew_full[i] = kern.gradients_X([[1.]], Xnew, self._predictive_variable[[i]])
|
dK_dXnew_full[i] = kern.gradients_X(one, Xnew, self._predictive_variable[[i]])
|
||||||
|
|
||||||
if full_cov:
|
if full_cov:
|
||||||
dK2_dXdX = kern.gradients_XX([[1.]], Xnew)
|
dK2_dXdX = kern.gradients_XX(one, Xnew)
|
||||||
else:
|
else:
|
||||||
dK2_dXdX = kern.gradients_XX_diag([[1.]], Xnew)
|
dK2_dXdX = kern.gradients_XX_diag(one, Xnew)
|
||||||
|
|
||||||
def compute_cov_inner(wi):
|
def compute_cov_inner(wi):
|
||||||
if full_cov:
|
if full_cov:
|
||||||
|
|
@ -458,7 +459,7 @@ class GP(Model):
|
||||||
m, v = self._raw_predict(X, full_cov=full_cov, **predict_kwargs)
|
m, v = self._raw_predict(X, full_cov=full_cov, **predict_kwargs)
|
||||||
if self.normalizer is not None:
|
if self.normalizer is not None:
|
||||||
m, v = self.normalizer.inverse_mean(m), self.normalizer.inverse_variance(v)
|
m, v = self.normalizer.inverse_mean(m), self.normalizer.inverse_variance(v)
|
||||||
|
|
||||||
def sim_one_dim(m, v):
|
def sim_one_dim(m, v):
|
||||||
if not full_cov:
|
if not full_cov:
|
||||||
return np.random.multivariate_normal(m.flatten(), np.diag(v.flatten()), size).T
|
return np.random.multivariate_normal(m.flatten(), np.diag(v.flatten()), size).T
|
||||||
|
|
|
||||||
|
|
@ -61,12 +61,12 @@ class Kern(Parameterized):
|
||||||
self.psicomp = PSICOMP_GH()
|
self.psicomp = PSICOMP_GH()
|
||||||
|
|
||||||
def __setstate__(self, state):
|
def __setstate__(self, state):
|
||||||
self._all_dims_active = range(0, max(state['active_dims'])+1)
|
self._all_dims_active = np.arange(0, max(state['active_dims'])+1)
|
||||||
super(Kern, self).__setstate__(state)
|
super(Kern, self).__setstate__(state)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _effective_input_dim(self):
|
def _effective_input_dim(self):
|
||||||
return self._all_dims_active.size
|
return np.size(self._all_dims_active)
|
||||||
|
|
||||||
@Cache_this(limit=20)
|
@Cache_this(limit=20)
|
||||||
def _slice_X(self, X):
|
def _slice_X(self, X):
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,7 @@ class Stationary(Kern):
|
||||||
r = self._scaled_dist(X, X2)
|
r = self._scaled_dist(X, X2)
|
||||||
return self.K_of_r(r)
|
return self.K_of_r(r)
|
||||||
|
|
||||||
@Cache_this(limit=20, ignore_args=())
|
@Cache_this(limit=3, ignore_args=())
|
||||||
def dK_dr_via_X(self, X, X2):
|
def dK_dr_via_X(self, X, X2):
|
||||||
#a convenience function, so we can cache dK_dr
|
#a convenience function, so we can cache dK_dr
|
||||||
return self.dK_dr(self._scaled_dist(X, X2))
|
return self.dK_dr(self._scaled_dist(X, X2))
|
||||||
|
|
@ -127,7 +127,7 @@ class Stationary(Kern):
|
||||||
r2 = np.clip(r2, 0, np.inf)
|
r2 = np.clip(r2, 0, np.inf)
|
||||||
return np.sqrt(r2)
|
return np.sqrt(r2)
|
||||||
|
|
||||||
@Cache_this(limit=20, ignore_args=())
|
@Cache_this(limit=3, ignore_args=())
|
||||||
def _scaled_dist(self, X, X2=None):
|
def _scaled_dist(self, X, X2=None):
|
||||||
"""
|
"""
|
||||||
Efficiently compute the scaled distance, r.
|
Efficiently compute the scaled distance, r.
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ def plot_mean(self, plot_limits=None, fixed_inputs=None,
|
||||||
"""
|
"""
|
||||||
Plot the mean of the GP.
|
Plot the mean of the GP.
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
@ -116,7 +116,7 @@ def plot_confidence(self, lower=2.5, upper=97.5, plot_limits=None, fixed_inputs=
|
||||||
E.g. the 95% confidence interval is $2.5, 97.5$.
|
E.g. the 95% confidence interval is $2.5, 97.5$.
|
||||||
Note: Only implemented for one dimension!
|
Note: Only implemented for one dimension!
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
@ -170,7 +170,7 @@ def plot_samples(self, plot_limits=None, fixed_inputs=None,
|
||||||
"""
|
"""
|
||||||
Plot the mean of the GP.
|
Plot the mean of the GP.
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
@ -231,7 +231,7 @@ def plot_density(self, plot_limits=None, fixed_inputs=None,
|
||||||
E.g. the 95% confidence interval is $2.5, 97.5$.
|
E.g. the 95% confidence interval is $2.5, 97.5$.
|
||||||
Note: Only implemented for one dimension!
|
Note: Only implemented for one dimension!
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
@ -288,7 +288,7 @@ def plot(self, plot_limits=None, fixed_inputs=None,
|
||||||
"""
|
"""
|
||||||
Convenience function for plotting the fit of a GP.
|
Convenience function for plotting the fit of a GP.
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
@ -330,6 +330,8 @@ def plot(self, plot_limits=None, fixed_inputs=None,
|
||||||
# It does not make sense to plot the data (which lives not in the latent function space) into latent function space.
|
# It does not make sense to plot the data (which lives not in the latent function space) into latent function space.
|
||||||
plot_data = False
|
plot_data = False
|
||||||
plots = {}
|
plots = {}
|
||||||
|
if hasattr(self, 'Z') and plot_inducing:
|
||||||
|
plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing'))
|
||||||
if plot_data:
|
if plot_data:
|
||||||
plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data"))
|
plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data"))
|
||||||
plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error"))
|
plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error"))
|
||||||
|
|
@ -340,8 +342,6 @@ def plot(self, plot_limits=None, fixed_inputs=None,
|
||||||
get_which_data_ycols(self, which_data_ycols),
|
get_which_data_ycols(self, which_data_ycols),
|
||||||
predict_kw, samples_likelihood)
|
predict_kw, samples_likelihood)
|
||||||
plots.update(_plot_samples(canvas, helper_data, helper_prediction, projection, "Lik Samples"))
|
plots.update(_plot_samples(canvas, helper_data, helper_prediction, projection, "Lik Samples"))
|
||||||
if hasattr(self, 'Z') and plot_inducing:
|
|
||||||
plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing'))
|
|
||||||
return pl().add_to_canvas(canvas, plots, legend=legend)
|
return pl().add_to_canvas(canvas, plots, legend=legend)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -362,7 +362,7 @@ def plot_f(self, plot_limits=None, fixed_inputs=None,
|
||||||
|
|
||||||
If you want fine graned control use the specific plotting functions supplied in the model.
|
If you want fine graned control use the specific plotting functions supplied in the model.
|
||||||
|
|
||||||
You can deactivate the legend for this one plot by supplying None to label.
|
You can deactivate the legend for this one plot by supplying None to label.
|
||||||
|
|
||||||
Give the Y_metadata in the predict_kw if you need it.
|
Give the Y_metadata in the predict_kw if you need it.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -285,7 +285,10 @@ def get_x_y_var(model):
|
||||||
X = model.X.mean.values
|
X = model.X.mean.values
|
||||||
X_variance = model.X.variance.values
|
X_variance = model.X.variance.values
|
||||||
else:
|
else:
|
||||||
X = model.X.values
|
try:
|
||||||
|
X = model.X.values
|
||||||
|
except AttributeError:
|
||||||
|
X = model.X
|
||||||
X_variance = None
|
X_variance = None
|
||||||
try:
|
try:
|
||||||
Y = model.Y.values
|
Y = model.Y.values
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue