mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-05 17:52:39 +02:00
merge changes
This commit is contained in:
commit
8a83845937
65 changed files with 1197 additions and 600 deletions
|
|
@ -75,15 +75,19 @@ class BayesianGPLVM(SparseGP):
|
|||
# update for the KL divergence
|
||||
self.variational_prior.update_gradients_KL(self.X)
|
||||
|
||||
def plot_latent(self, plot_inducing=True, *args, **kwargs):
|
||||
"""
|
||||
See GPy.plotting.matplot_dep.dim_reduction_plots.plot_latent
|
||||
"""
|
||||
def plot_latent(self, labels=None, which_indices=None,
|
||||
resolution=50, ax=None, marker='o', s=40,
|
||||
fignum=None, plot_inducing=True, legend=True,
|
||||
plot_limits=None,
|
||||
aspect='auto', updates=False, **kwargs):
|
||||
import sys
|
||||
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
||||
from ..plotting.matplot_dep import dim_reduction_plots
|
||||
|
||||
return dim_reduction_plots.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs)
|
||||
return dim_reduction_plots.plot_latent(self, labels, which_indices,
|
||||
resolution, ax, marker, s,
|
||||
fignum, plot_inducing, legend,
|
||||
plot_limits, aspect, updates, **kwargs)
|
||||
|
||||
def do_test_latents(self, Y):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class GPCoregionalizedRegression(GP):
|
|||
|
||||
#Kernel
|
||||
if kernel is None:
|
||||
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=GPy.kern.rbf(X.shape[1]-1), W_rank=1,name=kernel_name)
|
||||
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kern.RBF(X.shape[1]-1), W_rank=1,name=kernel_name)
|
||||
|
||||
#Likelihood
|
||||
likelihood = util.multioutput.build_likelihood(Y_list,self.output_index,likelihoods_list)
|
||||
|
|
|
|||
|
|
@ -20,14 +20,14 @@ class GPRegression(GP):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, X, Y, kernel=None):
|
||||
def __init__(self, X, Y, kernel=None, Y_metadata=None):
|
||||
|
||||
if kernel is None:
|
||||
kernel = kern.RBF(X.shape[1])
|
||||
|
||||
likelihood = likelihoods.Gaussian()
|
||||
|
||||
super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='GP regression')
|
||||
super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='GP regression', Y_metadata=Y_metadata)
|
||||
|
||||
def _getstate(self):
|
||||
return GP._getstate(self)
|
||||
|
|
|
|||
|
|
@ -67,12 +67,22 @@ class GPLVM(GP):
|
|||
assert self.likelihood.Y.shape[1] == 2
|
||||
pb.scatter(self.likelihood.Y[:, 0], self.likelihood.Y[:, 1], 40, self.X[:, 0].copy(), linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable
|
||||
Xnew = np.linspace(self.X.min(), self.X.max(), 200)[:, None]
|
||||
mu, var, upper, lower = self.predict(Xnew)
|
||||
mu, _ = self.predict(Xnew)
|
||||
pb.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5)
|
||||
|
||||
def plot_latent(self, *args, **kwargs):
|
||||
def plot_latent(self, labels=None, which_indices=None,
|
||||
resolution=50, ax=None, marker='o', s=40,
|
||||
fignum=None, legend=True,
|
||||
plot_limits=None,
|
||||
aspect='auto', updates=False, **kwargs):
|
||||
import sys
|
||||
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
||||
from ..plotting.matplot_dep import dim_reduction_plots
|
||||
|
||||
return dim_reduction_plots.plot_latent(self, *args, **kwargs)
|
||||
return dim_reduction_plots.plot_latent(self, labels, which_indices,
|
||||
resolution, ax, marker, s,
|
||||
fignum, False, legend,
|
||||
plot_limits, aspect, updates, **kwargs)
|
||||
|
||||
def plot_magnification(self, *args, **kwargs):
|
||||
return util.plot_latent.plot_magnification(self, *args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -43,14 +43,14 @@ class SparseGPCoregionalizedRegression(SparseGP):
|
|||
|
||||
#Kernel
|
||||
if kernel is None:
|
||||
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=GPy.kern.rbf(X.shape[1]-1), W_rank=1,name=kernel_name)
|
||||
kernel = util.multioutput.ICM(input_dim=X.shape[1]-1, num_outputs=Ny, kernel=kern.RBF(X.shape[1]-1), W_rank=1,name=kernel_name)
|
||||
|
||||
#Likelihood
|
||||
likelihood = util.multioutput.build_likelihood(Y_list,self.output_index,likelihoods_list)
|
||||
|
||||
#Inducing inputs list
|
||||
if len(Z_list):
|
||||
assert len(Z_list) == self.output_dim, 'Number of outputs do not match length of inducing inputs list.'
|
||||
assert len(Z_list) == Ny, 'Number of outputs do not match length of inducing inputs list.'
|
||||
else:
|
||||
if isinstance(num_inducing,np.int):
|
||||
num_inducing = [num_inducing] * Ny
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue