mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-06 02:24:17 +02:00
2d plotting
This commit is contained in:
parent
c2750ae719
commit
8dbb65ab50
4 changed files with 50 additions and 49 deletions
|
|
@ -77,13 +77,11 @@ class SparseGP(GP):
|
||||||
mu = np.dot(Kx.T, self.posterior.woodbury_vector)
|
mu = np.dot(Kx.T, self.posterior.woodbury_vector)
|
||||||
if full_cov:
|
if full_cov:
|
||||||
Kxx = self.kern.K(Xnew)
|
Kxx = self.kern.K(Xnew)
|
||||||
var = Kxx - mdot(Kx.T, self.posterior.woodbury_inv, Kx)
|
#var = Kxx - mdot(Kx.T, self.posterior.woodbury_inv, Kx)
|
||||||
|
var = Kxx - np.tensordot(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx).T, Kx, [1,0]).swapaxes(1,2)
|
||||||
else:
|
else:
|
||||||
Kxx = self.kern.Kdiag(Xnew)
|
Kxx = self.kern.Kdiag(Xnew)
|
||||||
WKx_old = np.dot(np.atleast_3d(self.posterior.woodbury_inv)[:,:,0], Kx)
|
var = (Kxx - np.sum(np.dot(np.atleast_3d(self.posterior.woodbury_inv).T, Kx) * Kx[None,:,:], 1)).T
|
||||||
WKx = np.tensordot(np.atleast_3d(self.posterior.woodbury_inv), Kx, [0,0])
|
|
||||||
import ipdb;ipdb.set_trace()
|
|
||||||
var = Kxx - np.sum(Kx * WKx, 0)
|
|
||||||
else:
|
else:
|
||||||
Kx = self.kern.psi1(self.Z, Xnew, X_variance_new)
|
Kx = self.kern.psi1(self.Z, Xnew, X_variance_new)
|
||||||
mu = np.dot(Kx, self.Cpsi1V)
|
mu = np.dot(Kx, self.Cpsi1V)
|
||||||
|
|
@ -93,7 +91,7 @@ class SparseGP(GP):
|
||||||
Kxx = self.kern.psi0(self.Z, Xnew, X_variance_new)
|
Kxx = self.kern.psi0(self.Z, Xnew, X_variance_new)
|
||||||
psi2 = self.kern.psi2(self.Z, Xnew, X_variance_new)
|
psi2 = self.kern.psi2(self.Z, Xnew, X_variance_new)
|
||||||
var = Kxx - np.sum(np.sum(psi2 * Kmmi_LmiBLmi[None, :, :], 1), 1)
|
var = Kxx - np.sum(np.sum(psi2 * Kmmi_LmiBLmi[None, :, :], 1), 1)
|
||||||
return mu, var[:,None]
|
return mu, var
|
||||||
|
|
||||||
|
|
||||||
def _getstate(self):
|
def _getstate(self):
|
||||||
|
|
|
||||||
|
|
@ -89,7 +89,7 @@ def sparse_gplvm_oil(optimize=True, verbose=0, plot=True, N=100, Q=6, num_induci
|
||||||
Y = Y - Y.mean(0)
|
Y = Y - Y.mean(0)
|
||||||
Y /= Y.std(0)
|
Y /= Y.std(0)
|
||||||
# Create the model
|
# Create the model
|
||||||
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.bias(Q)
|
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(Q)
|
||||||
m = GPy.models.SparseGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing)
|
m = GPy.models.SparseGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing)
|
||||||
m.data_labels = data['Y'][:N].argmax(axis=1)
|
m.data_labels = data['Y'][:N].argmax(axis=1)
|
||||||
|
|
||||||
|
|
@ -139,7 +139,7 @@ def swiss_roll(optimize=True, verbose=1, plot=True, N=1000, num_inducing=15, Q=4
|
||||||
(1 - var))) + .001
|
(1 - var))) + .001
|
||||||
Z = _np.random.permutation(X)[:num_inducing]
|
Z = _np.random.permutation(X)[:num_inducing]
|
||||||
|
|
||||||
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2))
|
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(Q, _np.exp(-2)) + GPy.kern.White(Q, _np.exp(-2))
|
||||||
|
|
||||||
m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
|
m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
|
||||||
m.data_colors = c
|
m.data_colors = c
|
||||||
|
|
@ -159,28 +159,26 @@ def swiss_roll(optimize=True, verbose=1, plot=True, N=1000, num_inducing=15, Q=4
|
||||||
|
|
||||||
def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40, max_iters=1000, **k):
|
def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40, max_iters=1000, **k):
|
||||||
import GPy
|
import GPy
|
||||||
from GPy.likelihoods import Gaussian
|
|
||||||
from matplotlib import pyplot as plt
|
from matplotlib import pyplot as plt
|
||||||
|
|
||||||
_np.random.seed(0)
|
_np.random.seed(0)
|
||||||
data = GPy.util.datasets.oil()
|
data = GPy.util.datasets.oil()
|
||||||
|
|
||||||
kernel = GPy.kern.RBF_inv(Q, 1., [.1] * Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2))
|
kernel = GPy.kern.RBF(Q, 1., [.1] * Q, ARD=True)# + GPy.kern.Bias(Q, _np.exp(-2))
|
||||||
Y = data['X'][:N]
|
Y = data['X'][:N]
|
||||||
Yn = Gaussian(Y, normalize=True)
|
m = GPy.models.BayesianGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing, **k)
|
||||||
m = GPy.models.BayesianGPLVM(Yn, Q, kernel=kernel, num_inducing=num_inducing, **k)
|
|
||||||
m.data_labels = data['Y'][:N].argmax(axis=1)
|
m.data_labels = data['Y'][:N].argmax(axis=1)
|
||||||
m['noise'] = Yn.Y.var() / 100.
|
m['.*noise.var'] = Y.var() / 100.
|
||||||
|
|
||||||
if optimize:
|
if optimize:
|
||||||
m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05)
|
m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05)
|
||||||
|
|
||||||
if plot:
|
if plot:
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.Y[0, :]
|
||||||
fig, (latent_axes, sense_axes) = plt.subplots(1, 2)
|
fig, (latent_axes, sense_axes) = plt.subplots(1, 2)
|
||||||
m.plot_latent(ax=latent_axes)
|
m.plot_latent(ax=latent_axes)
|
||||||
data_show = GPy.util.visualize.vector_show(y)
|
data_show = GPy.plotting.matplot_dep.visualize.vector_show(y)
|
||||||
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], # @UnusedVariable
|
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X[0, :], # @UnusedVariable
|
||||||
m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
|
m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
plt.close(fig)
|
plt.close(fig)
|
||||||
|
|
@ -190,8 +188,8 @@ def _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim=False):
|
||||||
_np.random.seed(1234)
|
_np.random.seed(1234)
|
||||||
|
|
||||||
x = _np.linspace(0, 4 * _np.pi, N)[:, None]
|
x = _np.linspace(0, 4 * _np.pi, N)[:, None]
|
||||||
s1 = _np.vectorize(lambda x: -_np.sin(x))
|
s1 = _np.vectorize(lambda x: -_np.sin(_np.exp(x)))
|
||||||
s2 = _np.vectorize(lambda x: _np.cos(x))
|
s2 = _np.vectorize(lambda x: _np.cos(x)**2)
|
||||||
s3 = _np.vectorize(lambda x:-_np.exp(-_np.cos(2 * x)))
|
s3 = _np.vectorize(lambda x:-_np.exp(-_np.cos(2 * x)))
|
||||||
sS = _np.vectorize(lambda x: x*_np.sin(x))
|
sS = _np.vectorize(lambda x: x*_np.sin(x))
|
||||||
|
|
||||||
|
|
@ -328,7 +326,7 @@ def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw):
|
||||||
_, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim)
|
_, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim)
|
||||||
likelihood_list = [Gaussian(x, normalize=True) for x in Ylist]
|
likelihood_list = [Gaussian(x, normalize=True) for x in Ylist]
|
||||||
|
|
||||||
k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2))
|
k = kern.Linear(Q, ARD=True) + kern.Bias(Q, _np.exp(-2)) + kern.White(Q, _np.exp(-2))
|
||||||
m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw)
|
m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw)
|
||||||
m.ensure_default_constraints()
|
m.ensure_default_constraints()
|
||||||
|
|
||||||
|
|
@ -355,15 +353,15 @@ def brendan_faces(optimize=True, verbose=True, plot=True):
|
||||||
m = GPy.models.GPLVM(Yn, Q)
|
m = GPy.models.GPLVM(Yn, Q)
|
||||||
|
|
||||||
# optimize
|
# optimize
|
||||||
m.constrain('rbf|noise|white', GPy.core.transformations.logexp_clipped())
|
m.constrain('rbf|noise|white', GPy.transformations.LogexpClipped())
|
||||||
|
|
||||||
if optimize: m.optimize('scg', messages=verbose, max_iters=1000)
|
if optimize: m.optimize('scg', messages=verbose, max_iters=1000)
|
||||||
|
|
||||||
if plot:
|
if plot:
|
||||||
ax = m.plot_latent(which_indices=(0, 1))
|
ax = m.plot_latent(which_indices=(0, 1))
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, order='F', invert=False, scale=False)
|
data_show = GPy.plotting.matplot_dep.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, order='F', invert=False, scale=False)
|
||||||
GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -382,8 +380,8 @@ def olivetti_faces(optimize=True, verbose=True, plot=True):
|
||||||
if plot:
|
if plot:
|
||||||
ax = m.plot_latent(which_indices=(0, 1))
|
ax = m.plot_latent(which_indices=(0, 1))
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(112, 92), transpose=False, invert=False, scale=False)
|
data_show = GPy.plotting.matplot_dep.visualize.image_show(y[None, :], dimensions=(112, 92), transpose=False, invert=False, scale=False)
|
||||||
GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -398,8 +396,8 @@ def stick_play(range=None, frame_rate=15, optimize=False, verbose=True, plot=Tru
|
||||||
Y = data['Y'][range[0]:range[1], :].copy()
|
Y = data['Y'][range[0]:range[1], :].copy()
|
||||||
if plot:
|
if plot:
|
||||||
y = Y[0, :]
|
y = Y[0, :]
|
||||||
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
|
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
|
||||||
GPy.util.visualize.data_play(Y, data_show, frame_rate)
|
GPy.plotting.matplot_dep.visualize.data_play(Y, data_show, frame_rate)
|
||||||
return Y
|
return Y
|
||||||
|
|
||||||
def stick(kernel=None, optimize=True, verbose=True, plot=True):
|
def stick(kernel=None, optimize=True, verbose=True, plot=True):
|
||||||
|
|
@ -410,12 +408,12 @@ def stick(kernel=None, optimize=True, verbose=True, plot=True):
|
||||||
# optimize
|
# optimize
|
||||||
m = GPy.models.GPLVM(data['Y'], 2, kernel=kernel)
|
m = GPy.models.GPLVM(data['Y'], 2, kernel=kernel)
|
||||||
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
||||||
if plot and GPy.util.visualize.visual_available:
|
if plot and GPy.plotting.matplot_dep.visualize.visual_available:
|
||||||
plt.clf
|
plt.clf
|
||||||
ax = m.plot_latent()
|
ax = m.plot_latent()
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
|
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
|
||||||
GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -429,12 +427,12 @@ def bcgplvm_linear_stick(kernel=None, optimize=True, verbose=True, plot=True):
|
||||||
mapping = GPy.mappings.Linear(data['Y'].shape[1], 2)
|
mapping = GPy.mappings.Linear(data['Y'].shape[1], 2)
|
||||||
m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping)
|
m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping)
|
||||||
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
||||||
if plot and GPy.util.visualize.visual_available:
|
if plot and GPy.plotting.matplot_dep.visualize.visual_available:
|
||||||
plt.clf
|
plt.clf
|
||||||
ax = m.plot_latent()
|
ax = m.plot_latent()
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
|
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
|
||||||
GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -449,12 +447,12 @@ def bcgplvm_stick(kernel=None, optimize=True, verbose=True, plot=True):
|
||||||
mapping = GPy.mappings.Kernel(X=data['Y'], output_dim=2, kernel=back_kernel)
|
mapping = GPy.mappings.Kernel(X=data['Y'], output_dim=2, kernel=back_kernel)
|
||||||
m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping)
|
m = GPy.models.BCGPLVM(data['Y'], 2, kernel=kernel, mapping=mapping)
|
||||||
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
if optimize: m.optimize(messages=verbose, max_f_eval=10000)
|
||||||
if plot and GPy.util.visualize.visual_available:
|
if plot and GPy.plotting.matplot_dep.visualize.visual_available:
|
||||||
plt.clf
|
plt.clf
|
||||||
ax = m.plot_latent()
|
ax = m.plot_latent()
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
|
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
|
||||||
GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -480,7 +478,7 @@ def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True):
|
||||||
|
|
||||||
data = GPy.util.datasets.osu_run1()
|
data = GPy.util.datasets.osu_run1()
|
||||||
Q = 6
|
Q = 6
|
||||||
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2))
|
kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(Q, _np.exp(-2)) + GPy.kern.White(Q, _np.exp(-2))
|
||||||
m = BayesianGPLVM(data['Y'], Q, init="PCA", num_inducing=20, kernel=kernel)
|
m = BayesianGPLVM(data['Y'], Q, init="PCA", num_inducing=20, kernel=kernel)
|
||||||
# optimize
|
# optimize
|
||||||
m.ensure_default_constraints()
|
m.ensure_default_constraints()
|
||||||
|
|
@ -491,8 +489,8 @@ def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True):
|
||||||
plt.sca(latent_axes)
|
plt.sca(latent_axes)
|
||||||
m.plot_latent()
|
m.plot_latent()
|
||||||
y = m.likelihood.Y[0, :].copy()
|
y = m.likelihood.Y[0, :].copy()
|
||||||
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
|
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
|
||||||
GPy.util.visualize.lvm_dimselect(m.X[0, :].copy(), m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
|
GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X[0, :].copy(), m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
@ -511,8 +509,8 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True, optimize=True, verbose
|
||||||
if plot:
|
if plot:
|
||||||
ax = m.plot_latent()
|
ax = m.plot_latent()
|
||||||
y = m.likelihood.Y[0, :]
|
y = m.likelihood.Y[0, :]
|
||||||
data_show = GPy.util.visualize.skeleton_show(y[None, :], data['skel'])
|
data_show = GPy.plotting.matplot_dep.visualize.skeleton_show(y[None, :], data['skel'])
|
||||||
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
|
||||||
raw_input('Press enter to finish')
|
raw_input('Press enter to finish')
|
||||||
lvm_visualizer.close()
|
lvm_visualizer.close()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -57,8 +57,8 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
|
||||||
fig = pb.figure(num=fignum)
|
fig = pb.figure(num=fignum)
|
||||||
ax = fig.add_subplot(111)
|
ax = fig.add_subplot(111)
|
||||||
|
|
||||||
X, Y = param_to_array(model.X, model.Y)
|
X, Y, Z = param_to_array(model.X, model.Y, model.Z)
|
||||||
if model.has_uncertain_inputs(): X_variance = model.X_variance
|
if model.has_uncertain_inputs(): X_variance = param_to_array(model.q.variance)
|
||||||
|
|
||||||
#work out what the inputs are for plotting (1D or 2D)
|
#work out what the inputs are for plotting (1D or 2D)
|
||||||
fixed_dims = np.array([i for i,v in fixed_inputs])
|
fixed_dims = np.array([i for i,v in fixed_inputs])
|
||||||
|
|
@ -97,10 +97,10 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
|
||||||
|
|
||||||
|
|
||||||
#add error bars for uncertain (if input uncertainty is being modelled)
|
#add error bars for uncertain (if input uncertainty is being modelled)
|
||||||
if hasattr(model,"has_uncertain_inputs") and model.has_uncertain_inputs():
|
#if hasattr(model,"has_uncertain_inputs") and model.has_uncertain_inputs():
|
||||||
ax.errorbar(X[which_data_rows, free_dims].flatten(), Y[which_data_rows, which_data_ycols].flatten(),
|
# ax.errorbar(X[which_data_rows, free_dims].flatten(), Y[which_data_rows, which_data_ycols].flatten(),
|
||||||
xerr=2 * np.sqrt(X_variance[which_data_rows, free_dims].flatten()),
|
# xerr=2 * np.sqrt(X_variance[which_data_rows, free_dims].flatten()),
|
||||||
ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
|
# ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
|
||||||
|
|
||||||
|
|
||||||
#set the limits of the plot to some sensible values
|
#set the limits of the plot to some sensible values
|
||||||
|
|
@ -112,7 +112,7 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
|
||||||
#add inducing inputs (if a sparse model is used)
|
#add inducing inputs (if a sparse model is used)
|
||||||
if hasattr(model,"Z"):
|
if hasattr(model,"Z"):
|
||||||
#Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
|
#Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
|
||||||
Zu = param_to_array(model.Z[:,free_dims])
|
Zu = Z[:,free_dims]
|
||||||
z_height = ax.get_ylim()[0]
|
z_height = ax.get_ylim()[0]
|
||||||
ax.plot(Zu, np.zeros_like(Zu) + z_height, 'r|', mew=1.5, markersize=12)
|
ax.plot(Zu, np.zeros_like(Zu) + z_height, 'r|', mew=1.5, markersize=12)
|
||||||
|
|
||||||
|
|
@ -136,7 +136,7 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
|
||||||
Y = Y
|
Y = Y
|
||||||
else:
|
else:
|
||||||
m, _, _, _ = model.predict(Xgrid)
|
m, _, _, _ = model.predict(Xgrid)
|
||||||
Y = model.data
|
Y = Y
|
||||||
for d in which_data_ycols:
|
for d in which_data_ycols:
|
||||||
m_d = m[:,d].reshape(resolution, resolution).T
|
m_d = m[:,d].reshape(resolution, resolution).T
|
||||||
ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet)
|
ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet)
|
||||||
|
|
@ -152,7 +152,7 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
|
||||||
#add inducing inputs (if a sparse model is used)
|
#add inducing inputs (if a sparse model is used)
|
||||||
if hasattr(model,"Z"):
|
if hasattr(model,"Z"):
|
||||||
#Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
|
#Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
|
||||||
Zu = model.Z[:,free_dims]
|
Zu = Z[:,free_dims]
|
||||||
ax.plot(Zu[:,free_dims[0]], Zu[:,free_dims[1]], 'wo')
|
ax.plot(Zu[:,free_dims[0]], Zu[:,free_dims[1]], 'wo')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,11 @@ class Test(unittest.TestCase):
|
||||||
self.assertListEqual(self.param_index[two].tolist(), [0,3])
|
self.assertListEqual(self.param_index[two].tolist(), [0,3])
|
||||||
self.assertListEqual(self.param_index[one].tolist(), [1])
|
self.assertListEqual(self.param_index[one].tolist(), [1])
|
||||||
|
|
||||||
|
def test_shift_right(self):
|
||||||
|
self.param_index.shift_right(5, 2)
|
||||||
|
self.assertListEqual(self.param_index[three].tolist(), [2,4,9])
|
||||||
|
self.assertListEqual(self.param_index[two].tolist(), [0,7])
|
||||||
|
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||||
|
|
||||||
def test_index_view(self):
|
def test_index_view(self):
|
||||||
#=======================================================================
|
#=======================================================================
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue