mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 11:32:39 +02:00
added debug plot
This commit is contained in:
parent
5f76e7cd12
commit
04261f045b
3 changed files with 104 additions and 21 deletions
|
|
@ -100,7 +100,7 @@ def oil_100():
|
|||
# m.plot_latent(labels=data['Y'].argmax(axis=1))
|
||||
return m
|
||||
|
||||
def mrd_simulation():
|
||||
def mrd_simulation(plot_sim=False):
|
||||
# num = 2
|
||||
ard1 = np.array([1., 1, 0, 0], dtype=float)
|
||||
ard2 = np.array([0., 1, 1, 0], dtype=float)
|
||||
|
|
@ -119,8 +119,8 @@ def mrd_simulation():
|
|||
# Y2 -= Y2.mean(0)
|
||||
# make_params = lambda ard: np.hstack([[1], ard, [1, .3]])
|
||||
|
||||
D1, D2, N, M, Q = 50, 100, 150, 15, 4
|
||||
x = np.linspace(0, 2 * np.pi, N)[:, None]
|
||||
D1, D2, N, M, Q = 5, 10, 150, 15, 3
|
||||
x = np.linspace(0, 4 * np.pi, N)[:, None]
|
||||
|
||||
s1 = np.vectorize(lambda x: np.sin(x))
|
||||
s2 = np.vectorize(lambda x: np.cos(x))
|
||||
|
|
@ -132,9 +132,30 @@ def mrd_simulation():
|
|||
Y1 = S1.dot(np.random.randn(S1.shape[1], D1))
|
||||
Y2 = S2.dot(np.random.randn(S2.shape[1], D2))
|
||||
|
||||
Y1 -= Y1.mean(0)
|
||||
Y2 -= Y2.mean(0)
|
||||
|
||||
if plot_sim:
|
||||
import pylab
|
||||
fig = pylab.figure("MRD Simulation")
|
||||
ax = fig.add_subplot(2, 2, 1)
|
||||
ax.imshow(S1)
|
||||
ax.set_title("S1")
|
||||
ax = fig.add_subplot(2, 2, 2)
|
||||
ax.imshow(S2)
|
||||
ax.set_title("S2")
|
||||
ax = fig.add_subplot(2, 2, 3)
|
||||
ax.imshow(Y1)
|
||||
ax.set_title("Y1")
|
||||
ax = fig.add_subplot(2, 2, 4)
|
||||
ax.imshow(Y2)
|
||||
ax.set_title("Y2")
|
||||
pylab.draw()
|
||||
pylab.tight_layout()
|
||||
|
||||
k = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q) + GPy.kern.white(Q)
|
||||
|
||||
m = MRD(Y1, Y2, Q=Q, M=M, kernel=k, init="PCA", _debug=False)
|
||||
m = MRD(Y1, Y2, Q=Q, M=M, kernel=k, init="concat", _debug=True)
|
||||
m.ensure_default_constraints()
|
||||
|
||||
# fig = pyplot.figure("expected", figsize=(8, 3))
|
||||
|
|
@ -145,6 +166,10 @@ def mrd_simulation():
|
|||
|
||||
return m
|
||||
|
||||
def mrd_silhouette():
|
||||
|
||||
pass
|
||||
|
||||
def brendan_faces():
|
||||
data = GPy.util.datasets.brendan_faces()
|
||||
Y = data['Y'][0:-1:10, :]
|
||||
|
|
|
|||
|
|
@ -104,7 +104,8 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto
|
|||
|
||||
iteration += 1
|
||||
if display:
|
||||
print 'Iteration: {0:<5g} Objective:{1:< 12g} Scale:{2:< 12g}\r'.format(iteration, fnow, beta),
|
||||
print '\r',
|
||||
print 'Iteration: {0:<5g} Objective:{1:< 12g} Scale:{2:< 12g}'.format(iteration, fnow, beta),
|
||||
# print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r',
|
||||
sys.stdout.flush()
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from GPy.models.sparse_GP import sparse_GP
|
|||
import itertools
|
||||
from matplotlib import pyplot
|
||||
import pylab
|
||||
|
||||
from GPy.util.linalg import PCA
|
||||
|
||||
class MRD(model):
|
||||
"""
|
||||
|
|
@ -22,7 +22,7 @@ class MRD(model):
|
|||
:type Ylist: [np.ndarray]
|
||||
:param names: names for different gplvm models
|
||||
:type names: [str]
|
||||
:param Q: latent dimensionality
|
||||
:param Q: latent dimensionality (will raise
|
||||
:type Q: int
|
||||
:param init: initialisation method for the latent space
|
||||
:type init: 'PCA'|'random'
|
||||
|
|
@ -40,10 +40,11 @@ class MRD(model):
|
|||
kernel to use
|
||||
"""
|
||||
def __init__(self, *Ylist, **kwargs):
|
||||
self._debug = False
|
||||
if kwargs.has_key("_debug"):
|
||||
self._debug = kwargs['_debug']
|
||||
del kwargs['_debug']
|
||||
else:
|
||||
self._debug = False
|
||||
if kwargs.has_key("names"):
|
||||
self.names = kwargs['names']
|
||||
del kwargs['names']
|
||||
|
|
@ -55,14 +56,30 @@ class MRD(model):
|
|||
del kwargs['kernel']
|
||||
else:
|
||||
k = lambda: None
|
||||
self.bgplvms = [Bayesian_GPLVM(Y, kernel=k(), **kwargs) for Y in Ylist]
|
||||
if kwargs.has_key('init'):
|
||||
init = kwargs['init']
|
||||
del kwargs['init']
|
||||
try:
|
||||
self.Q = kwargs["Q"]
|
||||
except KeyError:
|
||||
raise ValueError("Need Q for MRD")
|
||||
try:
|
||||
self.M = kwargs["M"]
|
||||
except KeyError:
|
||||
self.M = 10
|
||||
|
||||
|
||||
X = self._init_X(Ylist, init)
|
||||
Z = numpy.random.permutation(X.copy())[:self.M]
|
||||
|
||||
self.bgplvms = [Bayesian_GPLVM(Y, kernel=k(), X=X, Z=Z, **kwargs) for Y in Ylist]
|
||||
|
||||
self.gref = self.bgplvms[0]
|
||||
nparams = numpy.array([0] + [sparse_GP._get_params(g).size - g.Z.size for g in self.bgplvms])
|
||||
self.nparams = nparams.cumsum()
|
||||
self.Q = self.gref.Q
|
||||
|
||||
self.N = self.gref.N
|
||||
self.NQ = self.N * self.Q
|
||||
self.M = self.gref.M
|
||||
self.MQ = self.M * self.Q
|
||||
|
||||
model.__init__(self) # @UndefinedVariable
|
||||
|
|
@ -87,9 +104,16 @@ class MRD(model):
|
|||
| mu | S | Z || theta1 | theta2 | .. | thetaN |
|
||||
=================================================================
|
||||
"""
|
||||
X = self.gref.X.flatten()
|
||||
X_var = self.gref.X_variance.flatten()
|
||||
Z = self.gref.Z.flatten()
|
||||
X = self.gref.X.ravel()
|
||||
X_var = self.gref.X_variance.ravel()
|
||||
Z = self.gref.Z.ravel()
|
||||
|
||||
if self._debug:
|
||||
for g in self.bgplvms:
|
||||
assert numpy.allclose(g.X.ravel(), X)
|
||||
assert numpy.allclose(g.X_variance.ravel(), X_var)
|
||||
assert numpy.allclose(g.Z.ravel(), Z)
|
||||
|
||||
thetas = [sparse_GP._get_params(g)[g.Z.size:] for g in self.bgplvms]
|
||||
params = numpy.hstack([X, X_var, Z, numpy.hstack(thetas)])
|
||||
return params
|
||||
|
|
@ -105,14 +129,20 @@ class MRD(model):
|
|||
|
||||
def _set_params(self, x):
|
||||
start = 0; end = self.NQ
|
||||
X = x[start:end].reshape(self.N, self.Q).copy()
|
||||
X = x[start:end].reshape(self.N, self.Q)
|
||||
start = end; end += start
|
||||
X_var = x[start:end].reshape(self.N, self.Q).copy()
|
||||
X_var = x[start:end].reshape(self.N, self.Q)
|
||||
start = end; end += self.MQ
|
||||
Z = x[start:end].reshape(self.M, self.Q).copy()
|
||||
Z = x[start:end].reshape(self.M, self.Q)
|
||||
thetas = x[end:]
|
||||
|
||||
# set params for all others:
|
||||
if self._debug:
|
||||
for g in self.bgplvms:
|
||||
assert numpy.allclose(g.X, self.gref.X)
|
||||
assert numpy.allclose(g.X_variance, self.gref.X_variance)
|
||||
assert numpy.allclose(g.Z, self.gref.Z)
|
||||
|
||||
# set params for all:
|
||||
for g, s, e in itertools.izip(self.bgplvms, self.nparams, self.nparams[1:]):
|
||||
self._set_var_params(g, X, X_var, Z)
|
||||
self._set_kern_params(g, thetas[s:e].copy())
|
||||
|
|
@ -121,10 +151,10 @@ class MRD(model):
|
|||
|
||||
|
||||
def log_likelihood(self):
|
||||
ll = +self.gref.KL_divergence()
|
||||
ll = -self.gref.KL_divergence()
|
||||
for g in self.bgplvms:
|
||||
ll -= sparse_GP.log_likelihood(g)
|
||||
return -ll
|
||||
ll += sparse_GP.log_likelihood(g)
|
||||
return ll
|
||||
|
||||
def _log_likelihood_gradients(self):
|
||||
dLdmu, dLdS = reduce(lambda a, b: [a[0] + b[0], a[1] + b[1]], (g.dL_dmuS() for g in self.bgplvms))
|
||||
|
|
@ -141,6 +171,17 @@ class MRD(model):
|
|||
partial=g.partial_for_likelihood)]) \
|
||||
for g in self.bgplvms])))
|
||||
|
||||
def _init_X(self, Ylist, init='PCA_concat'):
|
||||
if init in "PCA_concat":
|
||||
X = PCA(numpy.hstack(Ylist), self.Q)[0]
|
||||
elif init in "PCA_single":
|
||||
X = numpy.zeros((Ylist[0].shape[0], self.Q))
|
||||
for qs, Y in itertools.izip(numpy.array_split(numpy.arange(self.Q), len(Ylist)), Ylist):
|
||||
X[:, qs] = PCA(Y, len(qs))[0]
|
||||
else: # init == 'random':
|
||||
X = numpy.random.randn(Ylist[0].shape[0], self.Q)
|
||||
return X
|
||||
|
||||
def plot_X(self):
|
||||
fig = pylab.figure("MRD X", figsize=(4 * len(self.bgplvms), 3))
|
||||
fig.clf()
|
||||
|
|
@ -180,3 +221,19 @@ class MRD(model):
|
|||
pylab.draw()
|
||||
fig.tight_layout()
|
||||
return fig
|
||||
|
||||
def _debug_plot(self):
|
||||
self.plot_X()
|
||||
self.plot_latent()
|
||||
self.plot_scales()
|
||||
|
||||
def _debug_optimize(self, opt='scg', maxiters=500, itersteps=10):
|
||||
iters = 0
|
||||
optstep = lambda: self.optimize(opt, messages=1, max_iters=itersteps)
|
||||
self._debug_plot()
|
||||
raw_input("enter to start debug")
|
||||
while iters < maxiters:
|
||||
optstep()
|
||||
self._debug_plot()
|
||||
iters += itersteps
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue