fix ss_mrd and fix white and bias kernel

This commit is contained in:
Zhenwen Dai 2014-07-01 13:16:57 +01:00
parent 2c88528ebd
commit 129985998c
4 changed files with 21 additions and 17 deletions

View file

@ -520,7 +520,7 @@ class Indexable(Nameable, Observable):
self.constraints.add(t, self._raveled_index()) self.constraints.add(t, self._raveled_index())
t.add_tied_parameter(self) t.add_tied_parameter(self)
self._highest_parent_._connect_fixes() self._highest_parent_._connect_fixes()
def constrain(self, transform, warning=True, trigger_parent=True): def constrain(self, transform, warning=True, trigger_parent=True):
""" """

View file

@ -88,17 +88,18 @@ class Add(CombinationKernel):
# rbf X bias # rbf X bias
#elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)): #elif isinstance(p1, (Bias, Fixed)) and isinstance(p2, (RBF, RBFInv)):
elif isinstance(p1, Bias) and isinstance(p2, (RBF, Linear)): elif isinstance(p1, Bias) and isinstance(p2, (RBF, Linear)):
tmp = p2.psi1(Z, variational_posterior) tmp = p2.psi1(Z, variational_posterior).sum(axis=0)
psi2 += p1.variance * (tmp[:, :, None] + tmp[:, None, :]) psi2 += p1.variance * (tmp[:,None]+tmp[None,:]) #(tmp[:, :, None] + tmp[:, None, :])
#elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)): #elif isinstance(p2, (Bias, Fixed)) and isinstance(p1, (RBF, RBFInv)):
elif isinstance(p2, Bias) and isinstance(p1, (RBF, Linear)): elif isinstance(p2, Bias) and isinstance(p1, (RBF, Linear)):
tmp = p1.psi1(Z, variational_posterior) tmp = p1.psi1(Z, variational_posterior).sum(axis=0)
psi2 += p2.variance * (tmp[:, :, None] + tmp[:, None, :]) psi2 += p2.variance * (tmp[:,None]+tmp[None,:]) #(tmp[:, :, None] + tmp[:, None, :])
elif isinstance(p2, (RBF, Linear)) and isinstance(p1, (RBF, Linear)): elif isinstance(p2, (RBF, Linear)) and isinstance(p1, (RBF, Linear)):
assert np.intersect1d(p1.active_dims, p2.active_dims).size == 0, "only non overlapping kernel dimensions allowed so far" assert np.intersect1d(p1.active_dims, p2.active_dims).size == 0, "only non overlapping kernel dimensions allowed so far"
tmp1 = p1.psi1(Z, variational_posterior) tmp1 = p1.psi1(Z, variational_posterior)
tmp2 = p2.psi1(Z, variational_posterior) tmp2 = p2.psi1(Z, variational_posterior)
psi2 += (tmp1[:, :, None] * tmp2[:, None, :]) + (tmp2[:, :, None] * tmp1[:, None, :]) psi2 += np.einsum('nm,no->mo',tmp1,tmp2)+np.einsum('nm,no->mo',tmp2,tmp1)
#(tmp1[:, :, None] * tmp2[:, None, :]) + (tmp2[:, :, None] * tmp1[:, None, :])
else: else:
raise NotImplementedError, "psi2 cannot be computed for this kernel" raise NotImplementedError, "psi2 cannot be computed for this kernel"
return psi2 return psi2

View file

@ -39,7 +39,7 @@ class Static(Kern):
def psi2(self, Z, variational_posterior): def psi2(self, Z, variational_posterior):
K = self.K(variational_posterior.mean, Z) K = self.K(variational_posterior.mean, Z)
return K[:,:,None]*K[:,None,:] # NB. more efficient implementations on inherriting classes return np.einsum('ij,ik->jk',K,K) #K[:,:,None]*K[:,None,:] # NB. more efficient implementations on inherriting classes
class White(Static): class White(Static):
@ -53,7 +53,7 @@ class White(Static):
return np.zeros((X.shape[0], X2.shape[0])) return np.zeros((X.shape[0], X2.shape[0]))
def psi2(self, Z, variational_posterior): def psi2(self, Z, variational_posterior):
return np.zeros((variational_posterior.shape[0], Z.shape[0], Z.shape[0]), dtype=np.float64) return np.zeros((Z.shape[0], Z.shape[0]), dtype=np.float64)
def update_gradients_full(self, dL_dK, X, X2=None): def update_gradients_full(self, dL_dK, X, X2=None):
self.variance.gradient = np.trace(dL_dK) self.variance.gradient = np.trace(dL_dK)
@ -82,12 +82,12 @@ class Bias(Static):
self.variance.gradient = dL_dKdiag.sum() self.variance.gradient = dL_dKdiag.sum()
def psi2(self, Z, variational_posterior): def psi2(self, Z, variational_posterior):
ret = np.empty((variational_posterior.shape[0], Z.shape[0], Z.shape[0]), dtype=np.float64) ret = np.empty((Z.shape[0], Z.shape[0]), dtype=np.float64)
ret[:] = self.variance**2 ret[:] = self.variance*self.variance*variational_posterior.shape[0]
return ret return ret
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior): def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
self.variance.gradient = dL_dpsi0.sum() + dL_dpsi1.sum() + 2.*self.variance*dL_dpsi2.sum() self.variance.gradient = dL_dpsi0.sum() + dL_dpsi1.sum() + 2.*self.variance*dL_dpsi2.sum()*variational_posterior.shape[0]
class Fixed(Static): class Fixed(Static):
def __init__(self, input_dim, covariance_matrix, variance=1., active_dims=None, name='fixed'): def __init__(self, input_dim, covariance_matrix, variance=1., active_dims=None, name='fixed'):
@ -97,7 +97,7 @@ class Fixed(Static):
:param variance: the variance of the kernel :param variance: the variance of the kernel
:type variance: float :type variance: float
""" """
super(Bias, self).__init__(input_dim, variance, active_dims, name) super(Fixed, self).__init__(input_dim, variance, active_dims, name)
self.fixed_K = covariance_matrix self.fixed_K = covariance_matrix
def K(self, X, X2): def K(self, X, X2):
return self.variance * self.fixed_K return self.variance * self.fixed_K
@ -112,7 +112,7 @@ class Fixed(Static):
self.variance.gradient = np.einsum('i,i', dL_dKdiag, self.fixed_K) self.variance.gradient = np.einsum('i,i', dL_dKdiag, self.fixed_K)
def psi2(self, Z, variational_posterior): def psi2(self, Z, variational_posterior):
return np.zeros((variational_posterior.shape[0], Z.shape[0], Z.shape[0]), dtype=np.float64) return np.zeros((Z.shape[0], Z.shape[0]), dtype=np.float64)
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior): def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
self.variance.gradient = dL_dpsi0.sum() self.variance.gradient = dL_dpsi0.sum()

View file

@ -15,14 +15,17 @@ class SSMRD(Model):
self.updates = False self.updates = False
self.models = [SSGPLVM(y, input_dim, X=X, X_variance=X_variance, num_inducing=num_inducing,Z=Z,init=initx, self.models = [SSGPLVM(y, input_dim, X=X, X_variance=X_variance, num_inducing=num_inducing,Z=Z,init=initx,
kernel=kernel if kernel else None,inference_method=inference_method,likelihood=likelihoods, kernel=kernel.copy() if kernel else None,inference_method=inference_method,likelihood=likelihoods,
name='model_'+str(i)) for i,y in enumerate(Ylist)] name='model_'+str(i)) for i,y in enumerate(Ylist)]
self.add_parameters(*(self.models)) self.add_parameters(*(self.models))
[[[self.models[m].X.mean[i,j:j+1].tie('mean_'+str(i)+'_'+str(j)) for m in xrange(len(self.models))] for j in xrange(self.models[0].X.mean.shape[1])]
for i in xrange(self.models[0].X.mean.shape[0])]
[[[self.models[m].X.variance[i,j:j+1].tie('var_'+str(i)+'_'+str(j)) for m in xrange(len(self.models))] for j in xrange(self.models[0].X.variance.shape[1])]
for i in xrange(self.models[0].X.variance.shape[0])]
self.updates = True self.updates = True
[[self.models[j].X.mean.flat[i:i+1].tie('mean_'+str(i)) for j in xrange(len(self.models))] for i in xrange(self.models[0].X.mean.size)]
[[self.models[j].X.variance.flat[i:i+1].tie('var_'+str(i)) for j in xrange(len(self.models))] for i in xrange(self.models[0].X.variance.size)]
def parameters_changed(self): def parameters_changed(self):
super(SSMRD, self).parameters_changed() super(SSMRD, self).parameters_changed()
self._log_marginal_likelihood = sum([m._log_marginal_likelihood for m in self.models]) self._log_marginal_likelihood = sum([m._log_marginal_likelihood for m in self.models])