Merge branch 'params' of github.com:SheffieldML/GPy into params

This commit is contained in:
James Hensman 2014-02-17 12:05:31 +00:00
commit 65a2c2a320
9 changed files with 112 additions and 14 deletions

View file

@ -54,19 +54,21 @@ class SparseGP(GP):
self.add_parameter(self.Z, index=0) self.add_parameter(self.Z, index=0)
self.parameters_changed() self.parameters_changed()
def _update_gradients_Z(self, add=False):
def parameters_changed(self):
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
#The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed) #The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed)
if not self.Z.is_fixed: if not self.Z.is_fixed:
self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z) if add: self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
else: self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
if self.X_variance is None: if self.X_variance is None:
self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X) self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X)
else: else:
self.Z.gradient += self.kern.dpsi1_dZ(self.grad_dict['dL_dpsi1'], self.Z, self.X, self.X_variance) self.Z.gradient += self.kern.dpsi1_dZ(self.grad_dict['dL_dpsi1'], self.Z, self.X, self.X_variance)
self.Z.gradient += self.kern.dpsi2_dZ(self.grad_dict['dL_dpsi2'], self.Z, self.X, self.X_variance) self.Z.gradient += self.kern.dpsi2_dZ(self.grad_dict['dL_dpsi2'], self.Z, self.X, self.X_variance)
def parameters_changed(self):
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
self._update_gradients_Z(add=False)
def _raw_predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False): def _raw_predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False):
""" """
Make a prediction for the latent function values Make a prediction for the latent function values

View file

@ -59,8 +59,9 @@ class Optimizer():
""" """
See GPy.plotting.matplot_dep.inference_plots See GPy.plotting.matplot_dep.inference_plots
""" """
import sys
assert "matplotlib" in sys.modules, "matplotlib package has not been imported." assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ..plotting.matplot_dep import inference_plots from ...plotting.matplot_dep import inference_plots
inference_plots.plot_optimizer(self) inference_plots.plot_optimizer(self)

View file

@ -14,7 +14,8 @@ class Bias(Kernpart):
:type variance: float :type variance: float
""" """
super(Bias, self).__init__(input_dim, name) super(Bias, self).__init__(input_dim, name)
self.variance = Param("variance", variance) from ...core.parameterization.transformations import Logexp
self.variance = Param("variance", variance, Logexp())
self.add_parameter(self.variance) self.add_parameter(self.variance)
def K(self,X,X2,target): def K(self,X,X2,target):

View file

@ -61,7 +61,7 @@ class Linear(Kernpart):
def update_gradients_full(self, dL_dK, X): def update_gradients_full(self, dL_dK, X):
#self.variances.gradient[:] = 0 #self.variances.gradient[:] = 0
self._param_grad_helper(dL_dK, X, self.variances.gradient) self._param_grad_helper(dL_dK, X, None, self.variances.gradient)
def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z): def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
tmp = dL_dKdiag[:, None] * X ** 2 tmp = dL_dKdiag[:, None] * X ** 2

View file

@ -265,7 +265,7 @@ class RBF(Kernpart):
} }
""" """
num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim
X, dvardLdK = param_to_array(X, dvardLdK) X, dvardLdK, var_len3 = param_to_array(X, dvardLdK, var_len3)
weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options)
else: else:
code = """ code = """
@ -282,7 +282,7 @@ class RBF(Kernpart):
} }
""" """
num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim
X, X2, dvardLdK = param_to_array(X, X2, dvardLdK) X, X2, dvardLdK, var_len3 = param_to_array(X, X2, dvardLdK, var_len3)
weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options)
return target return target

View file

@ -72,9 +72,10 @@ class BayesianGPLVM(SparseGP, GPLVM):
return 0.5 * (var_mean + var_S) - 0.5 * self.input_dim * self.num_data return 0.5 * (var_mean + var_S) - 0.5 * self.input_dim * self.num_data
def parameters_changed(self): def parameters_changed(self):
super(BayesianGPLVM, self).parameters_changed() self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
self._log_marginal_likelihood -= self.KL_divergence() self._update_gradients_Z(add=False)
self._log_marginal_likelihood -= self.KL_divergence()
dL_dmu, dL_dS = self.dL_dmuS() dL_dmu, dL_dS = self.dL_dmuS()
# dL: # dL:
@ -161,6 +162,38 @@ class BayesianGPLVM(SparseGP, GPLVM):
return dim_reduction_plots.plot_steepest_gradient_map(self,*args,**kwargs) return dim_reduction_plots.plot_steepest_gradient_map(self,*args,**kwargs)
class BayesianGPLVMWithMissingData(BayesianGPLVM):
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
from ..util.subarray_and_sorting import common_subarrays
self.subarrays = common_subarrays(Y)
import ipdb;ipdb.set_trace()
BayesianGPLVM.__init__(self, Y, input_dim, X=X, X_variance=X_variance, init=init, num_inducing=num_inducing, Z=Z, kernel=kernel, inference_method=inference_method, likelihood=likelihood, name=name, **kwargs)
def parameters_changed(self):
super(BayesianGPLVM, self).parameters_changed()
self._log_marginal_likelihood -= self.KL_divergence()
dL_dmu, dL_dS = self.dL_dmuS()
# dL:
self.q.mean.gradient = dL_dmu
self.q.variance.gradient = dL_dS
# dKL:
self.q.mean.gradient -= self.X
self.q.variance.gradient -= (1. - (1. / (self.X_variance))) * 0.5
if __name__ == '__main__':
import numpy as np
X = np.random.randn(20,2)
W = np.linspace(0,1,10)[None,:]
Y = (X*W).sum(1)
missing = np.random.binomial(1,.1,size=Y.shape)
pass
def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2):
""" """
objective function for fitting the latent variables for test points objective function for fitting the latent variables for test points

View file

@ -0,0 +1,60 @@
'''
Created on 12 Feb 2014
@author: maxz
'''
import unittest
import numpy as np
from GPy.core.parameterization.index_operations import ParameterIndexOperations,\
ParameterIndexOperationsView
one, two, three = 'one', 'two', 'three'
class Test(unittest.TestCase):
def setUp(self):
self.param_index = ParameterIndexOperations()
self.param_index.add(one, [3])
self.param_index.add(two, [0,5])
self.param_index.add(three, [2,4,7])
def test_remove(self):
self.param_index.remove(three, np.r_[3:10])
self.assertListEqual(self.param_index[three].tolist(), [2])
self.param_index.remove(one, [1])
self.assertListEqual(self.param_index[one].tolist(), [3])
def test_index_view(self):
#=======================================================================
# 0 1 2 3 4 5 6 7 8 9
# one
# two two
# three three three
# view: [0 1 2 3 4 5 ]
#=======================================================================
view = ParameterIndexOperationsView(self.param_index, 2, 6)
self.assertSetEqual(set(view.properties()), set([one, two, three]))
for v,p in zip(view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
self.assertEqual(v, p)
self.assertSetEqual(set(view[two]), set([3]))
self.assertSetEqual(set(self.param_index[two]), set([0, 5]))
view.add(two, np.array([0]))
self.assertSetEqual(set(view[two]), set([0,3]))
self.assertSetEqual(set(self.param_index[two]), set([0, 2, 5]))
view.clear()
for v,p in zip(view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
self.assertEqual(v, p)
self.assertEqual(v, [])
param_index = ParameterIndexOperations()
param_index.add(one, [3])
param_index.add(two, [0,5])
param_index.add(three, [2,4,7])
view2 = ParameterIndexOperationsView(param_index, 2, 6)
view.update(view2)
for [i,v],[i2,v2] in zip(sorted(param_index.items()), sorted(self.param_index.items())):
self.assertEqual(i, i2)
self.assertTrue(np.all(v == v2))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_index_view']
unittest.main()

View file

@ -10,6 +10,7 @@ import datasets
import mocap import mocap
import decorators import decorators
import classification import classification
import subarray_and_sorting
import caching import caching
try: try:

View file

@ -29,7 +29,7 @@ class Cacher(object):
return self.cached_outputs[-1] return self.cached_outputs[-1]
def on_cache_changed(self, X): def on_cache_changed(self, X):
print id(X) #print id(X)
i = self.cached_inputs.index(X) i = self.cached_inputs.index(X)
self.inputs_changed[i] = True self.inputs_changed[i] = True