Merge branch 'devel' of github.com:SheffieldML/GPy into devel

This commit is contained in:
Zhenwen Dai 2015-09-04 17:27:34 +01:00
commit 7335d4e62e
6 changed files with 26 additions and 40 deletions

View file

@ -114,7 +114,7 @@ class GP(Model):
self._predictive_variable = self.X
def set_XY(self, X=None, Y=None, trigger_update=True):
def set_XY(self, X=None, Y=None):
"""
Set the input / output data of the model
This is useful if we wish to change our existing data but maintain the same model
@ -124,7 +124,7 @@ class GP(Model):
:param Y: output observations
:type Y: np.ndarray
"""
if trigger_update: self.update_model(False)
self.update_model(False)
if Y is not None:
if self.normalizer is not None:
self.normalizer.scale_by(Y)
@ -140,34 +140,33 @@ class GP(Model):
assert isinstance(X, type(self.X)), "The given X must have the same type as the X in the model!"
self.unlink_parameter(self.X)
self.X = X
self.link_parameters(self.X)
self.link_parameter(self.X)
else:
self.unlink_parameter(self.X)
from ..core import Param
self.X = Param('latent mean',X)
self.link_parameters(self.X)
self.link_parameter(self.X)
else:
self.X = ObsAr(X)
if trigger_update: self.update_model(True)
if trigger_update: self._trigger_params_changed()
self.update_model(True)
def set_X(self,X, trigger_update=True):
def set_X(self,X):
"""
Set the input data of the model
:param X: input observations
:type X: np.ndarray
"""
self.set_XY(X=X, trigger_update=trigger_update)
self.set_XY(X=X)
def set_Y(self,Y, trigger_update=True):
def set_Y(self,Y):
"""
Set the output data of the model
:param X: output observations
:type X: np.ndarray
"""
self.set_XY(Y=Y, trigger_update=trigger_update)
self.set_XY(Y=Y)
def parameters_changed(self):
"""

View file

@ -180,6 +180,7 @@ class Param(Parameterizable, ObsAr):
import copy
Pickleable.__setstate__(s, copy.deepcopy(self.__getstate__(), memo))
return s
def _setup_observers(self):
"""
Setup the default observers

View file

@ -107,8 +107,7 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
self.X.gradient += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X)
def _outer_init_full_values(self):
full_values = super(BayesianGPLVMMiniBatch, self)._outer_init_full_values()
return full_values
return super(BayesianGPLVMMiniBatch, self)._outer_init_full_values()
def parameters_changed(self):
super(BayesianGPLVMMiniBatch,self).parameters_changed()
@ -134,6 +133,8 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
d = self.output_dim
self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d
self._Xgrad = self.X.gradient.copy()
def plot_latent(self, labels=None, which_indices=None,
resolution=50, ax=None, marker='o', s=40,
fignum=None, plot_inducing=True, legend=True,

View file

@ -174,16 +174,15 @@ class MRD(BayesianGPLVMMiniBatch):
#grad_dict = b.full_values
if self.has_uncertain_inputs():
self.X.mean.gradient += b.X.mean.gradient
self.X.variance.gradient += b.X.variance.gradient
self.X.gradient += b._Xgrad
else:
self.X.gradient += b.X.gradient
self.X.gradient += b._Xgrad
if self.has_uncertain_inputs():
# update for the KL divergence
self.variational_prior.update_gradients_KL(self.X)
self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)
pass
#if self.has_uncertain_inputs():
# # update for the KL divergence
# self.variational_prior.update_gradients_KL(self.X)
# self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)
# pass
def log_likelihood(self):
return self._log_marginal_likelihood

View file

@ -20,6 +20,8 @@ from GPy.examples.dimensionality_reduction import mrd_simulation
from GPy.core.parameterization.variational import NormalPosterior
from GPy.models.gp_regression import GPRegression
from functools import reduce
from GPy.util.caching import Cacher
from pickle import PicklingError
def toy_model():
X = np.linspace(0,1,50)[:, None]
@ -205,23 +207,6 @@ class Test(ListDictTestCase):
def _callback(self, what, which):
what.count += 1
@unittest.skip
def test_add_observer(self):
par = toy_model()
par.name = "original"
par.count = 0
par.add_observer(self, self._callback, 1)
pcopy = GPRegression(par.X.copy(), par.Y.copy(), kernel=par.kern.copy())
self.assertNotIn(par.observers[0], pcopy.observers)
pcopy = par.copy()
pcopy.name = "copy"
self.assertTrue(par.checkgrad())
self.assertTrue(pcopy.checkgrad())
self.assertTrue(pcopy.kern.checkgrad())
import ipdb;ipdb.set_trace()
self.assertIn(par.observers[0], pcopy.observers)
self.assertEqual(par.count, 3)
self.assertEqual(pcopy.count, 6) # 3 of each call to checkgrad
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_parameter_index_operations']

View file

@ -3,6 +3,7 @@
from ..core.parameterization.observable import Observable
import collections, weakref
from functools import reduce
from pickle import PickleError
class Cacher(object):
def __init__(self, operation, limit=5, ignore_args=(), force_kwargs=()):
@ -149,10 +150,10 @@ class Cacher(object):
return Cacher(self.operation, self.limit, self.ignore_args, self.force_kwargs)
def __getstate__(self, memo=None):
raise NotImplementedError("Trying to pickle Cacher object with function {}, pickling functions not possible.".format(str(self.operation)))
raise PickleError("Trying to pickle Cacher object with function {}, pickling functions not possible.".format(str(self.operation)))
def __setstate__(self, memo=None):
raise NotImplementedError("Trying to pickle Cacher object with function {}, pickling functions not possible.".format(str(self.operation)))
raise PickleError("Trying to pickle Cacher object with function {}, pickling functions not possible.".format(str(self.operation)))
@property
def __name__(self):