diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 8ce3482c..7b010e6c 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -90,8 +90,8 @@ class GP(Model): self.inference_method = inference_method logger.info("adding kernel and likelihood as parameters") - self.add_parameter(self.kern) - self.add_parameter(self.likelihood) + self.link_parameter(self.kern) + self.link_parameter(self.likelihood) def parameters_changed(self): self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y_normalized, self.Y_metadata) diff --git a/GPy/core/model.py b/GPy/core/model.py index c4fc7fd5..8c556da2 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -23,7 +23,7 @@ class Model(Parameterized): self.preferred_optimizer = 'bfgs' from .parameterization.ties_and_remappings import Tie self.tie = Tie() - self.add_parameter(self.tie, -1) + self.link_parameter(self.tie, -1) self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500) def log_likelihood(self): diff --git a/GPy/core/parameterization/parameterized.py b/GPy/core/parameterization/parameterized.py index ba5cdf1a..7b5911a5 100644 --- a/GPy/core/parameterization/parameterized.py +++ b/GPy/core/parameterization/parameterized.py @@ -82,7 +82,7 @@ class Parameterized(Parameterizable): self._fixes_ = None self._param_slices_ = [] #self._connect_parameters() - self.add_parameters(*parameters) + self.link_parameters(*parameters) def build_pydot(self, G=None): import pydot # @UnresolvedImport @@ -110,7 +110,7 @@ class Parameterized(Parameterizable): #=========================================================================== # Add remove parameters: #=========================================================================== - def add_parameter(self, param, index=None, _ignore_added_names=False): + def link_parameter(self, param, index=None, _ignore_added_names=False): """ :param parameters: the parameters to add :type parameters: list of or one :py:class:`GPy.core.param.Param` @@ -122,8 +122,8 @@ class Parameterized(Parameterizable): at any given index using the :func:`list.insert` syntax """ if param in self.parameters and index is not None: - self.remove_parameter(param) - self.add_parameter(param, index) + self.unlink_parameter(param) + self.link_parameter(param, index) # elif param.has_parent(): # raise HierarchyError, "parameter {} already in another model ({}), create new object (or copy) for adding".format(param._short(), param._highest_parent_._short()) elif param not in self.parameters: @@ -132,7 +132,7 @@ class Parameterized(Parameterizable): if parent is self: raise HierarchyError, "You cannot add a parameter twice into the hierarchy" param.traverse_parents(visit, self) - param._parent_.remove_parameter(param) + param._parent_.unlink_parameter(param) # make sure the size is set if index is None: start = sum(p.size for p in self.parameters) @@ -168,14 +168,14 @@ class Parameterized(Parameterizable): raise HierarchyError, """Parameter exists already, try making a copy""" - def add_parameters(self, *parameters): + def link_parameters(self, *parameters): """ convenience method for adding several parameters without gradient specification """ - [self.add_parameter(p) for p in parameters] + [self.link_parameter(p) for p in parameters] - def remove_parameter(self, param): + def unlink_parameter(self, param): """ :param param: param object to remove from being a parameter of this parameterized object. """ @@ -206,6 +206,11 @@ class Parameterized(Parameterizable): self._highest_parent_._connect_fixes() self._highest_parent_._notify_parent_change() + def add_parameter(self, *args, **kwargs): + raise DeprecationWarning, "add_parameter was renamed to link_parameter to avoid confusion of setting variables" + def remove_parameter(self, *args, **kwargs): + raise DeprecationWarning, "remove_parameter was renamed to link_parameter to avoid confusion of setting variables" + def _connect_parameters(self, ignore_added_names=False): # connect parameterlist to this parameterized object # This just sets up the right connection for the params objects @@ -294,7 +299,9 @@ class Parameterized(Parameterizable): if hasattr(self, "parameters"): try: pnames = self.parameter_names(False, adjust_for_printing=True, recursive=False) - if name in pnames: self.parameters[pnames.index(name)][:] = val; return + if name in pnames: + param = self.parameters[pnames.index(name)] + param[:] = val; return except AttributeError: pass object.__setattr__(self, name, val); diff --git a/GPy/core/parameterization/variational.py b/GPy/core/parameterization/variational.py index 2afeafbb..251ec7db 100644 --- a/GPy/core/parameterization/variational.py +++ b/GPy/core/parameterization/variational.py @@ -42,7 +42,7 @@ class SpikeAndSlabPrior(VariationalPrior): self.pi = Param('Pi', pi, Logistic(1e-10,1.-1e-10)) else: self.pi = Param('Pi', pi, __fixed__) - self.add_parameter(self.pi) + self.link_parameter(self.pi) def KL_divergence(self, variational_posterior): @@ -89,7 +89,7 @@ class VariationalPosterior(Parameterized): self.ndim = self.mean.ndim self.shape = self.mean.shape self.num_data, self.input_dim = self.mean.shape - self.add_parameters(self.mean, self.variance) + self.link_parameters(self.mean, self.variance) self.num_data, self.input_dim = self.mean.shape if self.has_uncertain_inputs(): assert self.variance.shape == self.mean.shape, "need one variance per sample and dimenion" @@ -156,7 +156,7 @@ class SpikeAndSlabPosterior(VariationalPosterior): """ super(SpikeAndSlabPosterior, self).__init__(means, variances, name) self.gamma = Param("binary_prob",binary_prob, Logistic(1e-10,1.-1e-10)) - self.add_parameter(self.gamma) + self.link_parameter(self.gamma) def __getitem__(self, s): if isinstance(s, (int, slice, tuple, list, np.ndarray)): diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 358db125..6b923609 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -50,7 +50,7 @@ class SparseGP(GP): GP.__init__(self, X, Y, kernel, likelihood, inference_method=inference_method, name=name, Y_metadata=Y_metadata, normalizer=normalizer) logger.info("Adding Z as parameter") - self.add_parameter(self.Z, index=0) + self.link_parameter(self.Z, index=0) def has_uncertain_inputs(self): return isinstance(self.X, VariationalPosterior) diff --git a/GPy/core/symbolic.py b/GPy/core/symbolic.py index a2d61911..c3e1a52c 100644 --- a/GPy/core/symbolic.py +++ b/GPy/core/symbolic.py @@ -127,7 +127,7 @@ class Symbolic_core(): val = parameters[theta.name] # Add parameter. - self.add_parameters(Param(theta.name, val, None)) + self.link_parameters(Param(theta.name, val, None)) #self._set_attribute(theta.name, ) def eval_parameters_changed(self): diff --git a/GPy/kern/_src/ODE_UY.py b/GPy/kern/_src/ODE_UY.py index 510b4f7c..b4a2b42d 100644 --- a/GPy/kern/_src/ODE_UY.py +++ b/GPy/kern/_src/ODE_UY.py @@ -17,7 +17,7 @@ class ODE_UY(Kern): self.lengthscale_Y = Param('lengthscale_Y', lengthscale_Y, Logexp()) self.lengthscale_U = Param('lengthscale_U', lengthscale_Y, Logexp()) - self.add_parameters(self.variance_Y, self.variance_U, self.lengthscale_Y, self.lengthscale_U) + self.link_parameters(self.variance_Y, self.variance_U, self.lengthscale_Y, self.lengthscale_U) def K(self, X, X2=None): # model : a * dy/dt + b * y = U diff --git a/GPy/kern/_src/add.py b/GPy/kern/_src/add.py index 27f8ebd1..4c72a254 100644 --- a/GPy/kern/_src/add.py +++ b/GPy/kern/_src/add.py @@ -18,7 +18,7 @@ class Add(CombinationKernel): if isinstance(kern, Add): del subkerns[i] for part in kern.parts[::-1]: - kern.remove_parameter(part) + kern.unlink_parameter(part) subkerns.insert(i, part) super(Add, self).__init__(subkerns, name) @@ -171,10 +171,10 @@ class Add(CombinationKernel): if isinstance(other, Add): other_params = other.parameters[:] for p in other_params: - other.remove_parameter(p) - self.add_parameters(*other_params) + other.unlink_parameter(p) + self.link_parameters(*other_params) else: - self.add_parameter(other) + self.link_parameter(other) self.input_dim, self.active_dims = self.get_input_dim_active_dims(self.parts) return self diff --git a/GPy/kern/_src/brownian.py b/GPy/kern/_src/brownian.py index aeb11fa3..fd79973c 100644 --- a/GPy/kern/_src/brownian.py +++ b/GPy/kern/_src/brownian.py @@ -22,7 +22,7 @@ class Brownian(Kern): super(Brownian, self).__init__(input_dim, active_dims, name) self.variance = Param('variance', variance, Logexp()) - self.add_parameters(self.variance) + self.link_parameters(self.variance) def K(self,X,X2=None): if X2 is None: diff --git a/GPy/kern/_src/coregionalize.py b/GPy/kern/_src/coregionalize.py index 7eccff3d..fc4a2f33 100644 --- a/GPy/kern/_src/coregionalize.py +++ b/GPy/kern/_src/coregionalize.py @@ -50,7 +50,7 @@ class Coregionalize(Kern): else: assert kappa.shape==(self.output_dim, ) self.kappa = Param('kappa', kappa, Logexp()) - self.add_parameters(self.W, self.kappa) + self.link_parameters(self.W, self.kappa) def parameters_changed(self): self.B = np.dot(self.W, self.W.T) + np.diag(self.kappa) diff --git a/GPy/kern/_src/kern.py b/GPy/kern/_src/kern.py index d8377ffc..4fcbf31f 100644 --- a/GPy/kern/_src/kern.py +++ b/GPy/kern/_src/kern.py @@ -221,7 +221,7 @@ class CombinationKernel(Kern): # initialize the kernel with the full input_dim super(CombinationKernel, self).__init__(input_dim, active_dims, name) self.extra_dims = extra_dims - self.add_parameters(*kernels) + self.link_parameters(*kernels) @property def parts(self): diff --git a/GPy/kern/_src/linear.py b/GPy/kern/_src/linear.py index c30e344e..9d1a956b 100644 --- a/GPy/kern/_src/linear.py +++ b/GPy/kern/_src/linear.py @@ -49,7 +49,7 @@ class Linear(Kern): variances = np.ones(self.input_dim) self.variances = Param('variances', variances, Logexp()) - self.add_parameter(self.variances) + self.link_parameter(self.variances) self.psicomp = PSICOMP_Linear() @Cache_this(limit=2) @@ -144,7 +144,7 @@ class LinearFull(Kern): self.W = Param('W', W) self.kappa = Param('kappa', kappa, Logexp()) - self.add_parameters(self.W, self.kappa) + self.link_parameters(self.W, self.kappa) def K(self, X, X2=None): P = np.dot(self.W, self.W.T) + np.diag(self.kappa) diff --git a/GPy/kern/_src/mlp.py b/GPy/kern/_src/mlp.py index 0b561d4b..badbd60d 100644 --- a/GPy/kern/_src/mlp.py +++ b/GPy/kern/_src/mlp.py @@ -36,7 +36,7 @@ class MLP(Kern): self.variance = Param('variance', variance, Logexp()) self.weight_variance = Param('weight_variance', weight_variance, Logexp()) self.bias_variance = Param('bias_variance', bias_variance, Logexp()) - self.add_parameters(self.variance, self.weight_variance, self.bias_variance) + self.link_parameters(self.variance, self.weight_variance, self.bias_variance) def K(self, X, X2=None): diff --git a/GPy/kern/_src/periodic.py b/GPy/kern/_src/periodic.py index 9f232ab0..e8e16506 100644 --- a/GPy/kern/_src/periodic.py +++ b/GPy/kern/_src/periodic.py @@ -33,7 +33,7 @@ class Periodic(Kern): self.variance = Param('variance', np.float64(variance), Logexp()) self.lengthscale = Param('lengthscale', np.float64(lengthscale), Logexp()) self.period = Param('period', np.float64(period), Logexp()) - self.add_parameters(self.variance, self.lengthscale, self.period) + self.link_parameters(self.variance, self.lengthscale, self.period) def _cos(self, alpha, omega, phase): def f(x): diff --git a/GPy/kern/_src/poly.py b/GPy/kern/_src/poly.py index d40f805c..4c5f0e93 100644 --- a/GPy/kern/_src/poly.py +++ b/GPy/kern/_src/poly.py @@ -14,7 +14,7 @@ class Poly(Kern): def __init__(self, input_dim, variance=1., order=3., active_dims=None, name='poly'): super(Poly, self).__init__(input_dim, active_dims, name) self.variance = Param('variance', variance, Logexp()) - self.add_parameter(self.variance) + self.link_parameter(self.variance) self.order=order def K(self, X, X2=None): diff --git a/GPy/kern/_src/static.py b/GPy/kern/_src/static.py index 7820c634..f4223bf4 100644 --- a/GPy/kern/_src/static.py +++ b/GPy/kern/_src/static.py @@ -11,7 +11,7 @@ class Static(Kern): def __init__(self, input_dim, variance, active_dims, name): super(Static, self).__init__(input_dim, active_dims, name) self.variance = Param('variance', variance, Logexp()) - self.add_parameters(self.variance) + self.link_parameters(self.variance) def Kdiag(self, X): ret = np.empty((X.shape[0],), dtype=np.float64) diff --git a/GPy/kern/_src/stationary.py b/GPy/kern/_src/stationary.py index f7993e82..04427c2c 100644 --- a/GPy/kern/_src/stationary.py +++ b/GPy/kern/_src/stationary.py @@ -61,7 +61,7 @@ class Stationary(Kern): self.lengthscale = Param('lengthscale', lengthscale, Logexp()) self.variance = Param('variance', variance, Logexp()) assert self.variance.size==1 - self.add_parameters(self.variance, self.lengthscale) + self.link_parameters(self.variance, self.lengthscale) def K_of_r(self, r): raise NotImplementedError, "implement the covariance function as a fn of r to use this class" @@ -343,7 +343,7 @@ class RatQuad(Stationary): def __init__(self, input_dim, variance=1., lengthscale=None, power=2., ARD=False, active_dims=None, name='RatQuad'): super(RatQuad, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name) self.power = Param('power', power, Logexp()) - self.add_parameters(self.power) + self.link_parameters(self.power) def K_of_r(self, r): r2 = np.power(r, 2.) diff --git a/GPy/likelihoods/gamma.py b/GPy/likelihoods/gamma.py index a6436616..ae85c113 100644 --- a/GPy/likelihoods/gamma.py +++ b/GPy/likelihoods/gamma.py @@ -25,7 +25,7 @@ class Gamma(Likelihood): super(Gamma, self).__init__(gp_link, 'Gamma') self.beta = Param('beta', beta) - self.add_parameter(self.beta) + self.link_parameter(self.beta) self.beta.fix()#TODO: gradients! def pdf_link(self, link_f, y, Y_metadata=None): diff --git a/GPy/likelihoods/gaussian.py b/GPy/likelihoods/gaussian.py index 6f08b4b4..4e10d3ef 100644 --- a/GPy/likelihoods/gaussian.py +++ b/GPy/likelihoods/gaussian.py @@ -40,7 +40,7 @@ class Gaussian(Likelihood): super(Gaussian, self).__init__(gp_link, name=name) self.variance = Param('variance', variance, Logexp()) - self.add_parameter(self.variance) + self.link_parameter(self.variance) if isinstance(gp_link, link_functions.Identity): self.log_concave = True diff --git a/GPy/likelihoods/mixed_noise.py b/GPy/likelihoods/mixed_noise.py index c2435508..613f069d 100644 --- a/GPy/likelihoods/mixed_noise.py +++ b/GPy/likelihoods/mixed_noise.py @@ -14,7 +14,7 @@ class MixedNoise(Likelihood): #NOTE at the moment this likelihood only works for using a list of gaussians super(Likelihood, self).__init__(name=name) - self.add_parameters(*likelihoods_list) + self.link_parameters(*likelihoods_list) self.likelihoods_list = likelihoods_list self.log_concave = False diff --git a/GPy/likelihoods/student_t.py b/GPy/likelihoods/student_t.py index c057e789..3aeb43e0 100644 --- a/GPy/likelihoods/student_t.py +++ b/GPy/likelihoods/student_t.py @@ -29,8 +29,8 @@ class StudentT(Likelihood): # sigma2 is not a noise parameter, it is a squared scale. self.sigma2 = Param('t_scale2', float(sigma2), Logexp()) self.v = Param('deg_free', float(deg_free)) - self.add_parameter(self.sigma2) - self.add_parameter(self.v) + self.link_parameter(self.sigma2) + self.link_parameter(self.v) self.v.constrain_fixed() self.log_concave = False diff --git a/GPy/mappings/linear.py b/GPy/mappings/linear.py index 24a45511..315dfc0e 100644 --- a/GPy/mappings/linear.py +++ b/GPy/mappings/linear.py @@ -24,7 +24,7 @@ class Linear(Bijective_mapping): Bijective_mapping.__init__(self, input_dim=input_dim, output_dim=output_dim, name=name) self.W = Param('W',np.array((self.input_dim, self.output_dim))) self.bias = Param('bias',np.array(self.output_dim)) - self.add_parameters(self.W, self.bias) + self.link_parameters(self.W, self.bias) def f(self, X): return np.dot(X,self.W) + self.bias diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index c9d1c68a..a4227119 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -78,7 +78,7 @@ class BayesianGPLVM(SparseGP): SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, normalizer=normalizer) self.logger.info("Adding X as parameter") - self.add_parameter(self.X, index=0) + self.link_parameter(self.X, index=0) if mpi_comm != None: from ..util.mpi import divide_data diff --git a/GPy/models/gp_kronecker_gaussian_regression.py b/GPy/models/gp_kronecker_gaussian_regression.py index 0e8dab81..434661d2 100644 --- a/GPy/models/gp_kronecker_gaussian_regression.py +++ b/GPy/models/gp_kronecker_gaussian_regression.py @@ -35,12 +35,12 @@ class GPKroneckerGaussianRegression(Model): self.X2 = ObsAr(X2) self.Y = Y self.kern1, self.kern2 = kern1, kern2 - self.add_parameter(self.kern1) - self.add_parameter(self.kern2) + self.link_parameter(self.kern1) + self.link_parameter(self.kern2) self.likelihood = likelihoods.Gaussian() self.likelihood.variance = noise_var - self.add_parameter(self.likelihood) + self.link_parameter(self.likelihood) self.num_data1, self.input_dim1 = self.X1.shape self.num_data2, self.input_dim2 = self.X2.shape diff --git a/GPy/models/gp_var_gauss.py b/GPy/models/gp_var_gauss.py index 68b62443..cd688360 100644 --- a/GPy/models/gp_var_gauss.py +++ b/GPy/models/gp_var_gauss.py @@ -32,13 +32,13 @@ class GPVariationalGaussianApproximation(Model): if kernel is None: kernel = kern.RBF(X.shape[1]) + kern.White(X.shape[1], 0.01) self.kern = kernel - self.add_parameter(self.kern) + self.link_parameter(self.kern) self.num_data, self.input_dim = self.X.shape self.alpha = Param('alpha', np.zeros(self.num_data)) self.beta = Param('beta', np.ones(self.num_data)) - self.add_parameter(self.alpha) - self.add_parameter(self.beta) + self.link_parameter(self.alpha) + self.link_parameter(self.beta) self.gh_x, self.gh_w = np.polynomial.hermite.hermgauss(20) self.Ysign = np.where(Y==1, 1, -1).flatten() diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py index 8f5432ba..79128270 100644 --- a/GPy/models/gplvm.py +++ b/GPy/models/gplvm.py @@ -38,7 +38,7 @@ class GPLVM(GP): super(GPLVM, self).__init__(X, Y, kernel, likelihood, name='GPLVM') self.X = Param('latent_mean', X) - self.add_parameter(self.X, index=0) + self.link_parameter(self.X, index=0) def parameters_changed(self): super(GPLVM, self).parameters_changed() diff --git a/GPy/models/gradient_checker.py b/GPy/models/gradient_checker.py index b7c78449..74026f8e 100644 --- a/GPy/models/gradient_checker.py +++ b/GPy/models/gradient_checker.py @@ -76,7 +76,7 @@ class GradientChecker(Model): for name, xi in zip(self.names, at_least_one_element(x0)): self.__setattr__(name, Param(name, xi)) - self.add_parameter(self.__getattribute__(name)) + self.link_parameter(self.__getattribute__(name)) # self._param_names = [] # for name, shape in zip(self.names, self.shapes): # self._param_names.extend(map(lambda nameshape: ('_'.join(nameshape)).strip('_'), itertools.izip(itertools.repeat(name), itertools.imap(lambda t: '_'.join(map(str, t)), itertools.product(*map(lambda xi: range(xi), shape)))))) diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index 3acc7c6e..015df7bd 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -129,7 +129,7 @@ class MRD(SparseGP): else: likelihoods = likelihoods self.logger.info("adding X and Z") - self.add_parameters(self.X, self.Z) + self.link_parameters(self.X, self.Z) self.bgplvms = [] self.num_data = Ylist[0].shape[0] @@ -137,11 +137,11 @@ class MRD(SparseGP): for i, n, k, l, Y in itertools.izip(itertools.count(), Ynames, kernels, likelihoods, Ylist): assert Y.shape[0] == self.num_data, "All datasets need to share the number of datapoints, and those have to correspond to one another" p = Parameterized(name=n) - p.add_parameter(k) + p.link_parameter(k) p.kern = k - p.add_parameter(l) + p.link_parameter(l) p.likelihood = l - self.add_parameter(p) + self.link_parameter(p) self.bgplvms.append(p) self.posterior = None diff --git a/GPy/testing/kernel_tests.py b/GPy/testing/kernel_tests.py index a942dc49..83e1085c 100644 --- a/GPy/testing/kernel_tests.py +++ b/GPy/testing/kernel_tests.py @@ -51,7 +51,7 @@ class Kern_check_dK_dtheta(Kern_check_model): """ def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) - self.add_parameter(self.kernel) + self.link_parameter(self.kernel) def parameters_changed(self): return self.kernel.update_gradients_full(self.dL_dK, self.X, self.X2) @@ -64,7 +64,7 @@ class Kern_check_dKdiag_dtheta(Kern_check_model): """ def __init__(self, kernel=None, dL_dK=None, X=None): Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None) - self.add_parameter(self.kernel) + self.link_parameter(self.kernel) def log_likelihood(self): return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum() @@ -77,7 +77,7 @@ class Kern_check_dK_dX(Kern_check_model): def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) self.X = Param('X',X) - self.add_parameter(self.X) + self.link_parameter(self.X) def parameters_changed(self): self.X.gradient[:] = self.kernel.gradients_X(self.dL_dK, self.X, self.X2) diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index af4b12e2..42f82121 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -65,28 +65,28 @@ class MiscTests(unittest.TestCase): np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) m.randomize() m2[:] = m[''].values() - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m.randomize() m2[''] = m[:] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m.randomize() m2[:] = m[:] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m.randomize() m2[''] = m[''] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m.kern.lengthscale.randomize() m2[:] = m[:] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m.Gaussian_noise.randomize() m2[:] = m[:] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) m['.*var'] = 2 m2['.*var'] = m['.*var'] - np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + np.testing.assert_almost_equal(m.log_likelihood(), m2.log_likelihood()) def test_likelihood_set(self): diff --git a/GPy/testing/observable_tests.py b/GPy/testing/observable_tests.py index 05794dc3..fb9112f8 100644 --- a/GPy/testing/observable_tests.py +++ b/GPy/testing/observable_tests.py @@ -30,15 +30,15 @@ class Test(unittest.TestCase): self.par2 = ParameterizedTest('test model 2') self.p = Param('test parameter', numpy.random.normal(1,2,(10,3))) - self.par.add_parameter(self.p) - self.par.add_parameter(Param('test1', numpy.random.normal(0,1,(1,)))) - self.par.add_parameter(Param('test2', numpy.random.normal(0,1,(1,)))) + self.par.link_parameter(self.p) + self.par.link_parameter(Param('test1', numpy.random.normal(0,1,(1,)))) + self.par.link_parameter(Param('test2', numpy.random.normal(0,1,(1,)))) - self.par2.add_parameter(Param('par2 test1', numpy.random.normal(0,1,(1,)))) - self.par2.add_parameter(Param('par2 test2', numpy.random.normal(0,1,(1,)))) + self.par2.link_parameter(Param('par2 test1', numpy.random.normal(0,1,(1,)))) + self.par2.link_parameter(Param('par2 test2', numpy.random.normal(0,1,(1,)))) - self.parent.add_parameter(self.par) - self.parent.add_parameter(self.par2) + self.parent.link_parameter(self.par) + self.parent.link_parameter(self.par2) self._observer_triggered = None self._trigger_count = 0 diff --git a/GPy/testing/parameterized_tests.py b/GPy/testing/parameterized_tests.py index f8895b14..a51d9e09 100644 --- a/GPy/testing/parameterized_tests.py +++ b/GPy/testing/parameterized_tests.py @@ -37,8 +37,8 @@ class ParameterizedTest(unittest.TestCase): self.test1 = GPy.core.Parameterized("test model") self.test1.param = self.param self.test1.kern = self.rbf+self.white - self.test1.add_parameter(self.test1.kern) - self.test1.add_parameter(self.param, 0) + self.test1.link_parameter(self.test1.kern) + self.test1.link_parameter(self.param, 0) # print self.test1: #============================================================================= # test_model. | Value | Constraint | Prior | Tied to @@ -67,11 +67,11 @@ class ParameterizedTest(unittest.TestCase): def test_fixes(self): self.white.fix(warning=False) - self.test1.remove_parameter(self.param) + self.test1.unlink_parameter(self.param) self.assertTrue(self.test1._has_fixes()) from GPy.core.parameterization.transformations import FIXED, UNFIXED self.assertListEqual(self.test1._fixes_.tolist(),[UNFIXED,UNFIXED,FIXED]) - self.test1.kern.add_parameter(self.white, 0) + self.test1.kern.link_parameter(self.white, 0) self.assertListEqual(self.test1._fixes_.tolist(),[FIXED,UNFIXED,UNFIXED]) self.test1.kern.rbf.fix() self.assertListEqual(self.test1._fixes_.tolist(),[FIXED]*3) @@ -82,7 +82,7 @@ class ParameterizedTest(unittest.TestCase): def test_remove_parameter(self): from GPy.core.parameterization.transformations import FIXED, UNFIXED, __fixed__, Logexp self.white.fix() - self.test1.kern.remove_parameter(self.white) + self.test1.kern.unlink_parameter(self.white) self.assertIs(self.test1._fixes_,None) self.assertListEqual(self.white._fixes_.tolist(), [FIXED]) @@ -90,7 +90,7 @@ class ParameterizedTest(unittest.TestCase): self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops) - self.test1.add_parameter(self.white, 0) + self.test1.link_parameter(self.white, 0) self.assertIs(self.test1.constraints, self.white.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops) @@ -98,7 +98,7 @@ class ParameterizedTest(unittest.TestCase): self.assertIs(self.white._fixes_,None) self.assertListEqual(self.test1._fixes_.tolist(),[FIXED] + [UNFIXED] * 52) - self.test1.remove_parameter(self.white) + self.test1.unlink_parameter(self.white) self.assertIs(self.test1._fixes_,None) self.assertListEqual(self.white._fixes_.tolist(), [FIXED]) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) @@ -107,11 +107,11 @@ class ParameterizedTest(unittest.TestCase): def test_remove_parameter_param_array_grad_array(self): val = self.test1.kern.param_array.copy() - self.test1.kern.remove_parameter(self.white) + self.test1.kern.unlink_parameter(self.white) self.assertListEqual(self.test1.kern.param_array.tolist(), val[:2].tolist()) def test_add_parameter_already_in_hirarchy(self): - self.assertRaises(HierarchyError, self.test1.add_parameter, self.white.parameters[0]) + self.assertRaises(HierarchyError, self.test1.link_parameter, self.white.parameters[0]) def test_default_constraints(self): self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops) @@ -119,7 +119,7 @@ class ParameterizedTest(unittest.TestCase): self.assertListEqual(self.rbf.constraints.indices()[0].tolist(), range(2)) from GPy.core.parameterization.transformations import Logexp kern = self.test1.kern - self.test1.remove_parameter(kern) + self.test1.unlink_parameter(kern) self.assertListEqual(kern.constraints[Logexp()].tolist(), range(3)) def test_constraints(self): @@ -127,7 +127,7 @@ class ParameterizedTest(unittest.TestCase): self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), range(self.param.size, self.param.size+self.rbf.size)) self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp()].tolist(), [self.param.size+self.rbf.size]) - self.test1.kern.remove_parameter(self.rbf) + self.test1.kern.unlink_parameter(self.rbf) self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), []) def test_constraints_views(self): @@ -166,7 +166,7 @@ class ParameterizedTest(unittest.TestCase): def test_add_parameter_in_hierarchy(self): from GPy.core import Param - self.test1.kern.rbf.add_parameter(Param("NEW", np.random.rand(2), NegativeLogexp()), 1) + self.test1.kern.rbf.link_parameter(Param("NEW", np.random.rand(2), NegativeLogexp()), 1) self.assertListEqual(self.test1.constraints[NegativeLogexp()].tolist(), range(self.param.size+1, self.param.size+1 + 2)) self.assertListEqual(self.test1.constraints[GPy.transformations.Logistic(0,1)].tolist(), range(self.param.size)) self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp(0,1)].tolist(), np.r_[50, 53:55].tolist()) diff --git a/GPy/testing/pickle_tests.py b/GPy/testing/pickle_tests.py index d51352fe..dfabe54e 100644 --- a/GPy/testing/pickle_tests.py +++ b/GPy/testing/pickle_tests.py @@ -108,7 +108,7 @@ class Test(ListDictTestCase): par = toy_rbf_1d_50(optimize=0, plot=0) pcopy = par.copy() self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist()) - self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist()) + np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full) self.assertSequenceEqual(str(par), str(pcopy)) self.assertIsNot(par.param_array, pcopy.param_array) self.assertIsNot(par.gradient_full, pcopy.gradient_full) @@ -141,7 +141,7 @@ class Test(ListDictTestCase): f.seek(0) pcopy = pickle.load(f) np.testing.assert_allclose(par.param_array, pcopy.param_array) - np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full) + np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full, atol=1e-6) self.assertSequenceEqual(str(par), str(pcopy)) self.assert_(pcopy.checkgrad())