diff --git a/.travis.yml b/.travis.yml index 6d188401..fb8ddb2c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,11 @@ virtualenv: before_install: - sudo apt-get install -qq python-scipy python-pip - sudo apt-get install -qq python-matplotlib + # Workaround for a permissions issue with Travis virtual machine images + # that breaks Python's multiprocessing: + # https://github.com/travis-ci/travis-cookbooks/issues/155 + - sudo rm -rf /dev/shm + - sudo ln -s /run/shm /dev/shm install: - pip install --upgrade numpy==1.7.1 diff --git a/GPy/core/model.py b/GPy/core/model.py index 5dc6b254..35b4b2dc 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -212,7 +212,7 @@ class model(parameterised): currently_constrained = self.all_constrained_indices() to_make_positive = [] for s in positive_strings: - for i in self.grep_param_names(s): + for i in self.grep_param_names(".*"+s): if not (i in currently_constrained): #to_make_positive.append(re.escape(param_names[i])) to_make_positive.append(i) diff --git a/GPy/core/parameterised.py b/GPy/core/parameterised.py index 7409402e..d1abb9c3 100644 --- a/GPy/core/parameterised.py +++ b/GPy/core/parameterised.py @@ -64,21 +64,21 @@ class parameterised(object): m['var'] = 2. # > sets all parameters matching 'var' to 2. m['var'] = # > sets parameters matching 'var' to """ - def get(self, name): + def get(self, regexp): warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2) - return self[name] + return self[regexp] - def set(self, name, val): + def set(self, regexp, val): warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2) - self[name] = val + self[regexp] = val - def __getitem__(self, name, return_names=False): + def __getitem__(self, regexp, return_names=False): """ Get a model parameter by name. The name is applied as a regular expression and all parameters that match that regular expression are returned. """ - matches = self.grep_param_names(name) + matches = self.grep_param_names(regexp) if len(matches): if return_names: return self._get_params()[matches], np.asarray(self._get_param_names())[matches].tolist() @@ -103,8 +103,8 @@ class parameterised(object): else: raise AttributeError, "no parameter matches %s" % name - def tie_params(self, which): - matches = self.grep_param_names(which) + def tie_params(self, regexp): + matches = self.grep_param_names(regexp) assert matches.size > 0, "need at least something to tie together" if len(self.tied_indices): assert not np.any(matches[:, None] == np.hstack(self.tied_indices)), "Some indices are already tied!" @@ -119,28 +119,23 @@ class parameterised(object): """Unties all parameters by setting tied_indices to an empty list.""" self.tied_indices = [] - def grep_param_names(self, expr): + def grep_param_names(self, regexp): """ - Arguments - --------- - expr -- can be a regular expression object or a string to be turned into regular expression object. + :param regexp: regular expression to select parameter names + :type regexp: re | str | int + :rtype: the indices of self._get_param_names which match the regular expression. - Returns - ------- - the indices of self._get_param_names which match the regular expression. - - Notes - ----- - Other objects are passed through - i.e. integers which weren't meant for grepping + Note:- + Other objects are passed through - i.e. integers which weren't meant for grepping """ - if type(expr) in [str, np.string_, np.str]: - expr = re.compile(expr) - return np.nonzero([expr.search(name) for name in self._get_param_names()])[0] - elif type(expr) is re._pattern_type: - return np.nonzero([expr.search(name) for name in self._get_param_names()])[0] + if type(regexp) in [str, np.string_, np.str]: + regexp = re.compile(regexp) + return np.nonzero([regexp.match(name) for name in self._get_param_names()])[0] + elif type(regexp) is re._pattern_type: + return np.nonzero([regexp.match(name) for name in self._get_param_names()])[0] else: - return expr + return regexp def Nparam_transformed(self): removed = 0 @@ -152,9 +147,9 @@ class parameterised(object): return len(self._get_params()) - removed - def unconstrain(self, which): + def unconstrain(self, regexp): """Unconstrain matching parameters. does not untie parameters""" - matches = self.grep_param_names(which) + matches = self.grep_param_names(regexp) #tranformed contraints: for match in matches: @@ -178,17 +173,17 @@ class parameterised(object): else: self.fixed_indices, self.fixed_values = [], [] - def constrain_negative(self, which): + def constrain_negative(self, regexp): """ Set negative constraints. """ - self.constrain(which, transformations.negative_exponent()) + self.constrain(regexp, transformations.negative_exponent()) - def constrain_positive(self, which): + def constrain_positive(self, regexp): """ Set positive constraints. """ - self.constrain(which, transformations.logexp()) + self.constrain(regexp, transformations.logexp()) - def constrain_bounded(self, which,lower, upper): + def constrain_bounded(self, regexp,lower, upper): """ Set bounded constraints. """ - self.constrain(which, transformations.logistic(lower, upper)) + self.constrain(regexp, transformations.logistic(lower, upper)) def all_constrained_indices(self): if len(self.constrained_indices) or len(self.fixed_indices): @@ -196,10 +191,10 @@ class parameterised(object): else: return np.empty(shape=(0,)) - def constrain(self,which,transform): + def constrain(self,regexp,transform): assert isinstance(transform,transformations.transformation) - matches = self.grep_param_names(which) + matches = self.grep_param_names(regexp) overlap = set(matches).intersection(set(self.all_constrained_indices())) if overlap: self.unconstrain(np.asarray(list(overlap))) @@ -214,11 +209,11 @@ class parameterised(object): x[matches] = transform.initialize(x[matches]) self._set_params(x) - def constrain_fixed(self, which, value=None): + def constrain_fixed(self, regexp, value=None): """ Arguments --------- - :param which: np.array(dtype=int), or regular expression object or string + :param regexp: np.array(dtype=int), or regular expression object or string :param value: a float to fix the matched values to. If the value is not specified, the parameter is fixed to the current value @@ -227,7 +222,7 @@ class parameterised(object): Fixing a parameter which is tied to another, or constrained in some way will result in an error. To fix multiple parameters to the same value, simply pass a regular expression which matches both parameter names, or pass both of the indexes """ - matches = self.grep_param_names(which) + matches = self.grep_param_names(regexp) assert not np.any(matches[:, None] == self.all_constrained_indices()), "Some indices are already constrained" self.fixed_indices.append(matches) if value != None: diff --git a/GPy/likelihoods/Gaussian.py b/GPy/likelihoods/Gaussian.py index e08fee90..d87b1b98 100644 --- a/GPy/likelihoods/Gaussian.py +++ b/GPy/likelihoods/Gaussian.py @@ -51,11 +51,15 @@ class Gaussian(likelihood): return ["noise_variance"] def _set_params(self, x): - x = float(x) + x = np.float64(x) if self._variance != x: - self.precision = 1. / x + if x == 0.: + self.precision = None + self.V = None + else: + self.precision = 1. / x + self.V = (self.precision) * self.Y self.covariance_matrix = np.eye(self.N) * x - self.V = (self.precision) * self.Y self._variance = x def predictive_values(self, mu, var, full_cov): diff --git a/GPy/testing/kernel_tests.py b/GPy/testing/kernel_tests.py index b27eee07..b48bc813 100644 --- a/GPy/testing/kernel_tests.py +++ b/GPy/testing/kernel_tests.py @@ -8,7 +8,7 @@ import GPy class KernelTests(unittest.TestCase): def test_kerneltie(self): K = GPy.kern.rbf(5, ARD=True) - K.tie_params('[01]') + K.tie_params('.*[01]') K.constrain_fixed('2') X = np.random.rand(5,5) Y = np.ones((5,1)) diff --git a/GPy/testing/mrd_tests.py b/GPy/testing/mrd_tests.py index 17e43a7e..903cddfc 100644 --- a/GPy/testing/mrd_tests.py +++ b/GPy/testing/mrd_tests.py @@ -23,7 +23,7 @@ class MRDTests(unittest.TestCase): Ylist = [np.random.multivariate_normal(np.zeros(N), K, D).T for _ in range(num_m)] likelihood_list = [GPy.likelihoods.Gaussian(Y) for Y in Ylist] - m = GPy.models.MRD(*likelihood_list, Q=Q, kernel=k, M=M) + m = GPy.models.MRD(likelihood_list, Q=Q, kernels=k, M=M) m.ensure_default_constraints() self.assertTrue(m.checkgrad()) diff --git a/GPy/testing/unit_tests.py b/GPy/testing/unit_tests.py index ee8368ac..86d13c8c 100644 --- a/GPy/testing/unit_tests.py +++ b/GPy/testing/unit_tests.py @@ -22,7 +22,7 @@ class GradientTests(unittest.TestCase): self.X2D = np.random.uniform(-3.,3.,(40,2)) self.Y2D = np.sin(self.X2D[:,0:1]) * np.sin(self.X2D[:,1:2])+np.random.randn(40,1)*0.05 - def check_model_with_white(self, kern, model_type='GP_regression', dimension=1, constraint=''): + def check_model_with_white(self, kern, model_type='GP_regression', dimension=1): #Get the correct gradients if dimension == 1: X = self.X1D @@ -37,7 +37,7 @@ class GradientTests(unittest.TestCase): noise = GPy.kern.white(dimension) kern = kern + noise m = model_fit(X, Y, kernel=kern) - m.constrain_positive(constraint) + m.ensure_default_constraints() m.randomize() # contrain all parameters to be positive self.assertTrue(m.checkgrad()) @@ -135,12 +135,12 @@ class GradientTests(unittest.TestCase): def test_sparse_GP_regression_rbf_white_kern_1d(self): ''' Testing the sparse GP regression with rbf kernel with white kernel on 1d data ''' rbf = GPy.kern.rbf(1) - self.check_model_with_white(rbf, model_type='sparse_GP_regression', dimension=1, constraint='(variance|lengthscale|precision)') + self.check_model_with_white(rbf, model_type='sparse_GP_regression', dimension=1) def test_sparse_GP_regression_rbf_white_kern_2D(self): ''' Testing the sparse GP regression with rbf and white kernel on 2d data ''' rbf = GPy.kern.rbf(2) - self.check_model_with_white(rbf, model_type='sparse_GP_regression', dimension=2, constraint='(variance|lengthscale|precision)') + self.check_model_with_white(rbf, model_type='sparse_GP_regression', dimension=2) def test_GPLVM_rbf_bias_white_kern_2D(self): """ Testing GPLVM with rbf + bias and white kernel """ @@ -150,7 +150,7 @@ class GradientTests(unittest.TestCase): K = k.K(X) Y = np.random.multivariate_normal(np.zeros(N),K,D).T m = GPy.models.GPLVM(Y, Q, kernel = k) - m.constrain_positive('(rbf|bias|white)') + m.ensure_default_constraints() self.assertTrue(m.checkgrad()) def test_GPLVM_rbf_linear_white_kern_2D(self): @@ -161,7 +161,7 @@ class GradientTests(unittest.TestCase): K = k.K(X) Y = np.random.multivariate_normal(np.zeros(N),K,D).T m = GPy.models.GPLVM(Y, Q, init = 'PCA', kernel = k) - m.constrain_positive('(linear|bias|white)') + m.ensure_default_constraints() self.assertTrue(m.checkgrad()) def test_GP_EP_probit(self): diff --git a/GPy/util/plot.py b/GPy/util/plot.py index 295047b1..309c440e 100644 --- a/GPy/util/plot.py +++ b/GPy/util/plot.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# #Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..fe342b5e --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include *.txt +recursive-include docs *.txt +include *.md +recursive-include docs *.md diff --git a/setup.py b/setup.py index 2f7c9af8..7f367942 100644 --- a/setup.py +++ b/setup.py @@ -23,14 +23,12 @@ setup(name = 'GPy', package_data = {'GPy': ['GPy/examples']}, py_modules = ['GPy.__init__'], long_description=read('README.md'), - #ext_modules = [Extension(name = 'GPy.kern.lfmUpsilonf2py', - # sources = ['GPy/kern/src/lfmUpsilonf2py.f90'])], install_requires=['sympy', 'numpy>=1.6', 'scipy>=0.9','matplotlib>=1.1', 'nose'], extras_require = { 'docs':['Sphinx', 'ipython'], }, - #setup_requires=['sphinx'], - #cmdclass = {'build_sphinx': BuildDoc}, classifiers=[ "License :: OSI Approved :: BSD License"], + #ext_modules = [Extension(name = 'GPy.kern.lfmUpsilonf2py', + # sources = ['GPy/kern/src/lfmUpsilonf2py.f90'])], )