diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 4b6231af..c2d2010c 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -229,13 +229,14 @@ class GP(Model): :param Y_metadata: metadata about the predicting point to pass to the likelihood :param kern: The kernel to use for prediction (defaults to the model kern). this is useful for examining e.g. subprocesses. - :returns: (mean, var, lower_upper): + :returns: (mean, var): mean: posterior mean, a Numpy array, Nnew x self.input_dim var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise - lower_upper: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew. This is to allow for different normalizations of the output dimensions. + + Note: If you want the predictive quantiles (e.g. 95% confidence interval) use :py:func:"~GPy.core.gp.GP.predict_quantiles". """ #predict the latent function values mu, var = self._raw_predict(Xnew, full_cov=full_cov, kern=kern) @@ -255,7 +256,7 @@ class GP(Model): :param quantiles: tuple of quantiles, default is (2.5, 97.5) which is the 95% interval :type quantiles: tuple :returns: list of quantiles for each X and predictive quantiles for interval combination - :rtype: [np.ndarray (Xnew x self.input_dim), np.ndarray (Xnew x self.input_dim)] + :rtype: [np.ndarray (Xnew x self.output_dim), np.ndarray (Xnew x self.output_dim)] """ m, v = self._raw_predict(X, full_cov=False) if self.normalizer is not None: diff --git a/GPy/core/model.py b/GPy/core/model.py index 937d30e5..c3ad5cbe 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -76,7 +76,7 @@ class Model(Parameterized): jobs = [] pool = mp.Pool(processes=num_processes) for i in range(num_restarts): - self.randomize() + if i>0: self.randomize() job = pool.apply_async(opt_wrapper, args=(self,), kwds=kwargs) jobs.append(job) @@ -90,7 +90,7 @@ class Model(Parameterized): for i in range(num_restarts): try: if not parallel: - self.randomize() + if i>0: self.randomize() self.optimize(**kwargs) else: self.optimization_runs.append(jobs[i].get()) diff --git a/GPy/core/verbose_optimization.py b/GPy/core/verbose_optimization.py index f882f228..08c5e2dd 100644 --- a/GPy/core/verbose_optimization.py +++ b/GPy/core/verbose_optimization.py @@ -5,9 +5,10 @@ from __future__ import print_function import numpy as np import sys import time +import datetime def exponents(fnow, current_grad): - exps = [np.abs(np.float(fnow)), current_grad] + exps = [np.abs(np.float(fnow)), 1 if current_grad is np.nan else current_grad] return np.sign(exps) * np.log10(exps).astype(int) class VerboseOptimization(object): @@ -23,6 +24,7 @@ class VerboseOptimization(object): self.model.add_observer(self, self.print_status) self.status = 'running' self.clear = clear_after_finish + self.deltat = .2 self.update() @@ -44,25 +46,25 @@ class VerboseOptimization(object): self.hor_align = FlexBox(children = [left_col, right_col], width='100%', orientation='horizontal') display(self.hor_align) - + try: self.text.set_css('width', '100%') left_col.set_css({ 'padding': '2px', 'width': "100%", }) - + right_col.set_css({ 'padding': '2px', }) - + self.hor_align.set_css({ 'width': "100%", }) self.hor_align.remove_class('vbox') self.hor_align.add_class('hbox') - + left_col.add_class("box-flex1") right_col.add_class('box-flex0') @@ -74,16 +76,31 @@ class VerboseOptimization(object): else: self.exps = exponents(self.fnow, self.current_gradient) print('Running {} Code:'.format(self.opt_name)) - print(' {3:7s} {0:{mi}s} {1:11s} {2:11s}'.format("i", "f", "|g|", "secs", mi=self.len_maxiters)) + print(' {3:7s} {0:{mi}s} {1:11s} {2:11s}'.format("i", "f", "|g|", "runtime", mi=self.len_maxiters)) def __enter__(self): self.start = time.time() return self - def print_out(self): + def print_out(self, seconds): + if seconds<60: + ms = (seconds%1)*100 + self.timestring = "{s:0>2d}s{ms:0>2d}".format(s=int(seconds), ms=int(ms)) + else: + m, s = divmod(seconds, 60) + if m>59: + h, m = divmod(m, 60) + if h>23: + d, h = divmod(h, 24) + self.timestring = '{d:0>2d}d{h:0>2d}h{m:0>2d}'.format(m=int(m), h=int(h), d=int(d)) + else: + self.timestring = '{h:0>2d}h{m:0>2d}m{s:0>2d}'.format(m=int(m), s=int(s), h=int(h)) + else: + ms = (seconds%1)*100 + self.timestring = '{m:0>2d}m{s:0>2d}s{ms:0>2d}'.format(m=int(m), s=int(s), ms=int(ms)) if self.ipython_notebook: names_vals = [['optimizer', "{:s}".format(self.opt_name)], - ['runtime [s]', "{:> g}".format(time.time()-self.start)], + ['runtime', "{:>s}".format(self.timestring)], ['evaluation', "{:>0{l}}".format(self.iteration, l=self.len_maxiters)], ['objective', "{: > 12.3E}".format(self.fnow)], ['||gradient||', "{: >+12.3E}".format(float(self.current_gradient))], @@ -120,14 +137,18 @@ class VerboseOptimization(object): if b: self.exps = n_exps print('\r', end=' ') - print('{3:> 7.2g} {0:>0{mi}g} {1:> 12e} {2:> 12e}'.format(self.iteration, float(self.fnow), float(self.current_gradient), time.time()-self.start, mi=self.len_maxiters), end=' ') # print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r', + print('{3:} {0:>0{mi}g} {1:> 12e} {2:> 12e}'.format(self.iteration, float(self.fnow), float(self.current_gradient), "{:>8s}".format(self.timestring), mi=self.len_maxiters), end=' ') # print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r', sys.stdout.flush() def print_status(self, me, which=None): self.update() + seconds = time.time()-self.start #sys.stdout.write(" "*len(self.message)) - self.print_out() + self.deltat += seconds + if self.deltat > .2: + self.print_out(seconds) + self.deltat = 0 self.iteration += 1 @@ -153,12 +174,12 @@ class VerboseOptimization(object): if self.verbose: self.stop = time.time() self.model.remove_observer(self) - self.print_out() + self.print_out(self.stop - self.start) if not self.ipython_notebook: print() - print('Optimization finished in {0:.5g} Seconds'.format(self.stop-self.start)) - print('Optimization status: {0}'.format(self.status)) + print('Runtime: {}'.format("{:>9s}".format(self.timestring))) + print('Optimization status: {0}'.format(self.status)) print() elif self.clear: self.hor_align.close() diff --git a/GPy/kern/_src/stationary.py b/GPy/kern/_src/stationary.py index b5e425e6..df064de7 100644 --- a/GPy/kern/_src/stationary.py +++ b/GPy/kern/_src/stationary.py @@ -15,7 +15,7 @@ from ...util.caching import Cache_this try: import stationary_cython except ImportError: - print('warning: failed to import cython module: falling back to numpy') + print('warning in sationary: failed to import cython module: falling back to numpy') config.set('cython', 'working', 'false')