more Python 3 compatibility fixes

This commit is contained in:
Aki Vehtari 2016-06-09 15:02:08 +03:00
parent c3963928f1
commit 4b2299def8

View file

@ -73,7 +73,7 @@ def prompt_user(prompt):
try:
print(prompt)
choice = raw_input().lower()
choice = input().lower()
# would like to test for exception here, but not sure if we can do that without importing IPython
except:
print('Stdin is not implemented.')
@ -96,16 +96,16 @@ def prompt_user(prompt):
def data_available(dataset_name=None):
"""Check if the data set is available on the local machine already."""
try:
from itertools import izip_longest
from itertools import zip_longest
except ImportError:
from itertools import zip_longest as izip_longest
from itertools import zip_longest as zip_longest
dr = data_resources[dataset_name]
zip_urls = (dr['files'], )
if 'save_names' in dr: zip_urls += (dr['save_names'], )
else: zip_urls += ([],)
for file_list, save_list in izip_longest(*zip_urls, fillvalue=[]):
for f, s in izip_longest(file_list, save_list, fillvalue=None):
for file_list, save_list in zip_longest(*zip_urls, fillvalue=[]):
for f, s in zip_longest(file_list, save_list, fillvalue=None):
if s is not None: f=s # If there is a save_name given, use that one
if not os.path.exists(os.path.join(data_path, dataset_name, f)):
return False
@ -138,7 +138,7 @@ def download_url(url, store_directory, save_name=None, messages=True, suffix='')
raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code))
with open(save_name, 'wb') as f:
meta = response.info()
content_length_str = meta.getheaders("Content-Length")
content_length_str = meta.get("Content-Length")
if content_length_str:
file_size = int(content_length_str[0])
else:
@ -220,8 +220,8 @@ def download_data(dataset_name=None):
if 'suffices' in dr: zip_urls += (dr['suffices'], )
else: zip_urls += ([],)
for url, files, save_names, suffices in itertools.izip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in itertools.izip_longest(files, save_names, suffices, fillvalue=None):
for url, files, save_names, suffices in itertools.zip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in itertools.zip_longest(files, save_names, suffices, fillvalue=None):
download_url(os.path.join(url,f), dataset_name, save_name, suffix=suffix)
return True