Added exception error message to the output message on failure of multiprocessing.cpu_count().

This commit is contained in:
Samer Afach 2016-11-13 12:36:39 +01:00
parent d0d234a807
commit c83e47d3b1

View File

@ -171,17 +171,23 @@ def download_files(list_of_urls, to_dir, processes=0):
list_of_urls = [line.replace(' ', '').replace('\n', '').replace('\t', '') for line in list_of_urls] list_of_urls = [line.replace(' ', '').replace('\n', '').replace('\t', '') for line in list_of_urls]
if not os.path.isdir(to_dir): if not os.path.isdir(to_dir):
mkdir_p(to_dir) mkdir_p(to_dir)
# try to detect the number of CPUs automatically
if processes <= 0: if processes <= 0:
try: try:
processes = mp.cpu_count() processes = mp.cpu_count()
except NotImplementedError as e: except NotImplementedError as e:
sys.stderr.write("Unable to determine the number of CPUs for parallelization. Proceeding sequentially. " sys.stderr.write("Unable to determine the number of CPUs for parallelization. Proceeding sequentially. "
"Consider inputting the number of CPUs manually.\n") "Consider inputting the number of CPUs manually. The error says: " + str(e) + "\n")
_download_files(list_of_urls, to_dir) _download_files(list_of_urls, to_dir)
return return
# if there's only 1 process or 1 URL, there's no need to use multiprocessing
elif processes == 1 or len(list_of_urls) == 1: elif processes == 1 or len(list_of_urls) == 1:
_download_files(list_of_urls, to_dir) _download_files(list_of_urls, to_dir)
return return
# if number of processes is larger than the number of URLs, reduce the number of processes to save resources
elif processes > len(list_of_urls): elif processes > len(list_of_urls):
processes = len(list_of_urls) processes = len(list_of_urls)