summaryrefslogtreecommitdiffstats
path: root/build/download/download.py
diff options
context:
space:
mode:
authorMichal Ptacek <m.ptacek@partner.samsung.com>2019-07-02 08:08:53 +0000
committerGerrit Code Review <gerrit@onap.org>2019-07-02 08:08:53 +0000
commit651c1973691d2ab9c2fe12cf7dcdb19b5ef399cd (patch)
treecb0b06aec0206efe9f59ee6b97a6e57f9ac297cc /build/download/download.py
parentd84d3421b8e9c09507fae1b7e4bd8a0352249cc0 (diff)
parent11e84f5a64e549e9f0a82f3f039ff6773fe94704 (diff)
Merge changes from topic "data-download-refactor"
* changes: Refactor download.py and remove not used files Refactor of rpm downloading Refactor pypi packages download Refactor and improve git repository cloning Refactor and improve docker image downloading Refactor npm packages download Refactor http files download
Diffstat (limited to 'build/download/download.py')
-rwxr-xr-xbuild/download/download.py224
1 files changed, 130 insertions, 94 deletions
diff --git a/build/download/download.py b/build/download/download.py
index 0d8912ee..0af12989 100755
--- a/build/download/download.py
+++ b/build/download/download.py
@@ -20,44 +20,51 @@
# COPYRIGHT NOTICE ENDS HERE
import argparse
+import datetime
import logging
import sys
-import datetime
import timeit
-import base
-import docker_images
-import git_repos
-import http_files
-import npm_packages
-import pypi_packages
-import rpm_packages
+import docker_downloader
+import git_downloader
+import http_downloader
+import npm_downloader
+import pypi_downloader
+import rpm_downloader
log = logging.getLogger(name=__name__)
+
def parse_args():
- parser=argparse.ArgumentParser(description='Download data from lists')
+ """
+ Parse command line arguments
+ :return: arguments
+ """
+ parser = argparse.ArgumentParser(description='Download data from lists')
list_group = parser.add_argument_group()
list_group.add_argument('--docker', action='append', nargs='+', default=[],
- metavar=('list', 'dir-name'),
- help='Docker type list. If second argument is specified '
- 'it is treated as directory where images will be saved '
- 'otherwise only pull operation is executed')
+ metavar=('list', 'dir-name'),
+ help='Docker type list. If second argument is specified '
+ 'it is treated as directory where images will be saved '
+ 'otherwise only pull operation is executed this can\'t '
+ 'be mixed between multiple docker list specifications. '
+ 'if one of the list does not have directory specified '
+ 'all lists are only pulled!!!')
list_group.add_argument('--http', action='append', nargs=2, default=[],
- metavar=('list', 'dir-name'),
- help='Http type list and directory to save downloaded files')
+ metavar=('list', 'dir-name'),
+ help='Http type list and directory to save downloaded files')
list_group.add_argument('--npm', action='append', nargs=2, default=[],
- metavar=('list', 'dir-name'),
- help='npm type list and directory to save downloaded files')
+ metavar=('list', 'dir-name'),
+ help='npm type list and directory to save downloaded files')
list_group.add_argument('--rpm', action='append', nargs=2, default=[],
- metavar=('list', 'dir-name'),
- help='rpm type list and directory to save downloaded files')
+ metavar=('list', 'dir-name'),
+ help='rpm type list and directory to save downloaded files')
list_group.add_argument('--git', action='append', nargs=2, default=[],
- metavar=('list', 'dir-name'),
- help='git repo type list and directory to save downloaded files')
+ metavar=('list', 'dir-name'),
+ help='git repo type list and directory to save downloaded files')
list_group.add_argument('--pypi', action='append', nargs=2, default=[],
- metavar=('list', 'dir-name'),
- help='pypi packages type list and directory to save downloaded files')
+ metavar=('list', 'dir-name'),
+ help='pypi packages type list and directory to save downloaded files')
parser.add_argument('--npm-registry', default='https://registry.npmjs.org',
help='npm registry to use (default: https://registry.npmjs.org)')
parser.add_argument('--check', '-c', action='store_true', default=False,
@@ -71,10 +78,77 @@ def parse_args():
if getattr(args, arg):
return args
- parser.error('One of --docker, --npm, --http, --rpm, --git must be specified')
+ parser.error('One of --docker, --npm, --http, --rpm, --git or --pypi must be specified')
+
+
+def log_start(item_type):
+ """
+ Log starting message
+ :param item_type: type of resources
+ :return:
+ """
+ log.info('Starting download of {}.'.format(item_type))
+
+
+def handle_download(downloader, check_mode, errorred_lists, start_time):
+ """
+ Handle download of resources
+ :param downloader: downloader to use
+ :param check_mode: run in check mode (boolean)
+ :param errorred_lists: list of data types of failed lists
+ :param start_time: timeit.default_timer() right before download
+ :return: timeit.default_timer() at the end of download
+ """
+ if check_mode:
+ print(downloader.check_table)
+ else:
+ log_start(downloader.list_type)
+ try:
+ downloader.download()
+ except RuntimeError:
+ errorred_lists.append(downloader.list_type)
+ return log_time_interval(start_time, downloader.list_type)
+
+
+def handle_command_download(downloader_class, check_mode, errorred_lists, start_time, *args):
+ """
+ Handle download of resources where shell command is used
+ :param downloader_class: Class of command_downloader.CommandDownloader to use
+ :param check_mode: run in check mode (boolean)
+ :param errorred_lists: list of data types of failed lists
+ :param start_time: timeit.default_timer() right before download
+ :param args: arguments for downloader class initialization
+ :return: timeit.default_timer() at the end of download
+ """
+ try:
+ downloader = downloader_class(*args)
+ return handle_download(downloader, check_mode, errorred_lists, start_time)
+ except FileNotFoundError as err:
+ classname = type(downloader_class).__name__
+ log.exception('Error initializing: {}: {}'.format(classname, err))
+ return timeit.default_timer()
+
+
+def log_time_interval(start, resource_type=''):
+ """
+ Log how long the download took
+ :param start: timeit.default_timer() when interval started
+ :param resource_type: type of data that was downloaded. (empty string for whole download)
+ :return: timeit.default_timer() after logging
+ """
+ e_time = datetime.timedelta(seconds=timeit.default_timer() - start)
+ if resource_type:
+ msg = 'Download of {} took {}\n'.format(resource_type, e_time)
+ else:
+ msg = 'Execution ended. Total elapsed time {}'.format(e_time)
+ log.info(msg)
+ return timeit.default_timer()
def run_cli():
+ if sys.version_info.major < 3:
+ log.error('Unfortunately Python 2 is not supported for data download.')
+ sys.exit(1)
args = parse_args()
console_handler = logging.StreamHandler(sys.stdout)
@@ -91,82 +165,44 @@ def run_cli():
root_logger = logging.getLogger()
root_logger.addHandler(console_handler)
- list_with_errors = []
- timer_start = timeit.default_timer()
-
- for docker_list in args.docker:
- log.info('Processing {}.'.format(docker_list[0]))
- progress = None if args.check else base.init_progress('docker images')
- save = False
- if len(docker_list) > 1:
- save = True
- else:
- docker_list.append(None)
- try:
- docker_images.download(docker_list[0], save,
- docker_list[1], args.check, progress)
- except RuntimeError:
- list_with_errors.append(docker_list[0])
+ errorred_lists = []
+ timer_start = interval_start = timeit.default_timer()
- for http_list in args.http:
- progress = None if args.check else base.init_progress('http files')
- log.info('Processing {}.'.format(http_list[0]))
- try:
- http_files.download(http_list[0], http_list[1], args.check,
- progress)
- except RuntimeError:
- list_with_errors.append(http_list[0])
+ if args.check:
+ log.info('Check mode. No download will be executed.')
- for npm_list in args.npm:
- progress = None if args.check else base.init_progress('npm packages')
- log.info('Processing {}.'.format(npm_list[0]))
- try:
- npm_packages.download(npm_list[0], args.npm_registry, npm_list[1],
- args.check, progress)
- except RuntimeError:
- list_with_errors.append(npm_list[0])
+ if args.docker:
+ save = True if len(list(filter(lambda x: len(x) == 2, args.docker))) == len(args.docker) else False
+ docker = docker_downloader.DockerDownloader(save, *args.docker, workers=3)
+ interval_start = handle_download(docker, args.check, errorred_lists, interval_start)
- for rpm_list in args.rpm:
- if args.check:
- log.info('Check mode for rpm packages is not implemented')
- break
- log.info('Processing {}.'.format(rpm_list[0]))
- try:
- rpm_packages.download(rpm_list[0], rpm_list[1])
- except RuntimeError:
- list_with_errors.append(rpm_list[0])
-
- for git_list in args.git:
- if args.check:
- log.info('Check mode for git repositories is not implemented')
- break
- progress = None if args.check else base.init_progress('git repositories')
- log.info('Processing {}.'.format(git_list[0]))
- try:
- git_repos.download(git_list[0], git_list[1], progress)
- except RuntimeError:
- list_with_errors.append(git_list[0])
-
- for pypi_list in args.pypi:
- if args.check:
- log.info('Check mode for pypi packages is not implemented')
- break
- progress = None if args.check else base.init_progress('pypi packages')
- log.info('Processing {}.'.format(pypi_list[0]))
- try:
- pypi_packages.download(pypi_list[0], pypi_list[1], progress)
- except RuntimeError:
- list_with_errors.append(pypi_list[0])
+ if args.http:
+ http = http_downloader.HttpDownloader(*args.http)
+ interval_start = handle_download(http, args.check, errorred_lists, interval_start)
- e_time = datetime.timedelta(seconds=timeit.default_timer() - timer_start)
- log.info(timeit.default_timer() - timer_start)
- log.info('Execution ended. Total elapsed time {}'.format(e_time))
+ if args.npm:
+ npm = npm_downloader.NpmDownloader(args.npm_registry, *args.npm)
+ interval_start = handle_download(npm, args.check, errorred_lists, interval_start)
- if list_with_errors:
- log.error('Errors encountered while processing these lists:'
- '\n{}'.format('\n'.join(list_with_errors)))
- sys.exit(1)
+ if args.rpm:
+ interval_start = handle_command_download(rpm_downloader.RpmDownloader, args.check, errorred_lists,
+ interval_start, *args.rpm)
+
+ if args.git:
+ interval_start = handle_command_download(git_downloader.GitDownloader, args.check, errorred_lists,
+ interval_start, *args.git)
+ if args.pypi:
+ handle_command_download(pypi_downloader.PyPiDownloader, args.check, errorred_lists,
+ interval_start, *args.pypi)
+
+ if not args.check:
+ log_time_interval(timer_start)
+
+ if errorred_lists:
+ log.error('Errors encountered while processing these types:'
+ '\n{}'.format('\n'.join(errorred_lists)))
+ sys.exit(1)
if __name__ == '__main__':