diff options
author | Patrik Teivonen <patrik.teivonen@qt.io> | 2022-05-25 14:14:19 +0300 |
---|---|---|
committer | Patrik Teivonen <patrik.teivonen@qt.io> | 2022-08-05 09:33:47 +0000 |
commit | 506b1fe037a1b4fd6f3aec7862ca3fa2ea35006b (patch) | |
tree | b0ff3247aa5b4f5081c17149b9463ea6fcef8636 | |
parent | 6099b89f35499c60eb0ed4be10e40966326ea678 (diff) |
Make imports consistent across Python scripts, isort hookv6.4.0-beta3-packaging
-Add isort to Pipfile, isort hook with blackformatter profile
-Fix imports in unittest hook
-Group and sort imports by their category (PEP8)
-Blank line between import groups (PEP8)
-Move all imports to toplevel (PEP8)
-Don't unnecessarily append to sys.path (PEP8)
-Import only required functions (this also helps reduce line length)
-Update deprecated imports such as builtins.WindowsError, ConfigParser.readfp
-Remove Python 2, cyclic and unused imports
-Fix platform dependent imports and remove unnecessaty try-excepts
-Remove unnecessary comments before imports, use in-line comments when needed
-Rename conflicting functions in imports, variables as absolute imports are recommended (PEP8)
Change-Id: I5ac7a3499ea47a6bdc9a9a2b5211841f65c011ea
Reviewed-by: Akseli Salovaara <akseli.salovaara@qt.io>
52 files changed, 804 insertions, 699 deletions
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a35a47865..55aab8c52 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,9 @@ repos: fail_fast: true - id: unittest name: Run unit tests (unittest) - entry: bash -c 'cd packaging-tools && pipenv run python3 -m unittest' + # Imports in unit tests failing as packaging-tools is not a valid Python package name + # Change dir to packaging-tools and append to PYTHONPATH environment variable as a workaround + entry: bash -c 'cd packaging-tools && export PYTHONPATH=$(pwd) && pipenv run python3 -m unittest' language: system pass_filenames: false always_run: true @@ -25,3 +27,9 @@ repos: language: system types: [python] fail_fast: true + - id: isort + name: Sort imports (isort) + entry: bash -c 'for x in "$@"; do pipenv run python3 -m isort --line-length 99 --profile black --src packaging-tools "$x"; done' + language: system + types: [python] + fail_fast: true @@ -26,6 +26,7 @@ flake8 = "==4.0.1" black = ">=20.8b1" stestr = "==3.2.1" pre-commit = "==2.17.0" +isort = "==5.10.1" [requires] python_version = "3" diff --git a/packaging-tools/archiveresolver.py b/packaging-tools/archiveresolver.py index 05894d930..4ee16be05 100644 --- a/packaging-tools/archiveresolver.py +++ b/packaging-tools/archiveresolver.py @@ -30,10 +30,11 @@ ############################################################################# import os -import bldinstallercommon -import pkg_constants from urllib.parse import urlparse +from bldinstallercommon import config_section_map, is_content_url_valid, safe_config_key_fetch +from pkg_constants import PKG_TEMPLATE_BASE_DIR_NAME + SERVER_NAMESPACE = 'ArchiveRemoteLocation' PACKAGE_REMOTE_LOCATION_RELEASE = 'release' PACKAGE_ARCHIVE_TAG = 'ARCHIVE_TAG' @@ -73,7 +74,7 @@ class ArchiveLocationResolver: self.configurations_root_dir = configurations_root_dir self.key_substitution_list = key_substitution_list # get packages tempalates src dir first - pkg_templates_dir = os.path.normpath(bldinstallercommon.config_section_map(target_config, 'PackageTemplates')['template_dirs']) + pkg_templates_dir = os.path.normpath(config_section_map(target_config, 'PackageTemplates')['template_dirs']) self.pkg_templates_dir_list = pkg_templates_dir.replace(' ', '').rstrip(',\n').split(',') # next read server list if server_base_url_override: @@ -83,8 +84,8 @@ class ArchiveLocationResolver: for section in target_config.sections(): if section.startswith(SERVER_NAMESPACE): server_name = section.split('.')[-1] - base_url = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_url') - base_path = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_path') + base_url = safe_config_key_fetch(target_config, section, 'base_url') + base_path = safe_config_key_fetch(target_config, section, 'base_path') base_path.replace(' ', '') # if base path is defined, then the following logic applies: # if script is used in testclient mode fetch the packages from "RnD" location @@ -126,19 +127,19 @@ class ArchiveLocationResolver: if temp != archive_uri: archive_uri = temp # 1. check if given archive_uri denotes a package under package templates directory - base_path = os.path.join(self.configurations_root_dir, pkg_constants.PKG_TEMPLATE_BASE_DIR_NAME) + base_path = os.path.join(self.configurations_root_dir, PKG_TEMPLATE_BASE_DIR_NAME) package_path = package_name + os.sep + 'data' + os.sep + archive_uri # find the correct template subdirectory for subdir in self.pkg_templates_dir_list: path_temp = os.path.join(base_path, subdir) if not os.path.isdir(path_temp): - path_temp = path_temp.replace(os.sep + pkg_constants.PKG_TEMPLATE_BASE_DIR_NAME, '') + path_temp = path_temp.replace(os.sep + PKG_TEMPLATE_BASE_DIR_NAME, '') if os.path.isdir(path_temp): temp = os.path.join(path_temp, package_path) if os.path.isfile(temp): return temp # 2. check if given URI is valid full URL - res = bldinstallercommon.is_content_url_valid(archive_uri) + res = is_content_url_valid(archive_uri) if res: return archive_uri else: diff --git a/packaging-tools/asynchronousfilereader.py b/packaging-tools/asynchronousfilereader.py index cfbd12763..4f07206c6 100644 --- a/packaging-tools/asynchronousfilereader.py +++ b/packaging-tools/asynchronousfilereader.py @@ -31,16 +31,11 @@ SOFTWARE. __version__ = '0.2.1' -import threading -try: - # Python 2 - from Queue import Queue -except ImportError: - # Python 3 - from queue import Queue +from queue import Queue +from threading import Thread -class AsynchronousFileReader(threading.Thread): +class AsynchronousFileReader(Thread): """ Helper class to implement asynchronous reading of a file in a separate thread. Pushes read lines on a queue to @@ -53,7 +48,7 @@ class AsynchronousFileReader(threading.Thread): queue = Queue() self.queue = queue - threading.Thread.__init__(self) + Thread.__init__(self) if autostart: self.start() diff --git a/packaging-tools/bld_ifw_tools.py b/packaging-tools/bld_ifw_tools.py index be539542d..ae2ec7c93 100755 --- a/packaging-tools/bld_ifw_tools.py +++ b/packaging-tools/bld_ifw_tools.py @@ -29,21 +29,32 @@ # ############################################################################# -import sys +import argparse import os import re -import argparse -import multiprocessing -import bldinstallercommon -import pkg_constants -import shutil import shlex +import shutil import subprocess -from read_remote_config import get_pkg_value -from bld_utils import is_windows, is_macos, is_linux +import sys +from multiprocessing import cpu_count from pathlib import Path -from bldinstallercommon import locate_path + +from bld_utils import is_linux, is_macos, is_windows +from bldinstallercommon import ( + clone_repository, + extract_file, + get_tag_from_branch, + is_content_url_valid, + list_as_string, + locate_executable, + locate_path, + move_tree, + remove_tree, + retrieve_url, +) from installer_utils import PackagingError +from pkg_constants import IFW_BUILD_ARTIFACTS_DIR +from read_remote_config import get_pkg_value from runner import do_execute_sub_process ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) @@ -202,13 +213,13 @@ class IfwOptions: self.qt_qmake_bin = 'qmake.exe' self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure.bat' else: - self.make_cmd = 'make -j' + str(multiprocessing.cpu_count() + 1) + self.make_cmd = 'make -j' + str(cpu_count() + 1) self.make_doc_cmd = 'make' self.make_install_cmd = 'make install' self.qt_qmake_bin = 'qmake' self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure' - self.build_artifacts_dir = os.path.join(ROOT_DIR, pkg_constants.IFW_BUILD_ARTIFACTS_DIR) + self.build_artifacts_dir = os.path.join(ROOT_DIR, IFW_BUILD_ARTIFACTS_DIR) self.mac_deploy_qt_archive_name = 'macdeployqt.7z' self.mac_qt_menu_nib_archive_name = 'qt_menu.nib.7z' # determine filenames used later on @@ -239,7 +250,7 @@ class IfwOptions: def sanity_check(self): # check qt src package url - res = bldinstallercommon.is_content_url_valid(self.qt_source_package_uri) + res = is_content_url_valid(self.qt_source_package_uri) if not(res): print('*** Qt src package uri is invalid: {0}'.format(self.qt_source_package_uri)) sys.exit(-1) @@ -310,7 +321,7 @@ def build_ifw(options, create_installer=False, build_ifw_examples=False): configure_options = get_dynamic_qt_configure_options() + '-prefix ' + options.qt_build_dir_dynamic + os.sep + 'qtbase' # Although we have a shadow build qt sources are still taminated. Unpack sources again. if os.path.exists(options.qt_source_dir): - bldinstallercommon.remove_tree(options.qt_source_dir) + remove_tree(options.qt_source_dir) prepare_qt_sources(options) build_qt(options, options.qt_build_dir_dynamic, configure_options, options.qt_build_modules_docs) build_ifw_docs(options) @@ -342,23 +353,23 @@ def prepare_qt_sources(options): def prepare_compressed_package(src_pkg_uri, src_pkg_saveas, destination_dir): print('Fetching package from: {0}'.format(src_pkg_uri)) if not os.path.isfile(src_pkg_saveas): - if not bldinstallercommon.is_content_url_valid(src_pkg_uri): + if not is_content_url_valid(src_pkg_uri): print('*** Src package uri is invalid! Abort!') sys.exit(-1) - bldinstallercommon.retrieve_url(src_pkg_uri, src_pkg_saveas) + retrieve_url(src_pkg_uri, src_pkg_saveas) else: print('Found old local package, using that: {0}'.format(src_pkg_saveas)) print('Done') print('--------------------------------------------------------------------') Path(destination_dir).mkdir(parents=True, exist_ok=True) - bldinstallercommon.extract_file(src_pkg_saveas, destination_dir) + extract_file(src_pkg_saveas, destination_dir) dir_contents = os.listdir(destination_dir) items = len(dir_contents) if items == 1: dir_name = dir_contents[0] full_dir_name = destination_dir + os.sep + dir_name - bldinstallercommon.move_tree(full_dir_name, destination_dir) - bldinstallercommon.remove_tree(full_dir_name) + move_tree(full_dir_name, destination_dir) + remove_tree(full_dir_name) else: print('*** Invalid dir structure encountered?!') sys.exit(-1) @@ -411,7 +422,7 @@ def prepare_installer_framework(options): Path(options.installer_framework_build_dir).mkdir(parents=True, exist_ok=True) if options.qt_installer_framework_uri.endswith('.git'): # clone repos - bldinstallercommon.clone_repository(options.qt_installer_framework_uri, options.qt_installer_framework_branch, options.installer_framework_source_dir, True) + clone_repository(options.qt_installer_framework_uri, options.qt_installer_framework_branch, options.installer_framework_source_dir, True) else: # fetch src package @@ -419,10 +430,10 @@ def prepare_installer_framework(options): def start_IFW_build(options, cmd_args, installer_framework_build_dir): - print("cmd_args: " + bldinstallercommon.list_as_string(cmd_args)) + print("cmd_args: " + list_as_string(cmd_args)) do_execute_sub_process(cmd_args, installer_framework_build_dir) cmd_args = options.make_cmd - print("cmd_args: " + bldinstallercommon.list_as_string(cmd_args)) + print("cmd_args: " + list_as_string(cmd_args)) do_execute_sub_process(cmd_args.split(' '), installer_framework_build_dir) @@ -571,7 +582,7 @@ def build_and_archive_qt(options): configure_options = get_dynamic_qt_configure_options() + '-prefix ' + options.qt_build_dir_dynamic + os.sep + 'qtbase' # Although we have a shadow build qt sources are still contaminated. Unpack sources again. if os.path.exists(options.qt_source_dir): - bldinstallercommon.remove_tree(options.qt_source_dir) + remove_tree(options.qt_source_dir) prepare_qt_sources(options) build_qt(options, options.qt_build_dir_dynamic, configure_options, options.qt_build_modules_docs) @@ -590,10 +601,10 @@ def clean_build_environment(options): if os.path.isfile(options.installer_framework_payload_arch): os.remove(options.installer_framework_payload_arch) if os.path.exists(options.build_artifacts_dir): - bldinstallercommon.remove_tree(options.build_artifacts_dir) + remove_tree(options.build_artifacts_dir) Path(options.build_artifacts_dir).mkdir(parents=True, exist_ok=True) if os.path.exists(options.installer_framework_build_dir): - bldinstallercommon.remove_tree(options.installer_framework_build_dir) + remove_tree(options.installer_framework_build_dir) if os.path.exists(options.installer_framework_pkg_dir): shutil.rmtree(options.installer_framework_pkg_dir) @@ -604,11 +615,11 @@ def clean_build_environment(options): return if os.path.exists(options.installer_framework_source_dir): - bldinstallercommon.remove_tree(options.installer_framework_source_dir) + remove_tree(options.installer_framework_source_dir) if os.path.exists(options.qt_source_dir): - bldinstallercommon.remove_tree(options.qt_source_dir) + remove_tree(options.qt_source_dir) if os.path.exists(options.qt_build_dir): - bldinstallercommon.remove_tree(options.qt_source_dir) + remove_tree(options.qt_source_dir) if os.path.isfile(options.qt_source_package_uri_saveas): os.remove(options.qt_source_package_uri_saveas) if os.path.isfile(options.qt_installer_framework_uri_saveas): @@ -633,7 +644,7 @@ def archive_installer_framework(installer_framework_build_dir, installer_framewo # create a package with a tagged name. # Package with the tagged name is needed for creating e.g. offline installers from stable builds if options.qt_installer_framework_uri.endswith('.git') and create_tagged_package: - tag = bldinstallercommon.get_tag_from_branch(options.installer_framework_source_dir, options.qt_installer_framework_branch) + tag = get_tag_from_branch(options.installer_framework_source_dir, options.qt_installer_framework_branch) if tag: print('Create archive from tag {0}'.format(tag)) installer_framework_tagged_archive = 'installer-framework-build-' + tag + "-" + options.plat_suffix + '-' + options.architecture + '.7z' @@ -651,13 +662,13 @@ def archive_installerbase(options): cmd_args_clean = [] bin_temp = '' if is_linux() or is_macos(): - bin_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['installerbase']) + bin_path = locate_executable(options.installer_framework_build_dir, ['installerbase']) bin_temp = ROOT_DIR + os.sep + '.tempSDKMaintenanceTool' shutil.copy(bin_path, bin_temp) cmd_args_archive = [ARCHIVE_PROGRAM, 'a', options.installer_base_archive_name, bin_temp] cmd_args_clean = ['rm', bin_temp] if is_windows(): - bin_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['installerbase.exe']) + bin_path = locate_executable(options.installer_framework_build_dir, ['installerbase.exe']) bin_temp = ROOT_DIR + os.sep + 'tempSDKMaintenanceToolBase.exe' shutil.copy(bin_path, bin_temp) if options.signserver and options.signpwd: @@ -680,11 +691,11 @@ def archive_binarycreator(options): print('Archive Installerbase and Binarycreator') cmd_args_archive = [] if is_linux() or is_macos(): - bin_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['installerbase']) - binarycreator_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['binarycreator']) + bin_path = locate_executable(options.installer_framework_build_dir, ['installerbase']) + binarycreator_path = locate_executable(options.installer_framework_build_dir, ['binarycreator']) elif is_windows(): - bin_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['installerbase.exe']) - binarycreator_path = bldinstallercommon.locate_executable(options.installer_framework_build_dir, ['binarycreator.exe']) + bin_path = locate_executable(options.installer_framework_build_dir, ['installerbase.exe']) + binarycreator_path = locate_executable(options.installer_framework_build_dir, ['binarycreator.exe']) else: raise Exception("Not a supported platform") cmd_args_archive = ['7z', 'a', options.binarycreator_archive_name, bin_path, binarycreator_path] diff --git a/packaging-tools/bld_lib.py b/packaging-tools/bld_lib.py index 93009735b..51a22ae8e 100644 --- a/packaging-tools/bld_lib.py +++ b/packaging-tools/bld_lib.py @@ -29,34 +29,33 @@ # ############################################################################# -import os -import sys import argparse import logging -import urllib.request -import tarfile -import shutil -import glob -import subprocess +import os +import platform as plat # import as plat to not shadow the "import platform" import re +import shutil +import sys +import tarfile +from glob import glob +from shutil import which +from subprocess import CalledProcessError, check_call from time import gmtime, strftime from typing import List, Tuple from urllib.parse import urlparse -from shutil import which -from remote_uploader import RemoteUploader -from read_remote_config import get_pkg_value -from bld_utils import is_windows +from urllib.request import urlretrieve +from rainbow_logging_handler import RainbowLoggingHandler + +from bld_utils import is_windows +from read_remote_config import get_pkg_value +from remote_uploader import RemoteUploader LOG_FMT_CI = "%(asctime)s %(levelname)s:%(filename)s:%(lineno)d(%(process)d): %(message)s" log = logging.getLogger("Bld") log.setLevel(logging.INFO) # Unify format of all messages -try: - from rainbow_logging_handler import RainbowLoggingHandler - handler = RainbowLoggingHandler(sys.stderr, color_asctime=(None, None, False)) -except ImportError: - handler = logging.StreamHandler() +handler = RainbowLoggingHandler(sys.stderr, color_asctime=(None, None, False)) formatter = logging.Formatter(LOG_FMT_CI) handler.setFormatter(formatter) @@ -97,7 +96,7 @@ def downloadQtPkg(args: argparse.Namespace, currentDir: str) -> Tuple[str, str]: log.info("Using existing: %s", saveAs) else: log.info("Downloading: %s into: %s", args.qtpkg, saveAs) - urllib.request.urlretrieve(args.qtpkg, saveAs) + urlretrieve(args.qtpkg, saveAs) return saveAs, qtVersion @@ -113,7 +112,7 @@ def extractArchive(saveAs: str, currentDir: str) -> str: elif saveAs.endswith(".7z"): try: os.chdir(qtDestDir) - subprocess.check_call(['7z', 'x', saveAs]) + check_call(['7z', 'x', saveAs]) except Exception as e: log.error("Extracting 7z file failed: %s", str(e)) raise @@ -138,7 +137,7 @@ def build(qtDestDir: str, currentDir: str) -> str: f.write("[Paths]\n") f.write("Prefix=..\n") - proFile = glob.glob(os.path.join(args.src_path, "*.pro")) + proFile = glob(os.path.join(args.src_path, "*.pro")) assert proFile, "Could not find .pro file(s) from: {0}".format(args.src_path) proFile = proFile[0] log.info("Using .pro file: %s", proFile) @@ -153,12 +152,12 @@ def build(qtDestDir: str, currentDir: str) -> str: try: os.chdir(bldDir) - subprocess.check_call([qmakeTool, proFile]) - subprocess.check_call([makeToolName]) + check_call([qmakeTool, proFile]) + check_call([makeToolName]) # on windows chhop out the drive letter (e.g. 'C:'" installRoot = installRootDir[2:] if is_windows() else installRootDir - subprocess.check_call([makeToolName, 'install', 'INSTALL_ROOT=' + installRoot]) - except subprocess.CalledProcessError as buildError: + check_call([makeToolName, 'install', 'INSTALL_ROOT=' + installRoot]) + except CalledProcessError as buildError: log.error("Failed to build the project: %s", str(buildError)) raise except Exception as e: @@ -180,14 +179,12 @@ def archive(args: argparse.Namespace, installRootDir: str, currentDir: str) -> s for lib in libs: shutil.copy2(lib, archivePath) - # import as plat to not shadow the "import platform" - import platform as plat arch = "x86_64" if sys.maxsize > 2**32 else "x86" artifactsFileName = "artifacts-" + plat.system().lower() + "-" + arch + ".7z" artifactsFilePath = os.path.join(currentDir, artifactsFileName) try: os.chdir(archivePath) - subprocess.check_call(['7z', 'a', '-m0=lzma2', '-mmt=16', artifactsFilePath, '*']) + check_call(['7z', 'a', '-m0=lzma2', '-mmt=16', artifactsFilePath, '*']) except Exception as e: print(e) raise diff --git a/packaging-tools/bld_module.py b/packaging-tools/bld_module.py index 07d244ca1..16665ed10 100755 --- a/packaging-tools/bld_module.py +++ b/packaging-tools/bld_module.py @@ -29,23 +29,38 @@ # ############################################################################# -# built in imports -import argparse # commandline argument parser -import multiprocessing +import argparse import os -import sys import shutil -import fileinput +import sys +from fileinput import FileInput from functools import reduce +from multiprocessing import cpu_count from pathlib import Path -# own imports -from threadedwork import ThreadedWork -from bld_utils import runCommand, runBuildCommand, runInstallCommand, stripVars, is_windows, is_linux, is_macos -import bldinstallercommon -from bldinstallercommon import locate_path, locate_paths +from bld_utils import ( + is_linux, + is_macos, + is_windows, + runBuildCommand, + runCommand, + runInstallCommand, + stripVars, +) +from bldinstallercommon import ( + clone_repository, + create_download_and_extract_tasks, + create_qt_download_task, + locate_path, + locate_paths, + patch_qt, + remove_tree, + rename_android_soname_files, + search_for_files, +) from installer_utils import PackagingError from runner import do_execute_sub_process +from threadedwork import ThreadedWork SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) MODULE_SRC_DIR_NAME = 'module_src' @@ -79,7 +94,7 @@ def erase_qmake_prl_build_dir(search_path): # erase lines starting with 'QMAKE_PRL_BUILD_DIR' from .prl files for item in file_list: found = False - for line in fileinput.FileInput(item, inplace=1): + for line in FileInput(item, inplace=1): if line.startswith('QMAKE_PRL_BUILD_DIR'): found = True print(''.rstrip('\n')) @@ -95,11 +110,11 @@ def erase_qmake_prl_build_dir(search_path): def patch_build_time_paths(search_path, search_strings, qt_install_prefix): extension_list = ['*.prl', '*.pri', '*.pc', '*.la'] search_regexp = '|'.join(search_strings) - file_list = bldinstallercommon.search_for_files(search_path, extension_list, search_regexp) + file_list = search_for_files(search_path, extension_list, search_regexp) for item in file_list: print('Replacing {0} paths from file: {1}'.format(search_strings, item)) - for line in fileinput.FileInput(item, inplace=1): + for line in FileInput(item, inplace=1): patched_line = reduce(lambda accum, value: accum.replace(value, qt_install_prefix), search_strings, line) @@ -184,12 +199,12 @@ tempPath = os.path.abspath(os.path.join(os.path.dirname(__file__), 'temp')) # clone module repo if callerArguments.module_url != '': Path(MODULE_SRC_DIR).mkdir(parents=True, exist_ok=True) - bldinstallercommon.clone_repository(callerArguments.module_url, callerArguments.module_branch, os.path.join(os.path.dirname(__file__), MODULE_SRC_DIR_NAME)) + clone_repository(callerArguments.module_url, callerArguments.module_branch, os.path.join(os.path.dirname(__file__), MODULE_SRC_DIR_NAME)) qtModuleSourceDirectory = MODULE_SRC_DIR elif callerArguments.module7z != '': Path(MODULE_SRC_DIR).mkdir(parents=True, exist_ok=True) myGetQtModule = ThreadedWork("get and extract module src") - myGetQtModule.addTaskObject(bldinstallercommon.create_download_extract_task(callerArguments.module7z, MODULE_SRC_DIR, tempPath, callerArguments)) + myGetQtModule.addTaskObject(create_download_and_extract_tasks(callerArguments.module7z, MODULE_SRC_DIR, tempPath, callerArguments)) myGetQtModule.run() qtModuleSourceDirectory = MODULE_SRC_DIR else: @@ -210,9 +225,9 @@ if is_windows(): # clean step if callerArguments.clean: print("##### {0} #####".format("clean old builds")) - bldinstallercommon.remove_tree(callerArguments.qt5path) - bldinstallercommon.remove_tree(qtModuleInstallDirectory) - bldinstallercommon.remove_tree(tempPath) + remove_tree(callerArguments.qt5path) + remove_tree(qtModuleInstallDirectory) + remove_tree(tempPath) if not os.path.lexists(callerArguments.qt5path) and not callerArguments.qt5_module_urls: parser.print_help() @@ -226,7 +241,7 @@ if not os.path.lexists(callerArguments.qt5path): # get Qt myGetQtBinaryWork = ThreadedWork("get and extract Qt 5 binary") myGetQtBinaryWork.addTaskObject( - bldinstallercommon.create_qt_download_task( + create_qt_download_task( callerArguments.qt5_module_urls, callerArguments.qt5path, tempPath, callerArguments ) @@ -237,7 +252,7 @@ if not os.path.lexists(callerArguments.qt5path): qt_install_prefix = get_qt_install_prefix(callerArguments.qt5path) # "install" Qt - bldinstallercommon.patch_qt(callerArguments.qt5path) + patch_qt(callerArguments.qt5path) # lets start building @@ -262,7 +277,7 @@ if is_macos(): environment["DYLD_FRAMEWORK_PATH"] = os.path.join(callerArguments.qt5path, 'lib') if not is_windows(): - environment["MAKEFLAGS"] = "-j" + str(multiprocessing.cpu_count() + 1) + environment["MAKEFLAGS"] = "-j" + str(cpu_count() + 1) if callerArguments.debug: buildType = 'debug' @@ -315,7 +330,7 @@ if ret: # patch .so filenames on Windows/Android if is_windows() and os.environ.get('DO_PATCH_ANDROID_SONAME_FILES'): - bldinstallercommon.rename_android_soname_files(qtModuleInstallDirectory) + rename_android_soname_files(qtModuleInstallDirectory) # doc collection if callerArguments.collectDocs: diff --git a/packaging-tools/bld_openssl.py b/packaging-tools/bld_openssl.py index 74e4e4628..4993f6ec0 100644 --- a/packaging-tools/bld_openssl.py +++ b/packaging-tools/bld_openssl.py @@ -32,8 +32,6 @@ # ############################################################################# -# import the print function which is used in python 3.x -from __future__ import print_function import argparse import os import platform diff --git a/packaging-tools/bld_python.py b/packaging-tools/bld_python.py index 9cf57d193..15916e87a 100644 --- a/packaging-tools/bld_python.py +++ b/packaging-tools/bld_python.py @@ -29,19 +29,19 @@ # ############################################################################# -import os -import sys -import re -import asyncio import argparse +import os import platform -import multiprocessing -import subprocess -from shutil import which, rmtree, copytree +import re +import sys +from asyncio import get_event_loop +from multiprocessing import cpu_count +from shutil import copytree, rmtree, which +from subprocess import check_output + +from installer_utils import cd, download_archive, extract_archive, is_valid_url_path from logging_util import init_logger from runner import async_exec_cmd, exec_cmd -from installer_utils import cd, is_valid_url_path, extract_archive, download_archive - log = init_logger(__name__, debug_mode=False) @@ -82,7 +82,7 @@ async def create_symlink(pythonDir: str): pythonExe = os.path.join(pythonDir, 'python.exe') assert os.path.isfile(pythonExe), "The 'python' executable did not exist: {0}".format(pythonExe) versionCmd = [pythonExe, '--version'] - versionOutput = subprocess.check_output(versionCmd, shell=True).decode("utf-8") + versionOutput = check_output(versionCmd, shell=True).decode("utf-8") match = re.search(r'(\d+)\.(\d+)\.(\d+)', versionOutput) if match: destination = os.path.join(pythonDir, 'python' + match.group(1) + match.group(2) + '.exe') @@ -119,7 +119,7 @@ async def _build_python(srcDir: str, bldDir: str, prefix: str) -> str: log.info(" Build dir: %s", bldDir) log.info(" Prefix: %s", prefix) system = platform.system().lower() - cpuCount = str(multiprocessing.cpu_count()) + cpuCount = str(cpu_count()) if "darwin" in system: opensslQueryCmd = ['brew', '--prefix', 'openssl'] opensslPath = exec_cmd(opensslQueryCmd) @@ -180,5 +180,5 @@ if __name__ == "__main__": log.error("Could not find '{0}' from the system. This tool is needed. Aborting..".format(requiredTool)) sys.exit(1) - loop = asyncio.get_event_loop() + loop = get_event_loop() loop.run_until_complete(build_python(args.src, args.prefix)) diff --git a/packaging-tools/bld_sdktool.py b/packaging-tools/bld_sdktool.py index 4689685bf..289b6d45d 100644 --- a/packaging-tools/bld_sdktool.py +++ b/packaging-tools/bld_sdktool.py @@ -30,16 +30,16 @@ ############################################################################# import os +from collections import namedtuple from pathlib import Path -import bldinstallercommon -from bld_utils import is_windows, is_linux -import collections +from bld_utils import is_linux, is_windows +from bldinstallercommon import extract_file, remove_one_tree_level, retrieve_url from runner import do_execute_sub_process -BuildParams = collections.namedtuple('BuildParams', - ['src_path', 'build_path', 'target_path', - 'make_command', 'redirect_output']) +BuildParams = namedtuple('BuildParams', + ['src_path', 'build_path', 'target_path', + 'make_command', 'redirect_output']) def qt_static_configure_options(): @@ -85,10 +85,10 @@ def get_and_extract_qt_src(url, temp, path): Path(temp).mkdir(parents=True, exist_ok=True) ext = package_extension(url) file_path = os.path.join(temp, 'qtsrc' + ext) - bldinstallercommon.retrieve_url(url, file_path) + retrieve_url(url, file_path) Path(path).mkdir(parents=True, exist_ok=True) - bldinstallercommon.extract_file(file_path, path) - bldinstallercommon.remove_one_tree_level(path) + extract_file(file_path, path) + remove_one_tree_level(path) def configure_qt(params, src, build): diff --git a/packaging-tools/bld_utils.py b/packaging-tools/bld_utils.py index 811aec7e7..e5c5d3b0f 100644 --- a/packaging-tools/bld_utils.py +++ b/packaging-tools/bld_utils.py @@ -29,27 +29,27 @@ # ############################################################################# -# built in imports -from distutils.spawn import find_executable # runCommand method import os +import shutil import sys +from builtins import OSError +from collections import deque +from copy import deepcopy +from distutils.spawn import find_executable # runCommand method +from socket import setdefaulttimeout +from subprocess import PIPE, STDOUT, Popen from sys import platform -import time -import shutil -import subprocess -import threading -import collections -import urllib.parse -import urllib.request -import urllib.error -import copy -import builtins +from threading import currentThread +from time import sleep +from urllib.error import HTTPError +from urllib.parse import urljoin, urlparse +from urllib.request import pathname2url, urlopen + # 3rd party module to read process output in a convenient way from asynchronousfilereader import AsynchronousFileReader # make a timeout for download jobs -import socket -socket.setdefaulttimeout(30) +setdefaulttimeout(30) def is_windows() -> bool: @@ -71,8 +71,8 @@ def is_linux() -> bool: # but should not do that on the further used environment dict def deep_copy_arguments(to_call): def f(*args, **kwargs): - return to_call(*(copy.deepcopy(x) for x in args), - **{k: copy.deepcopy(v) for k, v in kwargs.items()}) + return to_call(*(deepcopy(x) for x in args), + **{k: deepcopy(v) for k, v in kwargs.items()}) return f @@ -139,7 +139,7 @@ def urllib2_response_read(response, file_path, block_size, total_size): def download(url, target, read_block_size=1048576): try: if os.path.isdir(os.path.abspath(target)): - filename = os.path.basename(urllib.parse.urlparse(url).path) + filename = os.path.basename(urlparse(url).path) target = os.path.join(os.path.abspath(target), filename) if os.path.lexists(target): raise Exception("Can not download '{0}' to '{1}' as target. The file already exists.".format(url, target)) @@ -175,14 +175,14 @@ def download(url, target, read_block_size=1048576): try: # use urlopen which raise an error if that file is not existing - response = urllib.request.urlopen(url) + response = urlopen(url) total_size = response.info().get('Content-Length').strip() print("Downloading file from '{0}' with size {1} bytes to {2}".format(url, total_size, target)) # run the download received_size = urllib2_response_read(response, savefile_tmp, read_block_size, total_size) if received_size != int(total_size): raise Exception("Broken download, got a wrong size after download from '{0}'(total size: {1}, but {2} received).".format(url, total_size, received_size)) - except urllib.error.HTTPError as error: + except HTTPError as error: raise Exception("Can not download '{0}' to '{1}' as target(error code: '{2}').".format(url, target, error.code)) renamed = False @@ -199,10 +199,10 @@ def download(url, target, read_block_size=1048576): renamed = True # make sure that another output starts in a new line sys.stdout.write(os.linesep) - except builtins.WindowsError as e: + except OSError as e: # if it still exists just try that after a microsleep and stop this after 720 tries if os.path.lexists(savefile_tmp) and tryRenameCounter < 720: - time.sleep(2) + sleep(2) continue else: if not os.path.lexists(target): @@ -243,12 +243,12 @@ def getEnvironment(extra_environment=None, callerArguments=None): @deep_copy_arguments def runCommand(command, currentWorkingDirectory, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False, expectedExitCodes=[0]): - if builtins.type(expectedExitCodes) is not list: - raise TypeError("{}({}) is not {}".format("expectedExitCodes", builtins.type(expectedExitCodes), list)) - if builtins.type(onlyErrorCaseOutput) is not bool: - raise TypeError("{}({}) is not {}".format("onlyErrorCaseOutput", builtins.type(onlyErrorCaseOutput), bool)) + if type(expectedExitCodes) is not list: + raise TypeError("{}({}) is not {}".format("expectedExitCodes", type(expectedExitCodes), list)) + if type(onlyErrorCaseOutput) is not bool: + raise TypeError("{}({}) is not {}".format("onlyErrorCaseOutput", type(onlyErrorCaseOutput), bool)) - if builtins.type(command) is list: + if type(command) is list: commandAsList = command else: commandAsList = command[:].split(' ') @@ -282,21 +282,21 @@ def runCommand(command, currentWorkingDirectory, callerArguments=None, extra_env useShell = True if sys.platform.startswith('win') else False lastStdOutLines = [] lastStdErrLines = [] - if threading.currentThread().name == "MainThread" and not onlyErrorCaseOutput: - process = subprocess.Popen( + if currentThread().name == "MainThread" and not onlyErrorCaseOutput: + process = Popen( commandAsList, shell=useShell, cwd=currentWorkingDirectory, bufsize=-1, env=environment ) else: - process = subprocess.Popen( + process = Popen( commandAsList, shell=useShell, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, + stdout=PIPE, stderr=PIPE, cwd=currentWorkingDirectory, bufsize=-1, env=environment ) maxSavedLineNumbers = 1000 - lastStdOutLines = collections.deque(maxlen=maxSavedLineNumbers) - lastStdErrLines = collections.deque(maxlen=maxSavedLineNumbers) + lastStdOutLines = deque(maxlen=maxSavedLineNumbers) + lastStdErrLines = deque(maxlen=maxSavedLineNumbers) # Launch the asynchronous readers of the process' stdout and stderr. stdout = AsynchronousFileReader(process.stdout) @@ -308,18 +308,18 @@ def runCommand(command, currentWorkingDirectory, callerArguments=None, extra_env for line in stdout.readlines(): line = line.decode() lastStdOutLines.append(line) - if threading.currentThread().name != "MainThread": + if currentThread().name != "MainThread": sys.stdout.write(line) # Show what we received from standard error. for line in stderr.readlines(): line = line.decode() lastStdErrLines.append(line) - if threading.currentThread().name != "MainThread": + if currentThread().name != "MainThread": sys.stdout.write(line) # Sleep a bit before polling the readers again. - time.sleep(1) + sleep(1) # Let's be tidy and join the threads we've started. stdout.join() @@ -338,18 +338,18 @@ def runCommand(command, currentWorkingDirectory, callerArguments=None, extra_env # sys.stderr.write("set " + key + "=" + environment[key] + os.linesep) if exitCode not in expectedExitCodes: lastOutput = "" - type = "" - if threading.currentThread().name != "MainThread" or onlyErrorCaseOutput: + exit_type = "" + if currentThread().name != "MainThread" or onlyErrorCaseOutput: if len(lastStdErrLines) != 0: lastOutput += "".join(str(lastStdErrLines)) - type = "error " + exit_type = "error " elif len(lastStdOutLines) != 0: lastOutput += "".join(str(lastStdOutLines)) prettyLastOutput = os.linesep + '======================= error =======================' + os.linesep prettyLastOutput += "Working Directory: " + currentWorkingDirectory + os.linesep prettyLastOutput += "Last command: " + ' '.join(commandAsList) + os.linesep if lastOutput: - prettyLastOutput += "last {0}output:{1}{2}".format(type, os.linesep, lastOutput) + prettyLastOutput += "last {0}output:{1}{2}".format(exit_type, os.linesep, lastOutput) else: prettyLastOutput += " - no process output caught - " raise Exception("Different exit code then expected({0}): {1}{2}".format(expectedExitCodes, exitCode, prettyLastOutput)) @@ -369,7 +369,7 @@ def runInstallCommand(arguments=['install'], currentWorkingDirectory=None, calle extra_environment["MAKEFLAGS"] = "-j1" if arguments: - installcommand.extend(arguments if builtins.type(arguments) is list else arguments.split()) + installcommand.extend(arguments if type(arguments) is list else arguments.split()) return runCommand(installcommand, currentWorkingDirectory, callerArguments, extra_environment=extra_environment, onlyErrorCaseOutput=onlyErrorCaseOutput) @@ -380,15 +380,15 @@ def runBuildCommand(arguments=None, currentWorkingDirectory=None, callerArgument buildcommand = callerArguments.buildcommand.split() if arguments: - buildcommand.extend(arguments if builtins.type(arguments) is list else arguments.split()) + buildcommand.extend(arguments if type(arguments) is list else arguments.split()) return runCommand(buildcommand, currentWorkingDirectory, callerArguments, extra_environment=extra_environment, onlyErrorCaseOutput=onlyErrorCaseOutput, expectedExitCodes=expectedExitCodes) @deep_copy_arguments def getReturnValue(command, currentWorkingDirectory=None, extra_environment=None, callerArguments=None): commandAsList = command[:].split(' ') - return subprocess.Popen( - commandAsList, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + return Popen( + commandAsList, stdout=PIPE, stderr=STDOUT, cwd=currentWorkingDirectory, env=getEnvironment(extra_environment, callerArguments) ).communicate()[0].strip() @@ -419,4 +419,4 @@ def isGitDirectory(repository_path): def file_url(file_path): - return urllib.parse.urljoin('file:', urllib.request.pathname2url(file_path)) + return urljoin('file:', pathname2url(file_path)) diff --git a/packaging-tools/bldinstallercommon.py b/packaging-tools/bldinstallercommon.py index d5f1fd67f..a5c328f62 100644 --- a/packaging-tools/bldinstallercommon.py +++ b/packaging-tools/bldinstallercommon.py @@ -30,30 +30,26 @@ ############################################################################# import errno -import fnmatch import os import re import shutil -import subprocess -from subprocess import STDOUT -import tempfile -import sys import stat -import traceback -import urllib.request -import urllib.error -import urllib.parse +import sys +from fnmatch import fnmatch from pathlib import Path - -from bld_utils import runCommand, download, is_windows, is_macos, is_linux -from threadedwork import Task, ThreadedWork -from typing import Callable, Union, List +from subprocess import PIPE, STDOUT, Popen, check_call +from tempfile import mkdtemp +from traceback import print_exc +from typing import Callable, List, Union +from urllib.parse import urlparse +from urllib.request import urlcleanup, urlopen, urlretrieve + +from bld_utils import download, is_linux, is_macos, is_windows, runCommand from installer_utils import PackagingError from runner import do_execute_sub_process +from threadedwork import Task, ThreadedWork - -# need to include this for win platforms as long path names -# cause problems +# need to include this for win platforms as long path names cause problems if is_windows(): import win32api @@ -70,7 +66,7 @@ def is_content_url_valid(url): return True # throws error if url does not point to valid object try: - response = urllib.request.urlopen(url) + response = urlopen(url) total_size = response.info().get('Content-Length').strip() return int(total_size) > 0 except Exception: @@ -103,8 +99,8 @@ def dlProgress(count, blockSize, totalSize): def retrieve_url(url, savefile): try: savefile_tmp = savefile + '.tmp' - urllib.request.urlcleanup() - urllib.request.urlretrieve(url, savefile_tmp, reporthook=dlProgress) + urlcleanup() + urlretrieve(url, savefile_tmp, reporthook=dlProgress) shutil.move(savefile_tmp, savefile) except Exception: exc = sys.exc_info()[0] @@ -152,7 +148,7 @@ def move_tree(srcdir, dstdir, pattern=None): if os.path.isdir(srcfname) and not os.path.islink(srcfname): os.mkdir(dstfname) move_tree(srcfname, dstfname) - elif pattern is None or fnmatch.fnmatch(name, pattern): + elif pattern is None or fnmatch(name, pattern): if os.path.islink(srcfname): # shutil.move fails moving directory symlinks over file system bounds... linkto = os.readlink(srcfname) os.symlink(linkto, dstfname) @@ -191,7 +187,7 @@ def remove_one_tree_level(directory): dir_name = dircontents[0] full_dir_name = os.path.join(directory, dir_name) # avoid directory name collision by first moving to temporary dir - tempdir_base = tempfile.mkdtemp() + tempdir_base = mkdtemp() tempdir = os.path.join(tempdir_base, 'a') # dummy name shutil.move(full_dir_name, tempdir) move_tree(tempdir, directory) @@ -220,7 +216,7 @@ def remove_tree(path): try: runCommand(['rmdir', path, '/S', '/Q'], os.getcwd(), onlyErrorCaseOutput=True) except Exception: - traceback.print_exc() + print_exc() pass else: # shutil.rmtree(path) @@ -365,8 +361,8 @@ def requires_rpath(file_path): if not os.access(file_path, os.X_OK): return False return ( - re.search(r':*.R.*PATH=', subprocess.Popen( - ['chrpath', '-l', file_path], stdout=subprocess.PIPE + re.search(r':*.R.*PATH=', Popen( + ['chrpath', '-l', file_path], stdout=PIPE ).stdout.read().decode()) is not None ) return False @@ -379,7 +375,7 @@ def sanity_check_rpath_max_length(file_path, new_rpath): if is_linux(): if not os.access(file_path, os.X_OK): return False - result = re.search(r':*.R.*PATH=.*', subprocess.Popen(['chrpath', '-l', file_path], stdout=subprocess.PIPE).stdout.read().decode()) + result = re.search(r':*.R.*PATH=.*', Popen(['chrpath', '-l', file_path], stdout=PIPE).stdout.read().decode()) if not result: print('*** No RPath found from given file: ' + file_path) else: @@ -477,8 +473,8 @@ def handle_component_rpath(component_root_path, destination_lib_paths): # look for existing $ORIGIN path in the binary origin_rpath = re.search( r'\$ORIGIN[^:\n]*', - subprocess.Popen( - ['chrpath', '-l', file_full_path], stdout=subprocess.PIPE + Popen( + ['chrpath', '-l', file_full_path], stdout=PIPE ).stdout.read().decode() ) @@ -565,12 +561,12 @@ def git_archive_repo(repo_and_ref): if os.path.isfile(archive_name): os.remove(archive_name) # create temp directory - checkout_dir = tempfile.mkdtemp() + checkout_dir = mkdtemp() # clone given repo to temp clone_repository(repository, ref, checkout_dir, full_clone=True, init_subrepos=True) # git archive repo with given name archive_file = open(archive_name, 'w') - subprocess.check_call("git --no-pager archive %s" % (ref), stdout=archive_file, stderr=STDOUT, shell=True, cwd=checkout_dir) + check_call("git --no-pager archive %s" % (ref), stdout=archive_file, stderr=STDOUT, shell=True, cwd=checkout_dir) archive_file.close() print('Created archive: {0}'.format(archive_name)) shutil.rmtree(checkout_dir, ignore_errors=True) @@ -685,7 +681,7 @@ def create_extract_function(file_path, target_path, caller_arguments=None): # function ############################### def create_download_and_extract_tasks(url, target_path, temp_path, caller_arguments): - filename = os.path.basename(urllib.parse.urlparse(url).path) + filename = os.path.basename(urlparse(url).path) sevenzip_file = os.path.join(temp_path, filename) download_task = Task('download "{0}" to "{1}"'.format(url, sevenzip_file)) download_task.addFunction(download, url, sevenzip_file) @@ -698,7 +694,7 @@ def create_download_and_extract_tasks(url, target_path, temp_path, caller_argume # function ############################### def create_download_extract_task(url, target_path, temp_path, caller_arguments): - filename = os.path.basename(urllib.parse.urlparse(url).path) + filename = os.path.basename(urlparse(url).path) sevenzip_file = os.path.join(temp_path, filename) download_extract_task = Task("download {0} to {1} and extract it to {2}".format(url, sevenzip_file, target_path)) download_extract_task.addFunction(download, url, sevenzip_file) diff --git a/packaging-tools/build_clang.py b/packaging-tools/build_clang.py index 77f7bd6e1..663433d76 100644 --- a/packaging-tools/build_clang.py +++ b/packaging-tools/build_clang.py @@ -30,24 +30,24 @@ ############################################################################# import os -import shutil -import bld_utils -import bldinstallercommon +from shutil import rmtree + import environmentfrombatchfile -import threadedwork +from bld_utils import is_linux, is_macos, is_windows, runCommand +from bldinstallercommon import create_download_extract_task, create_qt_download_task from read_remote_config import get_pkg_value -from bld_utils import is_windows, is_macos, is_linux from runner import do_execute_sub_process +from threadedwork import ThreadedWork def git_clone_and_checkout(base_path, remote_repository_url, directory, revision): - bld_utils.runCommand(['git', 'clone', - '--depth', '1', - '--config', 'core.eol=lf', - '--config', 'core.autocrlf=input', - '--branch', revision, - '--recursive', - remote_repository_url, directory], base_path) + runCommand(['git', 'clone', + '--depth', '1', + '--config', 'core.eol=lf', + '--config', 'core.autocrlf=input', + '--branch', revision, + '--recursive', + remote_repository_url, directory], base_path) def get_clang(base_path, llvm_repository_url, llvm_revision): @@ -191,11 +191,11 @@ def mingw_training(base_path, qtcreator_path, environment, bitness): qt_mingw_module_urls = [qt_base_url + '/' + module + '/' + module + qt_mingw_postfix for module in qt_modules] qt_temp = os.path.join(base_path, 'qt_download') qt_mingw_temp = os.path.join(base_path, 'qt_download_mingw') - download_packages_work = threadedwork.ThreadedWork("get and extract Qt") - download_packages_work.addTaskObject(bldinstallercommon.create_qt_download_task(qt_module_urls, qt_dir, qt_temp, None)) - download_packages_work.addTaskObject(bldinstallercommon.create_qt_download_task(qt_mingw_module_urls, qt_mingw_dir, qt_mingw_temp, None)) + download_packages_work = ThreadedWork("get and extract Qt") + download_packages_work.addTaskObject(create_qt_download_task(qt_module_urls, qt_dir, qt_temp, None)) + download_packages_work.addTaskObject(create_qt_download_task(qt_mingw_module_urls, qt_mingw_dir, qt_mingw_temp, None)) - download_packages_work.addTaskObject(bldinstallercommon.create_download_extract_task( + download_packages_work.addTaskObject(create_download_extract_task( 'https://download.sysinternals.com/files/DebugView.zip', debugview_dir, base_path, @@ -205,7 +205,7 @@ def mingw_training(base_path, qtcreator_path, environment, bitness): cmake_arch_suffix = 'win64-x64' if bitness == 64 else 'win32-x86' cmake_base_url = 'http://' + pkg_server + '/packages/jenkins/cmake/' \ + cmake_version() + '/cmake-' + cmake_version() + '-' + cmake_arch_suffix + '.zip' - download_packages_work.addTaskObject(bldinstallercommon.create_download_extract_task( + download_packages_work.addTaskObject(create_download_extract_task( cmake_base_url, cmake_dir, base_path, None)) download_packages_work.run() @@ -246,10 +246,10 @@ def mingw_training(base_path, qtcreator_path, environment, bitness): '-S' + qtcreator_path, '-B' + creator_build_dir] - bld_utils.runCommand(qtc_cmake, creator_build_dir, None, environment) - bld_utils.runCommand([cmake_command, '--build', creator_build_dir], creator_build_dir, None, environment) - bld_utils.runCommand([cmake_command, '--install', creator_build_dir, '--prefix', creator_install_dir], creator_build_dir, None, environment) - bld_utils.runCommand([cmake_command, '--install', creator_build_dir, '--prefix', creator_install_dir, '--component', 'Dependencies'], creator_build_dir, None, environment) + runCommand(qtc_cmake, creator_build_dir, None, environment) + runCommand([cmake_command, '--build', creator_build_dir], creator_build_dir, None, environment) + runCommand([cmake_command, '--install', creator_build_dir, '--prefix', creator_install_dir], creator_build_dir, None, environment) + runCommand([cmake_command, '--install', creator_build_dir, '--prefix', creator_install_dir, '--component', 'Dependencies'], creator_build_dir, None, environment) # Remove the regular libclang.dll which got deployed via 'Dependencies' qtcreator install target os.remove(os.path.join(creator_install_dir, 'bin', 'libclang.dll')) @@ -257,7 +257,7 @@ def mingw_training(base_path, qtcreator_path, environment, bitness): # Train mingw libclang library with build QtCreator # First time open the project, then close it. This will generate initial settings and .user files. Second time do the actual training. for batchFile in ['qtc.openProject.batch', 'qtc.fileTextEditorCpp.batch']: - bld_utils.runCommand( + runCommand( [os.path.join(training_dir, 'runBatchFiles.bat'), msvc_version(), 'x64' if bitness == 64 else 'x86', batchFile], base_path, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False, expectedExitCodes=[0, 1] ) @@ -438,14 +438,14 @@ def check_clang(toolchain, build_path, environment): def package_clang(install_path, result_file_path): (basepath, dirname) = os.path.split(install_path) zip_command = ['7z', 'a', '-mmt4', result_file_path, dirname] - bld_utils.runCommand(zip_command, basepath) + runCommand(zip_command, basepath) def upload_clang(file_path, remote_path): (path, filename) = os.path.split(file_path) scp_bin = '%SCP%' if is_windows() else 'scp' scp_command = [scp_bin, filename, remote_path] - bld_utils.runCommand(scp_command, path) + runCommand(scp_command, path) def profile_data(toolchain): @@ -517,7 +517,7 @@ def main(): # Third time will use the training data collected and produce the optimized output if os.path.exists(profile_data_path): - shutil.rmtree(profile_data_path) + rmtree(profile_data_path) os.makedirs(profile_data_path) # Update the regular build, so that we can see the differences diff --git a/packaging-tools/build_clang_qdoc.py b/packaging-tools/build_clang_qdoc.py index 3280dca20..92f2fd0e8 100644 --- a/packaging-tools/build_clang_qdoc.py +++ b/packaging-tools/build_clang_qdoc.py @@ -30,20 +30,20 @@ ############################################################################# import os -import bld_utils + import environmentfrombatchfile -from bld_utils import is_windows, is_linux +from bld_utils import is_linux, is_windows, runCommand from read_remote_config import get_pkg_value from runner import do_execute_sub_process def git_clone_and_checkout(base_path, remote_repository_url, directory, revision): - bld_utils.runCommand(['git', 'clone', - '--config', 'core.eol=lf', - '--config', 'core.autocrlf=input', - '--branch', revision, - '--recursive', - remote_repository_url, directory], base_path) + runCommand(['git', 'clone', + '--config', 'core.eol=lf', + '--config', 'core.autocrlf=input', + '--branch', revision, + '--recursive', + remote_repository_url, directory], base_path) def get_clang(base_path, llvm_revision): @@ -240,14 +240,14 @@ def check_clang(toolchain, build_path, environment): def package_clang(install_path, result_file_path): (basepath, dirname) = os.path.split(install_path) zip_command = ['cmake', '-E', 'tar', 'cvf', result_file_path, '--format=7zip', dirname] - bld_utils.runCommand(zip_command, basepath) + runCommand(zip_command, basepath) def upload_clang(file_path, remote_path): (path, filename) = os.path.split(file_path) scp_bin = '%SCP%' if is_windows() else 'scp' scp_command = [scp_bin, filename, remote_path] - bld_utils.runCommand(scp_command, path) + runCommand(scp_command, path) def main(): diff --git a/packaging-tools/build_wrapper.py b/packaging-tools/build_wrapper.py index da694a0c2..9b2434114 100644 --- a/packaging-tools/build_wrapper.py +++ b/packaging-tools/build_wrapper.py @@ -32,26 +32,43 @@ """Scripts to generate SDK installer based on open source InstallerFramework""" -from configparser import ConfigParser -import optionparser import argparse -import collections -from glob import glob import json import os -import sys import re import shutil +import sys +from collections import namedtuple +from configparser import ConfigParser +from getpass import getuser +from glob import glob +from time import gmtime, strftime from urllib.parse import urlparse from urllib.request import urlopen -from time import gmtime, strftime -import bld_utils -from bld_utils import is_windows, is_macos, is_linux -import bldinstallercommon -from threadedwork import ThreadedWork, Task -import bld_sdktool + +from bld_sdktool import build_sdktool, zip_sdktool +from bld_utils import ( + download, + file_url, + get_commit_SHA, + is_linux, + is_macos, + is_windows, + runCommand, +) +from bldinstallercommon import ( + clone_repository, + copy_tree, + create_download_and_extract_tasks, + create_download_extract_task, + create_extract_function, + git_archive_repo, + safe_config_key_fetch, +) +from optionparser import getPkgOptions from read_remote_config import get_pkg_value from runner import do_execute_sub_process +from threadedwork import Task, ThreadedWork # ---------------------------------------------------------------------- SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) @@ -180,20 +197,20 @@ def create_download_documentation_task(base_url, download_path): dest_doc_path = os.path.join(download_path, 'doc') os.rename(source_path, dest_doc_path) # limit compression to 2 cores to limit memory footprint for 32bit Windows - bld_utils.runCommand(['7z', 'a', '-mx1', '-mmt2', '-md32m', '-ms=1g', target_filepath, dest_doc_path], - dest_doc_path, None) + runCommand(['7z', 'a', '-mx1', '-mmt2', '-md32m', '-ms=1g', target_filepath, dest_doc_path], + dest_doc_path, None) download_task = Task("downloading documentation from {0}".format(base_url)) for item in file_list: url = base_url + '/doc/' + item download_filepath = os.path.join(download_path, item) - download_task.addFunction(bld_utils.download, url, download_filepath) - download_task.addFunction(bldinstallercommon.create_extract_function(download_filepath, extract_path, None)) + download_task.addFunction(download, url, download_filepath) + download_task.addFunction(create_extract_function(download_filepath, extract_path, None)) download_task.addFunction(create_remove_one_dir_level_function(os.path.join(extract_path, item.rstrip(".zip")))) repackage_task = Task("repackaging documentation as {0}".format(target_filepath)) repackage_task.addFunction(repackage) - return (download_task, repackage_task, bld_utils.file_url(target_filepath)) + return (download_task, repackage_task, file_url(target_filepath)) def create_download_openssl_task(url, download_path): @@ -220,18 +237,18 @@ def create_download_openssl_task(url, download_path): else: source_path = linuxdir pattern = '*.so*' - bld_utils.runCommand(['7z', 'a', '-mmt2', target_filepath, pattern], - source_path, None) + runCommand(['7z', 'a', '-mmt2', target_filepath, pattern], + source_path, None) download_task = Task('downloading openssl from {0}'.format(url)) - download_task.addFunction(bld_utils.download, url, download_filepath) + download_task.addFunction(download, url, download_filepath) repackage_task = Task("repackaging openssl as {0}".format(target_filepath)) - repackage_task.addFunction(bldinstallercommon.create_extract_function(download_filepath, extract_path, None)) + repackage_task.addFunction(create_extract_function(download_filepath, extract_path, None)) repackage_task.addFunction(repackage) - return (download_task, repackage_task, bld_utils.file_url(target_filepath)) + return (download_task, repackage_task, file_url(target_filepath)) -PluginConf = collections.namedtuple('PluginConf', ['git_url', 'branch_or_tag', 'checkout_dir']) +PluginConf = namedtuple('PluginConf', ['git_url', 'branch_or_tag', 'checkout_dir']) def parseQtCreatorPlugins(pkgConfFile): @@ -239,7 +256,7 @@ def parseQtCreatorPlugins(pkgConfFile): pluginList = [] if not pkgConfFile: return pluginList - pluginOptions = optionparser.getPkgOptions(pkgConfFile) + pluginOptions = getPkgOptions(pkgConfFile) sectionName = "QtCreator.Build.Plugin" keyName = "plugins" if not pluginOptions.optionExists(sectionName, keyName): @@ -255,14 +272,14 @@ def parseQtCreatorPlugins(pkgConfFile): return pluginList -QtcPlugin = collections.namedtuple('QtcPlugin', ['name', - 'path', - 'version', - 'dependencies', - 'modules', - 'additional_arguments', - 'build', - 'package_commercial']) +QtcPlugin = namedtuple('QtcPlugin', ['name', + 'path', + 'version', + 'dependencies', + 'modules', + 'additional_arguments', + 'build', + 'package_commercial']) def make_QtcPlugin(name, path, version, dependencies=None, modules=None, @@ -321,7 +338,7 @@ def create_qtcreator_source_package(source_path, plugin_name, version, edition, if create_tar or create_zip: if not os.path.exists(target_base): os.makedirs(target_base) - bldinstallercommon.copy_tree(source_path, target_base) + copy_tree(source_path, target_base) if create_tar: check_call_log(['tar', 'czf', file_base + '.tar.gz', '--exclude', '.git', file_base], target_path, log_filepath=log_filepath) @@ -539,8 +556,8 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): if os.path.exists(qtCreatorSourceDirectory): shutil.rmtree(qtCreatorSourceDirectory) os.makedirs(qtCreatorSourceDirectory) - bldinstallercommon.clone_repository(optionDict['QT_CREATOR_GIT_URL'], optionDict['QT_CREATOR_GIT_BRANCH'], - qtCreatorSourceDirectory, full_clone=True, init_subrepos=True) + clone_repository(optionDict['QT_CREATOR_GIT_URL'], optionDict['QT_CREATOR_GIT_BRANCH'], + qtCreatorSourceDirectory, full_clone=True, init_subrepos=True) # Get Qt Creator plugin sources if not present yet for pluginConf in qtCreatorPlugins: checkoutDir = os.path.join(work_dir, pluginConf.checkout_dir) @@ -548,7 +565,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): if os.path.exists(checkoutDir): shutil.rmtree(checkoutDir) os.makedirs(checkoutDir) - bldinstallercommon.clone_repository(pluginConf.git_url, pluginConf.branch_or_tag, checkoutDir, full_clone=True) + clone_repository(pluginConf.git_url, pluginConf.branch_or_tag, checkoutDir, full_clone=True) # Build time variables qtcreator_source = os.path.join(work_dir, 'qt-creator') @@ -612,7 +629,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): extract_work = Task('Extract packages') def add_download_extract(url, target_path): - (download, extract) = bldinstallercommon.create_download_and_extract_tasks( + (download, extract) = create_download_and_extract_tasks( url, target_path, download_temp, None) download_work.addTaskObject(download) extract_work.addFunction(extract.do) @@ -684,7 +701,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): qt_module_urls.append(documentation_local_url) if qt_extra_module_url: qt_module_urls.append(qt_extra_module_url) - qt_module_local_urls = [bld_utils.file_url(os.path.join(qt_temp, os.path.basename(url))) + qt_module_local_urls = [file_url(os.path.join(qt_temp, os.path.basename(url))) for url in qt_module_urls] # download and install qt @@ -781,7 +798,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): additional_arguments=plugin_telemetry_args)]), # Build Qt Creator plugins - icu_local_url = bld_utils.file_url(os.path.join(qt_temp, os.path.basename(icu_libs))) if is_linux() else None + icu_local_url = file_url(os.path.join(qt_temp, os.path.basename(icu_libs))) if is_linux() else None # extract qtcreator bin and dev packages qtcreator_path = os.path.join(work_dir, 'qtc_build') check_call_log(['7z', 'x', '-y', os.path.join(work_dir, 'qt-creator_build', 'qtcreator.7z'), '-o' + qtcreator_path], @@ -792,7 +809,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): openssl_url=openssl_local_url, additional_config=qtc_additional_config, log_filepath=log_filepath) - qtcreator_sha = bld_utils.get_commit_SHA(qtcreator_source) + qtcreator_sha = get_commit_SHA(qtcreator_source) with open(os.path.join(work_dir, 'QTC_SHA1'), 'w') as f: f.write(qtcreator_sha + '\n') @@ -801,7 +818,7 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): sha1s = collect_qt_creator_plugin_sha1s(additional_plugins) licensemanaging_source = os.path.join(work_dir, 'license-managing') if os.path.exists(licensemanaging_source): - sha1s.append('license-managing: ' + bld_utils.get_commit_SHA(licensemanaging_source)) + sha1s.append('license-managing: ' + get_commit_SHA(licensemanaging_source)) sha1s.append('qt-creator: ' + qtcreator_sha) with open(os.path.join(work_dir, 'SHA1'), 'w') as f: f.writelines([sha + '\n' for sha in sha1s]) @@ -821,14 +838,14 @@ def handle_qt_creator_build(optionDict, qtCreatorPlugins): sdktool_build_path = os.path.join(work_dir, 'sdktool_build') sdktool_target_path = os.path.join(sdktool_build_path, 'target') with BuildLog(log_filepath) as f: - bld_sdktool.build_sdktool(sdktool_qtbase_src, os.path.join(sdktool_build_path, 'qt'), - os.path.join(work_dir, 'qt-creator', 'src', 'tools', 'sdktool'), - os.path.join(sdktool_build_path, 'src', 'tools', 'sdktool'), - sdktool_target_path, - 'nmake' if is_windows() else 'make', - redirect_output=f) - bld_sdktool.zip_sdktool(sdktool_target_path, os.path.join(work_dir, 'sdktool.7z'), - redirect_output=f) + build_sdktool(sdktool_qtbase_src, os.path.join(sdktool_build_path, 'qt'), + os.path.join(work_dir, 'qt-creator', 'src', 'tools', 'sdktool'), + os.path.join(sdktool_build_path, 'src', 'tools', 'sdktool'), + sdktool_target_path, + 'nmake' if is_windows() else 'make', + redirect_output=f) + zip_sdktool(sdktool_target_path, os.path.join(work_dir, 'sdktool.7z'), + redirect_output=f) # repackage and sign opensource and enterprise packages on macOS # these are then for direct packaging in the offline installers @@ -955,12 +972,12 @@ def handle_sdktool_build(optionDict): download_temp = os.path.join(work_dir, 'downloads') sdktool_build_path = os.path.join(work_dir, 'sdktool_build') sdktool_target_path = os.path.join(sdktool_build_path, 'target') - bld_sdktool.build_sdktool(sdktool_qtbase_src, os.path.join(sdktool_build_path, 'qt'), - os.path.join(work_dir, 'qt-creator', 'src', 'tools', 'sdktool'), - os.path.join(sdktool_build_path, 'src', 'tools', 'sdktool'), - sdktool_target_path, - 'nmake' if is_windows() else 'make') - bld_sdktool.zip_sdktool(sdktool_target_path, os.path.join(work_dir, 'sdktool.7z')) + build_sdktool(sdktool_qtbase_src, os.path.join(sdktool_build_path, 'qt'), + os.path.join(work_dir, 'qt-creator', 'src', 'tools', 'sdktool'), + os.path.join(sdktool_build_path, 'src', 'tools', 'sdktool'), + sdktool_target_path, + 'nmake' if is_windows() else 'make') + zip_sdktool(sdktool_target_path, os.path.join(work_dir, 'sdktool.7z')) file_upload_list = [('sdktool.7z', target_env_dir + '/sdktool.7z')] if is_windows(): # wininterrupt & qtcreatorcdbext cmd_args = [sys.executable, '-u', os.path.join(qtcreator_src, 'scripts', 'build.py'), @@ -973,7 +990,7 @@ def handle_sdktool_build(optionDict): python_url = optionDict.get('PYTHON_URL') if python_url: python_path = os.path.join(download_temp, 'python') - download_packages_work.addTaskObject(bldinstallercommon.create_download_extract_task( + download_packages_work.addTaskObject(create_download_extract_task( python_url, python_path, download_temp, None)) cmd_args.extend(['--python-path', python_path]) @@ -1014,8 +1031,8 @@ def create_remote_dirs(optionDict, server, dir_path): ############################### # git archive given repository ############################### -def git_archive_repo(optionDict, repo_and_ref): - archive_name = bldinstallercommon.git_archive_repo(repo_and_ref) +def do_git_archive_repo(optionDict, repo_and_ref): + archive_name = git_archive_repo(repo_and_ref) (repository, ref) = repo_and_ref.split("#") project_name = repository.split("/")[-1].split(".")[0] # Create remote dest directories @@ -1050,7 +1067,7 @@ def initPkgOptions(args): optionDict = {} # Are we using local conf file for pkg options? if args.pkg_conf_file: - options = optionparser.getPkgOptions(args.pkg_conf_file) + options = getPkgOptions(args.pkg_conf_file) optionDict = mergeTwoDicts(optionDict, options.configMap()) optionDict['TARGET_ENV'] = args.target_env if args.target_env else getDefaultTargetEnv() optionDict['BUILD_NUMBER'] = str(strftime('%Y%m%d%H%M%S', gmtime())) @@ -1082,7 +1099,6 @@ def initPkgOptions(args): optionDict['PROD_ADDR'] = get_pkg_value("PROD_ADDR") if LOCAL_MODE: - from getpass import getuser optionDict['PACKAGE_STORAGE_SERVER_USER'] = getuser() # current user optionDict['PACKAGE_STORAGE_SERVER'] = "127.0.0.1" optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = optionDict['PACKAGE_STORAGE_SERVER_USER'] + "@" + optionDict['PACKAGE_STORAGE_SERVER'] @@ -1118,8 +1134,8 @@ def initPkgOptions(args): parser.read(path) for s in parser.sections(): if s == 'release.global': - version = bldinstallercommon.safe_config_key_fetch(parser, s, 'version') - version_tag = bldinstallercommon.safe_config_key_fetch(parser, s, 'version_tag') + version = safe_config_key_fetch(parser, s, 'version') + version_tag = safe_config_key_fetch(parser, s, 'version_tag') optionDict['VERSION'] = version optionDict['VERSION_TAG'] = version_tag optionDict['VERSION_FULL'] = version if not version_tag else version + '-' + version_tag @@ -1167,6 +1183,6 @@ if __name__ == '__main__': elif args.command == bld_licheck: handle_qt_licheck_build(optionDict) elif args.command == archive_repository: - git_archive_repo(optionDict, args.archive_repo) + do_git_archive_repo(optionDict, args.archive_repo) else: print('Unsupported command') diff --git a/packaging-tools/content_cleaner.py b/packaging-tools/content_cleaner.py index 71c600c78..26a465da1 100755 --- a/packaging-tools/content_cleaner.py +++ b/packaging-tools/content_cleaner.py @@ -29,12 +29,13 @@ # ############################################################################# +import argparse import os import sys -import argparse -from pathlib import Path from contextlib import contextmanager -from typing import List, Generator +from pathlib import Path +from typing import Generator, List + from logging_util import init_logger log = init_logger(__name__, debug_mode=False) diff --git a/packaging-tools/create_conan_executable.py b/packaging-tools/create_conan_executable.py index 1ccc6b7a5..27696c4ff 100644 --- a/packaging-tools/create_conan_executable.py +++ b/packaging-tools/create_conan_executable.py @@ -29,19 +29,19 @@ # ############################################################################# -import os -import sys -import asyncio import argparse +import os import platform -import shutil +import sys +from asyncio import get_event_loop from pathlib import Path -from logging_util import init_logger +from shutil import rmtree from typing import Dict, List -from runner import async_exec_cmd -from python_env import create_venv -from installer_utils import is_valid_url_path, cd +from installer_utils import cd, is_valid_url_path +from logging_util import init_logger +from python_env import create_venv +from runner import async_exec_cmd log = init_logger(__name__, debug_mode=False) @@ -74,7 +74,7 @@ async def pip_install_url(pipenv: str, pip_packages: List[str], env: Dict[str, s for pkg in pip_packages or []: if is_valid_url_path(pkg): destinationDir = os.path.join(os.getcwd(), "_git_tmp", pkg.split("/")[-1]) - shutil.rmtree(destinationDir, ignore_errors=True) + rmtree(destinationDir, ignore_errors=True) await clone_repo(pkg, destinationDir, env) chekout_folders.append(destinationDir) else: @@ -130,7 +130,7 @@ async def run( ) -> str: work_dir = Path().home() / "_tmp_work_dir_" if work_dir.exists(): - shutil.rmtree(work_dir) + rmtree(work_dir) work_dir.mkdir(parents=True) with cd(work_dir): @@ -212,7 +212,7 @@ if __name__ == "__main__": args = parser.parse_args(sys.argv[1:]) hidden_imports = get_known_hidden_imports() + args.hidden_imports - loop = asyncio.get_event_loop() + loop = get_event_loop() loop.run_until_complete( run( args.python_src, diff --git a/packaging-tools/create_installer.py b/packaging-tools/create_installer.py index f81052b3a..2762b7b8a 100644 --- a/packaging-tools/create_installer.py +++ b/packaging-tools/create_installer.py @@ -30,33 +30,49 @@ """Scripts to generate SDK installer based on open source InstallerFramework""" -import configparser +import argparse import os +import re import shutil import sys -import re -import subprocess -from time import gmtime, strftime -import argparse -import multiprocessing # to get the cpu core count -from bld_utils import is_windows, is_macos, is_linux -if is_windows(): - import win32api +from configparser import ConfigParser, ExtendedInterpolation +from distutils.spawn import find_executable +from logging import getLogger +from multiprocessing import cpu_count from pathlib import Path +from subprocess import check_call +from time import gmtime, strftime -from threadedwork import ThreadedWork -import bld_utils -import bldinstallercommon -from bldinstallercommon import locate_path, locate_paths import pkg_constants from archiveresolver import ArchiveLocationResolver -from sdkcomponent import SdkComponent -from patch_qt import patchFiles, patchQtEdition -import logging +from bld_utils import download, is_linux, is_macos, is_windows +from bldinstallercommon import ( + copy_tree, + ensure_text_file_endings, + extract_file, + handle_component_rpath, + is_content_url_valid, + is_text_file, + locate_executable, + locate_path, + locate_paths, + remove_one_tree_level, + remove_tree, + replace_in_files, + retrieve_url, + safe_config_key_fetch, +) from installer_utils import PackagingError +from patch_qt import patchFiles, patchQtEdition +from pkg_constants import INSTALLER_OUTPUT_DIR_NAME from runner import do_execute_sub_process +from sdkcomponent import SdkComponent +from threadedwork import ThreadedWork + +if is_windows(): + import win32api -log = logging.getLogger("create_installer") +log = getLogger("create_installer") log.setLevel("INFO") # ---------------------------------------------------------------------- @@ -80,7 +96,6 @@ class CreateInstallerError(Exception): ############################################################## def check_required_tools(): """Check that valid tools are present in the build environment.""" - from distutils.spawn import find_executable if not find_executable('7z'): raise CreateInstallerError("7z tool not found in the PATH") @@ -93,7 +108,7 @@ def clean_work_dirs(task): log.info("Cleaning work environment") for item in [task.packages_full_path_dst, task.repo_output_dir, task.config_dir_dst]: if os.path.exists(item): - bldinstallercommon.remove_tree(item) + remove_tree(item) log.debug("Deleted directory: {0}".format(item)) @@ -109,7 +124,7 @@ def set_config_directory(task): raise CreateInstallerError("No such 'config' template directory: '{0}'".format(config_template_src)) Path(task.config_dir_dst).mkdir(parents=True, exist_ok=True) - bldinstallercommon.copy_tree(config_template_src, task.config_dir_dst) + copy_tree(config_template_src, task.config_dir_dst) log.info("Copied: '{0}' into: {1}".format(config_template_src, task.config_dir_dst)) @@ -137,13 +152,13 @@ def set_config_xml(task): shutil.copy(config_template_source, config_template_dest) log.info("Copied '{0}' into: '{1}'".format(config_template_source, config_template_dest)) - update_repository_url = bldinstallercommon.safe_config_key_fetch(task.config, 'SdkUpdateRepository', 'repository_url_release') + update_repository_url = safe_config_key_fetch(task.config, 'SdkUpdateRepository', 'repository_url_release') fileslist = [config_template_dest] - bldinstallercommon.replace_in_files(fileslist, UPDATE_REPOSITORY_URL_TAG, update_repository_url) + replace_in_files(fileslist, UPDATE_REPOSITORY_URL_TAG, update_repository_url) # substitute values also from global substitution list for item in task.substitution_list: - bldinstallercommon.replace_in_files(fileslist, item[0], item[1]) + replace_in_files(fileslist, item[0], item[1]) return config_template_dest @@ -168,11 +183,11 @@ def substitute_global_tags(task): path = os.path.join(root, name) fileslist.append(path) - bldinstallercommon.replace_in_files(fileslist, PACKAGE_CREATION_DATE_TAG, task.build_timestamp) + replace_in_files(fileslist, PACKAGE_CREATION_DATE_TAG, task.build_timestamp) if task.force_version_number_increase: - bldinstallercommon.replace_in_files(fileslist, VERSION_NUMBER_AUTO_INCREASE_TAG, task.version_number_auto_increase_value) + replace_in_files(fileslist, VERSION_NUMBER_AUTO_INCREASE_TAG, task.version_number_auto_increase_value) for item in task.substitution_list: - bldinstallercommon.replace_in_files(fileslist, item[0], item[1]) + replace_in_files(fileslist, item[0], item[1]) ############################################################## @@ -198,7 +213,7 @@ def substitute_component_tags(tag_pair_list, meta_dir_dest): value = pair[1] if tag and value: log.info("Matching '{0}' and '{1}' in files list".format(tag, value)) - bldinstallercommon.replace_in_files(fileslist, tag, value) + replace_in_files(fileslist, tag, value) else: log.warning("Ignoring incomplete tag pair: {0} = {1}".format(tag, value)) @@ -217,11 +232,11 @@ def parse_component_data(task, configuration_file, configurations_base_path): allos_conf_file_dir = os.path.normpath(task.configurations_dir + os.sep + 'all-os') file_full_path = locate_path(allos_conf_file_dir, [configuration_file], filters=[os.path.isfile]) log.info("Reading target configuration file: {0}".format(file_full_path)) - configuration = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation()) - configuration.readfp(open(file_full_path)) + configuration = ConfigParser(interpolation=ExtendedInterpolation()) + configuration.read_file(open(file_full_path)) # parse package ignore list first - sdk_component_exclude_list = bldinstallercommon.safe_config_key_fetch(configuration, 'PackageIgnoreList', 'packages') + sdk_component_exclude_list = safe_config_key_fetch(configuration, 'PackageIgnoreList', 'packages') if sdk_component_exclude_list: sdk_component_exclude_list = sdk_component_exclude_list.replace(' ', '') pkg_list = sdk_component_exclude_list.split(',') @@ -256,7 +271,7 @@ def parse_component_data(task, configuration_file, configurations_base_path): log.warning("Ignored component in non-strict mode (missing archive data or metadata?): {0}".format(section)) task.sdk_component_list_skipped.append(sdk_component) # check for extra configuration files if defined - extra_conf_list = bldinstallercommon.safe_config_key_fetch(configuration, 'PackageConfigurationFiles', 'file_list') + extra_conf_list = safe_config_key_fetch(configuration, 'PackageConfigurationFiles', 'file_list') if extra_conf_list: extra_conf_list = extra_conf_list.rstrip(',\n') file_list = extra_conf_list.split(',') @@ -308,7 +323,7 @@ def create_metadata_map(sdk_component): def get_component_sha1_file(sdk_component, sha1_file_dest): """download component sha1 file""" - bld_utils.download(sdk_component.component_sha1_uri, sha1_file_dest) + download(sdk_component.component_sha1_uri, sha1_file_dest) # read sha1 from the file with open(sha1_file_dest, "r") as sha1_file: @@ -335,12 +350,12 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest, log.info("No repackaging actions required for the package, just download it directly to data directory") downloadedArchive = os.path.normpath(data_dir_dest + os.sep + archive.archive_name) # start download - bld_utils.download(archive.archive_uri, downloadedArchive) + download(archive.archive_uri, downloadedArchive) return downloadedArchive = os.path.normpath(install_dir + os.sep + package_raw_name) # start download - bld_utils.download(archive.archive_uri, downloadedArchive) + download(archive.archive_uri, downloadedArchive) # repackage content so that correct dir structure will get into the package @@ -349,15 +364,15 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest, # extract contents if archive.extract_archive == 'yes': - extracted = bldinstallercommon.extract_file(downloadedArchive, install_dir) + extracted = extract_file(downloadedArchive, install_dir) # remove old package if extracted: os.remove(downloadedArchive) else: # ok we could not extract the file, so propably not even archived file, # check the case if we downloaded a text file, must ensure proper file endings - if bldinstallercommon.is_text_file(downloadedArchive): - bldinstallercommon.ensure_text_file_endings(downloadedArchive) + if is_text_file(downloadedArchive): + ensure_text_file_endings(downloadedArchive) # perform custom action script for the extracted archive if archive.archive_action: @@ -366,14 +381,14 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest, script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), script_file) if not os.path.exists(script_path): raise CreateInstallerError("Unable to locate custom archive action script: {0}".format(script_path)) - subprocess.check_call([script_path, '--input-dir=' + install_dir, script_args.strip()]) + check_call([script_path, '--input-dir=' + install_dir, script_args.strip()]) # strip out unnecessary folder structure based on the configuration count = 0 iterations = int(archive.package_strip_dirs) while(count < iterations): count = count + 1 - bldinstallercommon.remove_one_tree_level(install_dir) + remove_one_tree_level(install_dir) # perform package finalization tasks for the given archive if 'delete_doc_directory' in archive.package_finalize_items: try: @@ -414,7 +429,7 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest, if not archive.rpath_target.startswith(os.sep): archive.rpath_target = os.sep + archive.rpath_target if is_linux(): - bldinstallercommon.handle_component_rpath(install_dir, archive.rpath_target) + handle_component_rpath(install_dir, archive.rpath_target) if archive.component_sha1_file: # read sha1 from the file @@ -501,7 +516,7 @@ def remove_debug_information_files_by_file_type(install_dir, dbg_file_suffix): # On macOS, debug symbols are in dSYM folder bundles instead of files. dbg_file_list = locate_paths(dbg_info_dir, ["*dSYM"], [os.path.isdir]) for debug_information in dbg_file_list: - bldinstallercommon.remove_tree(debug_information) + remove_tree(debug_information) else: for path in locate_paths(dbg_info_dir, ["*." + dbg_file_suffix], [os.path.isfile]): Path(path).unlink() @@ -561,8 +576,8 @@ def create_target_components(task): # download and extract lrelease binary for creating translation binaries if task.create_repository and os.environ.get("LRELEASE_TOOL"): if not os.path.isfile(os.path.join(task.script_root_dir, "lrelease")): - bld_utils.download(os.environ.get("LRELEASE_TOOL"), task.script_root_dir) - bldinstallercommon.extract_file(os.path.basename(os.environ.get("LRELEASE_TOOL")), task.script_root_dir) + download(os.environ.get("LRELEASE_TOOL"), task.script_root_dir) + extract_file(os.path.basename(os.environ.get("LRELEASE_TOOL")), task.script_root_dir) getComponentDataWork = ThreadedWork("get components data") for sdk_component in task.sdk_component_list: sdk_component.print_component_data() @@ -578,10 +593,10 @@ def create_target_components(task): Path(meta_dir_dest).mkdir(parents=True, exist_ok=True) # Copy Meta data metadata_content_source_root = os.path.normpath(sdk_component.pkg_template_dir + os.sep + 'meta') - bldinstallercommon.copy_tree(metadata_content_source_root, meta_dir_dest) + copy_tree(metadata_content_source_root, meta_dir_dest) if os.path.isfile(os.path.join(task.script_root_dir, "lrelease")): # create translation binaries, files are created if translation source files exist for component - subprocess.check_call([os.path.join(task.script_root_dir, "update_component_translations.sh"), "-r", os.path.join(task.script_root_dir, "lrelease"), dest_base]) + check_call([os.path.join(task.script_root_dir, "update_component_translations.sh"), "-r", os.path.join(task.script_root_dir, "lrelease"), dest_base]) # add files into tag substitution task.directories_for_substitutions.append(meta_dir_dest) # handle archives @@ -614,18 +629,18 @@ def create_target_components(task): data_content_source_root = os.path.normpath(sdk_component.pkg_template_dir + os.sep + 'data') if os.path.exists(data_content_source_root): Path(data_dir_dest).mkdir(parents=True, exist_ok=True) - bldinstallercommon.copy_tree(data_content_source_root, data_dir_dest) + copy_tree(data_content_source_root, data_dir_dest) if not task.dry_run: # start the work threaded, more than 8 parallel downloads are not so useful - getComponentDataWork.run(min([task.max_cpu_count, multiprocessing.cpu_count()])) + getComponentDataWork.run(min([task.max_cpu_count, cpu_count()])) for sdk_component in task.sdk_component_list: # substitute tags substitute_component_tags(create_metadata_map(sdk_component), sdk_component.meta_dir_dest) if hasattr(sdk_component, 'temp_data_dir') and os.path.exists(sdk_component.temp_data_dir): # lastly remove temp dir after all data is prepared - if not bldinstallercommon.remove_tree(sdk_component.temp_data_dir): + if not remove_tree(sdk_component.temp_data_dir): raise CreateInstallerError("Unable to remove directory: {0}".format(sdk_component.temp_data_dir)) # substitute downloadable archive names in installscript.qs substitute_component_tags(sdk_component.generate_downloadable_archive_list(), sdk_component.meta_dir_dest) @@ -747,7 +762,7 @@ def create_installer_binary(task): do_execute_sub_process(cmd_args, task.script_root_dir) # move results to dedicated directory - output_dir = os.path.join(task.script_root_dir, pkg_constants.INSTALLER_OUTPUT_DIR_NAME) + output_dir = os.path.join(task.script_root_dir, INSTALLER_OUTPUT_DIR_NAME) Path(output_dir).mkdir(parents=True, exist_ok=True) file_name = os.path.join(task.script_root_dir, task.installer_name) old_existing_file_name = os.path.join(output_dir, task.installer_name) @@ -830,7 +845,7 @@ def inject_update_rcc_to_archive(archive_file_path, file_to_be_injected): shutil.copy(archive_file_path, tmp_dir) # extract copied_archive_file = os.path.join(tmp_dir, archive_file_name) - bldinstallercommon.extract_file(copied_archive_file, tmp_dir) + extract_file(copied_archive_file, tmp_dir) os.remove(copied_archive_file) # add file shutil.copy(file_to_be_injected, tmp_dir) @@ -842,7 +857,7 @@ def inject_update_rcc_to_archive(archive_file_path, file_to_be_injected): # copy re-compressed package to correct location shutil.copy(os.path.join(tmp_dir, archive_file_name), os.path.dirname(archive_file_path)) # delete tmp location - bldinstallercommon.shutil.rmtree(tmp_dir) + shutil.rmtree(tmp_dir) ############################################################## @@ -909,8 +924,8 @@ class QtInstallerTask: def __init__(self, args): log.info("Parsing: {0}".format(args.configuration_file)) - self.config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation()) - self.config.readfp(open(args.configuration_file)) + self.config = ConfigParser(interpolation=ExtendedInterpolation()) + self.config.read_file(open(args.configuration_file)) self.configurations_dir = args.configurations_dir self.configuration_file = args.configuration_file @@ -1038,10 +1053,10 @@ class QtInstallerTask: ############################################################## def set_ifw_tools(self): executable_suffix = ".exe" if is_windows() else "" - self.archivegen_tool = bldinstallercommon.locate_executable(self.ifw_tools_dir, ['archivegen' + executable_suffix]) - self.binarycreator_tool = bldinstallercommon.locate_executable(self.ifw_tools_dir, ['binarycreator' + executable_suffix]) - self.installerbase_tool = bldinstallercommon.locate_executable(self.ifw_tools_dir, ['installerbase' + executable_suffix]) - self.repogen_tool = bldinstallercommon.locate_executable(self.ifw_tools_dir, ['repogen' + executable_suffix]) + self.archivegen_tool = locate_executable(self.ifw_tools_dir, ['archivegen' + executable_suffix]) + self.binarycreator_tool = locate_executable(self.ifw_tools_dir, ['binarycreator' + executable_suffix]) + self.installerbase_tool = locate_executable(self.ifw_tools_dir, ['installerbase' + executable_suffix]) + self.repogen_tool = locate_executable(self.ifw_tools_dir, ['repogen' + executable_suffix]) # check assert os.path.isfile(self.archivegen_tool), "Archivegen tool not found: {0}".format(self.archivegen_tool) assert os.path.isfile(self.binarycreator_tool), "Binary creator tool not found: {0}".format(self.binarycreator_tool) @@ -1060,13 +1075,13 @@ class QtInstallerTask: # create needed dirs Path(self.ifw_tools_dir).mkdir(parents=True, exist_ok=True) log.info("Downloading: {0}".format(self.ifw_tools_uri)) - if not bldinstallercommon.is_content_url_valid(self.ifw_tools_uri): + if not is_content_url_valid(self.ifw_tools_uri): raise CreateInstallerError("Package URL is invalid: {0}".format(self.ifw_tools_uri)) - bldinstallercommon.retrieve_url(self.ifw_tools_uri, package_save_as_temp) + retrieve_url(self.ifw_tools_uri, package_save_as_temp) if not (os.path.isfile(package_save_as_temp)): raise CreateInstallerError("Downloading failed! Aborting!") # extract ifw archive - bldinstallercommon.extract_file(package_save_as_temp, self.ifw_tools_dir) + extract_file(package_save_as_temp, self.ifw_tools_dir) log.info("IFW tools extracted into: {0}".format(self.ifw_tools_dir)) diff --git a/packaging-tools/dump_debug_infos.py b/packaging-tools/dump_debug_infos.py index 44d8929d2..623d55da1 100644 --- a/packaging-tools/dump_debug_infos.py +++ b/packaging-tools/dump_debug_infos.py @@ -31,9 +31,9 @@ import argparse import os -import shutil import subprocess import sys +from shutil import rmtree def is_file_with_debug_information_windows(path): @@ -150,7 +150,7 @@ def _main(): if os.path.exists(args.output_path): if args.clean_output_path: - shutil.rmtree(args.output_path, ignore_errors=True) + rmtree(args.output_path, ignore_errors=True) os.makedirs(args.output_path) for search_path in args.search_pathes.split(","): diff --git a/packaging-tools/environmentfrombatchfile.py b/packaging-tools/environmentfrombatchfile.py index cd28ebdb3..4dd8ffb2a 100644 --- a/packaging-tools/environmentfrombatchfile.py +++ b/packaging-tools/environmentfrombatchfile.py @@ -29,9 +29,9 @@ # ############################################################################# -import itertools -import subprocess import os +from itertools import takewhile +from subprocess import PIPE, Popen # http://stackoverflow.com/questions/1214496/how-to-get-environment-from-a-subprocess-in-python # def validate_pair(ob): @@ -80,7 +80,7 @@ def get(env_cmd, initial=None, arguments=None): # if not isinstance(env_cmd, (list, tuple)): # env_cmd = [env_cmd] # construct the command that will alter the environment - # env_cmd = subprocess.list2cmdline(env_cmd) + # env_cmd = list2cmdline(env_cmd) # create a tag so we can tell in the output when the proc is done tag = 'Done running command' @@ -88,11 +88,11 @@ def get(env_cmd, initial=None, arguments=None): cmd = 'cmd.exe /s /c "\"{env_cmd}\" {arguments}&& echo "{tag}" && set"'.format(**vars()) # launch the process - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=initial, universal_newlines=True) + proc = Popen(cmd, stdout=PIPE, env=initial, universal_newlines=True) # parse the output sent to stdout lines = proc.stdout # consume whatever output occurs until the tag is reached - consume(itertools.takewhile(lambda line: tag not in line, lines)) + consume(takewhile(lambda line: tag not in line, lines)) # parse key/values into pairs pairs = (line.rstrip().split('=', 1) for line in lines) # make sure the pairs are valid diff --git a/packaging-tools/install_qt.py b/packaging-tools/install_qt.py index eb16ee529..d581581fc 100644 --- a/packaging-tools/install_qt.py +++ b/packaging-tools/install_qt.py @@ -32,7 +32,7 @@ import argparse import os -import bldinstallercommon +from bldinstallercommon import create_qt_download_task, patch_qt from threadedwork import ThreadedWork @@ -78,13 +78,13 @@ def install_qt(args): download_packages_work = ThreadedWork('get and extract Qt 5 binaries') need_to_install_qt = not os.path.lexists(args.qt_path) if need_to_install_qt: - download_packages_work.addTaskObject(bldinstallercommon.create_qt_download_task( + download_packages_work.addTaskObject(create_qt_download_task( args.qt_modules, args.qt_path, args.temp_path, args)) # run task if needed if download_packages_work.taskNumber != 0: download_packages_work.run() - bldinstallercommon.patch_qt(args.qt_path) + patch_qt(args.qt_path) def main(): diff --git a/packaging-tools/installer_utils.py b/packaging-tools/installer_utils.py index d1bc89275..8e6a01055 100644 --- a/packaging-tools/installer_utils.py +++ b/packaging-tools/installer_utils.py @@ -30,11 +30,13 @@ ############################################################################# import os -import wget -import fnmatch from contextlib import contextmanager -from urllib.parse import urlparse +from fnmatch import fnmatch from typing import Generator, List +from urllib.parse import urlparse + +import wget + from logging_util import init_logger from runner import async_exec_cmd @@ -77,7 +79,7 @@ def download_archive(url: str, destDir: str) -> str: def get_extract_cmd(artifact: str) -> List[str]: if artifact.endswith(".7z") or artifact.endswith(".zip"): return ['7z', 'x', artifact] - elif any(fnmatch.fnmatch(artifact, p) for p in ["*.tar*", "*.tgz"]): + elif any(fnmatch(artifact, p) for p in ["*.tar*", "*.tgz"]): return ['tar', '-xf', artifact] else: raise PackagingError("Could not find suitable extractor for: {0}".format(artifact)) diff --git a/packaging-tools/libclang_training/runBatchFiles.py b/packaging-tools/libclang_training/runBatchFiles.py index 9811835f6..25faecf49 100644 --- a/packaging-tools/libclang_training/runBatchFiles.py +++ b/packaging-tools/libclang_training/runBatchFiles.py @@ -56,10 +56,10 @@ Notes: """ import os -import shutil -import subprocess import sys -import time +from shutil import copyfile +from subprocess import STDOUT, Popen +from time import sleep, time import libclangtimings2csv import mergeCsvFiles @@ -149,8 +149,8 @@ class DebugView: def startAsync(self): args = [self.executable, '/accepteula', '/l', self.logFilePath] verboseStart(args) - self.proc = subprocess.Popen(args, shell=False) - time.sleep(2) + self.proc = Popen(args, shell=False) + sleep(2) def stop(self): if self.proc: @@ -178,7 +178,7 @@ def runSyncAndLogOutputWindows(args, batchFilePath, logFilePath): debugView.startAsync() verboseStart(args) - p = subprocess.Popen(args, env=createEnvironment(batchFilePath)) + p = Popen(args, env=createEnvironment(batchFilePath)) p.communicate() debugView.stop() @@ -190,10 +190,10 @@ def runSyncAndLogOutputUnix(args, batchFilePath, logFilePath): logFile = open(logFilePath, 'w') verboseStart(args) - p = subprocess.Popen(args, - stdout=logFile, - stderr=subprocess.STDOUT, - env=createEnvironment(batchFilePath)) + p = Popen(args, + stdout=logFile, + stderr=STDOUT, + env=createEnvironment(batchFilePath)) p.communicate() checkExitCodeOrDie(p.returncode, args) @@ -241,10 +241,10 @@ def createDir(dirPath): def createBackupFile(filePath): if os.path.exists(filePath): - backupPath = filePath[:-4] + ".backup_" + str(time.time()) + ".log" + backupPath = filePath[:-4] + ".backup_" + str(time()) + ".log" if Config.Verbose: print('info: creating backup of already existing "%s"' % (filePath)) - shutil.copyfile(filePath, backupPath) + copyfile(filePath, backupPath) def printDuration(s): @@ -254,12 +254,12 @@ def printDuration(s): def processBatchFileTimed(libClangId, batchFilePath): - timeStarted = time.time() + timeStarted = time() print("processing", batchFilePath, end=' ') runRecord = processBatchFile(libClangId, batchFilePath) - printDuration(time.time() - timeStarted) + printDuration(time() - timeStarted) return runRecord @@ -288,7 +288,7 @@ def getLibClangId(libClangDll): def switchLibClang(libClangDll): print('copying "%s" -> "%s"' % (libClangDll, Config.TargetLibClangDll)) - shutil.copyfile(libClangDll, Config.TargetLibClangDll) + copyfile(libClangDll, Config.TargetLibClangDll) def runQtCreatorWithLibClang(libClangDll): diff --git a/packaging-tools/logging_util.py b/packaging-tools/logging_util.py index 988647832..e18721711 100755 --- a/packaging-tools/logging_util.py +++ b/packaging-tools/logging_util.py @@ -30,6 +30,7 @@ ############################################################################# import logging + import colorlog diff --git a/packaging-tools/notarize.py b/packaging-tools/notarize.py index 560edf22d..32d5ad316 100755 --- a/packaging-tools/notarize.py +++ b/packaging-tools/notarize.py @@ -29,14 +29,14 @@ # ############################################################################# -import sys -import time import argparse import asyncio -import subprocess import logging +import sys from shutil import which -from time import gmtime, strftime +from subprocess import STDOUT, CalledProcessError, TimeoutExpired +from time import gmtime, sleep, strftime + from read_remote_config import get_pkg_value LOG_FMT_CI = "%(asctime)s %(levelname)s:%(filename)s:%(lineno)d(%(process)d): %(message)s" @@ -66,20 +66,20 @@ def parseValueFromData(key, data): async def requestCmd(args, cmd): - p = await asyncio.create_subprocess_exec(*cmd, stdout=asyncio.subprocess.PIPE, stderr=subprocess.STDOUT) + p = await asyncio.create_subprocess_exec(*cmd, stdout=asyncio.subprocess.PIPE, stderr=STDOUT) attempts = 3 while attempts: try: data = await asyncio.wait_for(p.communicate(), timeout=args.timeout) break - except (asyncio.TimeoutError, subprocess.TimeoutExpired): + except (asyncio.TimeoutError, TimeoutExpired): log.warning("Timeout (%ss)", str(args.timeout)) attempts -= 1 if attempts: log.info("Waiting a bit before next attempt..") await asyncio.sleep(60) - except subprocess.CalledProcessError as commandErr: + except CalledProcessError as commandErr: log.critical("Failed to run command: %s", str(commandErr)) raise except Exception as e: @@ -147,7 +147,7 @@ async def embedNotarization(args): if retry_count: log.warning(f"Trying again after {delay}s") - time.sleep(delay) + sleep(delay) delay = delay + delay / 2 # 60, 90, 135, 202, 303 else: log.critical("Execution of the remote script probably failed!") diff --git a/packaging-tools/optionparser.py b/packaging-tools/optionparser.py index 650efc3c5..853a826e7 100644 --- a/packaging-tools/optionparser.py +++ b/packaging-tools/optionparser.py @@ -28,10 +28,10 @@ # $QT_END_LICENSE$ # ############################################################################# -from configparser import ConfigParser import argparse import os import sys +from configparser import ConfigParser class PackagingOptions: @@ -39,7 +39,7 @@ class PackagingOptions: def __init__(self, confFile): if not os.path.isfile(confFile): raise IOError("Not a valid file: {0}".format(confFile)) - self.config = ConfigParser.ConfigParser(os.environ) + self.config = ConfigParser(os.environ) self.config.optionxform = str self.config.read(confFile) diff --git a/packaging-tools/patch_qt.py b/packaging-tools/patch_qt.py index 030a48482..7db3ff64c 100755 --- a/packaging-tools/patch_qt.py +++ b/packaging-tools/patch_qt.py @@ -31,7 +31,7 @@ import os import re -import fileinput +from fileinput import FileInput def _fileIterator(artifactsDir): @@ -66,7 +66,7 @@ def patchQtEdition(artifactsDir, licheckFileName, releaseDate): def _patchQtEdition(filePath, licheckFileName, releaseDate): - for line in fileinput.FileInput(filePath, inplace=True): + for line in FileInput(filePath, inplace=True): if 'QT_EDITION' in line: edition_line = 'QT_EDITION = Enterprise' licheck_line = 'QT_LICHECK = ' + licheckFileName @@ -79,7 +79,7 @@ def _patchQtEdition(filePath, licheckFileName, releaseDate): def patchQConfigPri(filePath): - for line in fileinput.FileInput(filePath, inplace=True): + for line in FileInput(filePath, inplace=True): patchedLine = patchQConfigPriFromLine(line) print(patchedLine.rstrip('\n')) @@ -95,7 +95,7 @@ def patchQConfigPriFromLine(line): def eraseQmakePrlBuildDir(filePath): # Erase lines starting with 'QMAKE_PRL_BUILD_DIR' from .prl files - for line in fileinput.FileInput(filePath, inplace=True): + for line in FileInput(filePath, inplace=True): patchedLine = patchQmakePrlBuildDirFromLine(line) print(patchedLine.rstrip('\n')) @@ -105,7 +105,7 @@ def patchQmakePrlBuildDirFromLine(line): def patchAbsoluteLibPathsFromFile(filePath): - for line in fileinput.FileInput(filePath, inplace=True): + for line in FileInput(filePath, inplace=True): patchedLine = patchAbsoluteLibPathsFromLine(line, filePath.split(".")[-1]) print(patchedLine.rstrip('\n')) diff --git a/packaging-tools/python_env.py b/packaging-tools/python_env.py index fb03e0415..ad82b76ac 100644 --- a/packaging-tools/python_env.py +++ b/packaging-tools/python_env.py @@ -29,18 +29,18 @@ # ############################################################################# -import os -import sys -import asyncio import argparse +import os import platform +import sys +from asyncio import get_event_loop from shutil import rmtree from typing import Dict, Tuple + from bld_python import build_python +from installer_utils import download_archive, is_valid_url_path from logging_util import init_logger from runner import async_exec_cmd, exec_cmd -from installer_utils import download_archive, is_valid_url_path - log = init_logger(__name__, debug_mode=False) @@ -140,5 +140,5 @@ if __name__ == "__main__": help="Path to get-pip.py needed for installing pip on Windows", ) args = parser.parse_args(sys.argv[1:]) - loop = asyncio.get_event_loop() + loop = get_event_loop() loop.run_until_complete(create_venv(args.python_src, args.get_pip_file)) diff --git a/packaging-tools/read_remote_config.py b/packaging-tools/read_remote_config.py index 219489e91..e07787877 100755 --- a/packaging-tools/read_remote_config.py +++ b/packaging-tools/read_remote_config.py @@ -29,24 +29,12 @@ # ############################################################################# +import argparse import os import sys -import argparse - -try: - import urllib.request as request -except ImportError: - # Python 2.x - import urllib2 as request - -try: - from configparser import ConfigParser - from io import StringIO -except ImportError: - # Python 2.x - from ConfigParser import ConfigParser - from StringIO import StringIO - +from configparser import ConfigParser +from io import StringIO +from urllib.request import urlopen _pkg_remote_settings = None @@ -56,18 +44,14 @@ class RemotePkgConfigError(Exception): def read_packaging_keys_config_url(url): - return request.urlopen(url).read().decode('utf-8').strip() + return urlopen(url).read().decode('utf-8').strip() def parse_packaging_keys_config(config): buf = StringIO(config) settings = ConfigParser() - try: - settings.read_file(buf) - except Exception: - # Python 2.x - settings.readfp(buf) + settings.read_file(buf) return settings diff --git a/packaging-tools/release_repo_meta_update.py b/packaging-tools/release_repo_meta_update.py index fc757e5f8..f62b36f5f 100755 --- a/packaging-tools/release_repo_meta_update.py +++ b/packaging-tools/release_repo_meta_update.py @@ -29,22 +29,23 @@ # ############################################################################# -import os -import sys import argparse -import asyncio -import time -import datetime +import os import shutil +import sys +from asyncio import get_event_loop +from datetime import datetime from pathlib import Path -from runner import exec_cmd -from typing import List, Dict, Tuple -from logging_util import init_logger -from installer_utils import is_valid_url_path, download_archive, extract_archive +from time import time +from typing import Dict, List, Tuple + from bldinstallercommon import locate_path +from installer_utils import download_archive, extract_archive, is_valid_url_path +from logging_util import init_logger +from runner import exec_cmd log = init_logger(__name__, debug_mode=False) -session_timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d--%H:%M:%S') +session_timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d--%H:%M:%S') convert_suffix = "____unified_metadata_update" backup_suffix = "____split_metadata_backup-" @@ -203,7 +204,7 @@ def scan_repositories(search_path: str) -> Tuple[List[str], List[str], List[str] def convert_repos(search_path: str, ifw_tools_url: str) -> None: - loop = asyncio.get_event_loop() + loop = get_event_loop() repogen = loop.run_until_complete(fetch_repogen(ifw_tools_url)) log.info(f"Using repogen from: {repogen}") done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(search_path) @@ -224,7 +225,7 @@ def convert_repos(search_path: str, ifw_tools_url: str) -> None: def revert_repos(search_path: str, ifw_tools_url: str, time_stamp: str, dry_run: bool) -> None: - loop = asyncio.get_event_loop() + loop = get_event_loop() repogen = loop.run_until_complete(fetch_repogen(ifw_tools_url)) log.info(f"Using repogen from: {repogen}") converted_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(search_path) diff --git a/packaging-tools/release_repo_updater.py b/packaging-tools/release_repo_updater.py index 6b81cec36..e713260d3 100755 --- a/packaging-tools/release_repo_updater.py +++ b/packaging-tools/release_repo_updater.py @@ -29,35 +29,39 @@ # ############################################################################# +import argparse +import json import os +import platform import re -import sys -import json -import time import shutil -import asyncio -import tempfile -import platform -import datetime -import argparse +import subprocess +import sys +from asyncio import get_event_loop from configparser import ConfigParser, ExtendedInterpolation -from typing import List, Dict, Tuple -from time import gmtime, strftime +from datetime import datetime from pathlib import Path -import subprocess -import release_task_reader -from urllib.request import urlretrieve, urlopen +from tempfile import TemporaryDirectory +from time import gmtime, sleep, strftime, time +from typing import Dict, List, Tuple from urllib.error import HTTPError, URLError -from release_task_reader import ReleaseTask -from installer_utils import PackagingError -from runner import exec_cmd, async_exec_cmd -from logging_util import init_logger +from urllib.request import urlopen, urlretrieve + +from bld_utils import is_linux from bldinstallercommon import locate_path +from installer_utils import PackagingError, download_archive, extract_archive, is_valid_url_path +from logging_util import init_logger from read_remote_config import get_pkg_value -import sign_installer +from release_task_reader import ReleaseTask, parse_config +from runner import async_exec_cmd, exec_cmd +from sign_installer import create_mac_dmg, sign_mac_app +from sign_windows_installer import sign_executable + +if is_linux(): + import sh log = init_logger(__name__, debug_mode=False) -timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d--%H:%M:%S') +timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d--%H:%M:%S') class event_register(object): @@ -73,7 +77,6 @@ class event_register(object): def initialize(cls, event_injector_path: str): if not cls.python_path: if platform.system() == "Linux": - import sh cls.python_path = sh.which("python3") if platform.system() == "Windows": cls.python_path = os.path.join(os.getenv("PYTHON3_PATH"), "python.exe") @@ -169,7 +172,7 @@ def execute_remote_cmd(remoteServer: str, remoteServerHome: str, cmd: List[str], def create_remote_script(server: str, cmd: List[str], remoteScriptPath: str, scriptFileName: str) -> str: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: tempFilePath = os.path.join(tmpBaseDir, scriptFileName) with open(tempFilePath, 'w+') as f: f.write("#!/usr/bin/env bash\n") @@ -191,14 +194,13 @@ def execute_remote_script(server: str, remoteScriptPath: str, timeout=60 * 60) - break if retry_count: log.warning(f"Trying again after {delay}s") - time.sleep(delay) + sleep(delay) delay = delay + delay / 2 # 60, 90, 135, 202, 303 else: log.critical(f"Execution of the remote script probably failed: {cmd}") async def upload_ifw_to_remote(ifwTools: str, remoteServer: str, remoteServerHome: str) -> str: - from installer_utils import is_valid_url_path, download_archive, extract_archive assert is_valid_url_path(ifwTools) log.info("Preparing ifw tools: %s", ifwTools) # fetch the tool first @@ -613,13 +615,12 @@ def upload_offline_to_remote(installerPath: str, remoteUploadPath: str, stagingS def sign_offline_installer(installer_path: str, installer_name: str) -> None: if platform.system() == "Windows": log.info("Sign Windows installer") - from sign_windows_installer import sign_executable sign_executable(os.path.join(installer_path, installer_name) + '.exe') elif platform.system() == "Darwin": log.info("Sign macOS .app bundle") - sign_installer.sign_mac_app(os.path.join(installer_path, installer_name + '.app'), get_pkg_value("SIGNING_IDENTITY")) + sign_mac_app(os.path.join(installer_path, installer_name + '.app'), get_pkg_value("SIGNING_IDENTITY")) log.info("Create macOS dmg file") - sign_installer.create_mac_dmg(os.path.join(installer_path, installer_name) + '.app') + create_mac_dmg(os.path.join(installer_path, installer_name) + '.app') log.info("Notarize macOS installer") notarize_dmg(os.path.join(installer_path, installer_name + '.dmg'), installer_name) else: @@ -830,10 +831,10 @@ if __name__ == "__main__": export_data = load_export_summary_data(Path(args.config)) if args.event_injector else None - loop = asyncio.get_event_loop() + loop = get_event_loop() if args.build_offline: # get offline tasks - tasks = release_task_reader.parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "offline")) + tasks = parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "offline")) loop.run_until_complete(build_offline_tasks(args.staging_server, args.staging_server_root, tasks, args.license, installerConfigBaseDir, args.artifact_share_url, args.ifw_tools, args.offline_installer_id, args.update_staging, @@ -841,7 +842,7 @@ if __name__ == "__main__": else: # this is either repository build or repository sync build # get repository tasks - tasks = release_task_reader.parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "repository")) + tasks = parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "repository")) ret = loop.run_until_complete(handle_update(args.staging_server, args.staging_server_root, args.license, tasks, args.repo_domain, installerConfigBaseDir, args.artifact_share_url, args.update_staging, args.update_production, args.sync_s3, args.sync_ext, diff --git a/packaging-tools/release_task_reader.py b/packaging-tools/release_task_reader.py index 3a07adfe6..0ea07057e 100755 --- a/packaging-tools/release_task_reader.py +++ b/packaging-tools/release_task_reader.py @@ -29,12 +29,13 @@ # ############################################################################# +import argparse import os import re import sys -import argparse -from typing import List, Dict from configparser import ConfigParser, ExtendedInterpolation +from typing import Dict, List + from logging_util import init_logger log = init_logger(__name__, debug_mode=False) diff --git a/packaging-tools/remote_uploader.py b/packaging-tools/remote_uploader.py index ae6c42caa..5b20489da 100755 --- a/packaging-tools/remote_uploader.py +++ b/packaging-tools/remote_uploader.py @@ -29,12 +29,12 @@ # ############################################################################# -import os -import sys import argparse +import os import platform -import subprocess +import sys from shutil import which +from subprocess import CalledProcessError, check_call class RemoteUploaderError(Exception): @@ -82,7 +82,7 @@ class RemoteUploader: cmd = self.ssh_cmd + ['mkdir', '-p', remoteDir] print("Executing: ", ' '.join(cmd)) if not self.dryRun: - subprocess.check_call(cmd, timeout=60) # give it 60s + check_call(cmd, timeout=60) # give it 60s def _copyToRemote(self, fileName, destDirName): assert self.init_finished, "RemoteUploader not initialized!" @@ -96,7 +96,7 @@ class RemoteUploader: cmd = self.copy_cmd + [fileName, remoteDestination] print("Executing: ", ' '.join(cmd)) if not self.dryRun: - subprocess.check_call(cmd, timeout=60 * 10) # give it 10 mins + check_call(cmd, timeout=60 * 10) # give it 10 mins def copyToRemote(self, path: str, destDirName=""): items = [path] if os.path.isfile(path) else [os.path.join(path, x) for x in os.listdir(path)] @@ -111,8 +111,8 @@ class RemoteUploader: cmd = self.ssh_cmd + ['ln'] + options + [self.remoteTargetDir, self.remoteLatestLink] print("Executing: ", ' '.join(cmd)) if not self.dryRun: - subprocess.check_call(cmd, timeout=60) # give it 60s - except subprocess.CalledProcessError: + check_call(cmd, timeout=60) # give it 60s + except CalledProcessError: print("Failed to execute: ", ' '.join(cmd)) raise diff --git a/packaging-tools/runner.py b/packaging-tools/runner.py index 71c70d2bb..21d3005b1 100755 --- a/packaging-tools/runner.py +++ b/packaging-tools/runner.py @@ -29,17 +29,16 @@ # ############################################################################# +import asyncio import os import sys -import asyncio -import subprocess -from subprocess import PIPE, STDOUT, Popen +from subprocess import PIPE, STDOUT, CalledProcessError, Popen, TimeoutExpired, check_output from traceback import print_exc -from typing import List, Dict +from typing import Dict, List +from bld_utils import is_windows from logging_util import init_logger - log = init_logger(__name__, debug_mode=False) MAX_DEBUG_PRINT_LENGTH = 10000 @@ -52,7 +51,7 @@ if sys.platform == 'win32': def exec_cmd(cmd: List[str], timeout=60, env: Dict[str, str] = None) -> str: env = env if env else os.environ.copy() log.info("Calling: %s", ' '.join(cmd)) - output = subprocess.check_output(' '.join(cmd), shell=True, env=env, timeout=timeout).decode("utf-8").strip() + output = check_output(' '.join(cmd), shell=True, env=env, timeout=timeout).decode("utf-8").strip() print(output) return output @@ -63,10 +62,10 @@ async def async_exec_cmd(cmd: List[str], timeout: int = 60 * 60, env: Dict[str, try: log.info("Calling: %s", ' '.join(cmd)) await asyncio.wait_for(p.communicate(), timeout=timeout) - except (asyncio.TimeoutError, subprocess.TimeoutExpired): + except (asyncio.TimeoutError, TimeoutExpired): log.error("Timeout (%ss) for: %s", str(timeout), cmd) raise - except subprocess.CalledProcessError as commandErr: + except CalledProcessError as commandErr: log.error("Failed to run command: %s", str(commandErr)) raise except Exception as e: @@ -76,10 +75,7 @@ async def async_exec_cmd(cmd: List[str], timeout: int = 60 * 60, env: Dict[str, def do_execute_sub_process(args, execution_path, abort_on_fail=True, get_output=False, extra_env=dict(os.environ), redirect_output=None, args_log=None): - # Temporarily adding imports here, to prevent circular import - from bld_utils import is_windows - from bldinstallercommon import list_as_string - _args_log = args_log or list_as_string(args) + _args_log = args_log or ' '.join([str(i) for i in args]) print(' --------------------------------------------------------------------') print(' Executing: [' + _args_log + ']') print(' Execution path: [' + execution_path + ']') @@ -125,7 +121,7 @@ def do_execute_sub_process(args, execution_path, abort_on_fail=True, get_output= sys.stdout.flush() except Exception: sys.stderr.write(' ERROR - ERROR - ERROR - ERROR - ERROR - ERROR !!!' + os.linesep) - sys.stderr.write(' Executing: [' + list_as_string(args) + ']' + os.linesep) + sys.stderr.write(' Executing: [' + _args_log + ']' + os.linesep) sys.stderr.write(' Execution path: [' + execution_path + ']' + os.linesep) print_exc() sys.stderr.flush() diff --git a/packaging-tools/sdkcomponent.py b/packaging-tools/sdkcomponent.py index e62641ffa..08c48bbeb 100644 --- a/packaging-tools/sdkcomponent.py +++ b/packaging-tools/sdkcomponent.py @@ -29,9 +29,10 @@ # ############################################################################# -import os import ntpath -import bldinstallercommon +import os + +from bldinstallercommon import config_section_map, is_content_url_valid, safe_config_key_fetch ONLINE_ARCHIVE_LIST_TAG = '<!--ONLINE_ARCHIVE_LIST-->' @@ -41,21 +42,21 @@ class SdkComponent: class DownloadableArchive: """DownloadableArchive subclass contains all required info about data packages for one SDK component""" def __init__(self, archive, package_name, parent_target_install_base, archive_server_name, target_config, archive_location_resolver, key_value_substitution_list): - self.archive_uri = bldinstallercommon.config_section_map(target_config, archive)['archive_uri'] - self.archive_action = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_action') - self.extract_archive = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'extract_archive') - self.package_strip_dirs = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'package_strip_dirs') - self.package_finalize_items = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'package_finalize_items') + self.archive_uri = config_section_map(target_config, archive)['archive_uri'] + self.archive_action = safe_config_key_fetch(target_config, archive, 'archive_action') + self.extract_archive = safe_config_key_fetch(target_config, archive, 'extract_archive') + self.package_strip_dirs = safe_config_key_fetch(target_config, archive, 'package_strip_dirs') + self.package_finalize_items = safe_config_key_fetch(target_config, archive, 'package_finalize_items') # parent's 'target_install_base' self.parent_target_install_base = parent_target_install_base # in case the individual archive needs to be installed outside the root dir specified by the parent component - self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_base') + self.target_install_base = safe_config_key_fetch(target_config, archive, 'target_install_base') # this is relative to 1) current archive's 'target_install_base' 2) parent components 'target_install_base'. (1) takes priority - self.target_install_dir = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_dir').lstrip(os.path.sep) - self.rpath_target = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'rpath_target') - self.component_sha1_file = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'component_sha1_file') + self.target_install_dir = safe_config_key_fetch(target_config, archive, 'target_install_dir').lstrip(os.path.sep) + self.rpath_target = safe_config_key_fetch(target_config, archive, 'rpath_target') + self.component_sha1_file = safe_config_key_fetch(target_config, archive, 'component_sha1_file') self.nomalize_archive_uri(package_name, archive_server_name, archive_location_resolver) - self.archive_name = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_name') + self.archive_name = safe_config_key_fetch(target_config, archive, 'archive_name') if not self.archive_name: self.archive_name = self.path_leaf(self.archive_uri) # Parse unnecessary extensions away from filename (QTBUG-39219) @@ -76,7 +77,7 @@ class SdkComponent: def check_archive_data(self): if self.archive_uri.startswith('http'): - res = bldinstallercommon.is_content_url_valid(self.archive_uri) + res = is_content_url_valid(self.archive_uri) if not res: return '*** Archive check fail! ***\n*** Unable to locate archive: ' + self.archive_uri elif not os.path.isfile(self.archive_uri): @@ -93,32 +94,32 @@ class SdkComponent: return self.parent_target_install_base + os.path.sep + self.target_install_dir def __init__(self, section_name, target_config, packages_full_path_list, archive_location_resolver, key_value_substitution_list, is_offline_build): - self.static_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'static_component') - self.root_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'root_component') + self.static_component = safe_config_key_fetch(target_config, section_name, 'static_component') + self.root_component = safe_config_key_fetch(target_config, section_name, 'root_component') self.package_name = section_name self.package_subst_name = section_name self.packages_full_path_list = packages_full_path_list - self.archives = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives') + self.archives = safe_config_key_fetch(target_config, section_name, 'archives') self.archives = self.archives.replace(' ', '').replace('\n', '') - self.archives_extract_dir = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives_extract_dir') - self.archive_server_name = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archive_server_name') + self.archives_extract_dir = safe_config_key_fetch(target_config, section_name, 'archives_extract_dir') + self.archive_server_name = safe_config_key_fetch(target_config, section_name, 'archive_server_name') self.downloadable_archive_list = [] - self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'target_install_base') - self.version = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version') - self.version_tag = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version_tag') - self.package_default = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'package_default') - self.install_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'install_priority') - self.sorting_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'sorting_priority') + self.target_install_base = safe_config_key_fetch(target_config, section_name, 'target_install_base') + self.version = safe_config_key_fetch(target_config, section_name, 'version') + self.version_tag = safe_config_key_fetch(target_config, section_name, 'version_tag') + self.package_default = safe_config_key_fetch(target_config, section_name, 'package_default') + self.install_priority = safe_config_key_fetch(target_config, section_name, 'install_priority') + self.sorting_priority = safe_config_key_fetch(target_config, section_name, 'sorting_priority') self.component_sha1 = "" - self.component_sha1_uri = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'component_sha1_uri') + self.component_sha1_uri = safe_config_key_fetch(target_config, section_name, 'component_sha1_uri') if (self.component_sha1_uri): self.component_sha1_uri = archive_location_resolver.resolve_full_uri(self.package_name, self.archive_server_name, self.component_sha1_uri) self.optional_for_offline = False self.key_value_substitution_list = key_value_substitution_list self.archive_skip = False - self.include_filter = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'include_filter') + self.include_filter = safe_config_key_fetch(target_config, section_name, 'include_filter') if is_offline_build: - tmp = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'optional_for_offline') + tmp = safe_config_key_fetch(target_config, section_name, 'optional_for_offline') for item in self.key_value_substitution_list: tmp = tmp.replace(item[0], item[1]) if tmp.lower() in ['yes', 'true', '1']: diff --git a/packaging-tools/send_header_diff_kmail.py b/packaging-tools/send_header_diff_kmail.py index 45f2334e7..10c3cba0b 100755 --- a/packaging-tools/send_header_diff_kmail.py +++ b/packaging-tools/send_header_diff_kmail.py @@ -53,8 +53,8 @@ import argparse import os -import subprocess import sys +from subprocess import check_call def send_headers(version, message_id, simulate): @@ -72,7 +72,7 @@ def send_headers(version, message_id, simulate): if simulate: print("Simulate:", " ".join(args)) else: - subprocess.check_call(args) + check_call(args) if __name__ == "__main__": diff --git a/packaging-tools/sign_installer.py b/packaging-tools/sign_installer.py index 2d8950bcf..75ab6cd3e 100755 --- a/packaging-tools/sign_installer.py +++ b/packaging-tools/sign_installer.py @@ -29,12 +29,13 @@ # ############################################################################# -import os -import sys -import shutil import argparse -import subprocess import logging +import os +import sys +from shutil import rmtree +from subprocess import DEVNULL, check_call + from read_remote_config import get_pkg_value log = logging.getLogger("Sign-utility") @@ -45,10 +46,10 @@ def sign_mac_app(app_path: str, signing_identity: str) -> None: assert app_path.endswith(".app"), f"Not a valid path to .app bundle: {app_path}" # we need to unlock the keychain first unlock_script = "/Users/qt/unlock-keychain.sh" - subprocess.check_call([unlock_script]) + check_call([unlock_script]) # "-o runtime" is required for notarization cmd_args = ['codesign', '-o', 'runtime', '--verbose=3', '-r', get_pkg_value("SIGNING_FLAGS"), '-s', signing_identity, app_path] - subprocess.check_call(cmd_args) + check_call(cmd_args) log.info(f"Successfully signed: {app_path}") @@ -58,7 +59,7 @@ def create_mac_dmg(app_path: str) -> None: destination_dmg_path = app_path.split(".app")[0] + '.dmg' cmd_args = ['hdiutil', 'create', '-srcfolder', app_path, '-volname', installer_name_base] cmd_args += ['-format', 'UDBZ', destination_dmg_path, '-ov', '-scrub', '-size', '4g'] - subprocess.check_call(cmd_args) + check_call(cmd_args) log.info(f"Successfully created: {destination_dmg_path}") @@ -69,15 +70,15 @@ def sign_windows_executable(file_path: str): dst = os.path.join(signToolsTempDir, item) curl_cmd_args = ['curl', "--fail", "-L", "--retry", "5", "--retry-delay", "30", "-o", dst, '--create-dirs', get_pkg_value("SIGN_TOOLS_ADDR") + item] - subprocess.check_call(curl_cmd_args) + check_call(curl_cmd_args) cmd_args = [os.path.join(signToolsTempDir, 'signtool32.exe'), 'sign', '/v', '/du', get_pkg_value("SIGNING_SERVER"), '/p', get_pkg_value("SIGNING_PASSWORD")] cmd_args += ['/tr', get_pkg_value("TIMESTAMP_SERVER"), '/f', os.path.join(signToolsTempDir, 'keys.pfx'), '/td', "sha256", '/fd', "sha256", file_path] log_entry = cmd_args[:] log_entry[4] = "****" log_entry[6] = "****" log.info("Calling: {0}".format(' '.join(log_entry))) - subprocess.check_call(cmd_args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - shutil.rmtree(signToolsTempDir) + check_call(cmd_args, stdout=DEVNULL, stderr=DEVNULL) + rmtree(signToolsTempDir) log.info(f"Successfully signed: {file_path}") diff --git a/packaging-tools/sign_windows_installer.py b/packaging-tools/sign_windows_installer.py index e04ead070..6ad0951e1 100755 --- a/packaging-tools/sign_windows_installer.py +++ b/packaging-tools/sign_windows_installer.py @@ -29,20 +29,24 @@ # ############################################################################# +import argparse import os +import subprocess import sys -import time +from configparser import ConfigParser +from datetime import datetime +from subprocess import DEVNULL +from time import time + import pysftp -import paramiko -import argparse -import datetime -import subprocess -import configparser from cryptography.fernet import Fernet +from paramiko import SSHException + from installer_utils import PackagingError from logging_util import init_logger + log = init_logger(__name__, debug_mode=False) -timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d--%H:%M:%S') +timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d--%H:%M:%S') def _get_home_dir() -> str: @@ -69,7 +73,7 @@ def _get_decrypt_key() -> bytes: def _handle_signing(file_path: str): - config = configparser.ConfigParser() + config = ConfigParser() config.read(os.path.basename(os.getenv("WINDOWS_SIGNKEYS_PATH"))) section = config.sections()[0] if section in config: @@ -86,12 +90,12 @@ def _handle_signing(file_path: str): log_entry[9] = "****" log_entry[11] = "****" log.info(f"Calling: {' '.join(log_entry)}") - sign_result = subprocess.run(cmd_args_sign, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + sign_result = subprocess.run(cmd_args_sign, stdout=DEVNULL, stderr=DEVNULL) if sign_result.returncode != 0: raise PackagingError(f"Package {file_path} signing with error {sign_result.returncode}") log.info(f"Successfully signed: {file_path}") cmd_args_verify = [os.path.basename(os.getenv("WINDOWS_SIGNTOOL_X64_PATH")), "verify", "-pa", file_path] - verify_result = subprocess.run(cmd_args_verify, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + verify_result = subprocess.run(cmd_args_verify, stdout=DEVNULL, stderr=DEVNULL) if verify_result.returncode != 0: raise PackagingError(f"Failed to verify {file_path} with error {verify_result.returncode}") log.info(f"Successfully verified: {file_path}") @@ -116,7 +120,7 @@ def download_signing_tools(path_to_key: str): with pysftp.Connection(os.getenv("SFTP_ADDRESS"), username=os.getenv("SFTP_USER"), private_key=path_to_key, cnopts=cnopts) as sftp: sftp.get(os.getenv("WINDOWS_SIGNKEYS_PATH")) sftp.get(os.getenv("WINDOWS_SIGNTOOL_X64_PATH")) - except paramiko.SSHException: + except SSHException: raise PackagingError("FTP authentication failed!") from None diff --git a/packaging-tools/tests/test_bld_python.py b/packaging-tools/tests/test_bld_python.py index 79aef747a..693e447d0 100755 --- a/packaging-tools/tests/test_bld_python.py +++ b/packaging-tools/tests/test_bld_python.py @@ -29,18 +29,19 @@ # ############################################################################# -from tests import testhelpers import os import unittest -import tempfile +from tempfile import TemporaryDirectory + from bld_python import BldPythonError, locate_source_root +from tests.testhelpers import asyncio_test class TestBldPython(unittest.TestCase): - @testhelpers.asyncio_test + @asyncio_test async def test_locate_source_root(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: tempDir = os.path.join(tmpBaseDir, "foo", "bar", "test", "dir") os.makedirs(tempDir) tempFilePath = os.path.join(tempDir, "configure") diff --git a/packaging-tools/tests/test_bldinstallercommon.py b/packaging-tools/tests/test_bldinstallercommon.py index a9c7f581d..89264ccf5 100644 --- a/packaging-tools/tests/test_bldinstallercommon.py +++ b/packaging-tools/tests/test_bldinstallercommon.py @@ -29,15 +29,21 @@ # ############################################################################# -from ddt import ddt, data import os import unittest +from pathlib import Path from tempfile import TemporaryDirectory + +from ddt import data, ddt + +from bld_utils import is_windows from bldinstallercommon import ( - replace_in_files, search_for_files, locate_executable, locate_path, locate_paths + locate_executable, + locate_path, + locate_paths, + replace_in_files, + search_for_files, ) -from pathlib import Path -from bld_utils import is_windows from installer_utils import PackagingError diff --git a/packaging-tools/tests/test_build_wrapper.py b/packaging-tools/tests/test_build_wrapper.py index c8f781c2d..d24f5b222 100644 --- a/packaging-tools/tests/test_build_wrapper.py +++ b/packaging-tools/tests/test_build_wrapper.py @@ -30,11 +30,13 @@ ############################################################################# import os -import getpass -import glob import unittest -import shutil -from ddt import ddt, data, unpack # type: ignore +from getpass import getuser +from glob import glob +from shutil import rmtree + +from ddt import data, ddt, unpack # type: ignore + from build_wrapper import init_snapshot_dir_and_upload_files @@ -54,10 +56,10 @@ class TestBuildWrapper(unittest.TestCase): optionDict['WORK_DIR'] = os.getcwd() optionDict['SSH_COMMAND'] = 'ssh' optionDict['SCP_COMMAND'] = 'scp' - user = getpass.getuser() + user = getuser() optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = user + '@127.0.0.1' optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = temp_dir - filesToUpload = [os.path.basename(x) for x in glob.glob('./*.sh')] + filesToUpload = [os.path.basename(x) for x in glob('./*.sh')] if subdir: init_snapshot_dir_and_upload_files(optionDict, projectName, versioOrBranch, buildNumber, filesToUpload, subdir) else: @@ -72,10 +74,10 @@ class TestBuildWrapper(unittest.TestCase): self.assertTrue(os.path.islink(remote_path_latest_link)) searchDir = os.path.join(remote_path_latest_link, subdir, '*.sh') - uploadedFiles = [os.path.basename(x) for x in glob.glob(searchDir)] + uploadedFiles = [os.path.basename(x) for x in glob(searchDir)] self.assertListEqual(sorted(filesToUpload), sorted(uploadedFiles)) - shutil.rmtree(remote_path_base) + rmtree(remote_path_base) if __name__ == '__main__': diff --git a/packaging-tools/tests/test_content_cleaner.py b/packaging-tools/tests/test_content_cleaner.py index eaf6631e9..0b900eced 100644 --- a/packaging-tools/tests/test_content_cleaner.py +++ b/packaging-tools/tests/test_content_cleaner.py @@ -32,9 +32,11 @@ import os import unittest -import tempfile +from tempfile import TemporaryDirectory from typing import List -from ddt import ddt, data, unpack + +from ddt import data, ddt, unpack + from content_cleaner import preserve_content, remove_content, remove_empty_directories @@ -102,7 +104,7 @@ class TestContentCleaner(unittest.TestCase): expected_result: List[str], preserve_rules: List[str], ) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: + with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: test_base_dir = os.path.join(tmp_base_dir, "test-base-dir") self.generate_test_content(test_base_dir, test_content) preserve_content(test_base_dir, preserve_rules) @@ -145,7 +147,7 @@ class TestContentCleaner(unittest.TestCase): verify_removed_files: List[str], ) -> None: try: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: + with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: test_base_dir = os.path.join(tmp_base_dir, "test-base-dir") self.generate_test_content(test_base_dir, test_content) remove_content(test_base_dir, remove_rules) @@ -165,7 +167,7 @@ class TestContentCleaner(unittest.TestCase): @unpack def test_remove_empty_directories(self, test_content: str, remove_dir: bool) -> None: try: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: + with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir: test_base_dir = os.path.join(tmp_base_dir, "test-base-dir") self.generate_test_content(test_base_dir, test_content) remove_empty_directories(test_base_dir) diff --git a/packaging-tools/tests/test_create_installer.py b/packaging-tools/tests/test_create_installer.py index 1c7f32cf2..6bcc04b92 100644 --- a/packaging-tools/tests/test_create_installer.py +++ b/packaging-tools/tests/test_create_installer.py @@ -29,14 +29,16 @@ # ############################################################################# -from ddt import ddt, data import os import unittest -from tempfile import TemporaryDirectory from pathlib import Path -from create_installer import remove_all_debug_libraries +from tempfile import TemporaryDirectory + +from ddt import data, ddt + +from bld_utils import is_macos, is_windows from bldinstallercommon import locate_paths -from bld_utils import is_windows, is_macos +from create_installer import remove_all_debug_libraries @ddt diff --git a/packaging-tools/tests/test_installer_utils.py b/packaging-tools/tests/test_installer_utils.py index 63e204ac4..11d465f7e 100644 --- a/packaging-tools/tests/test_installer_utils.py +++ b/packaging-tools/tests/test_installer_utils.py @@ -29,27 +29,39 @@ # ############################################################################# -from tests import testhelpers -import os import io -import unittest -import tempfile +import os import tarfile -from installer_utils import cd, PackagingError, get_extract_cmd, extract_archive, download_archive, is_valid_url_path +import unittest +from tempfile import TemporaryDirectory + +from installer_utils import ( + PackagingError, + cd, + download_archive, + extract_archive, + get_extract_cmd, + is_valid_url_path, +) from read_remote_config import get_pkg_value +from tests.testhelpers import ( + asyncio_test, + asyncio_test_parallel_data, + isInternalFileServerReachable, +) class TestInstallerUtils(unittest.TestCase): - @testhelpers.asyncio_test + @asyncio_test async def test_cd(self) -> None: cwd = os.getcwd() - with tempfile.TemporaryDirectory(dir=cwd) as tmpBaseDir: + with TemporaryDirectory(dir=cwd) as tmpBaseDir: with cd(tmpBaseDir): self.assertEqual(tmpBaseDir, os.getcwd()) self.assertEqual(cwd, os.getcwd()) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("https://www.qt.io", False), ("https://", False), (None, False), @@ -59,7 +71,7 @@ class TestInstallerUtils(unittest.TestCase): self.assertEqual(is_valid_url_path(url), expectedResult, "URL: {0} - expected result: {1} - result was: {2}".format(url, expectedResult, not expectedResult)) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("https://www.qt.io/some/file.zip", "7z"), ("https://www.qt.io/some/file.tar.gz", "tar"), ("https://www.qt.io/some/file.7z", "7z"), @@ -69,14 +81,14 @@ class TestInstallerUtils(unittest.TestCase): extractCmd = get_extract_cmd(archive) self.assertEqual(extractCmd[0], expectedExtractor, "Not a valid extractor Callable obtained for: {0}".format(archive)) - @testhelpers.asyncio_test + @asyncio_test async def test_invalid_extractor(self) -> None: with self.assertRaises(PackagingError): get_extract_cmd("https://www.qt.io/some/file.foo.bar") - @testhelpers.asyncio_test + @asyncio_test async def test_extract_archive(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: # create some test paths tempPath = os.path.join("foo", "bar") absoluteTempPath = os.path.join(tmpBaseDir, tempPath) @@ -99,11 +111,11 @@ class TestInstallerUtils(unittest.TestCase): await extract_archive(tarArchivePath, destDir) self.assertTrue(os.path.isfile(os.path.join(destDir, tempPath, "foobar.txt"))) - @unittest.skipUnless(testhelpers.isInternalFileServerReachable(), + @unittest.skipUnless(isInternalFileServerReachable(), "Skipping because file server is not accessible") - @testhelpers.asyncio_test + @asyncio_test async def test_download_archive(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir: pkg_srv = get_pkg_value("PACKAGE_STORAGE_SERVER_PATH_HTTP") test_file_url = pkg_srv + "/archive/packaging/qtsdk_testing.txt" downloadedFile = download_archive(test_file_url, tmpBaseDir) diff --git a/packaging-tools/tests/test_packaging.py b/packaging-tools/tests/test_packaging.py index a0a5d3451..e32db027d 100755 --- a/packaging-tools/tests/test_packaging.py +++ b/packaging-tools/tests/test_packaging.py @@ -30,13 +30,21 @@ ############################################################################# import os +import platform import sys import unittest -import tempfile -import shutil -import fileinput -from patch_qt import patchAbsoluteLibPathsFromLine, patchQmakePrlBuildDirFromLine, patchQConfigPriFromLine, patchQtEdition +from fileinput import FileInput +from shutil import rmtree +from tempfile import mkdtemp + from create_installer import parsePackageFinalizeItems +from patch_qt import ( + patchAbsoluteLibPathsFromLine, + patchQConfigPriFromLine, + patchQmakePrlBuildDirFromLine, + patchQtEdition, +) +from runner import do_execute_sub_process class TestPackaging(unittest.TestCase): @@ -111,7 +119,7 @@ class TestPackaging(unittest.TestCase): self.assertEqual(matchCount, len(data[2])) def test_patchQtEdition(self): - tempDir = tempfile.mkdtemp(dir=os.getcwd()) + tempDir = mkdtemp(dir=os.getcwd()) tempFile = os.path.join(tempDir, "qconfig.pri") try: @@ -132,19 +140,17 @@ class TestPackaging(unittest.TestCase): expectedData.append("nonsense") idx = 0 - for line in fileinput.FileInput(tempFile, inplace=False): + for line in FileInput(tempFile, inplace=False): print("Received data: [{0}] expected data: [{1}]".format(line.strip(), expectedData[idx])) self.assertEqual(line.strip(), expectedData[idx], "Received data: [{0}] differs from expected data: [{1}]".format(line, expectedData[idx])) idx += 1 finally: - shutil.rmtree(tempDir) + rmtree(tempDir) @unittest.skipUnless(os.environ.get("PKG_TEST_QT_CONFIG_BASE_PATH"), "Skipping because 'PKG_TEST_QT_CONFIG_BASE_PATH' is not set") @unittest.skipUnless(os.environ.get("PKG_TEST_QT_ARTIFACTS_URL"), "Skipping because 'PKG_TEST_QT_CONFIG_BASE_PATH' is not set") @unittest.skipUnless(os.environ.get("PKG_TEST_QT_IFW_TOOL_URL"), "Skipping because 'PKG_TEST_QT_IFW_TOOL_URL' is not set") def test_createInstaller(self): - from runner import do_execute_sub_process - import platform extension = '.run' if platform.system().lower().startswith('linux') else '' testsDir = os.path.dirname(os.path.abspath(__file__)) path = os.path.join(os.environ.get("PKG_TEST_QT_CONFIG_BASE_PATH"), "offline_installer_jobs", "5.9.3") diff --git a/packaging-tools/tests/test_release_repo_meta_update.py b/packaging-tools/tests/test_release_repo_meta_update.py index ce8732fa5..3619b8672 100755 --- a/packaging-tools/tests/test_release_repo_meta_update.py +++ b/packaging-tools/tests/test_release_repo_meta_update.py @@ -29,16 +29,23 @@ # ############################################################################# -from tests import testhelpers import os import unittest -import tempfile -from ddt import ddt +from tempfile import TemporaryDirectory from typing import List + +from ddt import ddt + from release_repo_meta_update import ( - IfwRepoUpdateError, scan_repositories, swap_repositories, create_converted_repositories, - check_repos_which_can_be_updated, convert_suffix, backup_suffix + IfwRepoUpdateError, + backup_suffix, + check_repos_which_can_be_updated, + convert_suffix, + create_converted_repositories, + scan_repositories, + swap_repositories, ) +from tests.testhelpers import asyncio_test @ddt @@ -96,9 +103,9 @@ class TestReleaseRepoMetaUpdate(unittest.TestCase): with open(tmp, 'w+') as f: f.write("\n") - @testhelpers.asyncio_test + @asyncio_test async def test_scan_repositories(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: self._write_test_repo(tmpBaseDir, self.paths) done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmpBaseDir) @@ -112,9 +119,9 @@ class TestReleaseRepoMetaUpdate(unittest.TestCase): self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in done_repos]), sorted(["/repo5", "/repo7"])) - @testhelpers.asyncio_test + @asyncio_test async def test_check_repos_which_can_be_updated(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: self._write_test_repo(tmpBaseDir, self.paths) done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmpBaseDir) @@ -124,18 +131,18 @@ class TestReleaseRepoMetaUpdate(unittest.TestCase): self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in existing_pending_repos]), sorted(["/repo2", "/repo3", "/repo7", "/repo9"])) - @testhelpers.asyncio_test + @asyncio_test async def test_swap_repositories_invalid(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: self._write_test_repo(tmpBaseDir, self.paths) done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmpBaseDir) with self.assertRaises(IfwRepoUpdateError): await create_converted_repositories(repogen="foobar-repogen", repositories_to_migrate=unconverted_repos, dry_run=True) - @testhelpers.asyncio_test + @asyncio_test async def test_swap_repositories_valid(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: self._write_test_repo(tmpBaseDir, self.non_migrated_paths) done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmpBaseDir) successful_conversions, failed_conversions = await create_converted_repositories(repogen="foobar-repogen", diff --git a/packaging-tools/tests/test_release_repo_updater.py b/packaging-tools/tests/test_release_repo_updater.py index 7d9290f74..9f61f7081 100755 --- a/packaging-tools/tests/test_release_repo_updater.py +++ b/packaging-tools/tests/test_release_repo_updater.py @@ -29,21 +29,37 @@ # ############################################################################# -from tests import testhelpers import os import unittest +from configparser import ConfigParser +from shutil import rmtree +from tempfile import TemporaryDirectory + from ddt import ddt -import tempfile -import shutil -import configparser + from installer_utils import PackagingError -from release_task_reader import parse_data -from release_repo_updater import upload_ifw_to_remote, upload_pending_repository_content, \ - reset_new_remote_repository, create_remote_repository_backup, \ - remote_file_exists, build_online_repositories, \ - ensure_ext_repo_paths, parse_ext, check_repogen_output, append_to_task_filters, \ - format_task_filters, has_connection_error from read_remote_config import get_pkg_value +from release_repo_updater import ( + append_to_task_filters, + build_online_repositories, + check_repogen_output, + create_remote_repository_backup, + ensure_ext_repo_paths, + format_task_filters, + has_connection_error, + parse_ext, + remote_file_exists, + reset_new_remote_repository, + string_to_bool, + upload_ifw_to_remote, + upload_pending_repository_content, +) +from release_task_reader import parse_data +from tests.testhelpers import ( + asyncio_test, + asyncio_test_parallel_data, + isInternalFileServerReachable, +) def _write_dummy_file(path: str) -> None: @@ -104,23 +120,23 @@ class TestReleaseRepoUpdater(unittest.TestCase): def setUpClass(cls) -> None: cls.server = "127.0.0.1" - @testhelpers.asyncio_test + @asyncio_test async def test_remote_file_exists(self): self.assertTrue(remote_file_exists(self.server, os.path.abspath(__file__))) self.assertFalse(remote_file_exists(self.server, "/some/bogus/directory/foo.txt")) - @unittest.skipUnless(testhelpers.isInternalFileServerReachable(), "Skipping because file server is not accessible") - @testhelpers.asyncio_test + @unittest.skipUnless(isInternalFileServerReachable(), "Skipping because file server is not accessible") + @asyncio_test async def test_upload_ifw_to_remote(self) -> None: try: repogen = await _get_repogen() finally: self.assertTrue(os.path.isfile(repogen)) - shutil.rmtree(os.path.dirname(repogen)) + rmtree(os.path.dirname(repogen)) - @testhelpers.asyncio_test + @asyncio_test async def test_upload_pending_repository_content(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: sourceRepo = os.path.join(tmpBaseDir, "repository") destinationRepo = os.path.join(tmpBaseDir, "destination_online_repository") @@ -131,9 +147,9 @@ class TestReleaseRepoUpdater(unittest.TestCase): upload_pending_repository_content(self.server, sourceRepo, destinationRepo) self.assertListEqual(sorted(os.listdir(sourceRepo)), sorted(os.listdir(destinationRepo))) - @testhelpers.asyncio_test + @asyncio_test async def test_reset_new_remote_repository(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: remoteSourceRepoPath = os.path.join(tmpBaseDir, "repository") remoteTargetRepoPath = os.path.join(tmpBaseDir, "destination_online_repository") @@ -150,9 +166,9 @@ class TestReleaseRepoUpdater(unittest.TestCase): reset_new_remote_repository(self.server, remoteSourceRepoPath, remoteTargetRepoPath) self.assertTrue(os.path.exists(remoteTargetRepoPath + "____snapshot_backup")) - @testhelpers.asyncio_test + @asyncio_test async def test_create_remote_repository_backup(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: remoteSourceRepoPath = os.path.join(tmpBaseDir, "repository") _write_dummy_file(os.path.join(remoteSourceRepoPath, "qt.foo.bar1", "meta", "package.xml")) @@ -163,20 +179,19 @@ class TestReleaseRepoUpdater(unittest.TestCase): self.assertFalse(os.path.exists(remoteSourceRepoPath)) self.assertListEqual(sorted(["Updates.xml", "qt.foo.bar1", "qt.foo.bar2"]), sorted(os.listdir(remoteRepoBackupPath))) - @testhelpers.asyncio_test_parallel_data((True, True), (False, False), ("yes", True), ("1", True), ("y", True), - ("false", False), ("n", False), ("0", False), ("no", False)) + @asyncio_test_parallel_data((True, True), (False, False), ("yes", True), ("1", True), ("y", True), + ("false", False), ("n", False), ("0", False), ("no", False)) async def test_string_to_bool(self, value: str, expectedResult: bool) -> None: - from release_repo_updater import string_to_bool self.assertEqual(string_to_bool(value), expectedResult) - @testhelpers.asyncio_test + @asyncio_test async def test_build_online_repositories_dryrun(self) -> None: sample_config = """ [task.repository.linux.x86_64.repo1] config_file: foobar_config_file repo_path: foo/bar/path_1 """ - config = configparser.ConfigParser() + config = ConfigParser() config.read_string(sample_config) # parse all tasks i.e. no filters @@ -186,14 +201,14 @@ class TestReleaseRepoUpdater(unittest.TestCase): task = tasks.pop() self.assertTrue(task.source_online_repository_path.endswith("foo/bar/path_1/online_repository")) - @testhelpers.asyncio_test + @asyncio_test async def test_ensure_ext_repo_paths(self) -> None: - with tempfile.TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: + with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir: expectedRepo = os.path.join(tmpBaseDir, "some", "test", "path") await ensure_ext_repo_paths(self.server, self.server, expectedRepo) self.assertTrue(os.path.isdir(expectedRepo)) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("user@server.com:/foo/bar"), ("server.com:/foo/bar"), ("user@server.com:/"), unpack=False @@ -201,7 +216,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): async def test_parse_ext_valid(self, ext) -> None: parse_ext(ext) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("user@server.com"), ("server.com:/foo/bar:"), ("user@server.com:some/path"), unpack=False @@ -210,7 +225,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): with self.assertRaises(PackagingError): parse_ext(ext) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("Error: Repository parameter missing argument"), ("Invalid content in ..."), ("Repository target directory /foobar/bar/foo already exists."), unpack=False @@ -219,7 +234,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): with self.assertRaises(PackagingError): check_repogen_output(repogen_output) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("Update component a.b.c.d"), ("Cannot find new components to update"), unpack=False ) @@ -227,7 +242,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): # should not throw exception check_repogen_output(repogen_output) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ([], ["repository"]), (["linux,common"], ["repository,linux,common"]), (["", "linux,common"], ["repository", "repository,linux,common"]) @@ -235,7 +250,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): async def test_append_to_task_filters(self, task_filters: str, expected_result: bool) -> None: self.assertEqual(append_to_task_filters(task_filters, "repository"), expected_result) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( (["task.repository.linux.x64.feature1"], ["task,repository,linux,x64,feature1"]), (["task.repository.linux.x64.feature1", "windows.x64,feature2"], ["task,repository,linux,x64,feature1", "windows,x64,feature2"]), @@ -247,7 +262,7 @@ class TestReleaseRepoUpdater(unittest.TestCase): print("test") self.assertEqual(format_task_filters(task_filters), expected_result) - @testhelpers.asyncio_test_parallel_data( + @asyncio_test_parallel_data( ("qtsdkrepository/windows_x86/desktop/tools_maintenance/log-s3-2020-12-03--10:18:11-xml.t" "xt:fatal error: Could not connect to the endpoint URL: 'https://qt-cdn.s3.eu-west-1.ama" "zonaws.com/?list-type=2&prefix=qtsdkrepository%2Fwindows_x86%2Fdesktop%2Ftools_maintena" diff --git a/packaging-tools/tests/test_release_task_reader.py b/packaging-tools/tests/test_release_task_reader.py index 23da3f0db..98230442c 100755 --- a/packaging-tools/tests/test_release_task_reader.py +++ b/packaging-tools/tests/test_release_task_reader.py @@ -29,25 +29,27 @@ # ############################################################################# -from tests import testhelpers import unittest -from ddt import ddt +from configparser import ConfigParser from typing import List -import release_task_reader -import configparser + +from ddt import ddt + +from release_task_reader import ReleaseTaskError, get_filter_parts, parse_data +from tests.testhelpers import asyncio_test, asyncio_test_parallel_data @ddt class TestReleaseTaskReader(unittest.TestCase): - @testhelpers.asyncio_test_parallel_data(("linux,x64,common", ["linux", "x64", "common"]), - ("linux, x64, common ", ["linux", "x64", "common"]), - ("linux; , x64 common ", ["linux", "x64", "common"]), - (": ,,; linux x64 common ", ["linux", "x64", "common"])) + @asyncio_test_parallel_data(("linux,x64,common", ["linux", "x64", "common"]), + ("linux, x64, common ", ["linux", "x64", "common"]), + ("linux; , x64 common ", ["linux", "x64", "common"]), + (": ,,; linux x64 common ", ["linux", "x64", "common"])) async def test_get_filter_parts(self, task_filters: str, expected_result: List[str]) -> None: - self.assertEqual(release_task_reader.get_filter_parts(task_filters), expected_result) + self.assertEqual(get_filter_parts(task_filters), expected_result) - @testhelpers.asyncio_test + @asyncio_test async def test_release_task_reader(self) -> None: sample_config = """ [task.repository.linux.x86_64] @@ -79,15 +81,15 @@ class TestReleaseTaskReader(unittest.TestCase): substitutions: arg13, arg23, arg33 rta_key_list: key13, key23 """ - config = configparser.ConfigParser() + config = ConfigParser() config.read_string(sample_config) # parse all tasks i.e. no filters - tasks = release_task_reader.parse_data(config, task_filters=[]) + tasks = parse_data(config, task_filters=[]) self.assertTrue(len(tasks) == 4, "Did not parse all tasks from sample config") # parse only "repository" tasks - tasks = release_task_reader.parse_data(config, task_filters=["repository"]) + tasks = parse_data(config, task_filters=["repository"]) self.assertTrue(len(tasks) == 1) self.assertEqual(tasks[0].is_repository_task(), True) self.assertEqual(tasks[0].is_offline_installer_task(), False) @@ -100,16 +102,16 @@ class TestReleaseTaskReader(unittest.TestCase): self.assertEqual(sorted(tasks[0].get_rta_key_list()), sorted(["key1", "key2", "key3", "key4"])) # parse only "offline" tasks with multiple filters - tasks = release_task_reader.parse_data(config, task_filters=["offline,linux,x86_64"]) + tasks = parse_data(config, task_filters=["offline,linux,x86_64"]) self.assertTrue(len(tasks) == 2) - tasks = release_task_reader.parse_data(config, task_filters=["offline,linux,x86_64,foobar"]) + tasks = parse_data(config, task_filters=["offline,linux,x86_64,foobar"]) self.assertTrue(len(tasks) == 1) # parse "offline" tasks with multiple filters and "online" tasks - tasks = release_task_reader.parse_data(config, task_filters=["offline,linux,x86_64", "online,linux,x86_64"]) + tasks = parse_data(config, task_filters=["offline,linux,x86_64", "online,linux,x86_64"]) self.assertTrue(len(tasks) == 3) - @testhelpers.asyncio_test + @asyncio_test async def test_release_task_reader_invalid_config(self) -> None: sample_config = """ [task.repository] @@ -119,10 +121,10 @@ class TestReleaseTaskReader(unittest.TestCase): repo_path: foo/bar/path rta_key_list: key1, key2 """ - config = configparser.ConfigParser() + config = ConfigParser() config.read_string(sample_config) - with self.assertRaises(release_task_reader.ReleaseTaskError): - release_task_reader.parse_data(config, task_filters=[]) + with self.assertRaises(ReleaseTaskError): + parse_data(config, task_filters=[]) if __name__ == '__main__': diff --git a/packaging-tools/tests/test_runCommand.py b/packaging-tools/tests/test_runCommand.py index dd68bad28..cfc7ee314 100644 --- a/packaging-tools/tests/test_runCommand.py +++ b/packaging-tools/tests/test_runCommand.py @@ -28,12 +28,16 @@ # $QT_END_LICENSE$ # ############################################################################# + +import argparse import ctypes -import time +import os import sys import unittest -import argparse # commandline argument parser -import os +from time import sleep + +from bld_utils import runCommand +from threadedwork import ThreadedWork if sys.platform.startswith("win"): # Don't display the Windows GPF dialog if the invoked program dies. @@ -69,7 +73,6 @@ def printLines(count): def useRunCommand(testArguments, *arguments): - from bld_utils import runCommand return runCommand("{0} {1}".format(baseCommand(), testArguments), *arguments) @@ -126,7 +129,6 @@ class TestRunCommand(unittest.TestCase): def test_withThreadedWork(self): currentMethodName = sys._getframe().f_code.co_name - from threadedwork import ThreadedWork testWork = ThreadedWork("{} - run some command threaded".format(currentMethodName)) taskStringList = [] taskStringList.append("--sleep 1 --printLines 10") @@ -140,7 +142,6 @@ class TestRunCommand(unittest.TestCase): def test_withThreadedWork_unexpected_exitCode(self): currentMethodName = sys._getframe().f_code.co_name - from threadedwork import ThreadedWork testWork = ThreadedWork("{} - run some command threaded".format(currentMethodName)) # this exchange the current os._exit(-1) implementation only for this testing case separatorLine = "{0}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{0}".format(os.linesep) @@ -158,7 +159,6 @@ class TestRunCommand(unittest.TestCase): def test_withThreadedWork_crash(self): currentMethodName = sys._getframe().f_code.co_name - from threadedwork import ThreadedWork testWork = ThreadedWork("{} - run some command threaded".format(currentMethodName)) # this exchange the current os._exit(-1) implementation only for this testing case separatorLine = "{0}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{0}".format(os.linesep) @@ -188,7 +188,7 @@ if __name__ == '__main__': parser.add_argument('--testMethod') callerArguments = parser.parse_args() if callerArguments.sleep: - time.sleep(callerArguments.sleep) + sleep(callerArguments.sleep) if callerArguments.printLines: printLines(callerArguments.printLines) if callerArguments.crash: diff --git a/packaging-tools/tests/test_runner.py b/packaging-tools/tests/test_runner.py index 95e8e8fa0..9ac08fe5d 100755 --- a/packaging-tools/tests/test_runner.py +++ b/packaging-tools/tests/test_runner.py @@ -29,38 +29,40 @@ # ############################################################################# -import asyncio import os import sys import unittest +from asyncio import TimeoutError from pathlib import Path from tempfile import TemporaryDirectory + from ddt import data, ddt + from bld_utils import is_windows from runner import async_exec_cmd, do_execute_sub_process, exec_cmd -from tests import testhelpers +from tests.testhelpers import asyncio_test @ddt class TestRunner(unittest.TestCase): @unittest.skipIf(is_windows(), "Windows not supported for this test yet") - @testhelpers.asyncio_test + @asyncio_test async def test_async_exec_cmd(self) -> None: await async_exec_cmd(['echo', "TEST"]) cmd = ['sleep', '2'] - with self.assertRaises(asyncio.TimeoutError): + with self.assertRaises(TimeoutError): await async_exec_cmd(cmd, timeout=1) @unittest.skipIf(is_windows(), "Windows not supported for this test yet") - @testhelpers.asyncio_test + @asyncio_test async def test_exec_cmd(self) -> None: output = exec_cmd(['echo', "TEST"]) self.assertEqual(output, "TEST") cmd = ['sleep', '2'] - with self.assertRaises(asyncio.TimeoutError): + with self.assertRaises(TimeoutError): await async_exec_cmd(cmd, timeout=1) @data( diff --git a/packaging-tools/tests/testhelpers.py b/packaging-tools/tests/testhelpers.py index 43478cbdb..04090db85 100644 --- a/packaging-tools/tests/testhelpers.py +++ b/packaging-tools/tests/testhelpers.py @@ -29,12 +29,15 @@ # ############################################################################# -import sh import asyncio -import typing import subprocess +from typing import Any, Callable + +from bld_utils import is_windows from read_remote_config import get_pkg_value +if not is_windows(): + import sh _asyncio_test_loop = asyncio.get_event_loop() @@ -49,7 +52,7 @@ def asyncio_test_parallel_data(*data_args, unpack=True): # then double pack so we can unpack anyway data_args = ((d,) for d in data_args) - def decorator(f: typing.Callable[..., typing.Any]): + def decorator(f: Callable[..., Any]): assert asyncio.iscoroutinefunction(f) def wrapped(*args, **kwargs): diff --git a/packaging-tools/threadedwork.py b/packaging-tools/threadedwork.py index f35140cf5..ae5c991c4 100644 --- a/packaging-tools/threadedwork.py +++ b/packaging-tools/threadedwork.py @@ -28,16 +28,15 @@ # $QT_END_LICENSE$ # ############################################################################# +import builtins as __builtin__ import itertools -# to get the cpu number -import multiprocessing import os -import threading -import traceback -import time import sys -import queue -import builtins as __builtin__ +import threading +from multiprocessing import cpu_count +from queue import Queue +from time import sleep +from traceback import format_exc # we are using RLock, because threadedPrint is using the same lock outputLock = threading.RLock() @@ -98,7 +97,7 @@ __builtin__.org_stdout = sys.stdout __builtin__.org_sterr = sys.stderr -def enableThreadedPrint(enable=True, threadCount=multiprocessing.cpu_count()): +def enableThreadedPrint(enable=True, threadCount=cpu_count()): if enable: global outputStates global outputFormatString @@ -161,7 +160,7 @@ class Task(): sys.__stdout__.flush() sys.__stderr__.write(format(taskFunction)) sys.__stderr__.write(os.linesep) - sys.__stderr__.write(traceback.format_exc()) + sys.__stderr__.write(format_exc()) sys.__stderr__.flush() self.exitFunction(*(self.exitFunctionArguments)) print("Done") @@ -171,7 +170,7 @@ class ThreadedWork(): def __init__(self, description): self.description = os.linesep + "##### {} #####".format(description) - self.queue = queue.Queue() + self.queue = Queue() self.legend = [] self.taskNumber = 0 self.exitFunction = None @@ -194,7 +193,7 @@ class ThreadedWork(): def run(self, maxThreads=None): if not maxThreads: - maxThreads = min(multiprocessing.cpu_count(), self.taskNumber) + maxThreads = min(cpu_count(), self.taskNumber) print(self.description) print(os.linesep.join(self.legend)) @@ -214,7 +213,7 @@ class ThreadedWork(): while consumer.is_alive(): try: # wait 1 second, then go back and ask if thread is still alive - time.sleep(1) + sleep(1) except KeyboardInterrupt: # if ctrl-C is pressed within that second, # catch the KeyboardInterrupt exception sys.exit(0) |