aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPatrik Teivonen <patrik.teivonen@qt.io>2022-06-16 14:01:51 +0300
committerPatrik Teivonen <patrik.teivonen@qt.io>2022-07-11 09:41:00 +0000
commit43f6e2ced0724182f5d31e3d1a7c19093626a341 (patch)
treef926ac578ed320a8aedaa8d76c3265751bcada8e
parent8399adccfaf316ac8bd51f511a3e0cd8328aaa51 (diff)
PEP8: Fix whitespace
Fix whitespace, Enable following checks in flake8: - E225 Missing whitespace around operator (E225) - E226 Missing whitespace around arithmetic operator (E226) - E231 Missing whitespace after ',', ';', or ':' (E231) - E221 Multiple spaces before operator (E221) - E201 Whitespace after '(' (E201) - E252 Missing whitespace around parameter equals - E251 Unexpected spaces around keyword / parameter equals - E202 Whitespace before ')' (E202) - E222 Multiple spaces after operator (E222) - E211 Whitespace before '(' (E211) Change-Id: Ib14a4a83dbc06cdfac511eb09912d7b9232df7e6 Reviewed-by: Akseli Salovaara <akseli.salovaara@qt.io> Reviewed-by: Iikka Eklund <iikka.eklund@qt.io>
-rw-r--r--.pre-commit-config.yaml2
-rw-r--r--packaging-tools/archiveresolver.py24
-rwxr-xr-xpackaging-tools/bld_ifw_tools.py130
-rwxr-xr-xpackaging-tools/bld_module.py36
-rw-r--r--packaging-tools/bld_openssl.py2
-rw-r--r--packaging-tools/bld_utils.py36
-rw-r--r--packaging-tools/bldinstallercommon.py54
-rw-r--r--packaging-tools/build_clang.py2
-rw-r--r--packaging-tools/build_wrapper.py32
-rw-r--r--packaging-tools/create_installer.py28
-rw-r--r--packaging-tools/dump_debug_infos.py4
-rw-r--r--packaging-tools/environmentfrombatchfile.py4
-rw-r--r--packaging-tools/libclang_training/libclangtimings2csv.py4
-rw-r--r--packaging-tools/libclang_training/mergeCsvFiles.py6
-rw-r--r--packaging-tools/libclang_training/runBatchFiles.py6
-rwxr-xr-xpackaging-tools/notarize.py4
-rw-r--r--packaging-tools/pkg_constants.py12
-rwxr-xr-xpackaging-tools/release_repo_updater.py30
-rwxr-xr-xpackaging-tools/runner.py4
-rw-r--r--packaging-tools/sdkcomponent.py114
-rw-r--r--packaging-tools/tests/test_build_wrapper.py6
-rw-r--r--packaging-tools/tests/test_runCommand.py6
-rw-r--r--packaging-tools/threadedwork.py8
23 files changed, 277 insertions, 277 deletions
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 755625509..bcc7d9f49 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -21,7 +21,7 @@ repos:
name: Check PEP8 compliance (flake8)
# Disable E203 for compatibility with blackformatter, and W503 as it goes against PEP8
# Disable checks that are not relevant to this patch, they will be introduced later
- entry: bash -c 'pipenv run python3 -m flake8 --max-line-length=99 --ignore=E111,E117,E121,E123,E124,E125,E126,E127,E128,E201,E202,E203,E211,E221,E222,E225,E226,E231,E251,E252,E501,W503 "$@"'
+ entry: bash -c 'pipenv run python3 -m flake8 --max-line-length=99 --ignore=E111,E117,E121,E123,E124,E125,E126,E127,E128,E203,E501,W503 "$@"'
language: system
types: [python]
fail_fast: true
diff --git a/packaging-tools/archiveresolver.py b/packaging-tools/archiveresolver.py
index bd6639770..05894d930 100644
--- a/packaging-tools/archiveresolver.py
+++ b/packaging-tools/archiveresolver.py
@@ -34,9 +34,9 @@ import bldinstallercommon
import pkg_constants
from urllib.parse import urlparse
-SERVER_NAMESPACE = 'ArchiveRemoteLocation'
+SERVER_NAMESPACE = 'ArchiveRemoteLocation'
PACKAGE_REMOTE_LOCATION_RELEASE = 'release'
-PACKAGE_ARCHIVE_TAG = 'ARCHIVE_TAG'
+PACKAGE_ARCHIVE_TAG = 'ARCHIVE_TAG'
###############################
@@ -73,18 +73,18 @@ class ArchiveLocationResolver:
self.configurations_root_dir = configurations_root_dir
self.key_substitution_list = key_substitution_list
# get packages tempalates src dir first
- pkg_templates_dir = os.path.normpath(bldinstallercommon.config_section_map(target_config,'PackageTemplates')['template_dirs'])
+ pkg_templates_dir = os.path.normpath(bldinstallercommon.config_section_map(target_config, 'PackageTemplates')['template_dirs'])
self.pkg_templates_dir_list = pkg_templates_dir.replace(' ', '').rstrip(',\n').split(',')
# next read server list
if server_base_url_override:
- server_obj = ArchiveLocationResolver.ArchiveRemoteLocation('default_server_name', server_base_url_override, '')
+ server_obj = ArchiveLocationResolver.ArchiveRemoteLocation('default_server_name', server_base_url_override, '')
self.server_list.append(server_obj)
else:
for section in target_config.sections():
if section.startswith(SERVER_NAMESPACE):
server_name = section.split('.')[-1]
- base_url = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_url')
- base_path = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_path')
+ base_url = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_url')
+ base_path = bldinstallercommon.safe_config_key_fetch(target_config, section, 'base_path')
base_path.replace(' ', '')
# if base path is defined, then the following logic applies:
# if script is used in testclient mode fetch the packages from "RnD" location
@@ -92,7 +92,7 @@ class ArchiveLocationResolver:
# If the base_path is not defined, use the address as-is
if base_path:
base_path = base_path + PACKAGE_REMOTE_LOCATION_RELEASE
- server_obj = ArchiveLocationResolver.ArchiveRemoteLocation(server_name, base_url, base_path)
+ server_obj = ArchiveLocationResolver.ArchiveRemoteLocation(server_name, base_url, base_path)
self.server_list.append(server_obj)
if len(self.server_list) == 1:
self.default_server = self.server_list[0]
@@ -156,9 +156,9 @@ class ArchiveLocationResolver:
# Print out server list
###############################
def print_server_list(self):
- print ('--------------------------------------------------')
- print (' Server list:')
+ print('--------------------------------------------------')
+ print(' Server list:')
for server in self.server_list:
- print (' ---------------------------------------------')
- print (' Server name: ' + server.server_name)
- print (' Server url: ' + server.server_url)
+ print(' ---------------------------------------------')
+ print(' Server name: ' + server.server_name)
+ print(' Server url: ' + server.server_url)
diff --git a/packaging-tools/bld_ifw_tools.py b/packaging-tools/bld_ifw_tools.py
index c051e87e9..01eebb250 100755
--- a/packaging-tools/bld_ifw_tools.py
+++ b/packaging-tools/bld_ifw_tools.py
@@ -150,10 +150,10 @@ def get_build_env(openssl_dir):
###############################
class IfwOptions:
- default_qt_src_pkg = 'http://download.qt.io/official_releases/qt/' + QT_VERSION + '/' + QT_VERSION_MINOR + '/single/qt-everywhere-src-' + QT_VERSION_MINOR + ARCH_EXT
- default_qt_installer_framework_url = 'git://code.qt.io/installer-framework/installer-framework.git'
- default_qt_installer_framework_branch_qt = '3.2'
- default_qt_installer_framework_qmake_args = ['-r', '-config', 'release', '-config', 'static']
+ default_qt_src_pkg = 'http://download.qt.io/official_releases/qt/' + QT_VERSION + '/' + QT_VERSION_MINOR + '/single/qt-everywhere-src-' + QT_VERSION_MINOR + ARCH_EXT
+ default_qt_installer_framework_url = 'git://code.qt.io/installer-framework/installer-framework.git'
+ default_qt_installer_framework_branch_qt = '3.2'
+ default_qt_installer_framework_qmake_args = ['-r', '-config', 'release', '-config', 'static']
def __init__(self,
qt_source_package_uri,
@@ -167,73 +167,73 @@ class IfwOptions:
qt_binaries_dynamic,
signserver,
signpwd,
- incremental_build = False,
- archive_qt = False
+ incremental_build=False,
+ archive_qt=False
):
- self.signserver = signserver
- self.signpwd = signpwd
- self.incremental_mode = incremental_build
- self.qt_source_dir = os.path.join(ROOT_DIR, 'qt-src')
- self.qt_build_dir = os.path.join(ROOT_DIR, 'qt-bld')
- self.qt_build_dir_dynamic = os.path.join(ROOT_DIR, 'qt-bld-dynamic')
- self.installer_framework_source_dir = os.path.join(ROOT_DIR, 'ifw-src')
- self.installer_framework_build_dir = os.path.join(ROOT_DIR, 'ifw-bld')
- self.installer_framework_pkg_dir = os.path.join(ROOT_DIR, 'ifw-pkg')
- self.installer_framework_target_dir = os.path.join(ROOT_DIR, 'ifw-target')
- self.qt_installer_framework_uri = qt_installer_framework_uri
- self.qt_installer_framework_uri_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_installer_framework_uri))
- self.qt_installer_framework_branch = qt_installer_framework_branch
- self.qt_installer_framework_branch_pretty = qt_installer_framework_branch.replace("/", "_")
- self.qt_installer_framework_qmake_args = qt_installer_framework_qmake_args
- self.openssl_dir = openssl_dir
- self.qt_binaries_static = qt_binaries_static
+ self.signserver = signserver
+ self.signpwd = signpwd
+ self.incremental_mode = incremental_build
+ self.qt_source_dir = os.path.join(ROOT_DIR, 'qt-src')
+ self.qt_build_dir = os.path.join(ROOT_DIR, 'qt-bld')
+ self.qt_build_dir_dynamic = os.path.join(ROOT_DIR, 'qt-bld-dynamic')
+ self.installer_framework_source_dir = os.path.join(ROOT_DIR, 'ifw-src')
+ self.installer_framework_build_dir = os.path.join(ROOT_DIR, 'ifw-bld')
+ self.installer_framework_pkg_dir = os.path.join(ROOT_DIR, 'ifw-pkg')
+ self.installer_framework_target_dir = os.path.join(ROOT_DIR, 'ifw-target')
+ self.qt_installer_framework_uri = qt_installer_framework_uri
+ self.qt_installer_framework_uri_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_installer_framework_uri))
+ self.qt_installer_framework_branch = qt_installer_framework_branch
+ self.qt_installer_framework_branch_pretty = qt_installer_framework_branch.replace("/", "_")
+ self.qt_installer_framework_qmake_args = qt_installer_framework_qmake_args
+ self.openssl_dir = openssl_dir
+ self.qt_binaries_static = qt_binaries_static
if self.qt_binaries_static:
- self.qt_binaries_static_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_binaries_static))
- self.qt_binaries_dynamic = qt_binaries_dynamic
+ self.qt_binaries_static_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_binaries_static))
+ self.qt_binaries_dynamic = qt_binaries_dynamic
if self.qt_binaries_dynamic:
- self.qt_binaries_dynamic_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_binaries_dynamic))
- self.qt_build_modules = ["qtbase", "qtdeclarative", "qttools", "qttranslations"]
- self.qt_build_modules_docs = ["qtbase", "qttools"]
+ self.qt_binaries_dynamic_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_binaries_dynamic))
+ self.qt_build_modules = ["qtbase", "qtdeclarative", "qttools", "qttranslations"]
+ self.qt_build_modules_docs = ["qtbase", "qttools"]
if is_windows():
self.qt_build_modules.append("qtwinextras")
- self.make_cmd = 'jom.exe'
- self.make_doc_cmd = 'jom.exe'
- self.make_install_cmd = 'jom.exe install'
- self.qt_qmake_bin = 'qmake.exe'
- self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure.bat'
+ self.make_cmd = 'jom.exe'
+ self.make_doc_cmd = 'jom.exe'
+ self.make_install_cmd = 'jom.exe install'
+ self.qt_qmake_bin = 'qmake.exe'
+ self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure.bat'
else:
- self.make_cmd = 'make -j' + str(multiprocessing.cpu_count() + 1)
- self.make_doc_cmd = 'make'
- self.make_install_cmd = 'make install'
- self.qt_qmake_bin = 'qmake'
- self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure'
-
- self.build_artifacts_dir = os.path.join(ROOT_DIR, pkg_constants.IFW_BUILD_ARTIFACTS_DIR)
- self.mac_deploy_qt_archive_name = 'macdeployqt.7z'
- self.mac_qt_menu_nib_archive_name = 'qt_menu.nib.7z'
+ self.make_cmd = 'make -j' + str(multiprocessing.cpu_count() + 1)
+ self.make_doc_cmd = 'make'
+ self.make_install_cmd = 'make install'
+ self.qt_qmake_bin = 'qmake'
+ self.qt_configure_bin = self.qt_source_dir + os.sep + 'configure'
+
+ self.build_artifacts_dir = os.path.join(ROOT_DIR, pkg_constants.IFW_BUILD_ARTIFACTS_DIR)
+ self.mac_deploy_qt_archive_name = 'macdeployqt.7z'
+ self.mac_qt_menu_nib_archive_name = 'qt_menu.nib.7z'
# determine filenames used later on
self.architecture = 'x64'
- self.plat_suffix = get_platform_suffix()
- self.installer_framework_archive_name = 'installer-framework-build-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
- self.installer_base_archive_name = 'installerbase-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
- self.binarycreator_archive_name = 'binarycreator-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
- self.installer_framework_payload_arch = 'installer-framework-build-stripped-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
- self.qt_source_package_uri = qt_source_package_uri
- self.qt_source_package_uri_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_source_package_uri))
+ self.plat_suffix = get_platform_suffix()
+ self.installer_framework_archive_name = 'installer-framework-build-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
+ self.installer_base_archive_name = 'installerbase-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
+ self.binarycreator_archive_name = 'binarycreator-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
+ self.installer_framework_payload_arch = 'installer-framework-build-stripped-' + self.qt_installer_framework_branch_pretty + "-" + self.plat_suffix + '-' + self.architecture + '.7z'
+ self.qt_source_package_uri = qt_source_package_uri
+ self.qt_source_package_uri_saveas = os.path.join(ROOT_DIR, os.path.basename(self.qt_source_package_uri))
# Set Qt build prefix
- qt_prefix = ' -prefix ' + self.qt_build_dir + os.sep + 'qtbase'
- self.qt_configure_options = qt_configure_options + qt_prefix
+ qt_prefix = ' -prefix ' + self.qt_build_dir + os.sep + 'qtbase'
+ self.qt_configure_options = qt_configure_options + qt_prefix
# Product key checker
- self.product_key_checker_pri = product_key_checker_pri
+ self.product_key_checker_pri = product_key_checker_pri
if product_key_checker_pri:
if os.path.isfile(product_key_checker_pri):
self.qt_installer_framework_qmake_args += ['PRODUCTKEYCHECK_PRI_FILE=' + self.product_key_checker_pri]
# macOS specific
if is_macos():
- self.qt_installer_framework_qmake_args += ['"LIBS+=-framework IOKit"']
- self.archive_qt = archive_qt
- self.qt_static_binary_name = 'qt-bin-' + QT_VERSION + '-' + self.plat_suffix + '_static.7z'
- self.qt_shared_binary_name = 'qt-bin-' + QT_VERSION + '-' + self.plat_suffix + '_shared.7z'
+ self.qt_installer_framework_qmake_args += ['"LIBS+=-framework IOKit"']
+ self.archive_qt = archive_qt
+ self.qt_static_binary_name = 'qt-bin-' + QT_VERSION + '-' + self.plat_suffix + '_static.7z'
+ self.qt_shared_binary_name = 'qt-bin-' + QT_VERSION + '-' + self.plat_suffix + '_shared.7z'
# sanity check
self.sanity_check()
@@ -389,7 +389,7 @@ def build_qt(options, qt_build_dir, qt_configure_options, qt_modules):
print('Building Qt')
cmd_args = options.make_cmd
for module in qt_modules:
- cmd_args += " module-"+module
+ cmd_args += " module-" + module
do_execute_sub_process(cmd_args.split(' '), options.qt_source_dir, True, False, get_build_env(options.openssl_dir))
print('--------------------------------------------------------------------')
print('Installing Qt')
@@ -471,7 +471,7 @@ def build_installer_framework_examples(options):
dirs.remove('translations') # for now don't visit translation example as qm files needs to be generated first
for directory in dirs:
print("********** building example " + directory)
- config_file = os.path.join(root, directory, 'config', 'config.xml')
+ config_file = os.path.join(root, directory, 'config', 'config.xml')
package_dir = os.path.join(root, directory, 'packages')
target_filename = os.path.join(root, directory, 'installer')
do_execute_sub_process(args=(file_binarycreator, '--offline-only', '-c', config_file, '-p', package_dir, target_filename), execution_path=package_dir)
@@ -516,7 +516,7 @@ def create_installer_package(options):
current_dir = os.getcwd()
os.chdir(package_dir)
- shutil.copytree(os.path.join(options.installer_framework_build_dir, 'bin'), os.path.join(package_dir, 'bin'), ignore = shutil.ignore_patterns("*.exe.manifest","*.exp","*.lib"))
+ shutil.copytree(os.path.join(options.installer_framework_build_dir, 'bin'), os.path.join(package_dir, 'bin'), ignore=shutil.ignore_patterns("*.exe.manifest", "*.exp", "*.lib"))
if is_linux():
do_execute_sub_process(args=('strip', os.path.join(package_dir, 'bin/archivegen')), execution_path=package_dir)
do_execute_sub_process(args=('strip', os.path.join(package_dir, 'bin/binarycreator')), execution_path=package_dir)
@@ -745,7 +745,7 @@ def patch_win32_mkspecs(mkspecsdir):
for root, dummy, files in os.walk(mkspecsdir):
for file in files:
if "win32" in root and file == "qmake.conf":
- patch(os.path.join(root, file), {"-MD" : "-MT", "embed_manifest_dll" : "", "embed_manifest_exe" : "" })
+ patch(os.path.join(root, file), {"-MD" : "-MT", "embed_manifest_dll" : "", "embed_manifest_exe" : ""})
def get_platform_suffix():
@@ -763,7 +763,7 @@ def get_platform_suffix():
# Setup argument parser
###############################
def setup_argument_parser():
- parser = argparse.ArgumentParser(prog = os.path.basename(sys.argv[0]),
+ parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),
add_help=True, description="Build Qt Installer-Framework",
epilog="Builds Qt Installer Framework against static Qt libraries and archives the build artifacts for further usage. \n"
"To create an installer which installs the built Installer Framework libraries and tools use \"--create_installer\" option.",
@@ -796,11 +796,11 @@ if __name__ == "__main__":
PARSER = setup_argument_parser()
# parse args
CARGS = PARSER.parse_args()
- qt_src = IfwOptions.default_qt_src_pkg if not CARGS.qt_archive_uri else CARGS.qt_archive_uri
+ qt_src = IfwOptions.default_qt_src_pkg if not CARGS.qt_archive_uri else CARGS.qt_archive_uri
qt_configure_options = get_static_qt_configure_options(CARGS.openssl_dir) if not CARGS.qt_configure_options else CARGS.qt_configure_options
- ifw_branch = IfwOptions.default_qt_installer_framework_branch_qt if not CARGS.ifw_branch else CARGS.ifw_branch
- signserver = '' if not CARGS.sign_server else CARGS.sign_server
- signpwd = '' if not CARGS.sign_server_pwd else CARGS.sign_server_pwd
+ ifw_branch = IfwOptions.default_qt_installer_framework_branch_qt if not CARGS.ifw_branch else CARGS.ifw_branch
+ signserver = '' if not CARGS.sign_server else CARGS.sign_server
+ signpwd = '' if not CARGS.sign_server_pwd else CARGS.sign_server_pwd
qt_conf_args = CARGS.qt_configure_options
ifw_qmake_args = CARGS.ifw_qmake_args
diff --git a/packaging-tools/bld_module.py b/packaging-tools/bld_module.py
index 68d0b35fa..7383328c0 100755
--- a/packaging-tools/bld_module.py
+++ b/packaging-tools/bld_module.py
@@ -47,9 +47,9 @@ from bldinstallercommon import locate_path, locate_paths
from installer_utils import PackagingError
from runner import do_execute_sub_process
-SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
-MODULE_SRC_DIR_NAME = 'module_src'
-MODULE_SRC_DIR = os.path.join(SCRIPT_ROOT_DIR, MODULE_SRC_DIR_NAME)
+SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
+MODULE_SRC_DIR_NAME = 'module_src'
+MODULE_SRC_DIR = os.path.join(SCRIPT_ROOT_DIR, MODULE_SRC_DIR_NAME)
###############################
@@ -79,7 +79,7 @@ def erase_qmake_prl_build_dir(search_path):
# erase lines starting with 'QMAKE_PRL_BUILD_DIR' from .prl files
for item in file_list:
found = False
- for line in fileinput.FileInput(item, inplace = 1):
+ for line in fileinput.FileInput(item, inplace=1):
if line.startswith('QMAKE_PRL_BUILD_DIR'):
found = True
print(''.rstrip('\n'))
@@ -99,7 +99,7 @@ def patch_build_time_paths(search_path, search_strings, qt_install_prefix):
for item in file_list:
print('Replacing {0} paths from file: {1}'.format(search_strings, item))
- for line in fileinput.FileInput(item, inplace = 1):
+ for line in fileinput.FileInput(item, inplace=1):
patched_line = reduce(lambda accum, value: accum.replace(value, qt_install_prefix),
search_strings,
line)
@@ -107,7 +107,7 @@ def patch_build_time_paths(search_path, search_strings, qt_install_prefix):
# install an argument parser
-parser = argparse.ArgumentParser(prog = os.path.basename(sys.argv[0]),
+parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),
add_help=True, description="build Qt 5 based Qt Module", formatter_class=argparse.RawTextHelpFormatter)
if is_windows():
parser.epilog = "example on windows: " + os.linesep + "\tpython {0} --clean " \
@@ -201,7 +201,7 @@ if is_windows():
# check whether this is a QNX build
if any('qnx' in qt5_url.lower() for qt5_url in callerArguments.qt5_module_urls):
# apply the workaround from QTBUG-38555
- qtModuleInstallDirectory = qtModuleInstallDirectory.replace('\\','/').replace('/', '\\', 1)
+ qtModuleInstallDirectory = qtModuleInstallDirectory.replace('\\', '/').replace('/', '\\', 1)
# clean step
@@ -289,16 +289,16 @@ else: # --> qmake
generateCommand.append(os.environ["EXTRA_QMAKE_ARGS"])
generateCommand.append(qtModuleProFile)
-runCommand(generateCommand, currentWorkingDirectory = qtModuleBuildDirectory,
- callerArguments = callerArguments, extra_environment = environment)
+runCommand(generateCommand, currentWorkingDirectory=qtModuleBuildDirectory,
+ callerArguments=callerArguments, extra_environment=environment)
-ret = runBuildCommand(currentWorkingDirectory = qtModuleBuildDirectory, callerArguments = callerArguments)
+ret = runBuildCommand(currentWorkingDirectory=qtModuleBuildDirectory, callerArguments=callerArguments)
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
ret = runInstallCommand(['install', 'INSTALL_ROOT=' + qtModuleInstallDirectory],
- currentWorkingDirectory = qtModuleBuildDirectory,
- callerArguments = callerArguments, extra_environment = environment)
+ currentWorkingDirectory=qtModuleBuildDirectory,
+ callerArguments=callerArguments, extra_environment=environment)
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
@@ -318,21 +318,21 @@ if callerArguments.collectDocs:
if callerArguments.makeDocs:
# build docs first
ret = runInstallCommand('docs',
- currentWorkingDirectory = qtModuleBuildDirectory,
- callerArguments = callerArguments, extra_environment = environment)
+ currentWorkingDirectory=qtModuleBuildDirectory,
+ callerArguments=callerArguments, extra_environment=environment)
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
# then make install those
ret = runInstallCommand(['install_docs', 'INSTALL_ROOT=' + qtModuleInstallDirectory],
- currentWorkingDirectory = qtModuleBuildDirectory,
- callerArguments = callerArguments, extra_environment = environment)
+ currentWorkingDirectory=qtModuleBuildDirectory,
+ callerArguments=callerArguments, extra_environment=environment)
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
# make separate "doc.7z" for later use if needed
doc_dir = locate_path(qtModuleInstallDirectory, ["doc"], filters=[os.path.isdir])
archive_name = callerArguments.module_name + '-' + os.environ['LICENSE'] + '-doc-' + os.environ['MODULE_VERSION'] + '.7z'
ret = runCommand(['7z', 'a', os.path.join('doc_archives', archive_name), doc_dir],
- currentWorkingDirectory = os.path.dirname(os.path.realpath(__file__)))
+ currentWorkingDirectory=os.path.dirname(os.path.realpath(__file__)))
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
@@ -359,6 +359,6 @@ if callerArguments.use_cmake:
archive_cmd.append(os.path.join(dir_to_archive, '*'))
else:
archive_cmd.append(dir_to_archive)
-ret = runCommand(archive_cmd, currentWorkingDirectory = os.path.dirname(os.path.realpath(__file__)))
+ret = runCommand(archive_cmd, currentWorkingDirectory=os.path.dirname(os.path.realpath(__file__)))
if ret:
raise RuntimeError('Failure running the last command: %i' % ret)
diff --git a/packaging-tools/bld_openssl.py b/packaging-tools/bld_openssl.py
index ab3333a18..c4c3d6f33 100644
--- a/packaging-tools/bld_openssl.py
+++ b/packaging-tools/bld_openssl.py
@@ -74,7 +74,7 @@ def check_environment():
def setup_argument_parser():
- parser = argparse.ArgumentParser(prog = os.path.basename(sys.argv[0]),
+ parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),
add_help=True, description='Build openssl from sources',
formatter_class=argparse.RawTextHelpFormatter)
diff --git a/packaging-tools/bld_utils.py b/packaging-tools/bld_utils.py
index 44beb5380..596bd6910 100644
--- a/packaging-tools/bld_utils.py
+++ b/packaging-tools/bld_utils.py
@@ -136,7 +136,7 @@ def urllib2_response_read(response, file_path, block_size, total_size):
return bytes_count
-def download(url, target, read_block_size = 1048576):
+def download(url, target, read_block_size=1048576):
try:
if os.path.isdir(os.path.abspath(target)):
filename = os.path.basename(urllib.parse.urlparse(url).path)
@@ -225,7 +225,7 @@ def setValueOnEnvironmentDict(environment, key, value):
@deep_copy_arguments
-def getEnvironment(extra_environment = None, callerArguments = None):
+def getEnvironment(extra_environment=None, callerArguments=None):
# first take the one from the system and use the plain dictionary data for that
environment = dict(os.environ)
@@ -242,7 +242,7 @@ def getEnvironment(extra_environment = None, callerArguments = None):
@deep_copy_arguments
-def runCommand(command, currentWorkingDirectory, callerArguments = None, extra_environment = None, onlyErrorCaseOutput=False, expectedExitCodes=[0]):
+def runCommand(command, currentWorkingDirectory, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False, expectedExitCodes=[0]):
if builtins.type(expectedExitCodes) is not list:
raise TypeError("{}({}) is not {}".format("expectedExitCodes", builtins.type(expectedExitCodes), list))
if builtins.type(onlyErrorCaseOutput) is not bool:
@@ -284,15 +284,15 @@ def runCommand(command, currentWorkingDirectory, callerArguments = None, extra_e
lastStdErrLines = []
if threading.currentThread().name == "MainThread" and not onlyErrorCaseOutput:
process = subprocess.Popen(commandAsList, shell=useShell,
- cwd = currentWorkingDirectory, bufsize = -1, env = environment)
+ cwd=currentWorkingDirectory, bufsize=-1, env=environment)
else:
process = subprocess.Popen(commandAsList, shell=useShell,
- stdout = subprocess.PIPE, stderr = subprocess.PIPE,
- cwd = currentWorkingDirectory, bufsize = -1, env = environment)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ cwd=currentWorkingDirectory, bufsize=-1, env=environment)
maxSavedLineNumbers = 1000
- lastStdOutLines = collections.deque(maxlen = maxSavedLineNumbers)
- lastStdErrLines = collections.deque(maxlen = maxSavedLineNumbers)
+ lastStdOutLines = collections.deque(maxlen=maxSavedLineNumbers)
+ lastStdErrLines = collections.deque(maxlen=maxSavedLineNumbers)
# Launch the asynchronous readers of the process' stdout and stderr.
stdout = AsynchronousFileReader(process.stdout)
@@ -353,7 +353,7 @@ def runCommand(command, currentWorkingDirectory, callerArguments = None, extra_e
@deep_copy_arguments
-def runInstallCommand(arguments = ['install'], currentWorkingDirectory = None, callerArguments = None, extra_environment = None, onlyErrorCaseOutput = False):
+def runInstallCommand(arguments=['install'], currentWorkingDirectory=None, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False):
if hasattr(callerArguments, 'installcommand') and callerArguments.installcommand:
installcommand = callerArguments.installcommand.split()
else:
@@ -366,36 +366,36 @@ def runInstallCommand(arguments = ['install'], currentWorkingDirectory = None, c
if arguments:
installcommand.extend(arguments if builtins.type(arguments) is list else arguments.split())
- return runCommand(installcommand, currentWorkingDirectory, callerArguments, extra_environment = extra_environment, onlyErrorCaseOutput = onlyErrorCaseOutput)
+ return runCommand(installcommand, currentWorkingDirectory, callerArguments, extra_environment=extra_environment, onlyErrorCaseOutput=onlyErrorCaseOutput)
@deep_copy_arguments
-def runBuildCommand(arguments = None, currentWorkingDirectory = None, callerArguments = None, extra_environment = None, onlyErrorCaseOutput = False, expectedExitCodes=[0]):
+def runBuildCommand(arguments=None, currentWorkingDirectory=None, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False, expectedExitCodes=[0]):
buildcommand = ['make']
if hasattr(callerArguments, 'buildcommand') and callerArguments.buildcommand:
buildcommand = callerArguments.buildcommand.split()
if arguments:
buildcommand.extend(arguments if builtins.type(arguments) is list else arguments.split())
- return runCommand(buildcommand, currentWorkingDirectory, callerArguments, extra_environment = extra_environment, onlyErrorCaseOutput = onlyErrorCaseOutput, expectedExitCodes = expectedExitCodes)
+ return runCommand(buildcommand, currentWorkingDirectory, callerArguments, extra_environment=extra_environment, onlyErrorCaseOutput=onlyErrorCaseOutput, expectedExitCodes=expectedExitCodes)
@deep_copy_arguments
-def getReturnValue(command, currentWorkingDirectory = None, extra_environment = None, callerArguments = None):
+def getReturnValue(command, currentWorkingDirectory=None, extra_environment=None, callerArguments=None):
commandAsList = command[:].split(' ')
- return subprocess.Popen(commandAsList, stdout=subprocess.PIPE, stderr = subprocess.STDOUT,
- cwd = currentWorkingDirectory, env = getEnvironment(extra_environment, callerArguments)).communicate()[0].strip()
+ return subprocess.Popen(commandAsList, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ cwd=currentWorkingDirectory, env=getEnvironment(extra_environment, callerArguments)).communicate()[0].strip()
-def gitSHA(path, callerArguments = None):
+def gitSHA(path, callerArguments=None):
gitBinary = "git"
if isGitDirectory(path):
- return getReturnValue(gitBinary + " rev-list -n1 HEAD", currentWorkingDirectory = path, callerArguments = callerArguments).strip()
+ return getReturnValue(gitBinary + " rev-list -n1 HEAD", currentWorkingDirectory=path, callerArguments=callerArguments).strip()
return ''
# get commit SHA either directly from git, or from a .tag file in the source directory
-def get_commit_SHA(source_path, callerArguments = None):
+def get_commit_SHA(source_path, callerArguments=None):
buildGitSHA = gitSHA(source_path, callerArguments)
if not buildGitSHA:
tagfile = os.path.join(source_path, '.tag')
diff --git a/packaging-tools/bldinstallercommon.py b/packaging-tools/bldinstallercommon.py
index 91613df80..7442d209e 100644
--- a/packaging-tools/bldinstallercommon.py
+++ b/packaging-tools/bldinstallercommon.py
@@ -57,8 +57,8 @@ from runner import do_execute_sub_process
if is_windows():
import win32api
-DEBUG_RPATH = False
-MAX_DEBUG_PRINT_LENGTH = 10000
+DEBUG_RPATH = False
+MAX_DEBUG_PRINT_LENGTH = 10000
###############################
@@ -86,13 +86,13 @@ CURRENT_DOWNLOAD_PERCENT = 0
def dlProgress(count, blockSize, totalSize):
global CURRENT_DOWNLOAD_PERCENT
- percent = int(count*blockSize*100/totalSize)
+ percent = int(count * blockSize * 100 / totalSize)
# produce only reasonable amount of prints into stdout
if percent > CURRENT_DOWNLOAD_PERCENT:
CURRENT_DOWNLOAD_PERCENT = percent
sys.stdout.write("\r" + " Downloading: %d%%" % percent)
sys.stdout.flush()
- if count*blockSize >= totalSize:
+ if count * blockSize >= totalSize:
CURRENT_DOWNLOAD_PERCENT = 0
print('\n')
@@ -206,7 +206,7 @@ def remove_one_tree_level(directory):
def handle_remove_readonly(func, path, exc):
excvalue = exc[1]
if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
- os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777
+ os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777
func(path)
else:
raise
@@ -328,7 +328,7 @@ def locate_paths(search_dir: Union[str, Path], patterns: List[str],
###############################
# original snippet: http://code.activestate.com/recipes/173220-test-if-a-file-or-string-is-text-or-binary/
text_characters = "".join(list(map(chr, list(range(32, 127)))) + list("\n\r\t\b"))
-trans_table = str.maketrans("", "", text_characters)
+trans_table = str.maketrans("", "", text_characters)
def is_text(s):
@@ -345,12 +345,12 @@ def is_text(s):
t = s.translate(trans_table)
# If more than 30% non-text characters, then
# this is considered a binary file
- if len(t)/len(s) > 0.30:
+ if len(t) / len(s) > 0.30:
return 0
return 1
-def is_text_file(filename, blocksize = 512):
+def is_text_file(filename, blocksize=512):
try:
return is_text(open(filename).read(blocksize))
except UnicodeDecodeError:
@@ -383,7 +383,7 @@ def sanity_check_rpath_max_length(file_path, new_rpath):
else:
rpath = result.group()
index = rpath.index('=')
- rpath = rpath[index+1:]
+ rpath = rpath[index + 1:]
space_for_new_rpath = len(rpath)
if len(new_rpath) > space_for_new_rpath:
print('*** Warning - Not able to process RPath for file: ' + file_path)
@@ -399,10 +399,10 @@ def sanity_check_rpath_max_length(file_path, new_rpath):
def pathsplit(p, rest=[]):
(h, t) = os.path.split(p)
if len(h) < 1:
- return [t]+rest
+ return [t] + rest
if len(t) < 1:
- return [h]+rest
- return pathsplit(h, [t]+rest)
+ return [h] + rest
+ return pathsplit(h, [t] + rest)
def commonpath(l1, l2, common=[]):
@@ -412,7 +412,7 @@ def commonpath(l1, l2, common=[]):
return (common, l1, l2)
if l1[0] != l2[0]:
return (common, l1, l2)
- return commonpath(l1[1:], l2[1:], common+[l1[0]])
+ return commonpath(l1[1:], l2[1:], common + [l1[0]])
def calculate_relpath(p1, p2):
@@ -420,9 +420,9 @@ def calculate_relpath(p1, p2):
p = []
if len(l1) > 0:
tmp = '..' + os.sep
- p = [ tmp * len(l1) ]
+ p = [tmp * len(l1)]
p = p + l2
- return os.path.join( *p )
+ return os.path.join(*p)
##############################################################
@@ -432,7 +432,7 @@ def calculate_rpath(file_full_path, destination_lib_path):
if not os.path.isfile(file_full_path):
raise IOError('*** Not a valid file: %s' % file_full_path)
- bin_path = os.path.dirname(file_full_path)
+ bin_path = os.path.dirname(file_full_path)
path_to_lib = os.path.abspath(destination_lib_path)
full_rpath = ''
if path_to_lib == bin_path:
@@ -492,7 +492,7 @@ def handle_component_rpath(component_root_path, destination_lib_paths):
###############################
# function
###############################
-def clone_repository(repo_url, repo_branch_or_tag, destination_folder, full_clone = False, init_subrepos = False):
+def clone_repository(repo_url, repo_branch_or_tag, destination_folder, full_clone=False, init_subrepos=False):
print('--------------------------------------------------------------------')
print('Cloning repository: ' + repo_url)
print(' branch/tag: ' + repo_branch_or_tag)
@@ -608,7 +608,7 @@ def list_as_string(argument_list):
###############################
# function
###############################
-def remote_path_exists(remote_addr, path_to_check, ssh_command = 'ssh'):
+def remote_path_exists(remote_addr, path_to_check, ssh_command='ssh'):
text_to_print = 'REMOTE_PATH_EXISTS'
cmd_args = [ssh_command, remote_addr, 'bash', '-c', '\"if [ -e ' + path_to_check + ' ] ; then echo ' + text_to_print + ' ; fi\"']
output = do_execute_sub_process(cmd_args, os.getcwd(), get_output=True)
@@ -619,7 +619,7 @@ def remote_path_exists(remote_addr, path_to_check, ssh_command = 'ssh'):
###############################
# function
###############################
-def create_mac_disk_image(execution_path, file_directory, file_base_name, image_size = '4g'):
+def create_mac_disk_image(execution_path, file_directory, file_base_name, image_size='4g'):
# create disk image
cmd_args = ['hdiutil', 'create', '-srcfolder',
os.path.join(file_directory, file_base_name + '.app'),
@@ -634,15 +634,15 @@ def create_mac_disk_image(execution_path, file_directory, file_base_name, image_
# function
###############################
def rename_android_soname_files(qt5_base_path):
- print ('---------- Renaming .so name files in ' + qt5_base_path + ' ----------------')
+ print('---------- Renaming .so name files in ' + qt5_base_path + ' ----------------')
# QTBUG-33793
# temporary solution for Android on Windows compilations
# rename the .so files for Android on Windows
# find the lib directory under the install directory for essentials
try:
- print ('Trying to locate /lib from: ' + qt5_base_path)
+ print('Trying to locate /lib from: ' + qt5_base_path)
lib_dir = locate_path(qt5_base_path, ["lib"], filters=[os.path.isdir])
- print ('Match found: ' + lib_dir)
+ print('Match found: ' + lib_dir)
# regex for Qt version, eg. 5.2.0
# assuming that Qt version will always have one digit, eg, 5.2.0
p = re.compile(r'\d\.\d\.\d')
@@ -657,10 +657,10 @@ def rename_android_soname_files(qt5_base_path):
old_filepath = os.path.join(lib_dir, name)
new_filepath = os.path.join(lib_dir, filename + '.so')
shutil.move(old_filepath, new_filepath)
- print ('---> Old file name : ' + old_filepath)
- print ('---> New file name : ' + new_filepath)
+ print('---> Old file name : ' + old_filepath)
+ print('---> New file name : ' + new_filepath)
else:
- print ('*** Warning! The file : ' + filename + ' does not match the pattern')
+ print('*** Warning! The file : ' + filename + ' does not match the pattern')
except PackagingError:
print('*** No .so files found to be renamed as /lib was not found. Skipping.')
@@ -668,12 +668,12 @@ def rename_android_soname_files(qt5_base_path):
###############################
# function
###############################
-def create_extract_function(file_path, target_path, caller_arguments = None):
+def create_extract_function(file_path, target_path, caller_arguments=None):
Path(target_path).mkdir(parents=True, exist_ok=True)
working_dir = os.path.dirname(file_path)
if file_path.endswith('.tar.gz'):
return lambda: runCommand(['tar', 'zxf', file_path, '-C', target_path], working_dir, caller_arguments)
- return lambda: runCommand(['7z', 'x', '-y', file_path, '-o'+target_path], working_dir, caller_arguments)
+ return lambda: runCommand(['7z', 'x', '-y', file_path, '-o' + target_path], working_dir, caller_arguments)
###############################
diff --git a/packaging-tools/build_clang.py b/packaging-tools/build_clang.py
index 1e62a9242..e4addec3e 100644
--- a/packaging-tools/build_clang.py
+++ b/packaging-tools/build_clang.py
@@ -258,7 +258,7 @@ def mingw_training(base_path, qtcreator_path, environment, bitness):
# First time open the project, then close it. This will generate initial settings and .user files. Second time do the actual training.
for batchFile in ['qtc.openProject.batch', 'qtc.fileTextEditorCpp.batch']:
bld_utils.runCommand([os.path.join(training_dir, 'runBatchFiles.bat'), msvc_version(), 'x64' if bitness == 64 else 'x86', batchFile],
- base_path, callerArguments = None, extra_environment = None, onlyErrorCaseOutput=False, expectedExitCodes=[0,1])
+ base_path, callerArguments=None, extra_environment=None, onlyErrorCaseOutput=False, expectedExitCodes=[0, 1])
def is_msvc_toolchain(toolchain):
diff --git a/packaging-tools/build_wrapper.py b/packaging-tools/build_wrapper.py
index 3eb6369a3..70b07d010 100644
--- a/packaging-tools/build_wrapper.py
+++ b/packaging-tools/build_wrapper.py
@@ -54,10 +54,10 @@ from read_remote_config import get_pkg_value
from runner import do_execute_sub_process
# ----------------------------------------------------------------------
-SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
-WORK_DIR = os.getenv('PKG_NODE_ROOT') if os.getenv("PKG_NODE_ROOT") else os.path.abspath(os.path.join(__file__, '../../../'))
-LOCAL_MODE = os.getenv('LOCAL_MODE') # if set, installers will be copied to a local directory
-LOCAL_INSTALLER_DIR = os.getenv('LOCAL_INSTALLER_DIR', os.path.join(WORK_DIR, 'installers'))
+SCRIPT_ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
+WORK_DIR = os.getenv('PKG_NODE_ROOT') if os.getenv("PKG_NODE_ROOT") else os.path.abspath(os.path.join(__file__, '../../../'))
+LOCAL_MODE = os.getenv('LOCAL_MODE') # if set, installers will be copied to a local directory
+LOCAL_INSTALLER_DIR = os.getenv('LOCAL_INSTALLER_DIR', os.path.join(WORK_DIR, 'installers'))
if LOCAL_MODE:
assert os.path.exists(LOCAL_INSTALLER_DIR), "Local installer destination directory does not exist: %s" % LOCAL_INSTALLER_DIR
@@ -90,12 +90,12 @@ def lock_keychain():
###########################################
# init snapshot build dir and upload files
###########################################
-def init_snapshot_dir_and_upload_files(optionDict, project_name, project_version_or_branch, build_number, file_upload_list, subdir = ''):
+def init_snapshot_dir_and_upload_files(optionDict, project_name, project_version_or_branch, build_number, file_upload_list, subdir=''):
if subdir != "" and subdir[0] != "/":
subdir = "/" + subdir
- remote_path_base = optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] + '/' + project_name + '/' + project_version_or_branch
- remote_path_snapshot_dir = remote_path_base + '/' + build_number
- remote_path_latest_link = remote_path_base + '/' + 'latest'
+ remote_path_base = optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] + '/' + project_name + '/' + project_version_or_branch
+ remote_path_snapshot_dir = remote_path_base + '/' + build_number
+ remote_path_latest_link = remote_path_base + '/' + 'latest'
# ensure remote directory exists
create_remote_dirs(optionDict, optionDict['PACKAGE_STORAGE_SERVER_ADDR'], remote_path_snapshot_dir + subdir)
# upload files
@@ -128,7 +128,7 @@ def handle_qt_licheck_build(optionDict):
else:
cmd_args = ['make', '-j6', '-f', 'Makefile_macos']
do_execute_sub_process(cmd_args, exe_dir, True)
- cmd_args = ['rsync', '-r', 'licheck_mac', upload_path +'licheck_mac']
+ cmd_args = ['rsync', '-r', 'licheck_mac', upload_path + 'licheck_mac']
do_execute_sub_process(cmd_args, exe_dir, True)
else:
# opensource, do nothing
@@ -438,8 +438,8 @@ def parse_qt_creator_plugin_conf(plugin_conf_file_path, optionDict):
return not platforms or platform_name in platforms
def fixup_plugin(plugin):
- plugin = plugin._replace(modules = [module % optionDict for module in plugin.modules])
- plugin = plugin._replace(additional_arguments = [arg % optionDict for arg in plugin.additional_arguments])
+ plugin = plugin._replace(modules=[module % optionDict for module in plugin.modules])
+ plugin = plugin._replace(additional_arguments=[arg % optionDict for arg in plugin.additional_arguments])
return plugin
return [fixup_plugin(make_QtcPlugin_from_json(plugin)) for plugin in plugins_json if valid_for_platform(plugin)]
@@ -1129,11 +1129,11 @@ def initPkgOptions(args):
if __name__ == '__main__':
# Define supported build steps
- bld_qtcreator = 'build_creator'
- bld_qtc_sdktool = 'build_sdktool'
- bld_licheck = 'licheck_bld'
- archive_repository = 'archive_repo'
- CMD_LIST = (bld_qtcreator, bld_qtc_sdktool, bld_licheck, archive_repository)
+ bld_qtcreator = 'build_creator'
+ bld_qtc_sdktool = 'build_sdktool'
+ bld_licheck = 'licheck_bld'
+ archive_repository = 'archive_repo'
+ CMD_LIST = (bld_qtcreator, bld_qtc_sdktool, bld_licheck, archive_repository)
parser = argparse.ArgumentParser(prog="Build Wrapper", description="Manage all packaging related build steps.")
parser.add_argument("-c", "--command", dest="command", required=True, choices=CMD_LIST, help=CMD_LIST)
diff --git a/packaging-tools/create_installer.py b/packaging-tools/create_installer.py
index 9457156f7..f81052b3a 100644
--- a/packaging-tools/create_installer.py
+++ b/packaging-tools/create_installer.py
@@ -60,15 +60,15 @@ log = logging.getLogger("create_installer")
log.setLevel("INFO")
# ----------------------------------------------------------------------
-TARGET_INSTALL_DIR_NAME_TAG = '%TARGET_INSTALL_DIR%'
-ARCHIVES_EXTRACT_DIR_NAME_TAG = '%ARCHIVES_EXTRACT_DIR%'
-PACKAGE_DEFAULT_TAG = '%PACKAGE_DEFAULT_TAG%'
-UPDATE_REPOSITORY_URL_TAG = '%UPDATE_REPOSITORY_URL%'
-PACKAGE_CREATION_DATE_TAG = '%PACKAGE_CREATION_DATE%'
-INSTALL_PRIORITY_TAG = '%INSTALL_PRIORITY%'
-SORTING_PRIORITY_TAG = '%SORTING_PRIORITY%'
-VERSION_NUMBER_AUTO_INCREASE_TAG = '%VERSION_NUMBER_AUTO_INCREASE%'
-COMPONENT_SHA1_TAG = '%COMPONENT_SHA1%'
+TARGET_INSTALL_DIR_NAME_TAG = '%TARGET_INSTALL_DIR%'
+ARCHIVES_EXTRACT_DIR_NAME_TAG = '%ARCHIVES_EXTRACT_DIR%'
+PACKAGE_DEFAULT_TAG = '%PACKAGE_DEFAULT_TAG%'
+UPDATE_REPOSITORY_URL_TAG = '%UPDATE_REPOSITORY_URL%'
+PACKAGE_CREATION_DATE_TAG = '%PACKAGE_CREATION_DATE%'
+INSTALL_PRIORITY_TAG = '%INSTALL_PRIORITY%'
+SORTING_PRIORITY_TAG = '%SORTING_PRIORITY%'
+VERSION_NUMBER_AUTO_INCREASE_TAG = '%VERSION_NUMBER_AUTO_INCREASE%'
+COMPONENT_SHA1_TAG = '%COMPONENT_SHA1%'
class CreateInstallerError(Exception):
@@ -431,7 +431,7 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest,
content_list = [(compress_content_dir + os.sep + x) for x in content_list]
saveas = os.path.normpath(data_dir_dest + os.sep + archive.archive_name)
- cmd_args = [ task.archivegen_tool, saveas] + content_list
+ cmd_args = [task.archivegen_tool, saveas] + content_list
do_execute_sub_process(cmd_args, data_dir_dest)
@@ -533,7 +533,7 @@ def remove_all_debug_libraries(install_dir):
for debug_library_name in set(debug_files_list_intersection):
# remove one 'd' from library names ending letter 'd' also in release builds
# and exclude from removed libraries
- altered_library_name = debug_library_name[:-5] + debug_library_name[-5+1:]
+ altered_library_name = debug_library_name[:-5] + debug_library_name[-5 + 1:]
all_debug_files_list.remove(altered_library_name)
# remove all debug libraries with filenames ending *d.dll | *d.lib
for item in all_debug_files_list:
@@ -709,9 +709,9 @@ def create_installer_binary(task):
# extension is exe, dmg, or run
# tag is alpha1, beta2, rc1, etc (no tag for final).
# platform is win, linux, mac, etc.
- platform = task.config.get('PlatformIdentifier', 'identifier')
- installer_type = 'offline' if task.offline_installer else 'online'
- extension = '.run' if is_linux() else ''
+ platform = task.config.get('PlatformIdentifier', 'identifier')
+ installer_type = 'offline' if task.offline_installer else 'online'
+ extension = '.run' if is_linux() else ''
if not task.installer_name:
task.installer_name = task.installer_name + '-' + platform + '-' + task.license_type
diff --git a/packaging-tools/dump_debug_infos.py b/packaging-tools/dump_debug_infos.py
index 8aa9181e3..000f886d6 100644
--- a/packaging-tools/dump_debug_infos.py
+++ b/packaging-tools/dump_debug_infos.py
@@ -92,13 +92,13 @@ def dump_syms(dump_syms_path, architectures, search_pathes, output_path, verbose
start_slash = 1
sym_path_base = base_path[start_slash + len(search_path):].replace("/", "_")
sym_filename = "{}.sym".format(sym_path_base)
- sym_path = os.path.join(output_path, sym_filename)
+ sym_path = os.path.join(output_path, sym_filename)
if dump_sym(dump_syms_path, architectures[0], absolute_path, sym_path, verbose):
sym_filenames.append(sym_filename)
if len(architectures) == 2:
arch_argument_len = len("--arch ")
sym_filename = "{}_{}.sym".format(sym_path_base, architectures[1][arch_argument_len:])
- sym_path = os.path.join(output_path, sym_filename)
+ sym_path = os.path.join(output_path, sym_filename)
if dump_sym(dump_syms_path, architectures[1], absolute_path, sym_path, verbose):
sym_filenames.append(sym_filename)
return sym_filenames
diff --git a/packaging-tools/environmentfrombatchfile.py b/packaging-tools/environmentfrombatchfile.py
index 4113b720c..bbc92ec05 100644
--- a/packaging-tools/environmentfrombatchfile.py
+++ b/packaging-tools/environmentfrombatchfile.py
@@ -63,7 +63,7 @@ def sanity_check_env(env_cmd, env_dict):
.format(env_cmd))
-def get(env_cmd, initial = None, arguments = None):
+def get(env_cmd, initial=None, arguments=None):
"""
Take a command (either a single command or list of arguments)
and return the environment created after running that command.
@@ -87,7 +87,7 @@ def get(env_cmd, initial = None, arguments = None):
cmd = 'cmd.exe /s /c "\"{env_cmd}\" {arguments}&& echo "{tag}" && set"'.format(**vars())
# launch the process
- proc = subprocess.Popen(cmd, stdout = subprocess.PIPE, env = initial, universal_newlines = True)
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=initial, universal_newlines=True)
# parse the output sent to stdout
lines = proc.stdout
# consume whatever output occurs until the tag is reached
diff --git a/packaging-tools/libclang_training/libclangtimings2csv.py b/packaging-tools/libclang_training/libclangtimings2csv.py
index 7190b8dde..c2655beff 100644
--- a/packaging-tools/libclang_training/libclangtimings2csv.py
+++ b/packaging-tools/libclang_training/libclangtimings2csv.py
@@ -56,7 +56,7 @@ def constructTimeNeededMatcher():
# Note: There is always at least the wall clock time at the utmost right,
# the others in front (up to 3) are optional.
startIndicator = r'\s*:'
- notRelevantParts = r'(\s*\d+\.\d+ \(\d+\.\d+\%\)){0,3}'
+ notRelevantParts = r'(\s*\d+\.\d+ \(\d+\.\d+\%\)){0,3}'
wallClockTime = r'\s*(\d+\.\d+) \(\d+\.\d+\%\)'
regex = startIndicator \
@@ -106,7 +106,7 @@ def recordsToString(records):
return string
-def convert(inputFile, columnLabel = None):
+def convert(inputFile, columnLabel=None):
if not columnLabel:
columnLabel = os.path.basename(inputFile)
fileContent = open(inputFile, 'r').read()
diff --git a/packaging-tools/libclang_training/mergeCsvFiles.py b/packaging-tools/libclang_training/mergeCsvFiles.py
index befef88ac..5cacd062e 100644
--- a/packaging-tools/libclang_training/mergeCsvFiles.py
+++ b/packaging-tools/libclang_training/mergeCsvFiles.py
@@ -54,7 +54,7 @@ def readCsv(filePath, delimiter):
records = []
for line in lines:
- identifier,value = line.split(delimiter)
+ identifier, value = line.split(delimiter)
identifier = identifier.strip()
value = value.strip()
records.append((identifier, value))
@@ -90,14 +90,14 @@ def checkConsistency(files):
# Ensure same size of records
for f in files:
if not len(f.values) == referenceEntrySize:
- print('error: number of entries mismatch between "%s" and "%s".' %(referenceEntry.filePath, f.filePath), file=sys.stderr)
+ print('error: number of entries mismatch between "%s" and "%s".' % (referenceEntry.filePath, f.filePath), file=sys.stderr)
sys.exit(1)
# Ensure same identifier on the left
for f in files:
identifiers = [v[0] for v in f.values]
if not identifiers == referenceEntryIdentifiers:
- print('error: mismatch between identifers in first column between "%s" and "%s".' %(referenceEntry.filePath, f.filePath), file=sys.stderr)
+ print('error: mismatch between identifers in first column between "%s" and "%s".' % (referenceEntry.filePath, f.filePath), file=sys.stderr)
sys.exit(1)
return referenceEntryIdentifiers
diff --git a/packaging-tools/libclang_training/runBatchFiles.py b/packaging-tools/libclang_training/runBatchFiles.py
index f68c7babe..9811835f6 100644
--- a/packaging-tools/libclang_training/runBatchFiles.py
+++ b/packaging-tools/libclang_training/runBatchFiles.py
@@ -243,14 +243,14 @@ def createBackupFile(filePath):
if os.path.exists(filePath):
backupPath = filePath[:-4] + ".backup_" + str(time.time()) + ".log"
if Config.Verbose:
- print('info: creating backup of already existing "%s"' %(filePath))
+ print('info: creating backup of already existing "%s"' % (filePath))
shutil.copyfile(filePath, backupPath)
def printDuration(s):
hours, remainder = divmod(s, 3600)
minutes, seconds = divmod(remainder, 60)
- print('...needed %d:%02d:%02d' %(hours, minutes, seconds))
+ print('...needed %d:%02d:%02d' % (hours, minutes, seconds))
def processBatchFileTimed(libClangId, batchFilePath):
@@ -287,7 +287,7 @@ def getLibClangId(libClangDll):
def switchLibClang(libClangDll):
- print('copying "%s" -> "%s"' %(libClangDll, Config.TargetLibClangDll))
+ print('copying "%s" -> "%s"' % (libClangDll, Config.TargetLibClangDll))
shutil.copyfile(libClangDll, Config.TargetLibClangDll)
diff --git a/packaging-tools/notarize.py b/packaging-tools/notarize.py
index db3e6980e..560edf22d 100755
--- a/packaging-tools/notarize.py
+++ b/packaging-tools/notarize.py
@@ -148,7 +148,7 @@ async def embedNotarization(args):
if retry_count:
log.warning(f"Trying again after {delay}s")
time.sleep(delay)
- delay = delay + delay/2 # 60, 90, 135, 202, 303
+ delay = delay + delay / 2 # 60, 90, 135, 202, 303
else:
log.critical("Execution of the remote script probably failed!")
raise NotarizationError("Failed to 'staple' the: {0}".format(args.dmg))
@@ -167,7 +167,7 @@ if __name__ == "__main__":
parser.add_argument("--user", dest="user", type=str, default=get_pkg_value("AC_USERNAME"), help="App Store Connect Username")
parser.add_argument("--passwd", dest="passwd", type=str, default=get_pkg_value("AC_PASSWORD"), help="App Store Connect Password")
parser.add_argument("--bundle-id", dest="bundle_id", default=strftime('%Y-%m-%d-%H-%M-%S', gmtime()), type=str, help="Give unique id for this bundle")
- parser.add_argument("--timeout", dest="timeout", type=int, default=60*60*3, help="Timeout value for the remote requests")
+ parser.add_argument("--timeout", dest="timeout", type=int, default=60 * 60 * 3, help="Timeout value for the remote requests")
args = parser.parse_args(sys.argv[1:])
if not which("xcrun"):
diff --git a/packaging-tools/pkg_constants.py b/packaging-tools/pkg_constants.py
index 1e77b088b..417a85701 100644
--- a/packaging-tools/pkg_constants.py
+++ b/packaging-tools/pkg_constants.py
@@ -29,10 +29,10 @@
#
#############################################################################
-RTA_DESCRIPTION_FILE_DIR_NAME = 'rta_description_files'
+RTA_DESCRIPTION_FILE_DIR_NAME = 'rta_description_files'
RTA_DESCRIPTION_FILE_NAME_BASE = 'rta_description_file'
-INSTALLER_OUTPUT_DIR_NAME = 'installer_output'
-IFW_BUILD_ARTIFACTS_DIR = 'ifw_build_artifacts'
-ICU_BUILD_OUTPUT_DIR = 'icu_build_artifacts'
-ICU_INSTALL_DIR_NAME = 'icu_install'
-PKG_TEMPLATE_BASE_DIR_NAME = 'pkg_templates'
+INSTALLER_OUTPUT_DIR_NAME = 'installer_output'
+IFW_BUILD_ARTIFACTS_DIR = 'ifw_build_artifacts'
+ICU_BUILD_OUTPUT_DIR = 'icu_build_artifacts'
+ICU_INSTALL_DIR_NAME = 'icu_install'
+PKG_TEMPLATE_BASE_DIR_NAME = 'pkg_templates'
diff --git a/packaging-tools/release_repo_updater.py b/packaging-tools/release_repo_updater.py
index 2cc4bacc0..68e4896de 100755
--- a/packaging-tools/release_repo_updater.py
+++ b/packaging-tools/release_repo_updater.py
@@ -160,7 +160,7 @@ def has_connection_error(output: str) -> bool:
return False
-def execute_remote_cmd(remoteServer: str, remoteServerHome: str, cmd: List[str], scriptFileName: str, timeout=60*60) -> None:
+def execute_remote_cmd(remoteServer: str, remoteServerHome: str, cmd: List[str], scriptFileName: str, timeout=60 * 60) -> None:
remoteTmpDir = os.path.join(remoteServerHome, "remote_scripts", timestamp)
create_remote_paths(remoteServer, [remoteTmpDir])
remoteScript = create_remote_script(remoteServer, cmd, remoteTmpDir, scriptFileName)
@@ -181,7 +181,7 @@ def create_remote_script(server: str, cmd: List[str], remoteScriptPath: str, scr
return os.path.join(remoteScriptPath, scriptFileName)
-def execute_remote_script(server: str, remoteScriptPath: str, timeout=60*60) -> None:
+def execute_remote_script(server: str, remoteScriptPath: str, timeout=60 * 60) -> None:
cmd = get_remote_login_cmd(server) + [remoteScriptPath]
retry_count = 5
delay = 60
@@ -192,7 +192,7 @@ def execute_remote_script(server: str, remoteScriptPath: str, timeout=60*60) ->
if retry_count:
log.warning(f"Trying again after {delay}s")
time.sleep(delay)
- delay = delay + delay/2 # 60, 90, 135, 202, 303
+ delay = delay + delay / 2 # 60, 90, 135, 202, 303
else:
log.critical(f"Execution of the remote script probably failed: {cmd}")
@@ -216,7 +216,7 @@ async def upload_ifw_to_remote(ifwTools: str, remoteServer: str, remoteServerHom
create_remote_paths(remoteServer, [remoteTmpDir])
# upload content
cmd = ['rsync', '-avzh', repogenDir + "/", remoteServer + ":" + remoteTmpDir]
- exec_cmd(cmd, timeout=60*60)
+ exec_cmd(cmd, timeout=60 * 60)
# return path on remote poiting to repogen
return os.path.join(remoteTmpDir, "bin", "repogen")
@@ -259,7 +259,7 @@ async def ensure_ext_repo_paths(server: str, ext: str, repo: str) -> None:
log.info("Ensure repository paths on ext: %s:%s", ext, repo)
login = get_remote_login_cmd(server) + get_remote_login_cmd(ext)
cmd = login + ["mkdir", "-p", repo]
- await async_exec_cmd(cmd, timeout=60*60*10)
+ await async_exec_cmd(cmd, timeout=60 * 60 * 10)
def is_safe_directory(paths: List[str]) -> None:
@@ -294,7 +294,7 @@ def upload_pending_repository_content(server: str, sourcePath: str, remoteDestin
create_remote_paths(server, [remoteDestinationPath])
# upload content
cmd = ['rsync', '-avzh', sourcePath + "/", server + ":" + remoteDestinationPath]
- exec_cmd(cmd, timeout=60*60) # give it 60 mins
+ exec_cmd(cmd, timeout=60 * 60) # give it 60 mins
def reset_new_remote_repository(server: str, remoteSourceRepoPath: str, remoteTargetRepoPath: str) -> None:
@@ -381,7 +381,7 @@ def spawn_remote_background_task(server: str, serverHome: str, remoteCmd: List[s
tip = ""
cmd = remoteCmd + ["2>&1", "|", "tee", remoteLogFile]
remoteScriptFileName = "sync-production-" + tip + "-" + timestamp + ".sh"
- execute_remote_cmd(server, serverHome, cmd, remoteScriptFileName, timeout=60*60*2) # 2h timeout for uploading data to CDN
+ execute_remote_cmd(server, serverHome, cmd, remoteScriptFileName, timeout=60 * 60 * 2) # 2h timeout for uploading data to CDN
async def update_repository(stagingServer: str, repoLayout: QtRepositoryLayout, task: ReleaseTask,
@@ -453,7 +453,7 @@ async def build_online_repositories(tasks: List[ReleaseTask], license: str, inst
cmd += ["--add-substitution=" + substitution]
try:
- await async_exec_cmd(cmd, timeout=60*60*3) # 3h for one repo build
+ await async_exec_cmd(cmd, timeout=60 * 60 * 3) # 3h for one repo build
except Exception as e:
log.error(str(e))
raise
@@ -583,13 +583,13 @@ def update_remote_latest_available_dir(newInstaller: str, remoteUploadPath: str,
try:
cmd_rm = get_remote_login_cmd(stagingServerRoot) + ['rm', previous_installer_path.split(':')[1]]
log.info(f"Running remove cmd: {cmd_rm}")
- exec_cmd(cmd_rm, timeout=60*60) # 1h
+ exec_cmd(cmd_rm, timeout=60 * 60) # 1h
except Exception:
log.info("Running cmd failed - this happens only if latest_available is empty")
pass
cmd_cp = get_remote_login_cmd(stagingServerRoot) + ['cp', remoteUploadPath.split(':')[1] + name + '*', latest_available_path.split(':')[1]]
log.info(f"Running copy cmd: {cmd_cp}")
- exec_cmd(cmd_cp, timeout=60*60) # 1h
+ exec_cmd(cmd_cp, timeout=60 * 60) # 1h
def upload_offline_to_remote(installerPath: str, remoteUploadPath: str, stagingServer: str, task: ReleaseTask,
@@ -604,7 +604,7 @@ def upload_offline_to_remote(installerPath: str, remoteUploadPath: str, stagingS
remote_destination = stagingServer + ":" + remoteUploadPath
cmd = ['scp', installer, remote_destination]
log.info(f"Uploading offline installer: {installer} to: {remote_destination}")
- exec_cmd(cmd, timeout=60*60) # 1h
+ exec_cmd(cmd, timeout=60 * 60) # 1h
update_remote_latest_available_dir(installer, remote_destination, task, stagingServer, installerBuildId)
if enable_oss_snapshots and license == "opensource":
upload_snapshots_to_remote(stagingServer, remoteUploadPath, task, installerBuildId, file_name_final)
@@ -632,7 +632,7 @@ def notarize_dmg(dmgPath, installerBasename) -> None:
bundleId = installerBasename + "-" + strftime('%Y-%m-%d-%H-%M', gmtime())
bundleId = bundleId.replace('_', '-').replace(' ', '') # replace illegal characters for bundleId
cmd = [sys.executable, script_path, '--dmg=' + dmgPath, '--bundle-id=' + bundleId]
- exec_cmd(cmd, timeout=60*60*3)
+ exec_cmd(cmd, timeout=60 * 60 * 3)
async def build_offline_tasks(stagingServer: str, stagingServerRoot: str, tasks: List[ReleaseTask], license: str,
@@ -670,7 +670,7 @@ async def _build_offline_tasks(stagingServer: str, stagingServerRoot: str, tasks
cmd += ["--force-version-number-increase"]
cmd.extend(["--add-substitution=" + s for s in task.get_installer_string_replacement_list()])
try:
- await async_exec_cmd(cmd, timeout=60*60*3) # 3h
+ await async_exec_cmd(cmd, timeout=60 * 60 * 3) # 3h
except Exception as e:
log.error(str(e))
raise
@@ -699,10 +699,10 @@ def upload_snapshots_to_remote(staging_server: str, remote_upload_path: str, tas
login = get_remote_login_cmd(staging_server) + get_remote_login_cmd(get_pkg_value("SNAPSHOT_SERVER"))
cmd_mkdir = login + ["mkdir", "-p", snapshot_upload_path]
log.info(f"Creating offline snapshot directory: {cmd_mkdir}")
- exec_cmd(cmd_mkdir, timeout=60*60)
+ exec_cmd(cmd_mkdir, timeout=60 * 60)
cmd_scp_installer = get_remote_login_cmd(staging_server) + ["scp", "-r", remote_installer_path] + [get_pkg_value("SNAPSHOT_SERVER") + ":" + snapshot_upload_path + "/"]
log.info(f"Uploading offline snapshot: {cmd_scp_installer}")
- exec_cmd(cmd_scp_installer, timeout=60*60*2)
+ exec_cmd(cmd_scp_installer, timeout=60 * 60 * 2)
def load_export_summary_data(config_file: Path) -> Dict[str, str]:
diff --git a/packaging-tools/runner.py b/packaging-tools/runner.py
index ca0847247..71c70d2bb 100755
--- a/packaging-tools/runner.py
+++ b/packaging-tools/runner.py
@@ -49,7 +49,7 @@ if sys.platform == 'win32':
asyncio.set_event_loop(loop)
-def exec_cmd(cmd: List[str], timeout=60, env: Dict[str, str]=None) -> str:
+def exec_cmd(cmd: List[str], timeout=60, env: Dict[str, str] = None) -> str:
env = env if env else os.environ.copy()
log.info("Calling: %s", ' '.join(cmd))
output = subprocess.check_output(' '.join(cmd), shell=True, env=env, timeout=timeout).decode("utf-8").strip()
@@ -57,7 +57,7 @@ def exec_cmd(cmd: List[str], timeout=60, env: Dict[str, str]=None) -> str:
return output
-async def async_exec_cmd(cmd: List[str], timeout: int=60 * 60, env: Dict[str, str]=None) -> None:
+async def async_exec_cmd(cmd: List[str], timeout: int = 60 * 60, env: Dict[str, str] = None) -> None:
env = env if env else os.environ.copy()
p = await asyncio.create_subprocess_exec(*cmd, stdout=None, stderr=STDOUT, env=env)
try:
diff --git a/packaging-tools/sdkcomponent.py b/packaging-tools/sdkcomponent.py
index c5ecea980..e62641ffa 100644
--- a/packaging-tools/sdkcomponent.py
+++ b/packaging-tools/sdkcomponent.py
@@ -33,7 +33,7 @@ import os
import ntpath
import bldinstallercommon
-ONLINE_ARCHIVE_LIST_TAG = '<!--ONLINE_ARCHIVE_LIST-->'
+ONLINE_ARCHIVE_LIST_TAG = '<!--ONLINE_ARCHIVE_LIST-->'
class SdkComponent:
@@ -41,21 +41,21 @@ class SdkComponent:
class DownloadableArchive:
"""DownloadableArchive subclass contains all required info about data packages for one SDK component"""
def __init__(self, archive, package_name, parent_target_install_base, archive_server_name, target_config, archive_location_resolver, key_value_substitution_list):
- self.archive_uri = bldinstallercommon.config_section_map(target_config, archive)['archive_uri']
- self.archive_action = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_action')
- self.extract_archive = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'extract_archive')
- self.package_strip_dirs = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'package_strip_dirs')
+ self.archive_uri = bldinstallercommon.config_section_map(target_config, archive)['archive_uri']
+ self.archive_action = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_action')
+ self.extract_archive = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'extract_archive')
+ self.package_strip_dirs = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'package_strip_dirs')
self.package_finalize_items = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'package_finalize_items')
# parent's 'target_install_base'
self.parent_target_install_base = parent_target_install_base
# in case the individual archive needs to be installed outside the root dir specified by the parent component
- self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_base')
+ self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_base')
# this is relative to 1) current archive's 'target_install_base' 2) parent components 'target_install_base'. (1) takes priority
- self.target_install_dir = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_dir').lstrip(os.path.sep)
- self.rpath_target = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'rpath_target')
- self.component_sha1_file = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'component_sha1_file')
+ self.target_install_dir = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'target_install_dir').lstrip(os.path.sep)
+ self.rpath_target = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'rpath_target')
+ self.component_sha1_file = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'component_sha1_file')
self.nomalize_archive_uri(package_name, archive_server_name, archive_location_resolver)
- self.archive_name = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_name')
+ self.archive_name = bldinstallercommon.safe_config_key_fetch(target_config, archive, 'archive_name')
if not self.archive_name:
self.archive_name = self.path_leaf(self.archive_uri)
# Parse unnecessary extensions away from filename (QTBUG-39219)
@@ -69,7 +69,7 @@ class SdkComponent:
for item in key_value_substitution_list:
self.target_install_base = self.target_install_base.replace(item[0], item[1])
self.target_install_dir = self.target_install_dir.replace(item[0], item[1])
- self.archive_name = self.archive_name.replace(item[0], item[1])
+ self.archive_name = self.archive_name.replace(item[0], item[1])
def nomalize_archive_uri(self, package_name, archive_server_name, archive_location_resolver):
self.archive_uri = archive_location_resolver.resolve_full_uri(package_name, archive_server_name, self.archive_uri)
@@ -93,41 +93,41 @@ class SdkComponent:
return self.parent_target_install_base + os.path.sep + self.target_install_dir
def __init__(self, section_name, target_config, packages_full_path_list, archive_location_resolver, key_value_substitution_list, is_offline_build):
- self.static_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'static_component')
- self.root_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'root_component')
- self.package_name = section_name
- self.package_subst_name = section_name
- self.packages_full_path_list = packages_full_path_list
- self.archives = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives')
- self.archives = self.archives.replace(' ', '').replace('\n', '')
- self.archives_extract_dir = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives_extract_dir')
- self.archive_server_name = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archive_server_name')
- self.downloadable_archive_list = []
- self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'target_install_base')
- self.version = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version')
- self.version_tag = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version_tag')
- self.package_default = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'package_default')
- self.install_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'install_priority')
- self.sorting_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'sorting_priority')
- self.component_sha1 = ""
- self.component_sha1_uri = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'component_sha1_uri')
+ self.static_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'static_component')
+ self.root_component = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'root_component')
+ self.package_name = section_name
+ self.package_subst_name = section_name
+ self.packages_full_path_list = packages_full_path_list
+ self.archives = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives')
+ self.archives = self.archives.replace(' ', '').replace('\n', '')
+ self.archives_extract_dir = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archives_extract_dir')
+ self.archive_server_name = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'archive_server_name')
+ self.downloadable_archive_list = []
+ self.target_install_base = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'target_install_base')
+ self.version = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version')
+ self.version_tag = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'version_tag')
+ self.package_default = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'package_default')
+ self.install_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'install_priority')
+ self.sorting_priority = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'sorting_priority')
+ self.component_sha1 = ""
+ self.component_sha1_uri = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'component_sha1_uri')
if (self.component_sha1_uri):
self.component_sha1_uri = archive_location_resolver.resolve_full_uri(self.package_name, self.archive_server_name, self.component_sha1_uri)
- self.optional_for_offline = False
+ self.optional_for_offline = False
self.key_value_substitution_list = key_value_substitution_list
- self.archive_skip = False
- self.include_filter = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'include_filter')
+ self.archive_skip = False
+ self.include_filter = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'include_filter')
if is_offline_build:
tmp = bldinstallercommon.safe_config_key_fetch(target_config, section_name, 'optional_for_offline')
for item in self.key_value_substitution_list:
tmp = tmp.replace(item[0], item[1])
if tmp.lower() in ['yes', 'true', '1']:
self.optional_for_offline = True
- self.downloadable_arch_list_qs = []
- self.pkg_template_dir = ''
- self.sanity_check_error_msg = ''
- self.target_config = target_config
- self.archive_location_resolver = archive_location_resolver
+ self.downloadable_arch_list_qs = []
+ self.pkg_template_dir = ''
+ self.sanity_check_error_msg = ''
+ self.target_config = target_config
+ self.archive_location_resolver = archive_location_resolver
# substitute key-value pairs if any
for item in self.key_value_substitution_list:
self.target_install_base = self.target_install_base.replace(item[0], item[1])
@@ -156,9 +156,9 @@ class SdkComponent:
else:
# sanity check, duplicate template should not exist to avoid
# problems!
- print ('*** Found duplicate template for: ' + self.package_name)
- print ('*** Ignoring: ' + template_full_path)
- print ('*** Using: ' + self.pkg_template_dir)
+ print('*** Found duplicate template for: ' + self.package_name)
+ print('*** Ignoring: ' + template_full_path)
+ print('*** Using: ' + self.pkg_template_dir)
self.parse_archives(self.target_config, self.archive_location_resolver)
self.check_component_data(self.target_config)
@@ -220,7 +220,7 @@ class SdkComponent:
archives_list = self.archives.split(',')
for archive in archives_list:
if not archive:
- print ("Warning: There appears to be ',' issues in the config file archive list for component: ", self.package_name)
+ print("Warning: There appears to be ',' issues in the config file archive list for component: ", self.package_name)
continue
# check that archive template exists
if not target_config.has_section(archive):
@@ -244,24 +244,24 @@ class SdkComponent:
return temp_list
def print_component_data(self):
- print ('=============================================================')
- print (' [' + self.package_name + ']')
+ print('=============================================================')
+ print(' [' + self.package_name + ']')
if self.static_component:
- print (' Static component: ' + self.static_component)
+ print(' Static component: ' + self.static_component)
return
if self.root_component:
- print (' Root component: ' + self.root_component)
- print (' Include filter: ' + self.include_filter)
- print (' Target install base: ' + self.target_install_base)
- print (' Version: ' + self.version)
- print (' Version tag: ' + self.version_tag)
- print (' Package default: ' + self.package_default)
+ print(' Root component: ' + self.root_component)
+ print(' Include filter: ' + self.include_filter)
+ print(' Target install base: ' + self.target_install_base)
+ print(' Version: ' + self.version)
+ print(' Version tag: ' + self.version_tag)
+ print(' Package default: ' + self.package_default)
if self.downloadable_archive_list:
- print (' Archives:')
+ print(' Archives:')
for archive in self.downloadable_archive_list:
- print (' ---------------------------------------------------------------')
- print (' Downloadable archive name: ' + archive.archive_name)
- print (' Archive strip dirs: ' + archive.package_strip_dirs)
- print (' Archive target install dir: ' + archive.get_archive_installation_directory())
- print (' Archive RPath target: ' + archive.rpath_target)
- print (' Archive URI: ' + archive.archive_uri)
+ print(' ---------------------------------------------------------------')
+ print(' Downloadable archive name: ' + archive.archive_name)
+ print(' Archive strip dirs: ' + archive.package_strip_dirs)
+ print(' Archive target install dir: ' + archive.get_archive_installation_directory())
+ print(' Archive RPath target: ' + archive.rpath_target)
+ print(' Archive URI: ' + archive.archive_uri)
diff --git a/packaging-tools/tests/test_build_wrapper.py b/packaging-tools/tests/test_build_wrapper.py
index 85bbdd3ec..c8f781c2d 100644
--- a/packaging-tools/tests/test_build_wrapper.py
+++ b/packaging-tools/tests/test_build_wrapper.py
@@ -63,9 +63,9 @@ class TestBuildWrapper(unittest.TestCase):
else:
init_snapshot_dir_and_upload_files(optionDict, projectName, versioOrBranch, buildNumber, filesToUpload)
- remote_path_base = os.path.join(temp_dir, projectName, versioOrBranch)
- remote_path_snapshot_dir = os.path.join(remote_path_base, buildNumber)
- remote_path_latest_link = os.path.join(remote_path_base, 'latest')
+ remote_path_base = os.path.join(temp_dir, projectName, versioOrBranch)
+ remote_path_snapshot_dir = os.path.join(remote_path_base, buildNumber)
+ remote_path_latest_link = os.path.join(remote_path_base, 'latest')
print(remote_path_latest_link)
self.assertTrue(os.path.isdir(remote_path_base))
self.assertTrue(os.path.isdir(remote_path_snapshot_dir))
diff --git a/packaging-tools/tests/test_runCommand.py b/packaging-tools/tests/test_runCommand.py
index 9e5afcc68..0340f8095 100644
--- a/packaging-tools/tests/test_runCommand.py
+++ b/packaging-tools/tests/test_runCommand.py
@@ -47,7 +47,7 @@ if sys.platform.startswith("win"):
def baseCommand():
- return " ".join([sys.executable,os.path.abspath(__file__)])
+ return " ".join([sys.executable, os.path.abspath(__file__)])
def crash():
@@ -116,7 +116,7 @@ class TestRunCommand(unittest.TestCase):
# onlyErrorCaseOutput=
True,
# expectedExitCodes=
- [0,5]), 5)
+ [0, 5]), 5)
def test_withThreadedWork(self):
currentMethodName = sys._getframe().f_code.co_name
@@ -174,7 +174,7 @@ if __name__ == '__main__':
if not sys.argv[1:]:
unittest.main()
else:
- parser = argparse.ArgumentParser(prog = os.path.basename(sys.argv[0]))
+ parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]))
parser.add_argument('--sleep', type=int)
parser.add_argument('--printLines', type=int)
parser.add_argument('--crash', action='store_true', default=False)
diff --git a/packaging-tools/threadedwork.py b/packaging-tools/threadedwork.py
index 6ea1b63be..f35140cf5 100644
--- a/packaging-tools/threadedwork.py
+++ b/packaging-tools/threadedwork.py
@@ -98,7 +98,7 @@ __builtin__.org_stdout = sys.stdout
__builtin__.org_sterr = sys.stderr
-def enableThreadedPrint(enable = True, threadCount = multiprocessing.cpu_count()):
+def enableThreadedPrint(enable=True, threadCount=multiprocessing.cpu_count()):
if enable:
global outputStates
global outputFormatString
@@ -134,7 +134,7 @@ class TaskFunction():
class Task():
- def __init__(self, description, function = None, *arguments):
+ def __init__(self, description, function=None, *arguments):
self.taskNumber = 0 # will be set from outside
self.description = description
self.listOfFunctions = []
@@ -188,7 +188,7 @@ class ThreadedWork():
if self.exitFunction:
task.exitFunction = self.exitFunction
task.exitFunctionArguments = self.exitFunctionArguments
- self.legend.append(("{:d}: " + os.linesep +"\t{}" + os.linesep).format(task.taskNumber, task.description))
+ self.legend.append(("{:d}: " + os.linesep + "\t{}" + os.linesep).format(task.taskNumber, task.description))
self.queue.put(task)
self.taskNumber = self.taskNumber + 1
@@ -231,7 +231,7 @@ class Consumer(threading.Thread):
self.workerThreadId = workerThreadId
threading.Thread.__init__(self)
- def run(self, stableRunIndicator = True):
+ def run(self, stableRunIndicator=True):
if stableRunIndicator:
threadData.progressIndicator = itertools.cycle(['..'])
else: