aboutsummaryrefslogtreecommitdiffstats
path: root/packaging-tools
diff options
context:
space:
mode:
Diffstat (limited to 'packaging-tools')
-rwxr-xr-xpackaging-tools/bld_ifw_tools.py64
-rw-r--r--packaging-tools/bld_lib.py116
-rwxr-xr-xpackaging-tools/bld_module.py190
-rw-r--r--packaging-tools/bld_openssl.py4
-rw-r--r--packaging-tools/bld_python.py98
-rw-r--r--packaging-tools/bld_utils.py122
-rw-r--r--packaging-tools/bldinstallercommon.py6
-rw-r--r--packaging-tools/build_clang.py4
-rw-r--r--packaging-tools/build_wrapper.py188
-rwxr-xr-xpackaging-tools/content_cleaner.py8
-rw-r--r--packaging-tools/create_conan_executable.py22
-rw-r--r--packaging-tools/create_installer.py62
-rw-r--r--packaging-tools/installer_utils.py16
-rw-r--r--packaging-tools/libclang_training/libclangtimings2csv.py42
-rw-r--r--packaging-tools/libclang_training/mergeCsvFiles.py36
-rw-r--r--packaging-tools/libclang_training/runBatchFiles.py116
-rwxr-xr-xpackaging-tools/notarize.py22
-rwxr-xr-xpackaging-tools/patch_qt.py36
-rw-r--r--packaging-tools/python_env.py36
-rwxr-xr-xpackaging-tools/release_repo_meta_update.py8
-rwxr-xr-xpackaging-tools/release_repo_updater.py188
-rwxr-xr-xpackaging-tools/release_task_reader.py22
-rwxr-xr-xpackaging-tools/remote_uploader.py10
-rwxr-xr-xpackaging-tools/runner.py4
-rwxr-xr-xpackaging-tools/sign_installer.py14
-rwxr-xr-xpackaging-tools/tests/test_bld_python.py20
-rw-r--r--packaging-tools/tests/test_build_wrapper.py24
-rw-r--r--packaging-tools/tests/test_installer_utils.py48
-rwxr-xr-xpackaging-tools/tests/test_packaging.py146
-rwxr-xr-xpackaging-tools/tests/test_release_repo_meta_update.py36
-rwxr-xr-xpackaging-tools/tests/test_release_repo_updater.py70
-rw-r--r--packaging-tools/tests/test_runCommand.py120
-rw-r--r--packaging-tools/tests/testhelpers.py4
-rw-r--r--packaging-tools/threadedwork.py56
34 files changed, 979 insertions, 979 deletions
diff --git a/packaging-tools/bld_ifw_tools.py b/packaging-tools/bld_ifw_tools.py
index 0efad273e..01fd96273 100755
--- a/packaging-tools/bld_ifw_tools.py
+++ b/packaging-tools/bld_ifw_tools.py
@@ -406,8 +406,8 @@ def build_qt(options, qt_build_dir, qt_configure_options, qt_modules):
print('Installing Qt')
cmd_args = options.make_install_cmd
for module in qt_modules:
- moduleDir = os.path.join(options.qt_source_dir, module)
- do_execute_sub_process(cmd_args.split(' '), moduleDir)
+ module_dir = os.path.join(options.qt_source_dir, module)
+ do_execute_sub_process(cmd_args.split(' '), module_dir)
###############################
@@ -556,10 +556,10 @@ def create_installer_package(options):
print(f"Installer package is at: {target_dir}")
artifacts = os.listdir(options.installer_framework_target_dir)
for artifact in artifacts:
- destFileName = os.path.join(options.build_artifacts_dir, artifact)
+ dest_file_name = os.path.join(options.build_artifacts_dir, artifact)
if is_linux():
- destFileName += '.run'
- shutil.move(os.path.join(options.installer_framework_target_dir, artifact), destFileName)
+ dest_file_name += '.run'
+ shutil.move(os.path.join(options.installer_framework_target_dir, artifact), dest_file_name)
os.chdir(current_dir)
@@ -711,17 +711,17 @@ def archive_binarycreator(options):
def sign_windows_installerbase(file_name):
print('--------------------------------------------------------------------')
print('Sign Windows Installerbase')
- signToolsTempDir = r'C:\Utils\sign_tools_temp'
+ sign_tools_temp_dir = r'C:\Utils\sign_tools_temp'
for item in ["signtool32.exe", "keys.pfx", "capicom.dll"]:
- dst = os.path.join(signToolsTempDir, item)
+ dst = os.path.join(sign_tools_temp_dir, item)
curl_cmd_args = ['curl', "--fail", "-L", "--retry", "5", "--retry-delay", "30", "-o", dst,
'--create-dirs', get_pkg_value("SIGN_TOOLS_ADDR") + item]
subprocess.check_call(curl_cmd_args)
signing_server = get_pkg_value("SIGNING_SERVER")
signing_pass = get_pkg_value("SIGNING_PASSWORD")
- cmd_args = [os.path.join(signToolsTempDir, 'signtool32.exe'), 'sign', '/v', '/du', signing_server, '/p', signing_pass]
- cmd_args += ['/tr', "http://timestamp.digicert.com", '/f', os.path.join(signToolsTempDir, 'keys.pfx')]
+ cmd_args = [os.path.join(sign_tools_temp_dir, 'signtool32.exe'), 'sign', '/v', '/du', signing_server, '/p', signing_pass]
+ cmd_args += ['/tr', "http://timestamp.digicert.com", '/f', os.path.join(sign_tools_temp_dir, 'keys.pfx')]
cmd_args += ['/td', "sha256", '/fd', "sha256", file_name]
log_entry = cmd_args[:]
@@ -729,7 +729,7 @@ def sign_windows_installerbase(file_name):
log_entry[6] = "****"
print("Calling: %s", " ".join(log_entry))
subprocess.check_call(cmd_args, stderr=subprocess.STDOUT) # check_call() will consume output
- shutil.rmtree(signToolsTempDir)
+ shutil.rmtree(sign_tools_temp_dir)
print(f"Successfully signed: {file_name}")
@@ -805,41 +805,41 @@ def setup_argument_parser():
def main() -> None:
"""Main"""
# init things
- PARSER = setup_argument_parser()
+ parser = setup_argument_parser()
# parse args
- CARGS = PARSER.parse_args()
- qt_src = IfwOptions.default_qt_src_pkg if not CARGS.qt_archive_uri else CARGS.qt_archive_uri
- qt_configure_options = get_static_qt_configure_options(CARGS.openssl_dir) if not CARGS.qt_configure_options else CARGS.qt_configure_options
- ifw_branch = IfwOptions.default_qt_installer_framework_branch_qt if not CARGS.ifw_branch else CARGS.ifw_branch
- signserver = '' if not CARGS.sign_server else CARGS.sign_server
- signpwd = '' if not CARGS.sign_server_pwd else CARGS.sign_server_pwd
-
- qt_conf_args = CARGS.qt_configure_options
- ifw_qmake_args = CARGS.ifw_qmake_args
- if CARGS.debug:
+ cargs = parser.parse_args()
+ qt_src = IfwOptions.default_qt_src_pkg if not cargs.qt_archive_uri else cargs.qt_archive_uri
+ qt_configure_options = get_static_qt_configure_options(cargs.openssl_dir) if not cargs.qt_configure_options else cargs.qt_configure_options
+ ifw_branch = IfwOptions.default_qt_installer_framework_branch_qt if not cargs.ifw_branch else cargs.ifw_branch
+ signserver = '' if not cargs.sign_server else cargs.sign_server
+ signpwd = '' if not cargs.sign_server_pwd else cargs.sign_server_pwd
+
+ qt_conf_args = cargs.qt_configure_options
+ ifw_qmake_args = cargs.ifw_qmake_args
+ if cargs.debug:
qt_conf_args = qt_conf_args.replace('-release', '-debug')
ifw_qmake_args = ifw_qmake_args.replace('-config release', '-config debug')
# create options object
- OPTIONS = IfwOptions(
+ options = IfwOptions(
qt_src,
qt_configure_options,
- CARGS.ifw_uri,
+ cargs.ifw_uri,
ifw_branch,
ifw_qmake_args,
- CARGS.openssl_dir,
- CARGS.product_key_checker_pri,
- CARGS.qt_binaries_static,
- CARGS.qt_binaries_dynamic,
+ cargs.openssl_dir,
+ cargs.product_key_checker_pri,
+ cargs.qt_binaries_static,
+ cargs.qt_binaries_dynamic,
signserver,
signpwd,
- CARGS.incremental,
- CARGS.archive_qt
+ cargs.incremental,
+ cargs.archive_qt
)
# build ifw tools
- if CARGS.archive_qt:
- build_and_archive_qt(OPTIONS)
+ if cargs.archive_qt:
+ build_and_archive_qt(options)
else:
- build_ifw(OPTIONS, CARGS.create_installer, CARGS.build_ifw_examples)
+ build_ifw(options, cargs.create_installer, cargs.build_ifw_examples)
if __name__ == "__main__":
diff --git a/packaging-tools/bld_lib.py b/packaging-tools/bld_lib.py
index 3ff027aba..a5881c76e 100644
--- a/packaging-tools/bld_lib.py
+++ b/packaging-tools/bld_lib.py
@@ -88,78 +88,78 @@ def parse_qt_version(download_url_path: str) -> str:
def download_qt_pkg(args: argparse.Namespace, current_dir: str) -> Tuple[str, str]:
- urlRes = urlparse(args.qtpkg)
- assert urlRes.scheme and urlRes.netloc and urlRes.path, f"Invalid URL: {args.qtpkg}"
- qtVersion = parse_qt_version(urlRes.path)
+ url_res = urlparse(args.qtpkg)
+ assert url_res.scheme and url_res.netloc and url_res.path, f"Invalid URL: {args.qtpkg}"
+ qt_version = parse_qt_version(url_res.path)
- saveAs = os.path.join(current_dir, os.path.basename(urlRes.path))
- if os.path.exists(saveAs):
- log.info("Using existing: %s", saveAs)
+ save_as = os.path.join(current_dir, os.path.basename(url_res.path))
+ if os.path.exists(save_as):
+ log.info("Using existing: %s", save_as)
else:
- log.info("Downloading: %s into: %s", args.qtpkg, saveAs)
- urlretrieve(args.qtpkg, saveAs)
+ log.info("Downloading: %s into: %s", args.qtpkg, save_as)
+ urlretrieve(args.qtpkg, save_as)
- return saveAs, qtVersion
+ return save_as, qt_version
def extract_archive(save_as: str, current_dir: str) -> str:
- qtDestDir = os.path.join(current_dir, "qt_pkg")
- if not os.path.exists(qtDestDir):
- os.makedirs(qtDestDir)
- log.info("Extracting to: %s", qtDestDir)
+ qt_dest_dir = os.path.join(current_dir, "qt_pkg")
+ if not os.path.exists(qt_dest_dir):
+ os.makedirs(qt_dest_dir)
+ log.info("Extracting to: %s", qt_dest_dir)
if save_as.endswith("tar.gz"):
with tarfile.open(save_as, "r:gz") as tar:
- tar.extractall(qtDestDir)
+ tar.extractall(qt_dest_dir)
elif save_as.endswith(".7z"):
try:
- os.chdir(qtDestDir)
+ os.chdir(qt_dest_dir)
check_call(['7z', 'x', save_as])
except Exception as e:
log.error("Extracting 7z file failed: %s", str(e))
raise
finally:
os.chdir(current_dir)
- return qtDestDir
+ return qt_dest_dir
def build(args: argparse.Namespace, qt_dest_dir: str, current_dir: str) -> str:
if is_windows():
- qmakeToolName = "qmake.exe"
- makeToolName = "nmake"
+ qmake_tool_name = "qmake.exe"
+ make_tool_name = "nmake"
else:
- qmakeToolName = "qmake"
- makeToolName = "make"
+ qmake_tool_name = "qmake"
+ make_tool_name = "make"
- qmakeTool = find_file(qt_dest_dir, qmakeToolName)
- assert qmakeTool, f"Could not find: {qmakeToolName} from: {qt_dest_dir}"
+ qmake_tool = find_file(qt_dest_dir, qmake_tool_name)
+ assert qmake_tool, f"Could not find: {qmake_tool_name} from: {qt_dest_dir}"
# patch
- with open(os.path.join(os.path.dirname(qmakeTool), "qt.conf"), "w+", encoding="utf-8") as f:
+ with open(os.path.join(os.path.dirname(qmake_tool), "qt.conf"), "w+", encoding="utf-8") as f:
f.write("[Paths]\n")
f.write("Prefix=..\n")
pro_files_list = glob(os.path.join(args.src_path, "*.pro"))
assert pro_files_list, f"Could not find .pro file(s) from: {args.src_path}"
- proFile = pro_files_list[0]
- log.info("Using .pro file: %s", proFile)
+ pro_file = pro_files_list[0]
+ log.info("Using .pro file: %s", pro_file)
- installRootDir = os.path.join(current_dir, "lib_install_root")
- shutil.rmtree(installRootDir, ignore_errors=True)
- os.makedirs(installRootDir)
+ install_root_dir = os.path.join(current_dir, "lib_install_root")
+ shutil.rmtree(install_root_dir, ignore_errors=True)
+ os.makedirs(install_root_dir)
- bldDir = os.path.join(current_dir, "lib_bld")
- shutil.rmtree(bldDir, ignore_errors=True) # ignore if path did not exist
- os.makedirs(bldDir)
+ bld_dir = os.path.join(current_dir, "lib_bld")
+ shutil.rmtree(bld_dir, ignore_errors=True) # ignore if path did not exist
+ os.makedirs(bld_dir)
try:
- os.chdir(bldDir)
- check_call([qmakeTool, proFile])
- check_call([makeToolName])
+ os.chdir(bld_dir)
+ check_call([qmake_tool, pro_file])
+ check_call([make_tool_name])
# on windows chhop out the drive letter (e.g. 'C:'"
- installRoot = installRootDir[2:] if is_windows() else installRootDir
- check_call([makeToolName, 'install', 'INSTALL_ROOT=' + installRoot])
- except CalledProcessError as buildError:
- log.error("Failed to build the project: %s", str(buildError))
+ install_root = install_root_dir[2:] if is_windows() else install_root_dir
+ check_call([make_tool_name, 'install', 'INSTALL_ROOT=' + install_root])
+ except CalledProcessError as build_error:
+ log.error("Failed to build the project: %s", str(build_error))
raise
except Exception as e:
log.error("Something bad happened: %s", str(e))
@@ -167,47 +167,47 @@ def build(args: argparse.Namespace, qt_dest_dir: str, current_dir: str) -> str:
finally:
os.chdir(current_dir)
- return installRootDir
+ return install_root_dir
def archive(args: argparse.Namespace, install_root_dir: str, current_dir: str) -> str:
# strip out drive letter on Windows e.g. 'C:'
- srcPath = args.src_path[2:] if is_windows() else args.src_path
- archivePath = os.path.join(install_root_dir, srcPath.lstrip(os.path.sep))
- log.info("Archiving from: %s", archivePath)
+ src_path = args.src_path[2:] if is_windows() else args.src_path
+ archive_path = os.path.join(install_root_dir, src_path.lstrip(os.path.sep))
+ log.info("Archiving from: %s", archive_path)
libs = collect_libs(install_root_dir)
for lib in libs:
- shutil.copy2(lib, archivePath)
+ shutil.copy2(lib, archive_path)
arch = "x86_64" if sys.maxsize > 2**32 else "x86"
- artifactsFileName = "artifacts-" + plat.system().lower() + "-" + arch + ".7z"
- artifactsFilePath = os.path.join(current_dir, artifactsFileName)
+ artifacts_file_name = "artifacts-" + plat.system().lower() + "-" + arch + ".7z"
+ artifacts_file_path = os.path.join(current_dir, artifacts_file_name)
try:
- os.chdir(archivePath)
- check_call(['7z', 'a', '-m0=lzma2', '-mmt=16', artifactsFilePath, '*'])
+ os.chdir(archive_path)
+ check_call(['7z', 'a', '-m0=lzma2', '-mmt=16', artifacts_file_path, '*'])
except Exception as e:
print(str(e))
raise
finally:
os.chdir(current_dir)
- log.info("Created artifact: %s", artifactsFilePath)
- return artifactsFilePath
+ log.info("Created artifact: %s", artifacts_file_path)
+ return artifacts_file_path
def handle_build(args: argparse.Namespace) -> None:
- currentDir = os.getcwd()
+ current_dir = os.getcwd()
- saveAs, qtVersion = download_qt_pkg(args, currentDir)
- qtDestDir = extract_archive(saveAs, currentDir)
- installRootDir = build(args, qtDestDir, currentDir)
- artifactsFilePath = archive(args, installRootDir, currentDir)
+ save_as, qt_version = download_qt_pkg(args, current_dir)
+ qt_dest_dir = extract_archive(save_as, current_dir)
+ install_root_dir = build(args, qt_dest_dir, current_dir)
+ artifacts_file_path = archive(args, install_root_dir, current_dir)
- remoteUploader = RemoteUploader(False, args.remote_server, args.username, args.remote_base_path)
- remoteUploader.init_snapshot_upload_path(args.project_name, qtVersion, args.build_id)
- remoteUploader.copy_to_remote(artifactsFilePath)
- remoteUploader.update_latest_symlink()
+ remote_uploader = RemoteUploader(False, args.remote_server, args.username, args.remote_base_path)
+ remote_uploader.init_snapshot_upload_path(args.project_name, qt_version, args.build_id)
+ remote_uploader.copy_to_remote(artifacts_file_path)
+ remote_uploader.update_latest_symlink()
def main() -> None:
diff --git a/packaging-tools/bld_module.py b/packaging-tools/bld_module.py
index 624dff1a5..6c5a53b7f 100755
--- a/packaging-tools/bld_module.py
+++ b/packaging-tools/bld_module.py
@@ -179,176 +179,176 @@ def main() -> None:
parser.add_argument('--environment_batch', help="batch file that sets up environment")
parser.add_argument('--environment_batch_argument', help="if the batch file needs an argument just add it with this argument")
- callerArguments = parser.parse_args()
+ caller_arguments = parser.parse_args()
- # cleanup some values inside the callerArguments object
- strip_vars(callerArguments, "\"")
- if callerArguments.qt5path != os.path.abspath(callerArguments.qt5path):
- print(f"changing the value of --qt5path from {callerArguments.qt5path} to {os.path.abspath(callerArguments.qt5path)}")
- callerArguments.qt5path = os.path.abspath(callerArguments.qt5path)
+ # cleanup some values inside the caller_arguments object
+ strip_vars(caller_arguments, "\"")
+ if caller_arguments.qt5path != os.path.abspath(caller_arguments.qt5path):
+ print(f"changing the value of --qt5path from {caller_arguments.qt5path} to {os.path.abspath(caller_arguments.qt5path)}")
+ caller_arguments.qt5path = os.path.abspath(caller_arguments.qt5path)
- if not callerArguments.module_name:
- callerArguments.module_name = os.environ['MODULE_NAME']
+ if not caller_arguments.module_name:
+ caller_arguments.module_name = os.environ['MODULE_NAME']
- tempPath = os.path.abspath(os.path.join(os.path.dirname(__file__), 'temp'))
+ temp_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'temp'))
# clone module repo
- if callerArguments.module_url != '':
+ if caller_arguments.module_url != '':
Path(MODULE_SRC_DIR).mkdir(parents=True, exist_ok=True)
- clone_repository(callerArguments.module_url, callerArguments.module_branch, os.path.join(os.path.dirname(__file__), MODULE_SRC_DIR_NAME))
- qtModuleSourceDirectory = MODULE_SRC_DIR
- elif callerArguments.module7z != '':
+ clone_repository(caller_arguments.module_url, caller_arguments.module_branch, os.path.join(os.path.dirname(__file__), MODULE_SRC_DIR_NAME))
+ qt_module_source_directory = MODULE_SRC_DIR
+ elif caller_arguments.module7z != '':
Path(MODULE_SRC_DIR).mkdir(parents=True, exist_ok=True)
- myGetQtModule = ThreadedWork("get and extract module src")
- myGetQtModule.add_task_object(create_download_and_extract_tasks(callerArguments.module7z, MODULE_SRC_DIR, tempPath))
- myGetQtModule.run()
- qtModuleSourceDirectory = MODULE_SRC_DIR
+ my_get_qt_module = ThreadedWork("get and extract module src")
+ my_get_qt_module.add_task_object(create_download_and_extract_tasks(caller_arguments.module7z, MODULE_SRC_DIR, temp_path))
+ my_get_qt_module.run()
+ qt_module_source_directory = MODULE_SRC_DIR
else:
- print(f"Using local copy of {callerArguments.module_name}")
- qtModuleSourceDirectory = callerArguments.module_dir
+ print(f"Using local copy of {caller_arguments.module_name}")
+ qt_module_source_directory = caller_arguments.module_dir
# install directory
- qtModuleInstallDirectory = qtModuleSourceDirectory + '_install'
+ qt_module_install_directory = qt_module_source_directory + '_install'
if is_windows():
# rip out drive letter from path on Windows
- qtModuleInstallDirectory = qtModuleInstallDirectory[2:]
+ qt_module_install_directory = qt_module_install_directory[2:]
# check whether this is a QNX build
- if any('qnx' in qt5_url.lower() for qt5_url in callerArguments.qt5_module_urls):
+ if any('qnx' in qt5_url.lower() for qt5_url in caller_arguments.qt5_module_urls):
# apply the workaround from QTBUG-38555
- qtModuleInstallDirectory = qtModuleInstallDirectory.replace('\\', '/').replace('/', '\\', 1)
+ qt_module_install_directory = qt_module_install_directory.replace('\\', '/').replace('/', '\\', 1)
# clean step
- if callerArguments.clean:
+ if caller_arguments.clean:
print("##### clean old builds #####")
- remove_tree(callerArguments.qt5path)
- remove_tree(qtModuleInstallDirectory)
- remove_tree(tempPath)
+ remove_tree(caller_arguments.qt5path)
+ remove_tree(qt_module_install_directory)
+ remove_tree(temp_path)
- if not os.path.lexists(callerArguments.qt5path) and not callerArguments.qt5_module_urls:
+ if not os.path.lexists(caller_arguments.qt5path) and not caller_arguments.qt5_module_urls:
parser.print_help()
- print(f"error: Please add the missing qt5_module_url arguments if the {callerArguments.qt5path} does not exist {os.linesep}{os.linesep}")
+ print(f"error: Please add the missing qt5_module_url arguments if the {caller_arguments.qt5path} does not exist {os.linesep}{os.linesep}")
raise RuntimeError()
- qmakeBinary = os.path.abspath(os.path.join(callerArguments.qt5path, 'bin', 'qmake'))
+ qmake_binary = os.path.abspath(os.path.join(caller_arguments.qt5path, 'bin', 'qmake'))
- if not os.path.lexists(callerArguments.qt5path):
+ if not os.path.lexists(caller_arguments.qt5path):
# get Qt
- myGetQtBinaryWork = ThreadedWork("get and extract Qt 5 binary")
- myGetQtBinaryWork.add_task_object(
+ my_get_qt_binary_work = ThreadedWork("get and extract Qt 5 binary")
+ my_get_qt_binary_work.add_task_object(
create_qt_download_task(
- callerArguments.qt5_module_urls,
- callerArguments.qt5path, tempPath, callerArguments
+ caller_arguments.qt5_module_urls,
+ caller_arguments.qt5path, temp_path, caller_arguments
)
)
- myGetQtBinaryWork.run()
+ my_get_qt_binary_work.run()
# Save QT_INSTALL_PREFIX
- qt_install_prefix = get_qt_install_prefix(callerArguments.qt5path)
+ qt_install_prefix = get_qt_install_prefix(caller_arguments.qt5path)
# "install" Qt
- patch_qt(callerArguments.qt5path)
+ patch_qt(caller_arguments.qt5path)
# lets start building
# prepare the environment for example setting LD_LIBRARY_PATH
# or adding qmake path into the Path environment variable (Qt deployment step)
- pathKeyList = []
- qtBinPath = os.path.abspath(os.path.join(callerArguments.qt5path, 'bin'))
- pathKeyList.append(qtBinPath)
- pythonExecutablePath = os.path.dirname(sys.executable)
- pathKeyList.append(pythonExecutablePath)
+ path_key_list = []
+ qt_bin_path = os.path.abspath(os.path.join(caller_arguments.qt5path, 'bin'))
+ path_key_list.append(qt_bin_path)
+ python_executable_path = os.path.dirname(sys.executable)
+ path_key_list.append(python_executable_path)
- environment = {'PATH': os.pathsep.join(pathKeyList)}
+ environment = {'PATH': os.pathsep.join(path_key_list)}
if is_linux():
environment["LD_LIBRARY_PATH"] = os.pathsep.join(
- [os.path.join(callerArguments.qt5path, 'lib')]
+ [os.path.join(caller_arguments.qt5path, 'lib')]
+ os.environ.get("LD_LIBRARY_PATH", "").split(os.pathsep)
)
environment["QMAKESPEC"] = "linux-g++"
if is_macos():
- environment["DYLD_FRAMEWORK_PATH"] = os.path.join(callerArguments.qt5path, 'lib')
+ environment["DYLD_FRAMEWORK_PATH"] = os.path.join(caller_arguments.qt5path, 'lib')
if not is_windows():
environment["MAKEFLAGS"] = "-j" + str(cpu_count() + 1)
- if callerArguments.debug:
- buildType = 'debug'
+ if caller_arguments.debug:
+ build_type = 'debug'
else:
- buildType = 'release'
-
- qtModuleBuildDirectory = qtModuleSourceDirectory + '_build'
-
- if callerArguments.use_cmake:
- generateCommand = ['cmake',
- '-DCMAKE_VERBOSE_MAKEFILE=YES',
- # TODO: should get QT_INSTALL_LIBS instead
- '-DCMAKE_INSTALL_RPATH=' + ';'.join([qt_install_prefix, os.path.join(qt_install_prefix, 'lib')]),
- '-DCMAKE_INSTALL_PREFIX=' + qtModuleInstallDirectory,
- '-DCMAKE_BUILD_TYPE=' + buildType.capitalize()]
- cmake_prefix_path = [callerArguments.qt5path]
- for extra_arg in callerArguments.additional_config_args:
+ build_type = 'release'
+
+ qt_module_build_directory = qt_module_source_directory + '_build'
+
+ if caller_arguments.use_cmake:
+ generate_command = ['cmake',
+ '-DCMAKE_VERBOSE_MAKEFILE=YES',
+ # TODO: should get QT_INSTALL_LIBS instead
+ '-DCMAKE_INSTALL_RPATH=' + ';'.join([qt_install_prefix, os.path.join(qt_install_prefix, 'lib')]),
+ '-DCMAKE_INSTALL_PREFIX=' + qt_module_install_directory,
+ '-DCMAKE_BUILD_TYPE=' + build_type.capitalize()]
+ cmake_prefix_path = [caller_arguments.qt5path]
+ for extra_arg in caller_arguments.additional_config_args:
if extra_arg.startswith('-DCMAKE_PREFIX_PATH'):
cmake_prefix_path.extend(extra_arg.split('=', 1)[1].split(';'))
else:
- generateCommand.append(extra_arg)
- generateCommand.append('-DCMAKE_PREFIX_PATH=' + ';'.join(cmake_prefix_path))
- # for now assume that qtModuleSourceDirectory contains CMakeLists.txt directly
- generateCommand.append(qtModuleSourceDirectory)
+ generate_command.append(extra_arg)
+ generate_command.append('-DCMAKE_PREFIX_PATH=' + ';'.join(cmake_prefix_path))
+ # for now assume that qt_module_source_directory contains CMakeLists.txt directly
+ generate_command.append(qt_module_source_directory)
else: # --> qmake
- qtModuleProFile = locate_path(qtModuleSourceDirectory, ["*.pro"], filters=[os.path.isfile])
+ qt_module_pro_file = locate_path(qt_module_source_directory, ["*.pro"], filters=[os.path.isfile])
if is_windows():
# do not shadow-build with qmake on Windows
- qtModuleBuildDirectory = os.path.dirname(qtModuleProFile)
- generateCommand = [qmakeBinary]
- generateCommand.extend(callerArguments.additional_config_args)
+ qt_module_build_directory = os.path.dirname(qt_module_pro_file)
+ generate_command = [qmake_binary]
+ generate_command.extend(caller_arguments.additional_config_args)
if os.environ.get('EXTRA_QMAKE_ARGS'):
- generateCommand.append(os.environ["EXTRA_QMAKE_ARGS"])
- generateCommand.append(qtModuleProFile)
+ generate_command.append(os.environ["EXTRA_QMAKE_ARGS"])
+ generate_command.append(qt_module_pro_file)
- run_command(generateCommand, cwd=qtModuleBuildDirectory, extra_environment=environment)
+ run_command(generate_command, cwd=qt_module_build_directory, extra_environment=environment)
- ret = run_build_command(cwd=qtModuleBuildDirectory, caller_arguments=callerArguments)
+ ret = run_build_command(cwd=qt_module_build_directory, caller_arguments=caller_arguments)
if ret:
raise RuntimeError(f"Failure running the last command: {ret}")
ret = run_install_command(
- ['install', 'INSTALL_ROOT=' + qtModuleInstallDirectory],
- cwd=qtModuleBuildDirectory,
- caller_arguments=callerArguments, extra_environment=environment
+ ['install', 'INSTALL_ROOT=' + qt_module_install_directory],
+ cwd=qt_module_build_directory,
+ caller_arguments=caller_arguments, extra_environment=environment
)
if ret:
raise RuntimeError(f"Failure running the last command: {ret}")
# doc collection
- if callerArguments.collectDocs:
- doc_list = locate_paths(qtModuleSourceDirectory, ['*.qch'], filters=[os.path.isfile])
- doc_install_dir = os.path.join(qtModuleInstallDirectory, 'doc')
+ if caller_arguments.collectDocs:
+ doc_list = locate_paths(qt_module_source_directory, ['*.qch'], filters=[os.path.isfile])
+ doc_install_dir = os.path.join(qt_module_install_directory, 'doc')
Path(doc_install_dir).mkdir(parents=True, exist_ok=True)
for item in doc_list:
shutil.copy(item, doc_install_dir)
# enginio etc. docs creation
- if callerArguments.makeDocs:
+ if caller_arguments.makeDocs:
# build docs first
ret = run_install_command(
- 'docs', cwd=qtModuleBuildDirectory,
- caller_arguments=callerArguments, extra_environment=environment
+ 'docs', cwd=qt_module_build_directory,
+ caller_arguments=caller_arguments, extra_environment=environment
)
if ret:
raise RuntimeError(f"Failure running the last command: {ret}")
# then make install those
ret = run_install_command(
- ['install_docs', 'INSTALL_ROOT=' + qtModuleInstallDirectory],
- cwd=qtModuleBuildDirectory,
- caller_arguments=callerArguments, extra_environment=environment
+ ['install_docs', 'INSTALL_ROOT=' + qt_module_install_directory],
+ cwd=qt_module_build_directory,
+ caller_arguments=caller_arguments, extra_environment=environment
)
if ret:
raise RuntimeError(f"Failure running the last command: {ret}")
# make separate "doc.7z" for later use if needed
- doc_dir = locate_path(qtModuleInstallDirectory, ["doc"], filters=[os.path.isdir])
- archive_name = callerArguments.module_name + '-' + os.environ['LICENSE'] + '-doc-' + os.environ['MODULE_VERSION'] + '.7z'
+ doc_dir = locate_path(qt_module_install_directory, ["doc"], filters=[os.path.isdir])
+ archive_name = caller_arguments.module_name + '-' + os.environ['LICENSE'] + '-doc-' + os.environ['MODULE_VERSION'] + '.7z'
ret = run_command(
['7z', 'a', os.path.join('doc_archives', archive_name), doc_dir],
cwd=os.path.dirname(os.path.realpath(__file__))
@@ -357,25 +357,25 @@ def main() -> None:
raise RuntimeError(f"Failure running the last command: {ret}")
# try to figure out where the actual exported content is
- qt5_install_basename = os.path.basename(callerArguments.qt5path)
+ qt5_install_basename = os.path.basename(caller_arguments.qt5path)
- if callerArguments.use_cmake:
- dir_to_archive = qtModuleInstallDirectory
+ if caller_arguments.use_cmake:
+ dir_to_archive = qt_module_install_directory
else:
- dir_to_archive = locate_path(qtModuleInstallDirectory, [qt5_install_basename], filters=[os.path.isdir])
+ dir_to_archive = locate_path(qt_module_install_directory, [qt5_install_basename], filters=[os.path.isdir])
# if .tag file exists in the source package (sha1) then copy it into the binary archive
try:
- tag_file = locate_path(qtModuleSourceDirectory, [".tag"], filters=[os.path.isfile])
+ tag_file = locate_path(qt_module_source_directory, [".tag"], filters=[os.path.isfile])
shutil.copy2(tag_file, dir_to_archive)
except PackagingError:
pass
# Pre-patch the package for IFW to patch it correctly during installation
- patch_archive(dir_to_archive, [callerArguments.qt5path, dir_to_archive], qt_install_prefix)
+ patch_archive(dir_to_archive, [caller_arguments.qt5path, dir_to_archive], qt_install_prefix)
- archive_cmd = ['7z', 'a', os.path.join('module_archives', 'qt5_' + callerArguments.module_name + '.7z')]
- if callerArguments.use_cmake:
+ archive_cmd = ['7z', 'a', os.path.join('module_archives', 'qt5_' + caller_arguments.module_name + '.7z')]
+ if caller_arguments.use_cmake:
archive_cmd.append(os.path.join(dir_to_archive, '*'))
else:
archive_cmd.append(dir_to_archive)
diff --git a/packaging-tools/bld_openssl.py b/packaging-tools/bld_openssl.py
index 40d23ad3b..0dc6d9132 100644
--- a/packaging-tools/bld_openssl.py
+++ b/packaging-tools/bld_openssl.py
@@ -60,10 +60,10 @@ def archive(install_dir, archive_prefix):
def check_environment():
- with open(os.devnull, 'w', encoding="utf-8") as FNULL:
+ with open(os.devnull, 'w', encoding="utf-8") as fnull:
def check_cmd(cmd):
- if subprocess.call(cmd, stdout=FNULL, stderr=FNULL) != 0:
+ if subprocess.call(cmd, stdout=fnull, stderr=fnull) != 0:
print(f"*** Cannot execute {cmd[0]}")
exit(1)
check_cmd(['nasm', '-h'])
diff --git a/packaging-tools/bld_python.py b/packaging-tools/bld_python.py
index 1e2796805..8fae4c590 100644
--- a/packaging-tools/bld_python.py
+++ b/packaging-tools/bld_python.py
@@ -52,23 +52,23 @@ class BldPythonError(Exception):
async def prepare_sources(src: str, tmp_base_dir: str) -> str:
log.info("Preparing sources: %s", src)
- srcTmpDir = os.path.join(tmp_base_dir, "src_dir")
- rmtree(srcTmpDir, ignore_errors=True)
+ src_tmp_dir = os.path.join(tmp_base_dir, "src_dir")
+ rmtree(src_tmp_dir, ignore_errors=True)
if os.path.isdir(src):
if "windows" in platform.system().lower():
- copytree(src, srcTmpDir)
+ copytree(src, src_tmp_dir)
else:
return src
elif os.path.isfile(src):
- os.makedirs(srcTmpDir)
- await extract_archive(src, srcTmpDir)
+ os.makedirs(src_tmp_dir)
+ await extract_archive(src, src_tmp_dir)
elif is_valid_url_path(src):
- os.makedirs(srcTmpDir)
- destFile = download_archive(src, tmp_base_dir)
- await extract_archive(destFile, srcTmpDir)
+ os.makedirs(src_tmp_dir)
+ dest_file = download_archive(src, tmp_base_dir)
+ await extract_archive(dest_file, src_tmp_dir)
else:
raise BldPythonError(f"Could not prepare sources from: {src}")
- return srcTmpDir
+ return src_tmp_dir
def locate_source_root(search_dir: str) -> str:
@@ -79,38 +79,38 @@ def locate_source_root(search_dir: str) -> str:
async def create_symlink(python_dir: str):
- pythonExe = os.path.join(python_dir, 'python.exe')
- assert os.path.isfile(pythonExe), f"The 'python' executable did not exist: {pythonExe}"
- versionCmd = [pythonExe, '--version']
- versionOutput = check_output(versionCmd, shell=True).decode("utf-8")
- match = re.search(r'(\d+)\.(\d+)\.(\d+)', versionOutput)
+ python_exe = os.path.join(python_dir, 'python.exe')
+ assert os.path.isfile(python_exe), f"The 'python' executable did not exist: {python_exe}"
+ version_cmd = [python_exe, '--version']
+ version_output = check_output(version_cmd, shell=True).decode("utf-8")
+ match = re.search(r'(\d+)\.(\d+)\.(\d+)', version_output)
if match:
destination = os.path.join(python_dir, 'python' + match.group(1) + match.group(2) + '.exe')
- os.symlink(pythonExe, destination)
- log.info("Symbolic link created from %s to %s", pythonExe, destination)
+ os.symlink(python_exe, destination)
+ log.info("Symbolic link created from %s to %s", python_exe, destination)
else:
- raise BldPythonError(f"Could not parse version output: {versionOutput}")
+ raise BldPythonError(f"Could not parse version output: {version_output}")
async def _build_python_win(src_dir: str) -> str:
log.info("Building..")
log.info("Source dir: %s", src_dir)
- buildBat = os.path.join(src_dir, 'PCbuild', 'build.bat')
- assert os.path.isfile(buildBat), f"The 'build.bat' batch file did not exist: {buildBat}"
- await async_exec_cmd([buildBat])
- destDir = os.path.join(src_dir, 'PCbuild', 'amd64')
- assert os.path.isdir(destDir), f"The build destination directory did not exist: {destDir}"
- await create_symlink(destDir)
- log.info("Python built successfully and installed to: %s", destDir)
+ build_bat = os.path.join(src_dir, 'PCbuild', 'build.bat')
+ assert os.path.isfile(build_bat), f"The 'build.bat' batch file did not exist: {build_bat}"
+ await async_exec_cmd([build_bat])
+ dest_dir = os.path.join(src_dir, 'PCbuild', 'amd64')
+ assert os.path.isdir(dest_dir), f"The build destination directory did not exist: {dest_dir}"
+ await create_symlink(dest_dir)
+ log.info("Python built successfully and installed to: %s", dest_dir)
return src_dir
async def build_python_win(src: str) -> str:
- baseDir = os.path.join(os.getcwd(), "python_bld_tmp")
- os.makedirs(baseDir, exist_ok=True)
- srcDir = await prepare_sources(src, baseDir)
- sourceRootDir = locate_source_root(srcDir)
- return await _build_python_win(sourceRootDir)
+ base_dir = os.path.join(os.getcwd(), "python_bld_tmp")
+ os.makedirs(base_dir, exist_ok=True)
+ src_dir = await prepare_sources(src, base_dir)
+ source_root_dir = locate_source_root(src_dir)
+ return await _build_python_win(source_root_dir)
async def _build_python(src_dir: str, bld_dir: str, prefix: str) -> str:
@@ -119,38 +119,38 @@ async def _build_python(src_dir: str, bld_dir: str, prefix: str) -> str:
log.info(" Build dir: %s", bld_dir)
log.info(" Prefix: %s", prefix)
system = platform.system().lower()
- cpuCount = str(cpu_count())
+ thread_count = str(cpu_count())
if "darwin" in system:
- opensslQueryCmd = ['brew', '--prefix', 'openssl']
- opensslPath = exec_cmd(opensslQueryCmd)
- if not os.path.exists(opensslPath):
+ openssl_query_cmd = ['brew', '--prefix', 'openssl']
+ openssl_path = exec_cmd(openssl_query_cmd)
+ if not os.path.exists(openssl_path):
raise BldPythonError(
"Could not find OpenSSL path. Please check that the required brew formula is installed."
)
- configureCmd = [
+ configure_cmd = [
os.path.join(src_dir, 'configure'),
'--enable-framework',
- '--with-openssl=' + opensslPath,
+ '--with-openssl=' + openssl_path,
'--prefix=' + prefix
]
- makeCmd = ['make', '-j' + cpuCount]
- makeInstallCmd = ['make', 'install']
+ make_cmd = ['make', '-j' + thread_count]
+ make_install_cmd = ['make', 'install']
else:
- configureCmd = [
+ configure_cmd = [
os.path.join(src_dir, 'configure'),
'--enable-shared',
'--prefix=' + prefix
]
- makeCmd = ['make', '-j' + cpuCount]
- makeInstallCmd = ['make', 'install']
+ make_cmd = ['make', '-j' + thread_count]
+ make_install_cmd = ['make', 'install']
rmtree(bld_dir, ignore_errors=True)
os.makedirs(bld_dir)
with cd(bld_dir):
- await async_exec_cmd(configureCmd)
- await async_exec_cmd(makeCmd)
- await async_exec_cmd(makeInstallCmd)
+ await async_exec_cmd(configure_cmd)
+ await async_exec_cmd(make_cmd)
+ await async_exec_cmd(make_install_cmd)
log.info("Python built successfully and installed to: %s", prefix)
return prefix
@@ -161,12 +161,12 @@ async def build_python(src: str, prefix: str) -> str:
if os.path.isdir(prefix):
log.info("Deleting existing Python build from: %s", prefix)
rmtree(prefix, ignore_errors=True)
- baseDir = os.path.join(os.getcwd(), "python_bld_tmp")
- os.makedirs(baseDir, exist_ok=True)
- srcDir = await prepare_sources(src, baseDir)
- bldDir = os.path.join(baseDir, "build_dir")
- sourceRootDir = locate_source_root(srcDir)
- return await _build_python(sourceRootDir, bldDir, prefix)
+ base_dir = os.path.join(os.getcwd(), "python_bld_tmp")
+ os.makedirs(base_dir, exist_ok=True)
+ src_dir = await prepare_sources(src, base_dir)
+ bld_dir = os.path.join(base_dir, "build_dir")
+ source_root_dir = locate_source_root(src_dir)
+ return await _build_python(source_root_dir, bld_dir, prefix)
def main() -> None:
diff --git a/packaging-tools/bld_utils.py b/packaging-tools/bld_utils.py
index b2dae61fc..1255869f4 100644
--- a/packaging-tools/bld_utils.py
+++ b/packaging-tools/bld_utils.py
@@ -93,17 +93,17 @@ class DirRenamer():
def compress(path, directory_name, sevenzip_target):
- sevenZipExtension = os.extsep + '7z'
- parentDirectoryPath = os.path.abspath(os.path.join(path, '..'))
- if os.path.splitext(sevenzip_target)[1] != sevenZipExtension:
- sevenzip_target = sevenzip_target + sevenZipExtension
- sevenZipFileName = os.path.split(sevenzip_target)[1]
+ sevenzip_extension = os.extsep + '7z'
+ parent_directory_path = os.path.abspath(os.path.join(path, '..'))
+ if os.path.splitext(sevenzip_target)[1] != sevenzip_extension:
+ sevenzip_target = sevenzip_target + sevenzip_extension
+ sevenzip_filename = os.path.split(sevenzip_target)[1]
with DirRenamer(path, directory_name):
- run_command(' '.join(('7z a -mx9', sevenZipFileName, directory_name)), parentDirectoryPath)
+ run_command(' '.join(('7z a -mx9', sevenzip_filename, directory_name)), parent_directory_path)
- currentSevenZipPath = os.path.join(parentDirectoryPath, sevenZipFileName)
- if currentSevenZipPath != sevenzip_target:
- shutil.move(currentSevenZipPath, sevenzip_target)
+ current_sevenzip_path = os.path.join(parent_directory_path, sevenzip_filename)
+ if current_sevenzip_path != sevenzip_target:
+ shutil.move(current_sevenzip_path, sevenzip_target)
def strip_vars(sobject, chars):
@@ -184,12 +184,12 @@ def download(url, target, read_block_size=1048576):
raise Exception(f"Can not download '{url}' to '{target}' as target(error code: '{error.code}').")
renamed = False
- tryRenameCounter = 0
+ try_rename_counter = 0
while renamed is False :
- tryRenameCounter = tryRenameCounter + 1
+ try_rename_counter = try_rename_counter + 1
try:
- if tryRenameCounter > 5 :
- sys.stdout.write(f"r{tryRenameCounter}")
+ if try_rename_counter > 5 :
+ sys.stdout.write(f"r{try_rename_counter}")
if os.path.lexists(target):
raise Exception(f"Please remove savefile first: {target}")
os.rename(savefile_tmp, target)
@@ -199,7 +199,7 @@ def download(url, target, read_block_size=1048576):
sys.stdout.write(os.linesep)
except OSError as e:
# if it still exists just try that after a microsleep and stop this after 720 tries
- if os.path.lexists(savefile_tmp) and tryRenameCounter < 720:
+ if os.path.lexists(savefile_tmp) and try_rename_counter < 720:
sleep(2)
continue
if not os.path.lexists(target):
@@ -230,8 +230,8 @@ def get_environment(extra_environment=None):
return environment
for key in extra_environment.keys():
- keyUpper = key.upper()
- if any((keyUpper == 'PATH', keyUpper == 'INCLUDE', keyUpper == 'LIB')):
+ key_upper = key.upper()
+ if any((key_upper == 'PATH', key_upper == 'INCLUDE', key_upper == 'LIB')):
set_value_on_environment_dict(environment, key, extra_environment[key])
else:
environment[key] = extra_environment[key]
@@ -246,22 +246,22 @@ def run_command(command, cwd, extra_environment=None, only_error_case_output=Fal
raise TypeError(f"only_error_case_output({type(only_error_case_output)}) is not {bool}")
if type(command) is list:
- commandAsList = command
+ command_as_list = command
else:
- commandAsList = command[:].split(' ')
+ command_as_list = command[:].split(' ')
environment = get_environment(extra_environment)
# if we can not find the command, just check the current working dir
- if (not os.path.lexists(commandAsList[0]) and cwd
- and os.path.isfile(os.path.abspath(os.path.join(cwd, commandAsList[0])))):
- commandAsList[0] = os.path.abspath(os.path.join(cwd, commandAsList[0]))
+ if (not os.path.lexists(command_as_list[0]) and cwd
+ and os.path.isfile(os.path.abspath(os.path.join(cwd, command_as_list[0])))):
+ command_as_list[0] = os.path.abspath(os.path.join(cwd, command_as_list[0]))
- pathEnvironment = environment['PATH']
+ path_environment = environment['PATH']
# if we can not find the command, check the environment
- found_executable = shutil.which(str(commandAsList[0]), path=str(pathEnvironment))
- if not os.path.lexists(commandAsList[0]) and found_executable:
- commandAsList[0] = found_executable
+ found_executable = shutil.which(str(command_as_list[0]), path=str(path_environment))
+ if not os.path.lexists(command_as_list[0]) and found_executable:
+ command_as_list[0] = found_executable
if cwd and not os.path.lexists(cwd):
os.makedirs(cwd)
@@ -271,30 +271,30 @@ def run_command(command, cwd, extra_environment=None, only_error_case_output=Fal
print(f"Working Directory: {cwd}")
else:
print("No cwd set!")
- print("Last command: ", ' '.join(commandAsList))
+ print("Last command: ", ' '.join(command_as_list))
sys.stdout.flush()
if cwd and not os.path.lexists(cwd):
raise Exception(f"The current working directory is not existing: {cwd}")
- useShell = is_windows()
- lastStdOutLines = []
- lastStdErrLines = []
+ use_shell = is_windows()
+ last_stdout_lines = []
+ last_stderr_lines = []
if currentThread().name == "MainThread" and not only_error_case_output:
process = Popen(
- commandAsList, shell=useShell,
+ command_as_list, shell=use_shell,
cwd=cwd, bufsize=-1, env=environment
)
else:
process = Popen( # pylint: disable=R1732
- commandAsList, shell=useShell,
+ command_as_list, shell=use_shell,
stdout=PIPE, stderr=PIPE,
cwd=cwd, bufsize=-1, env=environment
)
- maxSavedLineNumbers = 1000
- lastStdOutLines = deque(maxlen=maxSavedLineNumbers)
- lastStdErrLines = deque(maxlen=maxSavedLineNumbers)
+ max_saved_line_numbers = 1000
+ last_stdout_lines = deque(maxlen=max_saved_line_numbers)
+ last_stderr_lines = deque(maxlen=max_saved_line_numbers)
# Launch the asynchronous readers of the process' stdout and stderr.
stdout = AsynchronousFileReader(process.stdout)
@@ -305,14 +305,14 @@ def run_command(command, cwd, extra_environment=None, only_error_case_output=Fal
# Show what we received from standard output.
for line in stdout.readlines():
line = line.decode()
- lastStdOutLines.append(line)
+ last_stdout_lines.append(line)
if currentThread().name != "MainThread":
sys.stdout.write(line)
# Show what we received from standard error.
for line in stderr.readlines():
line = line.decode()
- lastStdErrLines.append(line)
+ last_stderr_lines.append(line)
if currentThread().name != "MainThread":
sys.stdout.write(line)
@@ -328,30 +328,30 @@ def run_command(command, cwd, extra_environment=None, only_error_case_output=Fal
process.stderr.close()
process.wait()
- exitCode = process.returncode
+ exit_code = process.returncode
# lets keep that for debugging
# if environment:
# for key in sorted(environment):
# sys.stderr.write("set " + key + "=" + environment[key] + os.linesep)
- if exitCode not in expected_exit_codes:
- lastOutput = ""
+ if exit_code not in expected_exit_codes:
+ last_output = ""
exit_type = ""
if currentThread().name != "MainThread" or only_error_case_output:
- if len(lastStdErrLines) != 0:
- lastOutput += "".join(str(lastStdErrLines))
+ if len(last_stderr_lines) != 0:
+ last_output += "".join(str(last_stderr_lines))
exit_type = "error "
- elif len(lastStdOutLines) != 0:
- lastOutput += "".join(str(lastStdOutLines))
- prettyLastOutput = os.linesep + '======================= error =======================' + os.linesep
- prettyLastOutput += "Working Directory: " + cwd + os.linesep
- prettyLastOutput += "Last command: " + ' '.join(commandAsList) + os.linesep
- if lastOutput:
- prettyLastOutput += f"last {exit_type}output:{os.linesep}{lastOutput}"
+ elif len(last_stdout_lines) != 0:
+ last_output += "".join(str(last_stdout_lines))
+ pretty_last_output = os.linesep + '======================= error =======================' + os.linesep
+ pretty_last_output += "Working Directory: " + cwd + os.linesep
+ pretty_last_output += "Last command: " + ' '.join(command_as_list) + os.linesep
+ if last_output:
+ pretty_last_output += f"last {exit_type}output:{os.linesep}{last_output}"
else:
- prettyLastOutput += " - no process output caught - "
- raise Exception(f"Different exit code then expected({expected_exit_codes}): {exitCode}{prettyLastOutput}")
- return exitCode
+ pretty_last_output += " - no process output caught - "
+ raise Exception(f"Different exit code then expected({expected_exit_codes}): {exit_code}{pretty_last_output}")
+ return exit_code
@deep_copy_arguments
@@ -384,36 +384,36 @@ def run_build_command(arguments=None, cwd=None, caller_arguments=None, extra_env
@deep_copy_arguments
def get_return_value(command, cwd=None, extra_environment=None):
- commandAsList = command[:].split(' ')
+ command_as_list = command[:].split(' ')
return Popen(
- commandAsList, stdout=PIPE, stderr=STDOUT,
+ command_as_list, stdout=PIPE, stderr=STDOUT,
cwd=cwd, env=get_environment(extra_environment)
).communicate()[0].strip()
def git_sha(path):
- gitBinary = "git"
+ git_binary = "git"
if is_git_directory(path):
- return get_return_value(gitBinary + " rev-list -n1 HEAD", cwd=path).strip()
+ return get_return_value(git_binary + " rev-list -n1 HEAD", cwd=path).strip()
return ''
# get commit SHA either directly from git, or from a .tag file in the source directory
def get_commit_sha(source_path):
- buildGitSHA = git_sha(source_path)
- if not buildGitSHA:
+ build_git_sha = git_sha(source_path)
+ if not build_git_sha:
tagfile = os.path.join(source_path, '.tag')
if os.path.exists(tagfile):
with open(tagfile, 'r', encoding="utf-8") as f:
- buildGitSHA = f.read().strip()
- return buildGitSHA
+ build_git_sha = f.read().strip()
+ return build_git_sha
def is_git_directory(repository_path):
if not repository_path:
return False
- gitConfigDir = os.path.abspath(os.path.join(repository_path, '.git'))
- return os.path.lexists(gitConfigDir)
+ git_config_dir = os.path.abspath(os.path.join(repository_path, '.git'))
+ return os.path.lexists(git_config_dir)
def file_url(file_path):
diff --git a/packaging-tools/bldinstallercommon.py b/packaging-tools/bldinstallercommon.py
index 6ee508dee..0a0d6a732 100644
--- a/packaging-tools/bldinstallercommon.py
+++ b/packaging-tools/bldinstallercommon.py
@@ -717,9 +717,9 @@ def patch_qt(qt5_path):
print("##### patch Qt #####")
qmake_binary = os.path.join(qt5_path, 'bin', 'qmake')
# write qt.conf
- with open(os.path.join(qt5_path, 'bin', 'qt.conf'), "w", encoding="utf-8") as qtConfFile:
- qtConfFile.write("[Paths]" + os.linesep)
- qtConfFile.write("Prefix=.." + os.linesep)
+ with open(os.path.join(qt5_path, 'bin', 'qt.conf'), "w", encoding="utf-8") as qt_conf_file:
+ qt_conf_file.write("[Paths]" + os.linesep)
+ qt_conf_file.write("Prefix=.." + os.linesep)
# fix rpaths
if is_linux():
handle_component_rpath(qt5_path, 'lib')
diff --git a/packaging-tools/build_clang.py b/packaging-tools/build_clang.py
index c963bb7f3..ad808a05a 100644
--- a/packaging-tools/build_clang.py
+++ b/packaging-tools/build_clang.py
@@ -253,9 +253,9 @@ def mingw_training(base_path, qtcreator_path, environment, bitness):
# Train mingw libclang library with build QtCreator
# First time open the project, then close it. This will generate initial settings and .user files. Second time do the actual training.
- for batchFile in ['qtc.openProject.batch', 'qtc.fileTextEditorCpp.batch']:
+ for batch_file in ['qtc.openProject.batch', 'qtc.fileTextEditorCpp.batch']:
run_command(
- [os.path.join(training_dir, 'runBatchFiles.bat'), msvc_version(), 'x64' if bitness == 64 else 'x86', batchFile],
+ [os.path.join(training_dir, 'runBatchFiles.bat'), msvc_version(), 'x64' if bitness == 64 else 'x86', batch_file],
base_path, extra_environment=None, only_error_case_output=False, expected_exit_codes=[0, 1]
)
diff --git a/packaging-tools/build_wrapper.py b/packaging-tools/build_wrapper.py
index 691504876..cea73691f 100644
--- a/packaging-tools/build_wrapper.py
+++ b/packaging-tools/build_wrapper.py
@@ -169,9 +169,9 @@ def move_files_to_parent_dir(source):
def create_download_documentation_task(base_url, download_path):
doc_base_url = base_url + "/doc"
- useLocal = urlparse(doc_base_url).scheme == "file"
- print(f"doc_base_url: {doc_base_url} useLocal: {useLocal}")
- if useLocal:
+ use_local = urlparse(doc_base_url).scheme == "file"
+ print(f"doc_base_url: {doc_base_url} use_local: {use_local}")
+ if use_local:
file_list = os.listdir(doc_base_url[len("file:///"):])
else:
with urlopen(doc_base_url) as urlpath:
@@ -253,23 +253,23 @@ PluginConf = namedtuple('PluginConf', ['git_url', 'branch_or_tag', 'checkout_dir
def parse_qtcreator_plugins(pkg_conf_file):
"""Parse available Qt Creator plugins from configuration file"""
- pluginList = []
+ plugin_list = []
if not pkg_conf_file:
- return pluginList
- pluginOptions = get_pkg_options(pkg_conf_file)
- sectionName = "QtCreator.Build.Plugin"
- keyName = "plugins"
- if not pluginOptions.option_exists(sectionName, keyName):
- return pluginList
- pluginConfList = pluginOptions.config_section_map(sectionName)[keyName]
- for pluginName in pluginConfList.replace(" ", "").replace("\n", "").split(","):
- section = "QtCreator.Build.Plugin." + pluginName
- pluginUrl = pluginOptions.config_section_map(section)["QTC_PLUGIN_GIT_URL"]
- branchOrTag = pluginOptions.config_section_map(section)["QTC_PLUGIN_GIT_BRANCH_OR_TAG"]
- checkoutDirName = pluginOptions.config_section_map(section)["QTC_PLUGIN_CHECKOUT_DIR_NAME"]
- plugin = PluginConf(git_url=pluginUrl, branch_or_tag=branchOrTag, checkout_dir=checkoutDirName)
- pluginList.extend([plugin])
- return pluginList
+ return plugin_list
+ plugin_options = get_pkg_options(pkg_conf_file)
+ section_name = "QtCreator.Build.Plugin"
+ key_name = "plugins"
+ if not plugin_options.option_exists(section_name, key_name):
+ return plugin_list
+ plugin_conf_list = plugin_options.config_section_map(section_name)[key_name]
+ for plugin_name in plugin_conf_list.replace(" ", "").replace("\n", "").split(","):
+ section = "QtCreator.Build.Plugin." + plugin_name
+ plugin_url = plugin_options.config_section_map(section)["QTC_PLUGIN_GIT_URL"]
+ branch_or_tag = plugin_options.config_section_map(section)["QTC_PLUGIN_GIT_BRANCH_OR_TAG"]
+ checkout_dir_name = plugin_options.config_section_map(section)["QTC_PLUGIN_CHECKOUT_DIR_NAME"]
+ plugin = PluginConf(git_url=plugin_url, branch_or_tag=branch_or_tag, checkout_dir=checkout_dir_name)
+ plugin_list.extend([plugin])
+ return plugin_list
QtcPlugin = namedtuple('QtcPlugin', ['name',
@@ -552,20 +552,20 @@ def handle_qt_creator_build(option_dict, qtcreator_plugins):
# Get Qt Creator sources if not present yet
if 'QT_CREATOR_GIT_URL' in option_dict:
- qtCreatorSourceDirectory = os.path.join(work_dir, 'qt-creator')
- if os.path.exists(qtCreatorSourceDirectory):
- shutil.rmtree(qtCreatorSourceDirectory)
- os.makedirs(qtCreatorSourceDirectory)
+ qtcreator_source_directory = os.path.join(work_dir, 'qt-creator')
+ if os.path.exists(qtcreator_source_directory):
+ shutil.rmtree(qtcreator_source_directory)
+ os.makedirs(qtcreator_source_directory)
clone_repository(option_dict['QT_CREATOR_GIT_URL'], option_dict['QT_CREATOR_GIT_BRANCH'],
- qtCreatorSourceDirectory, full_clone=True, init_subrepos=True)
+ qtcreator_source_directory, full_clone=True, init_subrepos=True)
# Get Qt Creator plugin sources if not present yet
- for pluginConf in qtcreator_plugins:
- checkoutDir = os.path.join(work_dir, pluginConf.checkout_dir)
- if pluginConf.git_url:
- if os.path.exists(checkoutDir):
- shutil.rmtree(checkoutDir)
- os.makedirs(checkoutDir)
- clone_repository(pluginConf.git_url, pluginConf.branch_or_tag, checkoutDir, full_clone=True)
+ for plugin_conf in qtcreator_plugins:
+ checkout_dir = os.path.join(work_dir, plugin_conf.checkout_dir)
+ if plugin_conf.git_url:
+ if os.path.exists(checkout_dir):
+ shutil.rmtree(checkout_dir)
+ os.makedirs(checkout_dir)
+ clone_repository(plugin_conf.git_url, plugin_conf.branch_or_tag, checkout_dir, full_clone=True)
# Build time variables
qtcreator_source = os.path.join(work_dir, 'qt-creator')
@@ -1006,9 +1006,9 @@ def handle_sdktool_build(option_dict):
def notarize_dmg(dmg_path, installer_name_base):
# bundle-id is just a unique identifier without any special meaning, used to track the notarization progress
- bundleId = installer_name_base + "-" + strftime('%Y-%m-%d', gmtime())
- bundleId = bundleId.replace('_', '-').replace(' ', '') # replace illegal characters for bundleId
- args = [sys.executable, 'notarize.py', '--dmg=' + dmg_path, '--bundle-id=' + bundleId]
+ bundle_id = installer_name_base + "-" + strftime('%Y-%m-%d', gmtime())
+ bundle_id = bundle_id.replace('_', '-').replace(' ', '') # replace illegal characters for bundle_id
+ args = [sys.executable, 'notarize.py', '--dmg=' + dmg_path, '--bundle-id=' + bundle_id]
do_execute_sub_process(args, SCRIPT_ROOT_DIR)
@@ -1063,71 +1063,71 @@ def init_pkg_options(args):
return "mac-clang-10.11-x64"
raise RuntimeError("Unsupported host platform")
- optionDict = {}
+ option_dict = {}
# Are we using local conf file for pkg options?
if args.pkg_conf_file:
options = get_pkg_options(args.pkg_conf_file)
- optionDict = merge_two_dicts(optionDict, options.config_map())
- optionDict['TARGET_ENV'] = args.target_env if args.target_env else get_default_target_env()
- optionDict['BUILD_NUMBER'] = str(strftime('%Y%m%d%H%M%S', gmtime()))
- optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = optionDict['PACKAGE_STORAGE_SERVER_USER'] + '@' + optionDict['PACKAGE_STORAGE_SERVER']
+ option_dict = merge_two_dicts(option_dict, options.config_map())
+ option_dict['TARGET_ENV'] = args.target_env if args.target_env else get_default_target_env()
+ option_dict['BUILD_NUMBER'] = str(strftime('%Y%m%d%H%M%S', gmtime()))
+ option_dict['PACKAGE_STORAGE_SERVER_ADDR'] = option_dict['PACKAGE_STORAGE_SERVER_USER'] + '@' + option_dict['PACKAGE_STORAGE_SERVER']
else:
- optionDict = dict(os.environ)
+ option_dict = dict(os.environ)
# Check for command line overrides
- optionDict['LICENSE'] = args.license_
- optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = args.path
- optionDict['OPENSSL_LIBS'] = args.openssl_libs
- optionDict['SNAPSHOT_SERVER_PATH'] = args.snapshot_path
- optionDict['TARGET_ENV'] = args.target_env if args.target_env else os.environ.get('cfg')
- optionDict['BUILD_NUMBER'] = args.build_number if args.build_number else os.environ.get('BUILD_NUMBER')
-
- optionDict['SIGNING_SERVER'] = get_pkg_value("SIGNING_SERVER")
- optionDict['SIGNING_PASSWORD'] = get_pkg_value("SIGNING_PASSWORD")
- optionDict['USP_SERVER_URL'] = get_pkg_value("USP_SERVER_URL")
- optionDict['USP_AUTH_KEY'] = get_pkg_value("USP_AUTH_KEY")
- optionDict['PACKAGE_STORAGE_SERVER_USER'] = get_pkg_value("PACKAGE_STORAGE_SERVER_USER")
- optionDict['PACKAGE_STORAGE_SERVER'] = get_pkg_value("PACKAGE_STORAGE_SERVER")
- optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = args.server or get_pkg_value("PACKAGE_STORAGE_SERVER_ADDR")
- optionDict['PACKAGE_STORAGE_SERVER_PATH_HTTP'] = args.override_server_path_http or get_pkg_value("PACKAGE_STORAGE_SERVER_PATH_HTTP")
- optionDict['SNAPSHOT_SERVER'] = args.snapshot_server or get_pkg_value("SNAPSHOT_SERVER")
- optionDict['EXT_SERVER_BASE_URL'] = get_pkg_value("EXT_SERVER_BASE_URL")
- optionDict['RTA_SERVER_BASE_URL'] = get_pkg_value("RTA_SERVER_BASE_URL")
- optionDict['PKG_STAGING_SERVER'] = get_pkg_value("PKG_STAGING_SERVER")
- optionDict['PKG_STAGING_SERVER_UNAME'] = get_pkg_value("PKG_STAGING_SERVER_UNAME")
- optionDict['PROD_USER'] = get_pkg_value("PROD_USER")
- optionDict['PROD_ADDR'] = get_pkg_value("PROD_ADDR")
+ option_dict['LICENSE'] = args.license_
+ option_dict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = args.path
+ option_dict['OPENSSL_LIBS'] = args.openssl_libs
+ option_dict['SNAPSHOT_SERVER_PATH'] = args.snapshot_path
+ option_dict['TARGET_ENV'] = args.target_env if args.target_env else os.environ.get('cfg')
+ option_dict['BUILD_NUMBER'] = args.build_number if args.build_number else os.environ.get('BUILD_NUMBER')
+
+ option_dict['SIGNING_SERVER'] = get_pkg_value("SIGNING_SERVER")
+ option_dict['SIGNING_PASSWORD'] = get_pkg_value("SIGNING_PASSWORD")
+ option_dict['USP_SERVER_URL'] = get_pkg_value("USP_SERVER_URL")
+ option_dict['USP_AUTH_KEY'] = get_pkg_value("USP_AUTH_KEY")
+ option_dict['PACKAGE_STORAGE_SERVER_USER'] = get_pkg_value("PACKAGE_STORAGE_SERVER_USER")
+ option_dict['PACKAGE_STORAGE_SERVER'] = get_pkg_value("PACKAGE_STORAGE_SERVER")
+ option_dict['PACKAGE_STORAGE_SERVER_ADDR'] = args.server or get_pkg_value("PACKAGE_STORAGE_SERVER_ADDR")
+ option_dict['PACKAGE_STORAGE_SERVER_PATH_HTTP'] = args.override_server_path_http or get_pkg_value("PACKAGE_STORAGE_SERVER_PATH_HTTP")
+ option_dict['SNAPSHOT_SERVER'] = args.snapshot_server or get_pkg_value("SNAPSHOT_SERVER")
+ option_dict['EXT_SERVER_BASE_URL'] = get_pkg_value("EXT_SERVER_BASE_URL")
+ option_dict['RTA_SERVER_BASE_URL'] = get_pkg_value("RTA_SERVER_BASE_URL")
+ option_dict['PKG_STAGING_SERVER'] = get_pkg_value("PKG_STAGING_SERVER")
+ option_dict['PKG_STAGING_SERVER_UNAME'] = get_pkg_value("PKG_STAGING_SERVER_UNAME")
+ option_dict['PROD_USER'] = get_pkg_value("PROD_USER")
+ option_dict['PROD_ADDR'] = get_pkg_value("PROD_ADDR")
if LOCAL_MODE:
- optionDict['PACKAGE_STORAGE_SERVER_USER'] = getuser() # current user
- optionDict['PACKAGE_STORAGE_SERVER'] = "127.0.0.1"
- optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = optionDict['PACKAGE_STORAGE_SERVER_USER'] + "@" + optionDict['PACKAGE_STORAGE_SERVER']
- optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = LOCAL_INSTALLER_DIR
- optionDict['SNAPSHOT_SERVER'] = optionDict['PACKAGE_STORAGE_SERVER_ADDR']
- optionDict['SNAPSHOT_SERVER_PATH'] = LOCAL_INSTALLER_DIR
+ option_dict['PACKAGE_STORAGE_SERVER_USER'] = getuser() # current user
+ option_dict['PACKAGE_STORAGE_SERVER'] = "127.0.0.1"
+ option_dict['PACKAGE_STORAGE_SERVER_ADDR'] = option_dict['PACKAGE_STORAGE_SERVER_USER'] + "@" + option_dict['PACKAGE_STORAGE_SERVER']
+ option_dict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = LOCAL_INSTALLER_DIR
+ option_dict['SNAPSHOT_SERVER'] = option_dict['PACKAGE_STORAGE_SERVER_ADDR']
+ option_dict['SNAPSHOT_SERVER_PATH'] = LOCAL_INSTALLER_DIR
if is_linux():
- optionDict['HOST_PLATFORM'] = 'linux'
+ option_dict['HOST_PLATFORM'] = 'linux'
elif is_macos():
- optionDict['HOST_PLATFORM'] = 'mac'
+ option_dict['HOST_PLATFORM'] = 'mac'
else:
- optionDict['HOST_PLATFORM'] = 'windows'
+ option_dict['HOST_PLATFORM'] = 'windows'
- optionDict['CURL_COMMAND'] = '%CURL%' if is_windows() else 'curl'
- optionDict['RM_COMMAND'] = '%RM%' if is_windows() else 'rm'
- optionDict['SSH_COMMAND'] = '%SSH%' if is_windows() else 'ssh'
- optionDict['SCP_COMMAND'] = '%SCP%' if is_windows() else 'scp'
- optionDict['WORK_DIR'] = WORK_DIR
+ option_dict['CURL_COMMAND'] = '%CURL%' if is_windows() else 'curl'
+ option_dict['RM_COMMAND'] = '%RM%' if is_windows() else 'rm'
+ option_dict['SSH_COMMAND'] = '%SSH%' if is_windows() else 'ssh'
+ option_dict['SCP_COMMAND'] = '%SCP%' if is_windows() else 'scp'
+ option_dict['WORK_DIR'] = WORK_DIR
# Parse version number info if this build utilizes release description file
- if 'RELEASE_DESCRIPTION_FILE' in optionDict:
+ if 'RELEASE_DESCRIPTION_FILE' in option_dict:
# Path relative to script or absolute file path
- path = optionDict['RELEASE_DESCRIPTION_FILE']
- path = path if (os.path.isabs(path) and os.path.isfile(path)) else os.path.join(optionDict['WORK_DIR'], path)
- optionDict['RELEASE_DESCRIPTION_FILE'] = path
+ path = option_dict['RELEASE_DESCRIPTION_FILE']
+ path = path if (os.path.isabs(path) and os.path.isfile(path)) else os.path.join(option_dict['WORK_DIR'], path)
+ option_dict['RELEASE_DESCRIPTION_FILE'] = path
# resolve configuration files base dir
- confBaseDir = optionDict['CONFIGURATIONS_FILE_BASE_DIR']
- confBaseDir = confBaseDir if (os.path.isabs(confBaseDir) and os.path.isdir(confBaseDir)) else os.path.join(optionDict['WORK_DIR'], confBaseDir)
- optionDict['CONFIGURATIONS_FILE_BASE_DIR'] = confBaseDir
+ conf_base_dir = option_dict['CONFIGURATIONS_FILE_BASE_DIR']
+ conf_base_dir = conf_base_dir if (os.path.isabs(conf_base_dir) and os.path.isdir(conf_base_dir)) else os.path.join(option_dict['WORK_DIR'], conf_base_dir)
+ option_dict['CONFIGURATIONS_FILE_BASE_DIR'] = conf_base_dir
parser = ConfigParser()
parser.read(path)
@@ -1135,10 +1135,10 @@ def init_pkg_options(args):
if s == 'release.global':
version = safe_config_key_fetch(parser, s, 'version')
version_tag = safe_config_key_fetch(parser, s, 'version_tag')
- optionDict['VERSION'] = version
- optionDict['VERSION_TAG'] = version_tag
- optionDict['VERSION_FULL'] = version if not version_tag else version + '-' + version_tag
- return optionDict
+ option_dict['VERSION'] = version
+ option_dict['VERSION_TAG'] = version_tag
+ option_dict['VERSION_FULL'] = version if not version_tag else version + '-' + version_tag
+ return option_dict
def main() -> None:
@@ -1148,10 +1148,10 @@ def main() -> None:
bld_qtc_sdktool = 'build_sdktool'
bld_licheck = 'licheck_bld'
archive_repository = 'archive_repo'
- CMD_LIST = (bld_qtcreator, bld_qtc_sdktool, bld_licheck, archive_repository)
+ cmd_list = (bld_qtcreator, bld_qtc_sdktool, bld_licheck, archive_repository)
parser = argparse.ArgumentParser(prog="Build Wrapper", description="Manage all packaging related build steps.")
- parser.add_argument("-c", "--command", dest="command", required=True, choices=CMD_LIST, help=str(CMD_LIST))
+ parser.add_argument("-c", "--command", dest="command", required=True, choices=cmd_list, help=str(cmd_list))
parser.add_argument("--pkg-conf-file", dest="pkg_conf_file", default="", help="instead of reading various config options from env variables read them from the given file.")
parser.add_argument("-l", "--license", dest="license_", default="", help="license type: enterprise or opensource")
parser.add_argument("-b", "--build_number", dest="build_number", default="", help="Unique build number identifier")
@@ -1170,20 +1170,20 @@ def main() -> None:
args = parser.parse_args(sys.argv[1:])
# Init configuration options first
- optionDict = init_pkg_options(args)
+ option_dict = init_pkg_options(args)
# Execute given command
# QtCreator specific
if args.command == bld_qtcreator:
- handle_qt_creator_build(optionDict, parse_qtcreator_plugins(args.pkg_conf_file))
+ handle_qt_creator_build(option_dict, parse_qtcreator_plugins(args.pkg_conf_file))
# sdktool
elif args.command == bld_qtc_sdktool:
- handle_sdktool_build(optionDict)
+ handle_sdktool_build(option_dict)
# Qt Installer-Framework specific
elif args.command == bld_licheck:
- handle_qt_licheck_build(optionDict)
+ handle_qt_licheck_build(option_dict)
elif args.command == archive_repository:
- do_git_archive_repo(optionDict, args.archive_repo)
+ do_git_archive_repo(option_dict, args.archive_repo)
else:
print('Unsupported command')
diff --git a/packaging-tools/content_cleaner.py b/packaging-tools/content_cleaner.py
index 904088a28..6b7671ebd 100755
--- a/packaging-tools/content_cleaner.py
+++ b/packaging-tools/content_cleaner.py
@@ -69,10 +69,10 @@ def expand_rules(rules: List[str]) -> List[str]:
def remove_empty_directories(root_path: str):
for root, dirs, _ in os.walk(root_path, topdown=True):
for name in dirs:
- dirPath = os.path.join(root, name)
- if not os.listdir(dirPath): # to check whether the dir is empty
- log.info("Removing empty directory: %s", dirPath)
- os.removedirs(dirPath)
+ dir_path = os.path.join(root, name)
+ if not os.listdir(dir_path): # to check whether the dir is empty
+ log.info("Removing empty directory: %s", dir_path)
+ os.removedirs(dir_path)
def preserve_content(input_dir: str, preserve_rules: List[str]) -> None:
diff --git a/packaging-tools/create_conan_executable.py b/packaging-tools/create_conan_executable.py
index b4a7769e7..3f6a26066 100644
--- a/packaging-tools/create_conan_executable.py
+++ b/packaging-tools/create_conan_executable.py
@@ -71,10 +71,10 @@ async def pip_install_url(pipenv: str, pip_packages: List[str], env: Dict[str, s
chekout_folders: List[str] = []
for pkg in pip_packages or []:
if is_valid_url_path(pkg):
- destinationDir = os.path.join(os.getcwd(), "_git_tmp", pkg.split("/")[-1])
- rmtree(destinationDir, ignore_errors=True)
- await clone_repo(pkg, destinationDir, env)
- chekout_folders.append(destinationDir)
+ destination_dir = os.path.join(os.getcwd(), "_git_tmp", pkg.split("/")[-1])
+ rmtree(destination_dir, ignore_errors=True)
+ await clone_repo(pkg, destination_dir, env)
+ chekout_folders.append(destination_dir)
else:
chekout_folders.append(pkg)
@@ -92,7 +92,7 @@ async def generate_executable(
pipenv: str, env: Dict[str, str], venv_folder: str, source_file: str, hidden_imports: List[str]
) -> str:
# if the path does not point to actual file then we assume it exists under the virtualenv
- _fileName = (
+ _file_name = (
source_file
if os.path.isfile(source_file)
else locate_file_from_venv(venv_folder, source_file)
@@ -105,17 +105,17 @@ async def generate_executable(
venv_folder,
"--onefile",
"--console",
- _fileName,
+ _file_name,
" ".join([f"--hidden-import={s}" for s in hidden_imports]),
]
# give it 15 mins
await async_exec_cmd(cmd=cmd, timeout=60 * 15, env=env)
- destPath = os.path.join(os.getcwd(), "dist")
- generatedFiles = [os.path.join(destPath, x) for x in os.listdir(destPath)]
- assert generatedFiles, f"No generated executables found from: {destPath}"
- log.info("Created executable: %s", generatedFiles)
- return destPath
+ dest_path = os.path.join(os.getcwd(), "dist")
+ generated_files = [os.path.join(dest_path, x) for x in os.listdir(dest_path)]
+ assert generated_files, f"No generated executables found from: {dest_path}"
+ log.info("Created executable: %s", generated_files)
+ return dest_path
async def run(
diff --git a/packaging-tools/create_installer.py b/packaging-tools/create_installer.py
index 1b30c6cc9..46b4c14b9 100644
--- a/packaging-tools/create_installer.py
+++ b/packaging-tools/create_installer.py
@@ -244,8 +244,8 @@ def parse_component_data(task, configuration_file, configurations_base_path):
task.sdk_component_ignore_list.append(item)
# parse sdk components
for section in configuration.sections():
- sectionNameSpace = section.split(".")[0]
- if sectionNameSpace in task.package_namespace:
+ section_namespace = section.split(".")[0]
+ if section_namespace in task.package_namespace:
if section not in task.sdk_component_ignore_list:
sdk_component = SdkComponent(section, configuration, task.packages_dir_name_list,
task.archive_location_resolver, task.substitution_list, task.offline_installer)
@@ -346,14 +346,14 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest,
and sdk_component.target_install_base == '/' \
and not archive.target_install_dir:
log.info("No repackaging actions required for the package, just download it directly to data directory")
- downloadedArchive = os.path.normpath(data_dir_dest + os.sep + archive.archive_name)
+ downloaded_archive = os.path.normpath(data_dir_dest + os.sep + archive.archive_name)
# start download
- download(archive.archive_uri, downloadedArchive)
+ download(archive.archive_uri, downloaded_archive)
return
- downloadedArchive = os.path.normpath(install_dir + os.sep + package_raw_name)
+ downloaded_archive = os.path.normpath(install_dir + os.sep + package_raw_name)
# start download
- download(archive.archive_uri, downloadedArchive)
+ download(archive.archive_uri, downloaded_archive)
# repackage content so that correct dir structure will get into the package
@@ -362,15 +362,15 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest,
# extract contents
if archive.extract_archive == 'yes':
- extracted = extract_file(downloadedArchive, install_dir)
+ extracted = extract_file(downloaded_archive, install_dir)
# remove old package
if extracted:
- os.remove(downloadedArchive)
+ os.remove(downloaded_archive)
else:
# ok we could not extract the file, so propably not even archived file,
# check the case if we downloaded a text file, must ensure proper file endings
- if is_text_file(downloadedArchive):
- ensure_text_file_endings(downloadedArchive)
+ if is_text_file(downloaded_archive):
+ ensure_text_file_endings(downloaded_archive)
# perform custom action script for the extracted archive
if archive.archive_action:
@@ -450,20 +450,20 @@ def get_component_data(task, sdk_component, archive, install_dir, data_dir_dest,
def handle_set_executable(base_dir, package_finalize_items):
for item in parse_package_finalize_items(package_finalize_items, 'set_executable'):
- expectedPath = os.path.join(base_dir, item)
- if not os.path.exists(expectedPath):
- raise CreateInstallerError(f'Can not set executable bit as path not found: "{expectedPath}"')
- os.chmod(expectedPath, 0o755)
- log.info("Executable bit set for: %s", expectedPath)
+ expected_path = os.path.join(base_dir, item)
+ if not os.path.exists(expected_path):
+ raise CreateInstallerError(f'Can not set executable bit as path not found: "{expected_path}"')
+ os.chmod(expected_path, 0o755)
+ log.info("Executable bit set for: %s", expected_path)
def handle_set_licheck(task, base_dir, package_finalize_items):
- for licheckFileName in parse_package_finalize_items(package_finalize_items, 'set_licheck'):
- licheckFilePath = os.path.join(base_dir, licheckFileName)
- if not os.path.exists(licheckFilePath):
- raise CreateInstallerError(f'Can not set licheck as path not found: "{licheckFilePath}"')
- patch_qt_edition(base_dir, licheckFileName, task.build_timestamp)
- log.info("Licheck set for: %s", licheckFilePath)
+ for licheck_file_name in parse_package_finalize_items(package_finalize_items, 'set_licheck'):
+ licheck_file_path = os.path.join(base_dir, licheck_file_name)
+ if not os.path.exists(licheck_file_path):
+ raise CreateInstallerError(f'Can not set licheck as path not found: "{licheck_file_path}"')
+ patch_qt_edition(base_dir, licheck_file_name, task.build_timestamp)
+ log.info("Licheck set for: %s", licheck_file_path)
break
@@ -553,11 +553,11 @@ def remove_all_debug_libraries(install_dir):
Path(item).unlink()
# remove macOS debug libraries
elif is_macos():
- for macOS_debug_library_dir in locate_paths(install_dir, ['bin', 'lib', 'qml', 'plugins'], filters=[os.path.isdir]):
- log.info("Removing macOS debug libraries from: %s", macOS_debug_library_dir)
+ for debug_library_dir in locate_paths(install_dir, ['bin', 'lib', 'qml', 'plugins'], filters=[os.path.isdir]):
+ log.info("Removing macOS debug libraries from: %s", debug_library_dir)
debug_library_file_ending = '_debug.*'
- if os.path.exists(macOS_debug_library_dir):
- for item in locate_paths(macOS_debug_library_dir, ['*' + debug_library_file_ending]):
+ if os.path.exists(debug_library_dir):
+ for item in locate_paths(debug_library_dir, ['*' + debug_library_file_ending]):
Path(item).unlink()
else:
log.info("Host was not Windows or macOS. For Linux and others we don\'t do anything at the moment")
@@ -576,7 +576,7 @@ def create_target_components(task):
if not os.path.isfile(os.path.join(task.script_root_dir, "lrelease")):
download(os.environ.get("LRELEASE_TOOL"), task.script_root_dir)
extract_file(os.path.basename(os.environ.get("LRELEASE_TOOL")), task.script_root_dir)
- getComponentDataWork = ThreadedWork("get components data")
+ get_component_data_work = ThreadedWork("get components data")
for sdk_component in task.sdk_component_list:
sdk_component.print_component_data()
# substitute pkg_template dir names and package_name
@@ -615,13 +615,13 @@ def create_target_components(task):
if is_windows():
install_dir = win32api.GetShortPathName(install_dir)
data_dir_dest = win32api.GetShortPathName(data_dir_dest)
- getComponentDataWork.add_task(f"adding {archive.archive_name} to {sdk_component.package_name}",
- get_component_data, task, sdk_component, archive, install_dir, data_dir_dest, compress_content_dir)
+ get_component_data_work.add_task(f"adding {archive.archive_name} to {sdk_component.package_name}",
+ get_component_data, task, sdk_component, archive, install_dir, data_dir_dest, compress_content_dir)
# handle component sha1 uri
if sdk_component.component_sha1_uri:
sha1_file_dest = os.path.normpath(dest_base + 'SHA1')
- getComponentDataWork.add_task(f"getting component sha1 file for {sdk_component.package_name}",
- get_component_sha1_file, sdk_component, sha1_file_dest)
+ get_component_data_work.add_task(f"getting component sha1 file for {sdk_component.package_name}",
+ get_component_sha1_file, sdk_component, sha1_file_dest)
# maybe there is some static data
data_content_source_root = os.path.normpath(sdk_component.pkg_template_dir + os.sep + 'data')
@@ -631,7 +631,7 @@ def create_target_components(task):
if not task.dry_run:
# start the work threaded, more than 8 parallel downloads are not so useful
- getComponentDataWork.run(min([task.max_cpu_count, cpu_count()]))
+ get_component_data_work.run(min([task.max_cpu_count, cpu_count()]))
for sdk_component in task.sdk_component_list:
# substitute tags
diff --git a/packaging-tools/installer_utils.py b/packaging-tools/installer_utils.py
index d34e8f159..cfba23685 100644
--- a/packaging-tools/installer_utils.py
+++ b/packaging-tools/installer_utils.py
@@ -67,13 +67,13 @@ def is_valid_url_path(url: str) -> bool:
def download_archive(url: str, dest_dir: str) -> str:
parts = urlparse(url)
- fileName = os.path.basename(parts.path)
- destFile = os.path.join(dest_dir, fileName)
- if os.path.isfile(destFile):
- log.info("Using existing downloaded file: %s", destFile)
+ file_name = os.path.basename(parts.path)
+ dest_file = os.path.join(dest_dir, file_name)
+ if os.path.isfile(dest_file):
+ log.info("Using existing downloaded file: %s", dest_file)
else:
- wget.download(url, destFile)
- return destFile
+ wget.download(url, dest_file)
+ return dest_file
def get_extract_cmd(artifact: str) -> List[str]:
@@ -86,11 +86,11 @@ def get_extract_cmd(artifact: str) -> List[str]:
async def extract_archive(artifact: str, destination_dir: str) -> None:
log.info("Extracting file: %s into: %s", artifact, destination_dir)
- extractCmd = get_extract_cmd(artifact)
+ extract_cmd = get_extract_cmd(artifact)
try:
os.makedirs(destination_dir, exist_ok=True)
with cd(destination_dir):
- await async_exec_cmd(extractCmd)
+ await async_exec_cmd(extract_cmd)
except Exception:
log.exception("Could not extact a file %s to %s", artifact, destination_dir)
raise
diff --git a/packaging-tools/libclang_training/libclangtimings2csv.py b/packaging-tools/libclang_training/libclangtimings2csv.py
index 029711c96..d66dcb77b 100644
--- a/packaging-tools/libclang_training/libclangtimings2csv.py
+++ b/packaging-tools/libclang_training/libclangtimings2csv.py
@@ -57,11 +57,11 @@ def construct_time_needed_matcher():
# : 2.5625 (100.0%) 0.1563 (100.0%) 2.7188 (100.0%) 2.7813 (100.0%)
# Note: There is always at least the wall clock time at the utmost right,
# the others in front (up to 3) are optional.
- startIndicator = r'\s*:'
- notRelevantParts = r'(\s*\d+\.\d+ \(\d+\.\d+\%\)){0,3}'
- wallClockTime = r'\s*(\d+\.\d+) \(\d+\.\d+\%\)'
+ start_indicator = r'\s*:'
+ not_relevant_parts = r'(\s*\d+\.\d+ \(\d+\.\d+\%\)){0,3}'
+ wall_clock_time = r'\s*(\d+\.\d+) \(\d+\.\d+\%\)'
- regex = startIndicator + notRelevantParts + wallClockTime
+ regex = start_indicator + not_relevant_parts + wall_clock_time
return re.compile(regex)
@@ -71,27 +71,27 @@ def csv_line(values):
def extract_records(file_content):
- recordMatcher = construct_record_matcher()
- timeNeededMatcher = construct_time_needed_matcher()
+ record_matcher = construct_record_matcher()
+ time_needed_matcher = construct_time_needed_matcher()
records = []
- previousTimeMatchEnd = -1
+ previous_time_match_end = -1
- for recordStartMatch in recordMatcher.finditer(file_content):
- timeNeededInMs = False
- if previousTimeMatchEnd >= recordStartMatch.start():
+ for record_start_match in record_matcher.finditer(file_content):
+ time_needed_in_ms = False
+ if previous_time_match_end >= record_start_match.start():
# Ops, we've detected a missing time record.
- previousRecord = records[-1]
- records[-1] = [previousRecord[0], '-1']
- timeNeededInMs = previousRecord[1]
+ previous_record = records[-1]
+ records[-1] = [previous_record[0], '-1']
+ time_needed_in_ms = previous_record[1]
- if not timeNeededInMs:
- timeMatch = next(timeNeededMatcher.finditer(file_content, recordStartMatch.end()))
- previousTimeMatchEnd = timeMatch.end()
- timeNeededInMs = timeMatch.group(2)
+ if not time_needed_in_ms:
+ time_match = next(time_needed_matcher.finditer(file_content, record_start_match.end()))
+ previous_time_match_end = time_match.end()
+ time_needed_in_ms = time_match.group(2)
- recordId = recordStartMatch.group().strip()
- record = [recordId, timeNeededInMs]
+ record_id = record_start_match.group().strip()
+ record = [record_id, time_needed_in_ms]
records.append(record)
# for record in records: print record
@@ -109,8 +109,8 @@ def records_to_string(records):
def convert(input_file, column_label=None):
if not column_label:
column_label = os.path.basename(input_file)
- with open(input_file, 'r', encoding="utf-8") as fileContent:
- records = [[column_label, column_label]] + extract_records(fileContent.read())
+ with open(input_file, 'r', encoding="utf-8") as file_content:
+ records = [[column_label, column_label]] + extract_records(file_content.read())
return records_to_string(records)
diff --git a/packaging-tools/libclang_training/mergeCsvFiles.py b/packaging-tools/libclang_training/mergeCsvFiles.py
index 16f0410bc..3bc2309f0 100644
--- a/packaging-tools/libclang_training/mergeCsvFiles.py
+++ b/packaging-tools/libclang_training/mergeCsvFiles.py
@@ -42,7 +42,7 @@ class Global:
class FileWithValues:
def __init__(self, file_path, tag, values):
- self.filePath = file_path
+ self.file_path = file_path
self.tag = tag
self.values = values
@@ -65,8 +65,8 @@ def read_csv(file_path, delimiter):
def read_csv_files(file_paths):
files = []
- for filePath in file_paths:
- with open(filePath, 'rt', encoding="utf-8") as f:
+ for file_path in file_paths:
+ with open(file_path, 'rt', encoding="utf-8") as f:
reader = csv.reader(f, delimiter=Global.Delimiter, quoting=csv.QUOTE_NONE)
values = []
@@ -76,31 +76,31 @@ def read_csv_files(file_paths):
tag = values[0][0] # remember column label
values = values[1:] # skip header
- myFile = FileWithValues(filePath, tag, values)
- files.append(myFile)
+ my_file = FileWithValues(file_path, tag, values)
+ files.append(my_file)
return files
def check_consistency(files):
- referenceEntry = files[0]
- referenceEntrySize = len(referenceEntry.values)
- referenceEntryIdentifiers = [v[0] for v in referenceEntry.values]
+ reference_entry = files[0]
+ reference_entry_size = len(reference_entry.values)
+ reference_entry_identifiers = [v[0] for v in reference_entry.values]
# Ensure same size of records
for f in files:
- if not len(f.values) == referenceEntrySize:
- print(f"error: number of entries mismatch between '{referenceEntry.filePath}' and '{f.filePath}'.", file=sys.stderr)
+ if not len(f.values) == reference_entry_size:
+ print(f"error: number of entries mismatch between '{reference_entry.file_path}' and '{f.file_path}'.", file=sys.stderr)
sys.exit(1)
# Ensure same identifier on the left
for f in files:
identifiers = [v[0] for v in f.values]
- if not identifiers == referenceEntryIdentifiers:
- print(f"error: mismatch between identifers in first column between '{referenceEntry.filePath}' and '{f.filePath}'.", file=sys.stderr)
+ if not identifiers == reference_entry_identifiers:
+ print(f"error: mismatch between identifers in first column between '{reference_entry.file_path}' and '{f.file_path}'.", file=sys.stderr)
sys.exit(1)
- return referenceEntryIdentifiers
+ return reference_entry_identifiers
def merge_files_helper(output_file_path, reference_identifiers, files):
@@ -120,8 +120,8 @@ def merge_files_helper(output_file_path, reference_identifiers, files):
def merge_files(output_file_path, files_to_merge):
files = read_csv_files(files_to_merge)
- referenceIdentifiers = check_consistency(files)
- merge_files_helper(output_file_path, referenceIdentifiers, files)
+ reference_identifiers = check_consistency(files)
+ merge_files_helper(output_file_path, reference_identifiers, files)
def print_help_and_exit():
@@ -143,9 +143,9 @@ def main():
if len(args) <= 2:
print_help_and_exit()
- outputFile = args[0]
- filesToMerge = args[1:]
- merge_files(outputFile, filesToMerge)
+ output_file = args[0]
+ files_to_merge = args[1:]
+ merge_files(output_file, files_to_merge)
if __name__ == "__main__":
diff --git a/packaging-tools/libclang_training/runBatchFiles.py b/packaging-tools/libclang_training/runBatchFiles.py
index 47adcd56c..c4910fdb6 100644
--- a/packaging-tools/libclang_training/runBatchFiles.py
+++ b/packaging-tools/libclang_training/runBatchFiles.py
@@ -104,14 +104,14 @@ class Config:
Config.TargetLibClangDll = os.environ['QTC_CLANG_BATCH_CONFIG_TARGET_LIBCLANG']
- libClangDlls = os.environ['QTC_CLANG_BATCH_CONFIG_LIBCLANGS']
- Config.LibClangDlls = libClangDlls.split(os.pathsep)
+ libclang_dlls = os.environ['QTC_CLANG_BATCH_CONFIG_LIBCLANGS']
+ Config.LibClangDlls = libclang_dlls.split(os.pathsep)
assert len(Config.LibClangDlls) >= 1
for dll in Config.LibClangDlls:
check_existence_or_die(dll)
- batchFiles = os.environ['QTC_CLANG_BATCH_CONFIG_FILES']
- Config.BatchFiles = batchFiles.split(os.pathsep)
+ batch_files = os.environ['QTC_CLANG_BATCH_CONFIG_FILES']
+ Config.BatchFiles = batch_files.split(os.pathsep)
assert len(Config.BatchFiles) >= 1
for b in Config.BatchFiles:
check_existence_or_die(b)
@@ -135,20 +135,20 @@ class Config:
class RunRecord:
def __init__(self, libclang_id, batch_file_path):
- self.libClangId = libclang_id
+ self.libclang_id = libclang_id
parts = os.path.basename(batch_file_path).split('.')
- self.batchFileId = '.'.join(parts[0:-1]) # Remove suffix
- self.logFilePath = self.batchFileId + '___' + libclang_id + '.log'
- self.csvFilePath = None
+ self.batch_file_id = '.'.join(parts[0:-1]) # Remove suffix
+ self.log_file_path = self.batch_file_id + '___' + libclang_id + '.log'
+ self.csv_file_path = None
class DebugView:
def __init__(self, log_file_path):
- self.logFilePath = log_file_path
+ self.log_file_path = log_file_path
self.executable = 'dbgview.exe'
def start_async(self):
- args = [self.executable, '/accepteula', '/l', self.logFilePath]
+ args = [self.executable, '/accepteula', '/l', self.log_file_path]
verbose_start(args)
self.proc = Popen(args, shell=False) # pylint: disable=R1732
sleep(2)
@@ -175,22 +175,22 @@ def create_environment(batch_file_path):
def run_sync_and_log_output_windows(args, batch_file_path, log_file_path):
- debugView = DebugView(log_file_path)
- debugView.start_async()
+ debug_view = DebugView(log_file_path)
+ debug_view.start_async()
verbose_start(args)
with Popen(args, env=create_environment(batch_file_path)) as p:
p.communicate()
- debugView.stop()
+ debug_view.stop()
check_exit_code_or_die(p.returncode, args)
def run_sync_and_log_output_unix(args, batch_file_path, log_file_path):
- with open(log_file_path, "w", encoding="utf-8") as logFile:
+ with open(log_file_path, "w", encoding="utf-8") as log_file:
verbose_start(args)
- with Popen(args, stdout=logFile, stderr=STDOUT, env=create_environment(batch_file_path)) as p:
+ with Popen(args, stdout=log_file, stderr=STDOUT, env=create_environment(batch_file_path)) as p:
p.communicate()
check_exit_code_or_die(p.returncode, args)
@@ -216,11 +216,11 @@ def run_qtcreator_with_batch_file(batch_file_path, log_file_path):
def convert_log_file_to_csv_file(log_file_path, column_label):
output = libclangtimings2csv.convert(log_file_path, column_label)
- csvFilePath = log_file_path + '.csv'
- with open(csvFilePath, 'w', encoding="utf-8") as f:
+ csv_file_path = log_file_path + '.csv'
+ with open(csv_file_path, 'w', encoding="utf-8") as f:
f.write(output)
- return csvFilePath
+ return csv_file_path
def log_file_from_id(log_file_id):
@@ -236,10 +236,10 @@ def create_dir(dir_path):
def create_backup_file(file_path):
if os.path.exists(file_path):
- backupPath = file_path[:-4] + ".backup_" + str(time()) + ".log"
+ backup_path = file_path[:-4] + ".backup_" + str(time()) + ".log"
if Config.Verbose:
print(f"info: creating backup of already existing '{file_path}'")
- copyfile(file_path, backupPath)
+ copyfile(file_path, backup_path)
def print_duration(s):
@@ -249,34 +249,34 @@ def print_duration(s):
def process_batch_file_timed(libclang_id, batch_file_path):
- timeStarted = time()
+ time_started = time()
print(f"processing {batch_file_path}", end=' ')
- runRecord = process_batch_file(libclang_id, batch_file_path)
+ run_record = process_batch_file(libclang_id, batch_file_path)
- print_duration(time() - timeStarted)
+ print_duration(time() - time_started)
- return runRecord
+ return run_record
def process_batch_file(libclang_id, batch_file_path):
- runRecord = RunRecord(libclang_id, batch_file_path)
- logFilePath = os.path.join(Config.LogDir, runRecord.logFilePath)
+ run_record = RunRecord(libclang_id, batch_file_path)
+ log_file_path = os.path.join(Config.LogDir, run_record.log_file_path)
create_dir(Config.LogDir)
- create_backup_file(logFilePath)
+ create_backup_file(log_file_path)
- run_qtcreator_with_batch_file(batch_file_path, logFilePath)
+ run_qtcreator_with_batch_file(batch_file_path, log_file_path)
- csvFilePath = convert_log_file_to_csv_file(logFilePath, runRecord.libClangId)
- runRecord.csvFilePath = csvFilePath
+ csv_file_path = convert_log_file_to_csv_file(log_file_path, run_record.libclang_id)
+ run_record.csv_file_path = csv_file_path
- return runRecord
+ return run_record
def get_libclang_id(libclang_dll):
- fileName = os.path.basename(libclang_dll)
- parts = fileName.split('.')
+ file_name = os.path.basename(libclang_dll)
+ parts = file_name.split('.')
identifier = '.'.join(parts[0:-1])
return identifier
@@ -290,49 +290,49 @@ def run_qtcreator_with_libclang(libclang_dll):
print("")
switch_libclang(libclang_dll)
- runRecords = []
- libClangId = get_libclang_id(libclang_dll)
- for batchFile in Config.BatchFiles:
- runRecord = process_batch_file_timed(libClangId, batchFile)
- runRecords.append(runRecord)
+ run_records = []
+ libclang_id = get_libclang_id(libclang_dll)
+ for batch_file in Config.BatchFiles:
+ run_record = process_batch_file_timed(libclang_id, batch_file)
+ run_records.append(run_record)
- return runRecords
+ return run_records
def log_id_part_from_libclang_dll(libclang_dll):
- fileName = os.path.basename(libclang_dll)
- parts = fileName.split('.')
- fileName = '.'.join(parts[1:-1])
- return fileName
+ file_name = os.path.basename(libclang_dll)
+ parts = file_name.split('.')
+ file_name = '.'.join(parts[1:-1])
+ return file_name
def merge_generated_csv_files(run_records):
- batchFileId2RunRecord = {}
+ batch_file_id_2_run_record = {}
for rr in run_records:
- newValue = [rr]
- if rr.batchFileId in batchFileId2RunRecord:
- newValue = batchFileId2RunRecord[rr.batchFileId]
- newValue.append(rr)
- batchFileId2RunRecord[rr.batchFileId] = newValue
+ new_value = [rr]
+ if rr.batch_file_id in batch_file_id_2_run_record:
+ new_value = batch_file_id_2_run_record[rr.batch_file_id]
+ new_value.append(rr)
+ batch_file_id_2_run_record[rr.batch_file_id] = new_value
- for batchFileId in batchFileId2RunRecord:
- csvFilePaths = [rr.csvFilePath for rr in batchFileId2RunRecord[batchFileId]]
- mergeFilePath = os.path.join(Config.LogDir, batchFileId + ".csv")
+ for batch_file_id in batch_file_id_2_run_record:
+ csv_file_paths = [rr.csv_file_path for rr in batch_file_id_2_run_record[batch_file_id]]
+ merge_file_path = os.path.join(Config.LogDir, batch_file_id + ".csv")
- mergeCsvFiles.merge_files(mergeFilePath, csvFilePaths)
- print(f"generated: {mergeFilePath}")
+ mergeCsvFiles.merge_files(merge_file_path, csv_file_paths)
+ print(f"generated: {merge_file_path}")
def main():
Config.initialize_from_environment()
Config.dump()
- runRecords = []
- for libClangDll in Config.LibClangDlls:
- runRecords += run_qtcreator_with_libclang(libClangDll)
+ run_records = []
+ for libclang_dll in Config.LibClangDlls:
+ run_records += run_qtcreator_with_libclang(libclang_dll)
print()
- merge_generated_csv_files(runRecords)
+ merge_generated_csv_files(run_records)
if __name__ == "__main__":
diff --git a/packaging-tools/notarize.py b/packaging-tools/notarize.py
index 8692dcc87..736782293 100755
--- a/packaging-tools/notarize.py
+++ b/packaging-tools/notarize.py
@@ -79,8 +79,8 @@ async def request_cmd(args, cmd):
if attempts:
log.info("Waiting a bit before next attempt..")
await asyncio.sleep(60)
- except CalledProcessError as commandErr:
- log.critical("Failed to run command: %s", str(commandErr))
+ except CalledProcessError as command_err:
+ log.critical("Failed to run command: %s", str(command_err))
raise
except Exception as e:
log.critical("Something failed: %s", str(e))
@@ -95,34 +95,34 @@ async def request_notarization(args):
cmd += ['--primary-bundle-id', args.bundle_id, '-f', args.dmg]
data = await request_cmd(args, cmd)
- requestUUID = parse_value_from_data("RequestUUID", data)
- if not requestUUID:
+ request_uuid = parse_value_from_data("RequestUUID", data)
+ if not request_uuid:
raise NotarizationError(f"Failed to notarize app:\n\n{data}")
- return requestUUID.split("=")[-1].strip()
+ return request_uuid.split("=")[-1].strip()
async def poll_notarization_completed(args, uuid):
cmd = ['xcrun', 'altool', '-u', args.user, '-p', args.passwd, '--notarization-info', uuid]
attempts = 180
- pollInterval = 60 # attempts * pollInterval = 3h
+ poll_interval = 60 # attempts * poll_interval = 3h
while attempts:
data = await request_cmd(args, cmd)
- statusCode = parse_value_from_data("Status Code:", data)
+ status_code = parse_value_from_data("Status Code:", data)
- if statusCode == "0":
+ if status_code == "0":
log.info("Notarization succeeded for: %s", args.dmg)
log.info("%s", data)
return True
- if statusCode == "2":
+ if status_code == "2":
log.info("Notarization failed for: %s", args.dmg)
raise NotarizationError(f"Notarization failed:\n\n{data}")
log.info("Notarization not ready yet for: %s", args.dmg)
log.info("%s", data)
attempts -= 1
- log.info("Sleeping %is before next poll attempt (attempts left: %i)", pollInterval, attempts)
- await asyncio.sleep(pollInterval)
+ log.info("Sleeping %is before next poll attempt (attempts left: %i)", poll_interval, attempts)
+ await asyncio.sleep(poll_interval)
log.warning("Notarization poll timeout..")
return False
diff --git a/packaging-tools/patch_qt.py b/packaging-tools/patch_qt.py
index b709e73c5..b0294ed44 100755
--- a/packaging-tools/patch_qt.py
+++ b/packaging-tools/patch_qt.py
@@ -37,8 +37,8 @@ from fileinput import FileInput
def _file_iterator(artifacts_dir):
print(f"Patching build time paths from: {artifacts_dir}")
for root, _, files in os.walk(artifacts_dir):
- for fileName in files:
- yield os.path.join(os.path.join(root, fileName))
+ for file_name in files:
+ yield os.path.join(os.path.join(root, file_name))
def _get_patchers(product):
@@ -51,16 +51,16 @@ def _get_patchers(product):
def patch_files(artifacts_dir, product):
print(f"Patching files from: {artifacts_dir}")
patchers = _get_patchers(product)
- for filePath in _file_iterator(artifacts_dir):
+ for file_path in _file_iterator(artifacts_dir):
for patcher in patchers:
- patcher(filePath)
+ patcher(file_path)
def patch_qt_edition(artifacts_dir, licheck_file_name, release_date):
for root, _, files in os.walk(artifacts_dir):
- for fileName in files:
- if fileName == 'qconfig.pri':
- _patch_qt_edition(os.path.join(root, fileName), licheck_file_name, release_date)
+ for file_name in files:
+ if file_name == 'qconfig.pri':
+ _patch_qt_edition(os.path.join(root, file_name), licheck_file_name, release_date)
return
@@ -79,8 +79,8 @@ def _patch_qt_edition(file_path, licheck_file_name, release_date):
def patch_qconfig_pri(file_path):
for line in FileInput(file_path, inplace=True):
- patchedLine = patch_qconfig_pri_from_line(line)
- print(patchedLine.rstrip('\n'))
+ patched_line = patch_qconfig_pri_from_line(line)
+ print(patched_line.rstrip('\n'))
def patch_qconfig_pri_from_line(line):
@@ -94,8 +94,8 @@ def patch_qconfig_pri_from_line(line):
def erase_qmake_prl_build_dir(file_path):
# Erase lines starting with 'QMAKE_PRL_BUILD_DIR' from .prl files
for line in FileInput(file_path, inplace=True):
- patchedLine = patch_qmake_prl_build_dir_from_line(line)
- print(patchedLine.rstrip('\n'))
+ patched_line = patch_qmake_prl_build_dir_from_line(line)
+ print(patched_line.rstrip('\n'))
def patch_qmake_prl_build_dir_from_line(line):
@@ -104,8 +104,8 @@ def patch_qmake_prl_build_dir_from_line(line):
def patch_absolute_lib_paths_from_file(file_path):
for line in FileInput(file_path, inplace=True):
- patchedLine = patch_absolute_lib_paths_from_line(line, file_path.split(".")[-1])
- print(patchedLine.rstrip('\n'))
+ patched_line = patch_absolute_lib_paths_from_line(line, file_path.split(".")[-1])
+ print(patched_line.rstrip('\n'))
def patch_absolute_lib_paths_from_line(line, file_extension):
@@ -121,21 +121,21 @@ def patch_absolute_lib_paths_from_line(line, file_extension):
def _remove_whitespace(line):
"""Remove white space from paths if found inside quoted blocks."""
- eraseEnabled = False
+ erase_enabled = False
result = ""
for char in line:
if char == "\"":
# toggle on/off
- eraseEnabled = not eraseEnabled
- if eraseEnabled and char == " ":
+ erase_enabled = not erase_enabled
+ if erase_enabled and char == " ":
continue
result += char
return result
if file_extension == "cmake":
# from cmake files patch only lines containing "find_extra_libs"
- cmakeFindExtraLibsSearchRegexp = re.compile(r'_*._find_extra_libs\(')
- if not re.search(cmakeFindExtraLibsSearchRegexp, line):
+ cmake_find_extra_libs_search_regexp = re.compile(r'_*._find_extra_libs\(')
+ if not re.search(cmake_find_extra_libs_search_regexp, line):
return line
expressions = [
diff --git a/packaging-tools/python_env.py b/packaging-tools/python_env.py
index 288bee1db..fca3ee055 100644
--- a/packaging-tools/python_env.py
+++ b/packaging-tools/python_env.py
@@ -52,21 +52,21 @@ class PythonEnvError(Exception):
def get_env(python_installation: str) -> Dict[str, str]:
env: Dict[str, str] = {}
system = platform.system().lower()
- libDir = os.path.join(python_installation, "lib")
- binDir = os.path.join(python_installation, "bin")
+ lib_dir = os.path.join(python_installation, "lib")
+ bin_dir = os.path.join(python_installation, "bin")
if "windows" in system:
- binDir = os.path.join(python_installation, "PCbuild", "amd64")
- assert os.path.isdir(binDir), f"The python binary directory did not exist: {binDir}"
- env["LIB_PATH"] = binDir
- env["PATH"] = binDir + ";" + os.environ.get("PATH", "")
+ bin_dir = os.path.join(python_installation, "PCbuild", "amd64")
+ assert os.path.isdir(bin_dir), f"The python binary directory did not exist: {bin_dir}"
+ env["LIB_PATH"] = bin_dir
+ env["PATH"] = bin_dir + ";" + os.environ.get("PATH", "")
env["SYSTEMROOT"] = os.environ["SYSTEMROOT"]
env["HOMEPATH"] = os.environ["HOMEPATH"]
elif "darwin" in system:
- env["DYLD_LIBRARY_PATH"] = libDir
- env["PATH"] = binDir + ":" + os.environ.get("PATH", "")
+ env["DYLD_LIBRARY_PATH"] = lib_dir
+ env["PATH"] = bin_dir + ":" + os.environ.get("PATH", "")
else:
- env["LD_LIBRARY_PATH"] = libDir
- env["PATH"] = binDir + ":" + os.environ.get("PATH", "")
+ env["LD_LIBRARY_PATH"] = lib_dir
+ env["PATH"] = bin_dir + ":" + os.environ.get("PATH", "")
return env
@@ -78,17 +78,17 @@ def locate_venv(pipenv: str, env: Dict[str, str]) -> str:
async def install_pip(get_pip_file: str, python_installation: str) -> str:
log.info("Installing pip...")
if is_valid_url_path(get_pip_file):
- pipTmpDir = os.path.join(os.getcwd(), "pip_install_tmp")
- rmtree(pipTmpDir, ignore_errors=True)
- os.makedirs(pipTmpDir)
- get_pip_file = download_archive(get_pip_file, pipTmpDir)
+ pip_tmp_dir = os.path.join(os.getcwd(), "pip_install_tmp")
+ rmtree(pip_tmp_dir, ignore_errors=True)
+ os.makedirs(pip_tmp_dir)
+ get_pip_file = download_archive(get_pip_file, pip_tmp_dir)
elif not (get_pip_file and os.path.isfile(get_pip_file)):
raise PythonEnvError(f"Could not install pip from: {get_pip_file}")
- pythonExe = os.path.join(python_installation, "PCBuild", "amd64", "python.exe")
- assert os.path.isfile(pythonExe), f"The 'python' executable did not exist: {pythonExe}"
- installPipCmd = [pythonExe, get_pip_file]
- await async_exec_cmd(installPipCmd)
+ python_exe = os.path.join(python_installation, "PCBuild", "amd64", "python.exe")
+ assert os.path.isfile(python_exe), f"The 'python' executable did not exist: {python_exe}"
+ install_pip_cmd = [python_exe, get_pip_file]
+ await async_exec_cmd(install_pip_cmd)
return os.path.join(python_installation, "Scripts", "pip3.exe")
diff --git a/packaging-tools/release_repo_meta_update.py b/packaging-tools/release_repo_meta_update.py
index bd7e523c9..c227f3c05 100755
--- a/packaging-tools/release_repo_meta_update.py
+++ b/packaging-tools/release_repo_meta_update.py
@@ -58,12 +58,12 @@ async def fetch_repogen(ifw_tools_url: str) -> str:
assert is_valid_url_path(ifw_tools_url)
log.info("Preparing ifw tools: %s", ifw_tools_url)
# fetch the tool first
- currentDir = os.getcwd()
- ifw_tools_dir = os.path.join(currentDir, "ifw_tools")
+ current_dir = os.getcwd()
+ ifw_tools_dir = os.path.join(current_dir, "ifw_tools")
if not os.path.isdir(ifw_tools_dir):
os.makedirs(ifw_tools_dir)
- destFile = download_archive(ifw_tools_url, ifw_tools_dir)
- await extract_archive(destFile, ifw_tools_dir)
+ dest_file = download_archive(ifw_tools_url, ifw_tools_dir)
+ await extract_archive(dest_file, ifw_tools_dir)
tool_name = "repogen"
return locate_path(ifw_tools_dir, [tool_name])
diff --git a/packaging-tools/release_repo_updater.py b/packaging-tools/release_repo_updater.py
index 5771ea0fe..53b045b92 100755
--- a/packaging-tools/release_repo_updater.py
+++ b/packaging-tools/release_repo_updater.py
@@ -165,22 +165,22 @@ def has_connection_error(output: str) -> bool:
def execute_remote_cmd(remote_server: str, remote_server_home: str, cmd: List[str], script_file_name: str, timeout=60 * 60) -> None:
- remoteTmpDir = os.path.join(remote_server_home, "remote_scripts", timestamp)
- create_remote_paths(remote_server, [remoteTmpDir])
- remoteScript = create_remote_script(remote_server, cmd, remoteTmpDir, script_file_name)
- log.info("Created remote script: [%s] with contents: %s", remoteScript, ' '.join(cmd))
- execute_remote_script(remote_server, remoteScript, timeout)
+ remote_tmp_dir = os.path.join(remote_server_home, "remote_scripts", timestamp)
+ create_remote_paths(remote_server, [remote_tmp_dir])
+ remote_script = create_remote_script(remote_server, cmd, remote_tmp_dir, script_file_name)
+ log.info("Created remote script: [%s] with contents: %s", remote_script, ' '.join(cmd))
+ execute_remote_script(remote_server, remote_script, timeout)
def create_remote_script(server: str, cmd: List[str], remote_script_path: str, script_file_name: str) -> str:
- with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir:
- tempFilePath = os.path.join(tmpBaseDir, script_file_name)
- with open(tempFilePath, 'w+', encoding="utf-8") as f:
+ with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
+ temp_file_path = os.path.join(tmp_base_dir, script_file_name)
+ with open(temp_file_path, 'w+', encoding="utf-8") as f:
f.write("#!/usr/bin/env bash\n")
f.write(' '.join(cmd))
- os.chmod(tempFilePath, 0o755)
+ os.chmod(temp_file_path, 0o755)
create_remote_paths(server, [remote_script_path])
- cmd = ['rsync', '-avzh', tempFilePath, server + ":" + remote_script_path]
+ cmd = ['rsync', '-avzh', temp_file_path, server + ":" + remote_script_path]
exec_cmd(cmd, timeout=60 * 60)
return os.path.join(remote_script_path, script_file_name)
@@ -205,23 +205,23 @@ async def upload_ifw_to_remote(ifw_tools: str, remote_server: str, remote_server
assert is_valid_url_path(ifw_tools)
log.info("Preparing ifw tools: %s", ifw_tools)
# fetch the tool first
- currentDir = os.getcwd()
- ifwToolsDir = os.path.join(currentDir, "ifw_tools")
- if not os.path.isdir(ifwToolsDir):
- os.makedirs(ifwToolsDir)
- destFile = download_archive(ifw_tools, ifwToolsDir)
- await extract_archive(destFile, ifwToolsDir)
- repogen = locate_path(ifwToolsDir, ["repogen"], filters=[os.path.isfile])
- repogenDir = os.path.dirname(repogen)
+ current_dir = os.getcwd()
+ ifw_tools_dir = os.path.join(current_dir, "ifw_tools")
+ if not os.path.isdir(ifw_tools_dir):
+ os.makedirs(ifw_tools_dir)
+ dest_file = download_archive(ifw_tools, ifw_tools_dir)
+ await extract_archive(dest_file, ifw_tools_dir)
+ repogen = locate_path(ifw_tools_dir, ["repogen"], filters=[os.path.isfile])
+ repogen_dir = os.path.dirname(repogen)
# upload to server
- remoteTmpDir = os.path.join(remote_server_home, "ifw_tools", timestamp)
+ remote_tmp_dir = os.path.join(remote_server_home, "ifw_tools", timestamp)
# create tmp dir at remote
- create_remote_paths(remote_server, [remoteTmpDir])
+ create_remote_paths(remote_server, [remote_tmp_dir])
# upload content
- cmd = ['rsync', '-avzh', repogenDir + "/", remote_server + ":" + remoteTmpDir]
+ cmd = ['rsync', '-avzh', repogen_dir + "/", remote_server + ":" + remote_tmp_dir]
exec_cmd(cmd, timeout=60 * 60)
# return path on remote poiting to repogen
- return os.path.join(remoteTmpDir, "repogen")
+ return os.path.join(remote_tmp_dir, "repogen")
def check_repogen_output(output: str) -> None:
@@ -270,9 +270,9 @@ def is_safe_directory(paths: List[str]) -> None:
path = os.path.abspath(_path)
if path == "/":
raise PackagingError(f"You should not make modifications directly to root: {path}")
- illegalDirectories = ("/bin", "/boot", "/sys", "/sbin", "/root", "/lib", "/dev")
- if path.startswith(illegalDirectories):
- raise PackagingError(f"You should not make modifications ('{path}') under these directories: {illegalDirectories}")
+ illegal_directories = ("/bin", "/boot", "/sys", "/sbin", "/root", "/lib", "/dev")
+ if path.startswith(illegal_directories):
+ raise PackagingError(f"You should not make modifications ('{path}') under these directories: {illegal_directories}")
if path in ["~", os.path.expanduser("~"), "/home"]:
raise PackagingError(f"You should not make modifications directly to home directory: {path}")
@@ -328,20 +328,20 @@ def create_remote_repository_backup(server: str, remote_repo_path: str) -> str:
def sync_production_repositories_to_s3(server: str, s3: str, updated_production_repositories: Dict[str, str],
remote_root_path: str, license_: str) -> None:
- remoteLogsBasePath = os.path.join(remote_root_path, license_, "s3_sync_logs")
- create_remote_paths(server, [remoteLogsBasePath])
+ remote_logs_base_path = os.path.join(remote_root_path, license_, "s3_sync_logs")
+ create_remote_paths(server, [remote_logs_base_path])
- for repo, remoteProductionRepoFullPath in updated_production_repositories.items():
- remoteLogFileBase = os.path.join(remoteLogsBasePath, repo, "log-s3-" + timestamp)
- create_remote_paths(server, [os.path.dirname(remoteLogFileBase)])
+ for repo, remote_production_repo_full_path in updated_production_repositories.items():
+ remote_log_file_base = os.path.join(remote_logs_base_path, repo, "log-s3-" + timestamp)
+ create_remote_paths(server, [os.path.dirname(remote_log_file_base)])
- s3RepoPath = os.path.join(s3, repo)
- tipPrefix = repo.replace("/", "-") + "-"
+ s3_repo_path = os.path.join(s3, repo)
+ tip_prefix = repo.replace("/", "-") + "-"
- remoteLogFile = remoteLogFileBase + "-7z.txt"
- sync_production_7z_to_s3(server, remote_root_path, remoteProductionRepoFullPath, s3RepoPath, remoteLogFile, tipPrefix)
- remoteLogFile = remoteLogFileBase + "-xml.txt"
- sync_production_xml_to_s3(server, remote_root_path, remoteProductionRepoFullPath, s3RepoPath, remoteLogFile, tipPrefix)
+ remote_log_file = remote_log_file_base + "-7z.txt"
+ sync_production_7z_to_s3(server, remote_root_path, remote_production_repo_full_path, s3_repo_path, remote_log_file, tip_prefix)
+ remote_log_file = remote_log_file_base + "-xml.txt"
+ sync_production_xml_to_s3(server, remote_root_path, remote_production_repo_full_path, s3_repo_path, remote_log_file, tip_prefix)
def sync_production_7z_to_s3(server: str, server_home: str, production_repo_path: str, s3_repo_path: str, remote_log_file: str, tip: str) -> None:
@@ -362,29 +362,29 @@ def sync_production_xml_to_s3(server: str, server_home: str, production_repo_pat
async def sync_production_repositories_to_ext(server: str, ext: str, updated_production_repositories: Dict[str, str],
remote_root_path: str, license_: str) -> None:
- remoteLogsBasePath = os.path.join(remote_root_path, license_, "ext_sync_logs")
- create_remote_paths(server, [remoteLogsBasePath])
+ remote_logs_base_path = os.path.join(remote_root_path, license_, "ext_sync_logs")
+ create_remote_paths(server, [remote_logs_base_path])
- extServer, extBasePath = parse_ext(ext)
+ ext_server, ext_base_path = parse_ext(ext)
- for repo, remoteProductionRepoFullPath in updated_production_repositories.items():
- remoteLogFile = os.path.join(remoteLogsBasePath, repo, "log-ext-" + timestamp + ".txt")
- create_remote_paths(server, [os.path.dirname(remoteLogFile)])
+ for repo, remote_production_repo_full_path in updated_production_repositories.items():
+ remote_log_file = os.path.join(remote_logs_base_path, repo, "log-ext-" + timestamp + ".txt")
+ create_remote_paths(server, [os.path.dirname(remote_log_file)])
- extRepoPath = os.path.join(extBasePath, repo)
- tipPrefix = repo.replace("/", "-") + "-"
+ ext_repo_path = os.path.join(ext_base_path, repo)
+ tip_prefix = repo.replace("/", "-") + "-"
- await ensure_ext_repo_paths(server, extServer, extRepoPath) # rsync can not create missing nested directories
- cmd = ["rsync", "-r", "--omit-dir-times", "--delete-delay", "--progress", remoteProductionRepoFullPath + "/", extServer + ":" + extRepoPath]
- spawn_remote_background_task(server, remote_root_path, cmd, remoteLogFile, tip=tipPrefix + "ext")
+ await ensure_ext_repo_paths(server, ext_server, ext_repo_path) # rsync can not create missing nested directories
+ cmd = ["rsync", "-r", "--omit-dir-times", "--delete-delay", "--progress", remote_production_repo_full_path + "/", ext_server + ":" + ext_repo_path]
+ spawn_remote_background_task(server, remote_root_path, cmd, remote_log_file, tip=tip_prefix + "ext")
def spawn_remote_background_task(server: str, server_home: str, remote_cmd: List[str], remote_log_file: str, tip: str) -> None:
if not tip:
tip = ""
cmd = remote_cmd + ["2>&1", "|", "tee", remote_log_file]
- remoteScriptFileName = "sync-production-" + tip + "-" + timestamp + ".sh"
- execute_remote_cmd(server, server_home, cmd, remoteScriptFileName, timeout=60 * 60 * 2) # 2h timeout for uploading data to CDN
+ remote_script_file_name = "sync-production-" + tip + "-" + timestamp + ".sh"
+ execute_remote_cmd(server, server_home, cmd, remote_script_file_name, timeout=60 * 60 * 2) # 2h timeout for uploading data to CDN
async def update_repository(staging_server: str, repo_layout: QtRepositoryLayout, task: ReleaseTask,
@@ -394,25 +394,25 @@ async def update_repository(staging_server: str, repo_layout: QtRepositoryLayout
log.info("Starting repository update: %s", task.get_repo_path())
create_remote_paths(staging_server, repo_layout.get_repo_layout())
- remotePendingPath = os.path.join(repo_layout.get_pending_path(), task.get_repo_path())
- remotePendingPathRepository = os.path.join(remotePendingPath, "repository")
+ remote_pending_path = os.path.join(repo_layout.get_pending_path(), task.get_repo_path())
+ remote_pending_path_repository = os.path.join(remote_pending_path, "repository")
- remoteStagingDestinationRepositoryPath = os.path.join(repo_layout.get_staging_path(), task.get_repo_path())
- remoteProductionDestinationRepositoryPath = os.path.join(repo_layout.get_production_path(), task.get_repo_path())
+ remote_staging_destination_repository_path = os.path.join(repo_layout.get_staging_path(), task.get_repo_path())
+ remote_production_destination_repository_path = os.path.join(repo_layout.get_production_path(), task.get_repo_path())
# We always replace existing repository if previous version should exist.
# Previous version is moved as backup
- upload_pending_repository_content(staging_server, task.get_source_online_repository_path(), remotePendingPathRepository)
+ upload_pending_repository_content(staging_server, task.get_source_online_repository_path(), remote_pending_path_repository)
# Now we can run the updates on the remote
if update_staging:
- reset_new_remote_repository(staging_server, remotePendingPathRepository, remoteStagingDestinationRepositoryPath)
+ reset_new_remote_repository(staging_server, remote_pending_path_repository, remote_staging_destination_repository_path)
if update_production:
- reset_new_remote_repository(staging_server, remotePendingPathRepository, remoteProductionDestinationRepositoryPath)
+ reset_new_remote_repository(staging_server, remote_pending_path_repository, remote_production_destination_repository_path)
log.info("Update done: %s", task.get_repo_path())
# Now we can delete pending content
- delete_remote_paths(staging_server, [remotePendingPathRepository])
+ delete_remote_paths(staging_server, [remote_pending_path_repository])
# trigger RTA cases for the task if specified
if rta:
trigger_rta(rta, task)
@@ -422,33 +422,33 @@ async def build_online_repositories(tasks: List[ReleaseTask], license_: str, ins
ifw_tools: str, build_repositories: bool) -> List[str]:
log.info("Building online repositories: %i", len(tasks))
# create base tmp dir
- tmpBaseDir = os.path.join(os.getcwd(), "_repo_update_jobs")
+ tmp_base_dir = os.path.join(os.getcwd(), "_repo_update_jobs")
if build_repositories:
- shutil.rmtree(tmpBaseDir, ignore_errors=True)
- os.makedirs(tmpBaseDir, exist_ok=True)
+ shutil.rmtree(tmp_base_dir, ignore_errors=True)
+ os.makedirs(tmp_base_dir, exist_ok=True)
assert license_, "The 'license_' must be defined!"
assert artifact_share_base_url, "The 'artifact_share_base_url' must be defined!"
assert ifw_tools, "The 'ifw_tools' must be defined!"
# locate the repo build script
- scriptPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "create_installer.py"))
- assert os.path.isfile(scriptPath), f"Not a valid script path: {scriptPath}"
+ script_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "create_installer.py"))
+ assert os.path.isfile(script_path), f"Not a valid script path: {script_path}"
# build online repositories first
done_repositories = [] # type: List[str]
for task in tasks:
- tmpDir = os.path.join(tmpBaseDir, task.get_repo_path())
- task.source_online_repository_path = os.path.join(tmpDir, "online_repository")
+ tmp_dir = os.path.join(tmp_base_dir, task.get_repo_path())
+ task.source_online_repository_path = os.path.join(tmp_dir, "online_repository")
if not build_repositories:
# this is usually for testing purposes in env where repositories are already built, we just update task objects
continue
log.info("Building repository: %s", task.get_repo_path())
- installerConfigFile = os.path.join(installer_config_base_dir, task.get_config_file())
- if not os.path.isfile(installerConfigFile):
- raise PackagingError(f"Invalid 'config_file' path: {installerConfigFile}")
+ installer_config_file = os.path.join(installer_config_base_dir, task.get_config_file())
+ if not os.path.isfile(installer_config_file):
+ raise PackagingError(f"Invalid 'config_file' path: {installer_config_file}")
- cmd = [sys.executable, scriptPath, "-c", installer_config_base_dir, "-f", installerConfigFile]
+ cmd = [sys.executable, script_path, "-c", installer_config_base_dir, "-f", installer_config_file]
cmd += ["--create-repo", "-l", license_, "-u", artifact_share_base_url, "--ifw-tools", ifw_tools]
cmd += ["--force-version-number-increase"]
for substitution in task.get_installer_string_replacement_list():
@@ -460,9 +460,9 @@ async def build_online_repositories(tasks: List[ReleaseTask], license_: str, ins
log.error(str(e))
raise
- onlineRepositoryPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "online_repository"))
- assert os.path.isdir(onlineRepositoryPath), f"Not a valid path: {onlineRepositoryPath}"
- shutil.move(onlineRepositoryPath, task.source_online_repository_path)
+ online_repository_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "online_repository"))
+ assert os.path.isdir(online_repository_path), f"Not a valid path: {online_repository_path}"
+ shutil.move(online_repository_path, task.source_online_repository_path)
log.info("Repository created at: %s", task.source_online_repository_path)
done_repositories.append(task.source_online_repository_path)
return done_repositories
@@ -471,7 +471,7 @@ async def build_online_repositories(tasks: List[ReleaseTask], license_: str, ins
async def update_repositories(tasks: List[ReleaseTask], staging_server: str, staging_server_root: str, repo_layout: QtRepositoryLayout,
update_staging: bool, update_production: bool, rta: str, ifw_tools: str) -> None:
# upload ifw tools to remote
- remoteRepogen = await upload_ifw_to_remote(ifw_tools, staging_server, staging_server_root)
+ remote_repogen = await upload_ifw_to_remote(ifw_tools, staging_server, staging_server_root)
try:
for task in tasks:
await update_repository(staging_server, repo_layout, task, update_staging, update_production, rta)
@@ -480,7 +480,7 @@ async def update_repositories(tasks: List[ReleaseTask], staging_server: str, sta
raise
finally:
# Now we can delete the ifw tools at remote
- delete_remote_paths(staging_server, [os.path.dirname(os.path.dirname(remoteRepogen))])
+ delete_remote_paths(staging_server, [os.path.dirname(os.path.dirname(remote_repogen))])
async def sync_production(tasks: List[ReleaseTask], repo_layout: QtRepositoryLayout, sync_s3: str, sync_ext: str,
@@ -488,21 +488,21 @@ async def sync_production(tasks: List[ReleaseTask], repo_layout: QtRepositoryLay
export_data: Dict[str, str]) -> None:
log.info("triggering production sync..")
# collect production sync jobs
- updatedProductionRepositories = {} # type: Dict[str, str]
+ updated_production_repositories = {} # type: Dict[str, str]
for task in tasks:
key = os.path.join(repo_layout.get_repo_domain(), task.get_repo_path())
- if key in updatedProductionRepositories:
+ if key in updated_production_repositories:
raise PackagingError(f"Duplicate repository path found: {key}")
- updatedProductionRepositories[key] = os.path.join(repo_layout.get_production_path(), task.get_repo_path())
+ updated_production_repositories[key] = os.path.join(repo_layout.get_production_path(), task.get_repo_path())
# if _all_ repository updates to production were successful then we can sync to production
if sync_s3:
async with EventRegister(f"{license_}: repo sync s3", event_injector, export_data):
- sync_production_repositories_to_s3(staging_server, sync_s3, updatedProductionRepositories,
+ sync_production_repositories_to_s3(staging_server, sync_s3, updated_production_repositories,
staging_server_root, license_)
if sync_ext:
async with EventRegister(f"{license_}: repo sync ext", event_injector, export_data):
- await sync_production_repositories_to_ext(staging_server, sync_ext, updatedProductionRepositories,
+ await sync_production_repositories_to_ext(staging_server, sync_ext, updated_production_repositories,
staging_server_root, license_)
log.info("Production sync trigger done!")
@@ -515,7 +515,7 @@ async def handle_update(staging_server: str, staging_server_root: str, license_:
"""Build all online repositories, update those to staging area and sync to production."""
log.info("Starting repository update for %i tasks..", len(tasks))
# get repository layout
- repoLayout = QtRepositoryLayout(staging_server_root, license_, repo_domain)
+ repo_layout = QtRepositoryLayout(staging_server_root, license_, repo_domain)
# this may take a while depending on how big the repositories are
async with EventRegister(f"{license_}: repo build", event_injector, export_data):
ret = await build_online_repositories(tasks, license_, installer_config_base_dir, artifact_share_base_url, ifw_tools,
@@ -523,10 +523,10 @@ async def handle_update(staging_server: str, staging_server_root: str, license_:
if do_update_repositories:
async with EventRegister(f"{license_}: repo update", event_injector, export_data):
- await update_repositories(tasks, staging_server, staging_server_root, repoLayout, update_staging, update_production,
+ await update_repositories(tasks, staging_server, staging_server_root, repo_layout, update_staging, update_production,
rta, ifw_tools)
if sync_repositories:
- await sync_production(tasks, repoLayout, sync_s3, sync_ext, staging_server, staging_server_root, license_,
+ await sync_production(tasks, repo_layout, sync_s3, sync_ext, staging_server, staging_server_root, license_,
event_injector, export_data)
log.info("Repository updates done!")
@@ -627,9 +627,9 @@ def sign_offline_installer(installer_path: str, installer_name: str) -> None:
def notarize_dmg(dmg_path, installer_basename) -> None:
script_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "notarize.py"))
# bundle-id is just a unique identifier without any special meaning, used to track the notarization progress
- bundleId = installer_basename + "-" + strftime('%Y-%m-%d-%H-%M', gmtime())
- bundleId = bundleId.replace('_', '-').replace(' ', '') # replace illegal characters for bundleId
- cmd = [sys.executable, script_path, '--dmg=' + dmg_path, '--bundle-id=' + bundleId]
+ bundle_id = installer_basename + "-" + strftime('%Y-%m-%d-%H-%M', gmtime())
+ bundle_id = bundle_id.replace('_', '-').replace(' ', '') # replace illegal characters for bundle_id
+ cmd = [sys.executable, script_path, '--dmg=' + dmg_path, '--bundle-id=' + bundle_id]
exec_cmd(cmd, timeout=60 * 60 * 3)
@@ -651,18 +651,18 @@ async def _build_offline_tasks(staging_server: str, staging_server_root: str, ta
assert artifact_share_base_url, "The 'artifact_share_base_url' must be defined!"
assert ifw_tools, "The 'ifw_tools' must be defined!"
- scriptPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "create_installer.py"))
- assert os.path.isfile(scriptPath), f"Not a valid script path: {scriptPath}"
+ script_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "create_installer.py"))
+ assert os.path.isfile(script_path), f"Not a valid script path: {script_path}"
installer_output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "installer_output"))
# build installers
for task in tasks:
log.info("Building offline installer: %s", task.get_installer_name())
- installerConfigFile = os.path.join(installer_config_base_dir, task.get_config_file())
- if not os.path.isfile(installerConfigFile):
- raise PackagingError(f"Invalid 'config_file' path: {installerConfigFile}")
+ installer_config_file = os.path.join(installer_config_base_dir, task.get_config_file())
+ if not os.path.isfile(installer_config_file):
+ raise PackagingError(f"Invalid 'config_file' path: {installer_config_file}")
- cmd = [sys.executable, scriptPath, "-c", installer_config_base_dir, "-f", installerConfigFile]
+ cmd = [sys.executable, script_path, "-c", installer_config_base_dir, "-f", installer_config_file]
cmd += ["--offline", "-l", license_, "-u", artifact_share_base_url, "--ifw-tools", ifw_tools]
cmd += ["--preferred-installer-name", task.get_installer_name()]
cmd += ["--force-version-number-increase"]
@@ -827,8 +827,8 @@ def main() -> None:
# format task string in case full task section string is used
args.task_filters = format_task_filters(args.task_filters)
# installer configuration files are relative to the given top level release description file
- installerConfigBaseDir = os.path.abspath(os.path.join(os.path.dirname(args.config), os.pardir))
- assert os.path.isdir(installerConfigBaseDir), f"Not able to figure out 'configurations/' directory correctly: {installerConfigBaseDir}"
+ installer_config_base_dir = os.path.abspath(os.path.join(os.path.dirname(args.config), os.pardir))
+ assert os.path.isdir(installer_config_base_dir), f"Not able to figure out 'configurations/' directory correctly: {installer_config_base_dir}"
export_data = load_export_summary_data(Path(args.config)) if args.event_injector else {}
@@ -837,7 +837,7 @@ def main() -> None:
# get offline tasks
tasks = parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "offline"))
loop.run_until_complete(build_offline_tasks(args.staging_server, args.staging_server_root, tasks, args.license_,
- installerConfigBaseDir, args.artifact_share_url, args.ifw_tools,
+ installer_config_base_dir, args.artifact_share_url, args.ifw_tools,
args.offline_installer_id, args.update_staging,
args.enable_oss_snapshots, args.event_injector, export_data))
@@ -845,7 +845,7 @@ def main() -> None:
# get repository tasks
tasks = parse_config(args.config, task_filters=append_to_task_filters(args.task_filters, "repository"))
ret = loop.run_until_complete(handle_update(args.staging_server, args.staging_server_root, args.license_, tasks,
- args.repo_domain, installerConfigBaseDir, args.artifact_share_url,
+ args.repo_domain, installer_config_base_dir, args.artifact_share_url,
args.update_staging, args.update_production, args.sync_s3, args.sync_ext,
args.rta, args.ifw_tools,
args.build_repositories, do_update_repositories, do_sync_repositories,
diff --git a/packaging-tools/release_task_reader.py b/packaging-tools/release_task_reader.py
index ed05af201..5afe91df6 100755
--- a/packaging-tools/release_task_reader.py
+++ b/packaging-tools/release_task_reader.py
@@ -60,8 +60,8 @@ class ReleaseTask:
self.repo_components_to_update = settings.get("repo_components_to_update", "")
self.installer_name = settings.get("installer_name", "")
self.rta_key_list = settings.get("rta_key_list", "")
- tmpList: List[str] = [x.strip() for x in self.substitutions.split(',')]
- self.installer_string_replacement_list = list(filter(None, tmpList))
+ tmp_list: List[str] = [x.strip() for x in self.substitutions.split(',')]
+ self.installer_string_replacement_list = list(filter(None, tmp_list))
self.source_online_repository_path = ""
self.source_pkg_path = ""
@@ -105,8 +105,8 @@ class ReleaseTask:
return self.repo_path
def get_rta_key_list(self) -> List[str]:
- tmpList = self.rta_key_list.strip().replace(' ', '').split(",")
- return list(filter(None, tmpList))
+ tmp_list = self.rta_key_list.strip().replace(' ', '').split(",")
+ return list(filter(None, tmp_list))
def get_source_online_repository_path(self) -> str:
# this points to local repository build path
@@ -140,18 +140,18 @@ def parse_data(settings, task_filters: List[str]) -> List[ReleaseTask]:
parts = section.split(".")
if not parts[0].startswith("task"):
continue
- appendTask = True
+ append_task = True
if section_filters_list:
- appendTask = False
+ append_task = False
for section_filters in section_filters_list:
if set(section_filters).issubset(set(parts)):
- appendTask = True
+ append_task = True
break
- if appendTask:
+ if append_task:
log.info("Parsing Task: %s", section)
- releaseTask = ReleaseTask(section, settings[section])
- releaseTask.add_to_substitutions_list(common_substitution_list)
- tasks.append(releaseTask)
+ release_task = ReleaseTask(section, settings[section])
+ release_task.add_to_substitutions_list(common_substitution_list)
+ tasks.append(release_task)
else:
log.info("Skipping task: [%s] - not included by task filter(s): %s", section, section_filters_list)
return tasks
diff --git a/packaging-tools/remote_uploader.py b/packaging-tools/remote_uploader.py
index d2678bada..5317d5c6e 100755
--- a/packaging-tools/remote_uploader.py
+++ b/packaging-tools/remote_uploader.py
@@ -85,15 +85,15 @@ class RemoteUploader:
check_call(cmd, timeout=60) # give it 60s
def _copy_to_remote(self, file_name, dest_dir_name):
- """Copy the given file to destDirName which is relative to remoteBasePath."""
+ """Copy the given file to dest_dirName which is relative to remoteBasePath."""
assert self.init_finished, "RemoteUploader not initialized!"
- remoteDestination = self.remoteLogin + ':' + self.remoteTargetDir
+ remote_destination = self.remoteLogin + ':' + self.remoteTargetDir
if dest_dir_name:
- remoteDestination = remoteDestination + '/' + dest_dir_name + '/'
+ remote_destination = remote_destination + '/' + dest_dir_name + '/'
if "windows" in platform.system().lower():
self.ensure_remote_dir(self.remoteTargetDir + '/' + dest_dir_name + '/')
- print(f"Copying [{file_name}] to [{remoteDestination}]")
- cmd = self.copy_cmd + [file_name, remoteDestination]
+ print(f"Copying [{file_name}] to [{remote_destination}]")
+ cmd = self.copy_cmd + [file_name, remote_destination]
print("Executing: ", ' '.join(cmd))
if not self.dryRun:
check_call(cmd, timeout=60 * 10) # give it 10 mins
diff --git a/packaging-tools/runner.py b/packaging-tools/runner.py
index 675789f89..bb0d39074 100755
--- a/packaging-tools/runner.py
+++ b/packaging-tools/runner.py
@@ -65,8 +65,8 @@ async def async_exec_cmd(cmd: List[str], timeout: int = 60 * 60, env: Dict[str,
except (asyncio.TimeoutError, TimeoutExpired):
log.error("Timeout (%ss) for: %s", str(timeout), cmd)
raise
- except CalledProcessError as commandErr:
- log.error("Failed to run command: %s", str(commandErr))
+ except CalledProcessError as command_err:
+ log.error("Failed to run command: %s", str(command_err))
raise
except Exception as e:
log.error("Something failed: %s", str(e))
diff --git a/packaging-tools/sign_installer.py b/packaging-tools/sign_installer.py
index 1b9d65454..1c134c960 100755
--- a/packaging-tools/sign_installer.py
+++ b/packaging-tools/sign_installer.py
@@ -64,21 +64,21 @@ def create_mac_dmg(app_path: str) -> None:
def sign_windows_executable(file_path: str):
- signTools = ["signtool32.exe", "keys.pfx", "capicom.dll"]
- signToolsTempDir = r'C:\Utils\sign_tools_temp'
- for item in signTools:
- dst = os.path.join(signToolsTempDir, item)
+ sign_tools = ["signtool32.exe", "keys.pfx", "capicom.dll"]
+ sign_tools_temp_dir = r'C:\Utils\sign_tools_temp'
+ for item in sign_tools:
+ dst = os.path.join(sign_tools_temp_dir, item)
curl_cmd_args = ['curl', "--fail", "-L", "--retry", "5", "--retry-delay", "30", "-o", dst,
'--create-dirs', get_pkg_value("SIGN_TOOLS_ADDR") + item]
check_call(curl_cmd_args)
- cmd_args = [os.path.join(signToolsTempDir, 'signtool32.exe'), 'sign', '/v', '/du', get_pkg_value("SIGNING_SERVER"), '/p', get_pkg_value("SIGNING_PASSWORD")]
- cmd_args += ['/tr', get_pkg_value("TIMESTAMP_SERVER"), '/f', os.path.join(signToolsTempDir, 'keys.pfx'), '/td', "sha256", '/fd', "sha256", file_path]
+ cmd_args = [os.path.join(sign_tools_temp_dir, 'signtool32.exe'), 'sign', '/v', '/du', get_pkg_value("SIGNING_SERVER"), '/p', get_pkg_value("SIGNING_PASSWORD")]
+ cmd_args += ['/tr', get_pkg_value("TIMESTAMP_SERVER"), '/f', os.path.join(sign_tools_temp_dir, 'keys.pfx'), '/td', "sha256", '/fd', "sha256", file_path]
log_entry = cmd_args[:]
log_entry[4] = "****"
log_entry[6] = "****"
log.info("Calling: %s", " ".join(log_entry))
check_call(cmd_args, stdout=DEVNULL, stderr=DEVNULL)
- rmtree(signToolsTempDir)
+ rmtree(sign_tools_temp_dir)
log.info("Successfully signed: %s", file_path)
diff --git a/packaging-tools/tests/test_bld_python.py b/packaging-tools/tests/test_bld_python.py
index 8b2866176..89da2d835 100755
--- a/packaging-tools/tests/test_bld_python.py
+++ b/packaging-tools/tests/test_bld_python.py
@@ -41,20 +41,20 @@ class TestBldPython(unittest.TestCase):
@asyncio_test
async def test_locate_source_root(self) -> None:
- with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir:
- tempDir = os.path.join(tmpBaseDir, "foo", "bar", "test", "dir")
- os.makedirs(tempDir)
- tempFilePath = os.path.join(tempDir, "configure")
- with open(tempFilePath, 'w+', encoding="utf-8") as f:
+ with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
+ temp_dir = os.path.join(tmp_base_dir, "foo", "bar", "test", "dir")
+ os.makedirs(temp_dir)
+ temp_file_path = os.path.join(temp_dir, "configure")
+ with open(temp_file_path, 'w+', encoding="utf-8") as f:
f.write("\n")
- foundDir = locate_source_root(tmpBaseDir)
- self.assertEqual(foundDir, tempDir)
+ found_dir = locate_source_root(tmp_base_dir)
+ self.assertEqual(found_dir, temp_dir)
- invalidDir = os.path.join(tmpBaseDir, "foo2", "bar", "test", "dir")
- os.makedirs(invalidDir)
+ invalid_dir = os.path.join(tmp_base_dir, "foo2", "bar", "test", "dir")
+ os.makedirs(invalid_dir)
with self.assertRaises(BldPythonError):
- locate_source_root(os.path.join(tmpBaseDir, "foo2"))
+ locate_source_root(os.path.join(tmp_base_dir, "foo2"))
if __name__ == '__main__':
diff --git a/packaging-tools/tests/test_build_wrapper.py b/packaging-tools/tests/test_build_wrapper.py
index 0de7a84b1..c9a0d047f 100644
--- a/packaging-tools/tests/test_build_wrapper.py
+++ b/packaging-tools/tests/test_build_wrapper.py
@@ -52,18 +52,18 @@ class TestBuildWrapper(unittest.TestCase):
@unpack
def test_init_snapshot_dir_and_upload_files(self, project_name, version_branch, build_number, subdir=""):
temp_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'build_wrapper_test')
- optionDict = {}
- optionDict['WORK_DIR'] = os.getcwd()
- optionDict['SSH_COMMAND'] = 'ssh'
- optionDict['SCP_COMMAND'] = 'scp'
+ option_dict = {}
+ option_dict['WORK_DIR'] = os.getcwd()
+ option_dict['SSH_COMMAND'] = 'ssh'
+ option_dict['SCP_COMMAND'] = 'scp'
user = getuser()
- optionDict['PACKAGE_STORAGE_SERVER_ADDR'] = user + '@127.0.0.1'
- optionDict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = temp_dir
- filesToUpload = [os.path.basename(x) for x in glob('./*.sh')]
+ option_dict['PACKAGE_STORAGE_SERVER_ADDR'] = user + '@127.0.0.1'
+ option_dict['PACKAGE_STORAGE_SERVER_BASE_DIR'] = temp_dir
+ files_to_upload = [os.path.basename(x) for x in glob('./*.sh')]
if subdir:
- init_snapshot_dir_and_upload_files(optionDict, project_name, version_branch, build_number, filesToUpload, subdir)
+ init_snapshot_dir_and_upload_files(option_dict, project_name, version_branch, build_number, files_to_upload, subdir)
else:
- init_snapshot_dir_and_upload_files(optionDict, project_name, version_branch, build_number, filesToUpload)
+ init_snapshot_dir_and_upload_files(option_dict, project_name, version_branch, build_number, files_to_upload)
remote_path_base = os.path.join(temp_dir, project_name, version_branch)
remote_path_snapshot_dir = os.path.join(remote_path_base, build_number)
@@ -73,9 +73,9 @@ class TestBuildWrapper(unittest.TestCase):
self.assertTrue(os.path.isdir(remote_path_snapshot_dir))
self.assertTrue(os.path.islink(remote_path_latest_link))
- searchDir = os.path.join(remote_path_latest_link, subdir, '*.sh')
- uploadedFiles = [os.path.basename(x) for x in glob(searchDir)]
- self.assertListEqual(sorted(filesToUpload), sorted(uploadedFiles))
+ search_dir = os.path.join(remote_path_latest_link, subdir, '*.sh')
+ uploaded_files = [os.path.basename(x) for x in glob(search_dir)]
+ self.assertListEqual(sorted(files_to_upload), sorted(uploaded_files))
rmtree(remote_path_base)
diff --git a/packaging-tools/tests/test_installer_utils.py b/packaging-tools/tests/test_installer_utils.py
index a18143a41..7c101e28d 100644
--- a/packaging-tools/tests/test_installer_utils.py
+++ b/packaging-tools/tests/test_installer_utils.py
@@ -56,9 +56,9 @@ class TestInstallerUtils(unittest.TestCase):
@asyncio_test
async def test_cd(self) -> None:
cwd = os.getcwd()
- with TemporaryDirectory(dir=cwd) as tmpBaseDir:
- with cd(tmpBaseDir):
- self.assertEqual(tmpBaseDir, os.getcwd())
+ with TemporaryDirectory(dir=cwd) as tmp_base_dir:
+ with cd(tmp_base_dir):
+ self.assertEqual(tmp_base_dir, os.getcwd())
self.assertEqual(cwd, os.getcwd())
@asyncio_test_parallel_data(
@@ -78,8 +78,8 @@ class TestInstallerUtils(unittest.TestCase):
("https://www.qt.io/some/file.tgz", "tar")
)
async def test_valid_extractor(self, archive: str, expected_extractor: str) -> None:
- extractCmd = get_extract_cmd(archive)
- self.assertEqual(extractCmd[0], expected_extractor, f"Not a valid extractor Callable obtained for: {archive}")
+ extract_cmd = get_extract_cmd(archive)
+ self.assertEqual(extract_cmd[0], expected_extractor, f"Not a valid extractor Callable obtained for: {archive}")
@asyncio_test
async def test_invalid_extractor(self) -> None:
@@ -88,38 +88,38 @@ class TestInstallerUtils(unittest.TestCase):
@asyncio_test
async def test_extract_archive(self) -> None:
- with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir:
+ with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
# create some test paths
- tempPath = os.path.join("foo", "bar")
- absoluteTempPath = os.path.join(tmpBaseDir, tempPath)
- os.makedirs(absoluteTempPath)
+ temp_path = os.path.join("foo", "bar")
+ absolute_temp_path = os.path.join(tmp_base_dir, temp_path)
+ os.makedirs(absolute_temp_path)
# create tmp file
- tempFileName = "foobar.txt"
- tempFilePath = os.path.join(absoluteTempPath, tempFileName)
- with open(tempFilePath, 'w+', encoding="utf-8") as f:
+ temp_file_name = "foobar.txt"
+ temp_file_path = os.path.join(absolute_temp_path, temp_file_name)
+ with open(temp_file_path, 'w+', encoding="utf-8") as f:
f.write("\n")
- self.assertTrue(os.path.isfile(tempFilePath))
+ self.assertTrue(os.path.isfile(temp_file_path))
# create fake tar archive
- tarArchivePath = os.path.join(tmpBaseDir, "foobar.tar")
- with tarfile.open(tarArchivePath, "w") as tarFile:
- with open(tempFilePath, mode='rb') as _tempFile:
- fileData = _tempFile.read()
- tarFile.addfile(tarfile.TarInfo(os.path.join(tempPath, tempFileName)), io.BytesIO(fileData))
+ tar_archive_path = os.path.join(tmp_base_dir, "foobar.tar")
+ with tarfile.open(tar_archive_path, "w") as tar_file:
+ with open(temp_file_path, mode='rb') as _temp_file:
+ file_data = _temp_file.read()
+ tar_file.addfile(tarfile.TarInfo(os.path.join(temp_path, temp_file_name)), io.BytesIO(file_data))
- destDir = os.path.join(tmpBaseDir, "dest_dir")
- await extract_archive(tarArchivePath, destDir)
- self.assertTrue(os.path.isfile(os.path.join(destDir, tempPath, "foobar.txt")))
+ dest_dir = os.path.join(tmp_base_dir, "dest_dir")
+ await extract_archive(tar_archive_path, dest_dir)
+ self.assertTrue(os.path.isfile(os.path.join(dest_dir, temp_path, "foobar.txt")))
@unittest.skipUnless(is_internal_file_server_reachable(),
"Skipping because file server is not accessible")
@asyncio_test
async def test_download_archive(self) -> None:
- with TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir:
+ with TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
pkg_srv = get_pkg_value("PACKAGE_STORAGE_SERVER_PATH_HTTP")
test_file_url = pkg_srv + "/archive/packaging/qtsdk_testing.txt"
- downloadedFile = download_archive(test_file_url, tmpBaseDir)
- self.assertTrue(os.path.isfile(downloadedFile))
+ downloaded_file = download_archive(test_file_url, tmp_base_dir)
+ self.assertTrue(os.path.isfile(downloaded_file))
if __name__ == '__main__':
diff --git a/packaging-tools/tests/test_packaging.py b/packaging-tools/tests/test_packaging.py
index 541ff6de4..72babeb79 100755
--- a/packaging-tools/tests/test_packaging.py
+++ b/packaging-tools/tests/test_packaging.py
@@ -50,126 +50,126 @@ from runner import do_execute_sub_process
class TestPackaging(unittest.TestCase):
def test_patch_absolute_lib_paths(self):
- testData = (("QMAKE_LIBS_ZLIB = /opt/android/android-ndk-r18b/platforms/android-21/arch-arm64/usr/lib/libz.so",
- "QMAKE_LIBS_ZLIB = -lz",
- "pri"),
- ("QMAKE_LIBS_GTK3 = /lib64/libgtk-3.so /lib64/libgdk-3.so /lib64/libatk-1.0.so /lib64/libgio-2.0.so /lib64/libpangocairo-1.0.so /lib64/libgdk_pixbuf-2.0.so /lib64/libcairo-gobject.so /lib64/libpango-1.0.so /lib64/libcairo.so /lib64/libgobject-2.0.so /lib64/libglib-2.0.so",
- "QMAKE_LIBS_GTK3 = -lgtk-3 -lgdk-3 -latk-1.0 -lgio-2.0 -lpangocairo-1.0 -lgdk_pixbuf-2.0 -lcairo-gobject -lpango-1.0 -lcairo -lgobject-2.0 -lglib-2.0",
- "pri"),
- ("QMAKE_PRL_LIBS = /opt/android/android-ndk-r18b/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++.so.21 /lib64/libglib-2.0.so",
- "QMAKE_PRL_LIBS = -lc++ -lglib-2.0",
- "prl"),
- ("QMAKE_PRL_LIBS = -LC:\\opensslx86\\lib -L$$[QT_INSTALL_LIBS] $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
- "QMAKE_PRL_LIBS = -LC:\\opensslx86\\lib -L$$[QT_INSTALL_LIBS] $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
- "prl"),
- ("QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.lib\" $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
- "QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"-lshell32\" $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
- "prl"),
- ("QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.foobar\" $$[QT_INSTALL_LIBS]\\Qt5Cored.foobar",
- "QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.foobar\" $$[QT_INSTALL_LIBS]\\Qt5Cored.foobar",
- "prl"),
- ("QMAKE_PRL_LIBS = $$[QT_INSTALL_LIBS]/qtfreetyped.lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x86/dwrite.lib\"",
- "QMAKE_PRL_LIBS = $$[QT_INSTALL_LIBS]/qtfreetyped.lib \"-ldwrite\"",
- "prl"),
- ("_qt5gui_find_extra_libs(EGL \"c:/Utils/Android/android-ndk-r18b/platforms/android-16/arch-x86/usr/lib/libEGL.so\" \"\" \"\")",
- "_qt5gui_find_extra_libs(EGL \"EGL\" \"\" \"\")",
- "cmake"),
- ("_qt5gui_find_extra_libs(EGL \"/opt/android/android-ndk-r18b/platforms/android-21/arch-arm64/usr/lib/libEGL.so\" \"\" \"\")",
- "_qt5gui_find_extra_libs(EGL \"EGL\" \"\" \"\")",
- "cmake"),
- ("get_filename_component(_qt5Gui_install_prefix \"${CMAKE_CURRENT_LIST_DIR}/../../../\" ABSOLUTE)",
- "get_filename_component(_qt5Gui_install_prefix \"${CMAKE_CURRENT_LIST_DIR}/../../../\" ABSOLUTE)",
- "cmake"),
- ("/Applications/Xcode10.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS12.1.sdk/usr/lib/libz.tbd -framework OpenGLES",
- "-lz -framework OpenGLES",
- "prl"))
-
- for data in testData:
+ test_data = (("QMAKE_LIBS_ZLIB = /opt/android/android-ndk-r18b/platforms/android-21/arch-arm64/usr/lib/libz.so",
+ "QMAKE_LIBS_ZLIB = -lz",
+ "pri"),
+ ("QMAKE_LIBS_GTK3 = /lib64/libgtk-3.so /lib64/libgdk-3.so /lib64/libatk-1.0.so /lib64/libgio-2.0.so /lib64/libpangocairo-1.0.so /lib64/libgdk_pixbuf-2.0.so /lib64/libcairo-gobject.so /lib64/libpango-1.0.so /lib64/libcairo.so /lib64/libgobject-2.0.so /lib64/libglib-2.0.so",
+ "QMAKE_LIBS_GTK3 = -lgtk-3 -lgdk-3 -latk-1.0 -lgio-2.0 -lpangocairo-1.0 -lgdk_pixbuf-2.0 -lcairo-gobject -lpango-1.0 -lcairo -lgobject-2.0 -lglib-2.0",
+ "pri"),
+ ("QMAKE_PRL_LIBS = /opt/android/android-ndk-r18b/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++.so.21 /lib64/libglib-2.0.so",
+ "QMAKE_PRL_LIBS = -lc++ -lglib-2.0",
+ "prl"),
+ ("QMAKE_PRL_LIBS = -LC:\\opensslx86\\lib -L$$[QT_INSTALL_LIBS] $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
+ "QMAKE_PRL_LIBS = -LC:\\opensslx86\\lib -L$$[QT_INSTALL_LIBS] $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
+ "prl"),
+ ("QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.lib\" $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
+ "QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"-lshell32\" $$[QT_INSTALL_LIBS]\\Qt5Cored.lib",
+ "prl"),
+ ("QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.foobar\" $$[QT_INSTALL_LIBS]\\Qt5Cored.foobar",
+ "QMAKE_PRL_LIBS = -LC:\\openssl\\lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x64\\shell32.foobar\" $$[QT_INSTALL_LIBS]\\Qt5Cored.foobar",
+ "prl"),
+ ("QMAKE_PRL_LIBS = $$[QT_INSTALL_LIBS]/qtfreetyped.lib \"C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.16299.0\\um\\x86/dwrite.lib\"",
+ "QMAKE_PRL_LIBS = $$[QT_INSTALL_LIBS]/qtfreetyped.lib \"-ldwrite\"",
+ "prl"),
+ ("_qt5gui_find_extra_libs(EGL \"c:/Utils/Android/android-ndk-r18b/platforms/android-16/arch-x86/usr/lib/libEGL.so\" \"\" \"\")",
+ "_qt5gui_find_extra_libs(EGL \"EGL\" \"\" \"\")",
+ "cmake"),
+ ("_qt5gui_find_extra_libs(EGL \"/opt/android/android-ndk-r18b/platforms/android-21/arch-arm64/usr/lib/libEGL.so\" \"\" \"\")",
+ "_qt5gui_find_extra_libs(EGL \"EGL\" \"\" \"\")",
+ "cmake"),
+ ("get_filename_component(_qt5Gui_install_prefix \"${CMAKE_CURRENT_LIST_DIR}/../../../\" ABSOLUTE)",
+ "get_filename_component(_qt5Gui_install_prefix \"${CMAKE_CURRENT_LIST_DIR}/../../../\" ABSOLUTE)",
+ "cmake"),
+ ("/Applications/Xcode10.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS12.1.sdk/usr/lib/libz.tbd -framework OpenGLES",
+ "-lz -framework OpenGLES",
+ "prl"))
+
+ for data in test_data:
result = patch_absolute_lib_paths_from_line(data[0], data[2])
self.assertEqual(result, data[1], f"Failed to patch: [{data[0]}] as: [{data[1]}]")
def test_patch_qmake_prl_build_dir_from_line(self):
- testData = (("QMAKE_PRL_BUILD_DIR = /foo/bar", ""),
- ("QMAKE_PRL_BUILD_DIR= /foo/bar", ""),
- ("foo bar = /foo/bar", "foo bar = /foo/bar"))
+ test_data = (("QMAKE_PRL_BUILD_DIR = /foo/bar", ""),
+ ("QMAKE_PRL_BUILD_DIR= /foo/bar", ""),
+ ("foo bar = /foo/bar", "foo bar = /foo/bar"))
- for data in testData:
+ for data in test_data:
result = patch_qmake_prl_build_dir_from_line(data[0])
self.assertEqual(result, data[1], f"Failed to patch: [{data[0]}] as: [{data[1]}]")
def test_patch_qconfig_pri_from_line(self):
- testData = (("QMAKE_DEFAULT_LIBDIRS = /foo/bar", "QMAKE_DEFAULT_LIBDIRS ="),
- ("QMAKE_DEFAULT_INCDIRS = /foo/bar", "QMAKE_DEFAULT_INCDIRS ="),
- ("foo bar = /foo/bar", "foo bar = /foo/bar"))
+ test_data = (("QMAKE_DEFAULT_LIBDIRS = /foo/bar", "QMAKE_DEFAULT_LIBDIRS ="),
+ ("QMAKE_DEFAULT_INCDIRS = /foo/bar", "QMAKE_DEFAULT_INCDIRS ="),
+ ("foo bar = /foo/bar", "foo bar = /foo/bar"))
- for data in testData:
+ for data in test_data:
result = patch_qconfig_pri_from_line(data[0])
self.assertEqual(result, data[1], f"Failed to patch: [{data[0]}] as: [{data[1]}]. Got: [{result}]")
def test_parse_package_finalize_items(self):
- testData = (("set_executable=licheck64, foo=bar, set_executable=something", "set_executable", ["licheck64", "something"]),
- ("set_executable=licheck64,foo=bar, set_executable = something", "set_executable", ["licheck64", "something"]),
- ("set_executable=licheck64", "set_executable", ["licheck64"]))
+ test_data = (("set_executable=licheck64, foo=bar, set_executable=something", "set_executable", ["licheck64", "something"]),
+ ("set_executable=licheck64,foo=bar, set_executable = something", "set_executable", ["licheck64", "something"]),
+ ("set_executable=licheck64", "set_executable", ["licheck64"]))
- for data in testData:
- matchCount = 0
+ for data in test_data:
+ match_count = 0
for item in parse_package_finalize_items(data[0], data[1]):
self.assertIn(item, data[2])
- matchCount += 1
- self.assertEqual(matchCount, len(data[2]))
+ match_count += 1
+ self.assertEqual(match_count, len(data[2]))
def test_patch_qt_edition(self):
- tempDir = mkdtemp(dir=os.getcwd())
- tempFile = os.path.join(tempDir, "qconfig.pri")
+ temp_dir = mkdtemp(dir=os.getcwd())
+ temp_file = os.path.join(temp_dir, "qconfig.pri")
try:
- with open(tempFile, "a", encoding="utf-8") as f:
+ with open(temp_file, "a", encoding="utf-8") as f:
f.write("something foo\n")
f.write("QT_EDITION = foobar\n")
f.write("nonsense\n")
- licheckName = "licheck_foo"
- releaseTimeStamp = "11223344"
- patch_qt_edition(tempDir, licheckName, releaseTimeStamp)
+ licheck_name = "licheck_foo"
+ release_timestamp = "11223344"
+ patch_qt_edition(temp_dir, licheck_name, release_timestamp)
- expectedData = []
- expectedData.append("something foo")
- expectedData.append("QT_EDITION = Enterprise")
- expectedData.append("QT_LICHECK = " + licheckName)
- expectedData.append("QT_RELEASE_DATE = " + releaseTimeStamp)
- expectedData.append("nonsense")
+ expected_data = []
+ expected_data.append("something foo")
+ expected_data.append("QT_EDITION = Enterprise")
+ expected_data.append("QT_LICHECK = " + licheck_name)
+ expected_data.append("QT_RELEASE_DATE = " + release_timestamp)
+ expected_data.append("nonsense")
idx = 0
- for line in FileInput(tempFile, inplace=False):
- print(f"Received data: [{line.strip()}] expected data: [{expectedData[idx]}]")
- self.assertEqual(line.strip(), expectedData[idx], f"Received data: [{line}] differs from expected data: [{expectedData[idx]}]")
+ for line in FileInput(temp_file, inplace=False):
+ print(f"Received data: [{line.strip()}] expected data: [{expected_data[idx]}]")
+ self.assertEqual(line.strip(), expected_data[idx], f"Received data: [{line}] differs from expected data: [{expected_data[idx]}]")
idx += 1
finally:
- rmtree(tempDir)
+ rmtree(temp_dir)
@unittest.skipUnless(os.environ.get("PKG_TEST_QT_CONFIG_BASE_PATH"), "Skipping because 'PKG_TEST_QT_CONFIG_BASE_PATH' is not set")
@unittest.skipUnless(os.environ.get("PKG_TEST_QT_ARTIFACTS_URL"), "Skipping because 'PKG_TEST_QT_CONFIG_BASE_PATH' is not set")
@unittest.skipUnless(os.environ.get("PKG_TEST_QT_IFW_TOOL_URL"), "Skipping because 'PKG_TEST_QT_IFW_TOOL_URL' is not set")
def test_create_installer(self):
extension = '.run' if platform.system().lower().startswith('linux') else ''
- testsDir = os.path.dirname(os.path.abspath(__file__))
+ tests_dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(os.environ.get("PKG_TEST_QT_CONFIG_BASE_PATH"), "offline_installer_jobs", "5.9.3")
- offlineJobs = os.listdir(path)
- for offlineJob in offlineJobs:
- cmd_args = [sys.executable, '-u', os.path.join(testsDir, 'create_installer.py')]
+ offline_jobs = os.listdir(path)
+ for offline_job in offline_jobs:
+ cmd_args = [sys.executable, '-u', os.path.join(tests_dir, 'create_installer.py')]
cmd_args = cmd_args + ['-c', os.environ.get("PKG_TEST_QT_CONFIG_BASE_PATH")]
- cmd_args = cmd_args + ['-f', os.path.join(path, offlineJob)]
+ cmd_args = cmd_args + ['-f', os.path.join(path, offline_job)]
cmd_args = cmd_args + ['--offline']
cmd_args = cmd_args + ['-l', 'enterprise']
cmd_args = cmd_args + ['-u', os.environ.get("PKG_TEST_QT_ARTIFACTS_URL")]
cmd_args = cmd_args + ['--ifw-tools=' + os.environ.get("PKG_TEST_QT_IFW_TOOL_URL")]
- cmd_args = cmd_args + ['--preferred-installer-name=' + offlineJob]
+ cmd_args = cmd_args + ['--preferred-installer-name=' + offline_job]
cmd_args = cmd_args + ['--dry-run']
try:
do_execute_sub_process(cmd_args, os.getcwd())
except Exception as e:
self.fail(f"Failed to execute: [{' '.join(cmd_args)}] -> {str(e)}")
- self.assertTrue(os.path.exists(os.path.join(testsDir, 'installer_output', offlineJob + extension)), "No installers generated")
+ self.assertTrue(os.path.exists(os.path.join(tests_dir, 'installer_output', offline_job + extension)), "No installers generated")
if __name__ == '__main__':
diff --git a/packaging-tools/tests/test_release_repo_meta_update.py b/packaging-tools/tests/test_release_repo_meta_update.py
index d99a1ad8c..5a253dda5 100755
--- a/packaging-tools/tests/test_release_repo_meta_update.py
+++ b/packaging-tools/tests/test_release_repo_meta_update.py
@@ -105,46 +105,46 @@ class TestReleaseRepoMetaUpdate(unittest.TestCase):
@asyncio_test
async def test_scan_repositories(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- self._write_test_repo(tmpBaseDir, self.paths)
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ self._write_test_repo(tmp_base_dir, self.paths)
- done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmpBaseDir)
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in broken_repos]),
+ done_repos, pending_repos, unconverted_repos, broken_repos = scan_repositories(tmp_base_dir)
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in broken_repos]),
sorted(["/repo9" + convert_suffix]))
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in unconverted_repos]),
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in unconverted_repos]),
sorted(["/repo1", "/repo2", "/repo3", "/repo4", "/repo6", "/repo8", "/repo9"]))
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in pending_repos]),
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in pending_repos]),
sorted(["/repo2" + convert_suffix, "/repo3" + convert_suffix,
"/repo7" + convert_suffix]))
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in done_repos]),
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in done_repos]),
sorted(["/repo5", "/repo7"]))
@asyncio_test
async def test_check_repos_which_can_be_updated(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- self._write_test_repo(tmpBaseDir, self.paths)
- done_repos, pending_repos, unconverted_repos, _ = scan_repositories(tmpBaseDir)
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ self._write_test_repo(tmp_base_dir, self.paths)
+ done_repos, pending_repos, unconverted_repos, _ = scan_repositories(tmp_base_dir)
updatable_repos, existing_pending_repos = check_repos_which_can_be_updated(done_repos + pending_repos + unconverted_repos)
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in updatable_repos]),
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in updatable_repos]),
sorted(["/repo1", "/repo4", "/repo5", "/repo6", "/repo8"]))
- self.assertListEqual(sorted([repo.split(tmpBaseDir)[-1] for repo in existing_pending_repos]),
+ self.assertListEqual(sorted([repo.split(tmp_base_dir)[-1] for repo in existing_pending_repos]),
sorted(["/repo2", "/repo3", "/repo7", "/repo9"]))
@asyncio_test
async def test_swap_repositories_invalid(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- self._write_test_repo(tmpBaseDir, self.paths)
- unconverted_repos = scan_repositories(tmpBaseDir)[2]
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ self._write_test_repo(tmp_base_dir, self.paths)
+ unconverted_repos = scan_repositories(tmp_base_dir)[2]
with self.assertRaises(IfwRepoUpdateError):
await create_converted_repositories(repogen="foobar-repogen", repositories_to_migrate=unconverted_repos,
dry_run=True)
@asyncio_test
async def test_swap_repositories_valid(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- self._write_test_repo(tmpBaseDir, self.non_migrated_paths)
- unconverted_repos = scan_repositories(tmpBaseDir)[2]
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ self._write_test_repo(tmp_base_dir, self.non_migrated_paths)
+ unconverted_repos = scan_repositories(tmp_base_dir)[2]
successful_conversions, failed_conversions = await create_converted_repositories(repogen="foobar-repogen",
repositories_to_migrate=unconverted_repos,
dry_run=True)
diff --git a/packaging-tools/tests/test_release_repo_updater.py b/packaging-tools/tests/test_release_repo_updater.py
index 520210e17..26aa2244c 100755
--- a/packaging-tools/tests/test_release_repo_updater.py
+++ b/packaging-tools/tests/test_release_repo_updater.py
@@ -104,15 +104,15 @@ def _write_updates_xml(path: str, version: str, release_date: str) -> None:
async def _get_repogen() -> str:
pkgsrv = get_pkg_value("PACKAGE_STORAGE_SERVER_PATH_HTTP")
- ifwTools = (
+ ifw_tools = (
f"{pkgsrv}/archive/ifw/enterprise/unifiedqt/4.3.0/tqtc-installer-framework-Linux-RHEL_7_6-"
"GCC-Linux-RHEL_7_6-X86_64.7z"
)
server = "127.0.0.1"
- serverHome = os.path.expanduser("~")
+ server_home = os.path.expanduser("~")
with TemporaryDirectory(dir=os.getcwd()) as temp_dir:
with cd(temp_dir):
- return await upload_ifw_to_remote(ifwTools, server, serverHome)
+ return await upload_ifw_to_remote(ifw_tools, server, server_home)
@ddt
@@ -140,48 +140,48 @@ class TestReleaseRepoUpdater(unittest.TestCase):
@asyncio_test
async def test_upload_pending_repository_content(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- sourceRepo = os.path.join(tmpBaseDir, "repository")
- destinationRepo = os.path.join(tmpBaseDir, "destination_online_repository")
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ source_repo = os.path.join(tmp_base_dir, "repository")
+ destination_repo = os.path.join(tmp_base_dir, "destination_online_repository")
- _write_dummy_file(os.path.join(sourceRepo, "qt.foo.bar1", "meta", "package.xml"))
- _write_dummy_file(os.path.join(sourceRepo, "qt.foo.bar2", "meta", "package.xml"))
- _write_dummy_file(os.path.join(sourceRepo, "Updates.xml"))
+ _write_dummy_file(os.path.join(source_repo, "qt.foo.bar1", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(source_repo, "qt.foo.bar2", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(source_repo, "Updates.xml"))
- upload_pending_repository_content(self.server, sourceRepo, destinationRepo)
- self.assertListEqual(sorted(os.listdir(sourceRepo)), sorted(os.listdir(destinationRepo)))
+ upload_pending_repository_content(self.server, source_repo, destination_repo)
+ self.assertListEqual(sorted(os.listdir(source_repo)), sorted(os.listdir(destination_repo)))
@asyncio_test
async def test_reset_new_remote_repository(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- remoteSourceRepoPath = os.path.join(tmpBaseDir, "repository")
- remoteTargetRepoPath = os.path.join(tmpBaseDir, "destination_online_repository")
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ remote_source_repo_path = os.path.join(tmp_base_dir, "repository")
+ remote_target_repo_path = os.path.join(tmp_base_dir, "destination_online_repository")
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "qt.foo.bar1", "meta", "package.xml"))
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "qt.foo.bar2", "meta", "package.xml"))
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "Updates.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "qt.foo.bar1", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "qt.foo.bar2", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "Updates.xml"))
- reset_new_remote_repository(self.server, remoteSourceRepoPath, remoteTargetRepoPath)
- self.assertTrue(os.path.isfile(os.path.join(remoteTargetRepoPath, "qt.foo.bar1", "meta", "package.xml")))
- self.assertTrue(os.path.isfile(os.path.join(remoteTargetRepoPath, "qt.foo.bar2", "meta", "package.xml")))
- self.assertTrue(os.path.isfile(os.path.join(remoteTargetRepoPath, "Updates.xml")))
+ reset_new_remote_repository(self.server, remote_source_repo_path, remote_target_repo_path)
+ self.assertTrue(os.path.isfile(os.path.join(remote_target_repo_path, "qt.foo.bar1", "meta", "package.xml")))
+ self.assertTrue(os.path.isfile(os.path.join(remote_target_repo_path, "qt.foo.bar2", "meta", "package.xml")))
+ self.assertTrue(os.path.isfile(os.path.join(remote_target_repo_path, "Updates.xml")))
# existing repository should be automatically be moved as backup
- reset_new_remote_repository(self.server, remoteSourceRepoPath, remoteTargetRepoPath)
- self.assertTrue(os.path.exists(remoteTargetRepoPath + "____snapshot_backup"))
+ reset_new_remote_repository(self.server, remote_source_repo_path, remote_target_repo_path)
+ self.assertTrue(os.path.exists(remote_target_repo_path + "____snapshot_backup"))
@asyncio_test
async def test_create_remote_repository_backup(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- remoteSourceRepoPath = os.path.join(tmpBaseDir, "repository")
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ remote_source_repo_path = os.path.join(tmp_base_dir, "repository")
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "qt.foo.bar1", "meta", "package.xml"))
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "qt.foo.bar2", "meta", "package.xml"))
- _write_dummy_file(os.path.join(remoteSourceRepoPath, "Updates.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "qt.foo.bar1", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "qt.foo.bar2", "meta", "package.xml"))
+ _write_dummy_file(os.path.join(remote_source_repo_path, "Updates.xml"))
- remoteRepoBackupPath = create_remote_repository_backup(self.server, remoteSourceRepoPath)
- self.assertFalse(os.path.exists(remoteSourceRepoPath))
- self.assertListEqual(sorted(["Updates.xml", "qt.foo.bar1", "qt.foo.bar2"]), sorted(os.listdir(remoteRepoBackupPath)))
+ remote_repo_backup_path = create_remote_repository_backup(self.server, remote_source_repo_path)
+ self.assertFalse(os.path.exists(remote_source_repo_path))
+ self.assertListEqual(sorted(["Updates.xml", "qt.foo.bar1", "qt.foo.bar2"]), sorted(os.listdir(remote_repo_backup_path)))
@asyncio_test_parallel_data((True, True), (False, False), ("yes", True), ("1", True), ("y", True),
("false", False), ("n", False), ("0", False), ("no", False))
@@ -207,10 +207,10 @@ class TestReleaseRepoUpdater(unittest.TestCase):
@asyncio_test
async def test_ensure_ext_repo_paths(self) -> None:
- with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmpBaseDir:
- expectedRepo = os.path.join(tmpBaseDir, "some", "test", "path")
- await ensure_ext_repo_paths(self.server, self.server, expectedRepo)
- self.assertTrue(os.path.isdir(expectedRepo))
+ with TemporaryDirectory(dir=os.getcwd(), prefix="_repo_tmp_") as tmp_base_dir:
+ expected_repo = os.path.join(tmp_base_dir, "some", "test", "path")
+ await ensure_ext_repo_paths(self.server, self.server, expected_repo)
+ self.assertTrue(os.path.isdir(expected_repo))
@asyncio_test_parallel_data(
("user@server.com:/foo/bar"),
diff --git a/packaging-tools/tests/test_runCommand.py b/packaging-tools/tests/test_runCommand.py
index b9c490cba..20081f25c 100644
--- a/packaging-tools/tests/test_runCommand.py
+++ b/packaging-tools/tests/test_runCommand.py
@@ -67,8 +67,8 @@ def crash():
def print_lines(count):
- for lineNumber in range(count):
- print(f"{lineNumber} printed line")
+ for line_number in range(count):
+ print(f"{line_number} printed line")
def use_run_command(test_arguments, *arguments):
@@ -80,17 +80,17 @@ class TestRunCommand(unittest.TestCase):
self.assertEqual(use_run_command("--print_lines 10", os.getcwd()), 0)
def test_crash(self):
- with self.assertRaises(Exception) as contextManager:
+ with self.assertRaises(Exception) as context_manager:
use_run_command("--print_lines 10 --crash", os.getcwd())
- self.assertIsNotNone(contextManager)
- self.assertIsNotNone(contextManager.exception)
- expectedMessageStart = "Different exit code then expected"
- messageStart = str(contextManager.exception)[:len(expectedMessageStart)]
- self.assertEqual(expectedMessageStart, messageStart)
+ self.assertIsNotNone(context_manager)
+ self.assertIsNotNone(context_manager.exception)
+ expected_message_start = "Different exit code then expected"
+ message_start = str(context_manager.exception)[:len(expected_message_start)]
+ self.assertEqual(expected_message_start, message_start)
def test_crash_only_error_case_output(self):
- with self.assertRaises(Exception) as contextManager:
+ with self.assertRaises(Exception) as context_manager:
use_run_command(
"--print_lines 10 --crash", os.getcwd(),
# extra_environment=
@@ -100,19 +100,19 @@ class TestRunCommand(unittest.TestCase):
# expected_exit_codes=
[0]
)
- self.assertIsNotNone(contextManager)
- self.assertIsNotNone(contextManager.exception)
- expectedMessageStart = "Different exit code then expected"
- messageStart = str(contextManager.exception)[:len(expectedMessageStart)]
- self.assertEqual(expectedMessageStart, messageStart)
- expectedMessageEnd = "9 printed line"
- messageEnd = str(contextManager.exception).splitlines()[-1]
- self.assertTrue(messageEnd.__contains__(expectedMessageEnd))
+ self.assertIsNotNone(context_manager)
+ self.assertIsNotNone(context_manager.exception)
+ expected_message_start = "Different exit code then expected"
+ message_start = str(context_manager.exception)[:len(expected_message_start)]
+ self.assertEqual(expected_message_start, message_start)
+ expected_message_end = "9 printed line"
+ message_end = str(context_manager.exception).splitlines()[-1]
+ self.assertTrue(message_end.__contains__(expected_message_end))
def test_different_exit_code_only_error_case_output(self):
self.assertEqual(
use_run_command(
- "--print_lines 10 --exitCode 5", os.getcwd(),
+ "--print_lines 10 --exit_code 5", os.getcwd(),
# extra_environment=
None,
# only_error_case_output=
@@ -123,51 +123,51 @@ class TestRunCommand(unittest.TestCase):
)
def test_with_threadedwork(self):
- currentMethodName = sys._getframe().f_code.co_name
- testWork = ThreadedWork(f"{currentMethodName} - run some command threaded")
- taskStringList = []
- taskStringList.append("--sleep 1 --print_lines 10")
- taskStringList.append("--sleep 2 --print_lines 30")
- taskStringList.append("--sleep 1 --print_lines 40")
- taskStringList.append("--sleep 2 --print_lines 50")
- taskStringList.append("--sleep 1 --print_lines 100")
- for taskString in taskStringList:
- testWork.add_task(taskString, use_run_command, taskString, os.getcwd())
- testWork.run()
+ current_method_name = sys._getframe().f_code.co_name
+ test_work = ThreadedWork(f"{current_method_name} - run some command threaded")
+ task_string_list = []
+ task_string_list.append("--sleep 1 --print_lines 10")
+ task_string_list.append("--sleep 2 --print_lines 30")
+ task_string_list.append("--sleep 1 --print_lines 40")
+ task_string_list.append("--sleep 2 --print_lines 50")
+ task_string_list.append("--sleep 1 --print_lines 100")
+ for task_string in task_string_list:
+ test_work.add_task(task_string, use_run_command, task_string, os.getcwd())
+ test_work.run()
def test_with_threadedwork_unexpected_exit_code(self):
- currentMethodName = sys._getframe().f_code.co_name
- testWork = ThreadedWork(f"{currentMethodName} - run some command threaded")
+ current_method_name = sys._getframe().f_code.co_name
+ test_work = ThreadedWork(f"{current_method_name} - run some command threaded")
# this exchange the current os._exit(-1) implementation only for this testing case
- separatorLine = f"{os.linesep}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{os.linesep}"
- separatorText = "the complete python application would stop here"
- testWork.set_exit_fail_function(sys.__stdout__.write, ''.join([separatorLine, separatorText, separatorLine]))
- taskStringList = []
- taskStringList.append("--sleep 1 --print_lines 10")
- taskStringList.append("--sleep 2 --print_lines 30")
- taskStringList.append("--sleep 1 --print_lines 40")
- taskStringList.append("--sleep 2 --print_lines 3 --exitCode 5")
- taskStringList.append("--sleep 1 --print_lines 100")
- for taskString in taskStringList:
- testWork.add_task(taskString, use_run_command, taskString, os.getcwd())
- testWork.run()
+ separator_line = f"{os.linesep}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{os.linesep}"
+ separator_text = "the complete python application would stop here"
+ test_work.set_exit_fail_function(sys.__stdout__.write, ''.join([separator_line, separator_text, separator_line]))
+ task_string_list = []
+ task_string_list.append("--sleep 1 --print_lines 10")
+ task_string_list.append("--sleep 2 --print_lines 30")
+ task_string_list.append("--sleep 1 --print_lines 40")
+ task_string_list.append("--sleep 2 --print_lines 3 --exit_code 5")
+ task_string_list.append("--sleep 1 --print_lines 100")
+ for task_string in task_string_list:
+ test_work.add_task(task_string, use_run_command, task_string, os.getcwd())
+ test_work.run()
def test_with_threadedwork_crash(self):
- currentMethodName = sys._getframe().f_code.co_name
- testWork = ThreadedWork(f"{currentMethodName} - run some command threaded")
+ current_method_name = sys._getframe().f_code.co_name
+ test_work = ThreadedWork(f"{current_method_name} - run some command threaded")
# this exchange the current os._exit(-1) implementation only for this testing case
- separatorLine = f"{os.linesep}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{os.linesep}"
- separatorText = "the complete python application would stop here"
- testWork.set_exit_fail_function(sys.__stdout__.write, ''.join([separatorLine, separatorText, separatorLine]))
- taskStringList = []
- taskStringList.append("--sleep 1 --print_lines 10")
- taskStringList.append("--sleep 2 --print_lines 30")
- taskStringList.append("--sleep 1 --print_lines 40")
- taskStringList.append("--sleep 2 --print_lines 3 --crash")
- taskStringList.append("--sleep 1 --print_lines 100")
- for taskString in taskStringList:
- testWork.add_task(taskString, use_run_command, taskString, os.getcwd())
- testWork.run()
+ separator_line = f"{os.linesep}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{os.linesep}"
+ separator_text = "the complete python application would stop here"
+ test_work.set_exit_fail_function(sys.__stdout__.write, ''.join([separator_line, separator_text, separator_line]))
+ task_string_list = []
+ task_string_list.append("--sleep 1 --print_lines 10")
+ task_string_list.append("--sleep 2 --print_lines 30")
+ task_string_list.append("--sleep 1 --print_lines 40")
+ task_string_list.append("--sleep 2 --print_lines 3 --crash")
+ task_string_list.append("--sleep 1 --print_lines 100")
+ for task_string in task_string_list:
+ test_work.add_task(task_string, use_run_command, task_string, os.getcwd())
+ test_work.run()
if __name__ == '__main__':
@@ -179,7 +179,7 @@ if __name__ == '__main__':
parser.add_argument('--sleep', type=int)
parser.add_argument('--print_lines', type=int)
parser.add_argument('--crash', action='store_true', default=False)
- parser.add_argument('--exitCode', type=int)
+ parser.add_argument('--exit_code', type=int)
parser.add_argument('--testMethod')
caller_arguments = parser.parse_args()
if caller_arguments.sleep:
@@ -190,8 +190,8 @@ if __name__ == '__main__':
sys.__stdout__.flush()
sys.__stderr__.flush()
crash()
- if caller_arguments.exitCode:
- os._exit(caller_arguments.exitCode)
+ if caller_arguments.exit_code:
+ os._exit(caller_arguments.exit_code)
if caller_arguments.testMethod:
# python test_runCommand.py --testMethod test_crash_only_error_case_output
TestRunCommand(methodName=caller_arguments.testMethod).debug()
diff --git a/packaging-tools/tests/testhelpers.py b/packaging-tools/tests/testhelpers.py
index 438325ac1..e9f0765a8 100644
--- a/packaging-tools/tests/testhelpers.py
+++ b/packaging-tools/tests/testhelpers.py
@@ -65,9 +65,9 @@ def asyncio_test_parallel_data(*data_args, unpack=True):
def is_internal_file_server_reachable() -> bool:
try:
- packageServer = get_pkg_value("PACKAGE_STORAGE_SERVER")
+ package_server = get_pkg_value("PACKAGE_STORAGE_SERVER")
ping = sh.which("ping")
- ret = subprocess.run(args=[ping, "-c", "1", packageServer], timeout=5, stdout=PIPE, stderr=PIPE)
+ ret = subprocess.run(args=[ping, "-c", "1", package_server], timeout=5, stdout=PIPE, stderr=PIPE)
return ret.returncode == 0
except Exception:
pass
diff --git a/packaging-tools/threadedwork.py b/packaging-tools/threadedwork.py
index ed2c78c29..f5116851e 100644
--- a/packaging-tools/threadedwork.py
+++ b/packaging-tools/threadedwork.py
@@ -48,29 +48,29 @@ output_format_string = ''
class StdOutHook:
def write(self, text):
# general print method sends line break just ignore that
- strippedText = text.strip()
- if strippedText == "":
+ stripped_text = text.strip()
+ if stripped_text == "":
return
global output_states
global output_format_string
- localProgressIndicator = None
- if len(strippedText) > 6:
- localProgressIndicator = next_progress_indicator()
+ local_progress_indicator = None
+ if len(stripped_text) > 6:
+ local_progress_indicator = next_progress_indicator()
else:
- localProgressIndicator = strippedText
+ local_progress_indicator = stripped_text
- newValue = f"{thread_data.taskNumber}: {localProgressIndicator}"
+ new_value = f"{thread_data.taskNumber}: {local_progress_indicator}"
with output_lock:
- if newValue != output_states[thread_data.workerThreadId]:
- oldOutput = "\r" + output_format_string.format(*output_states).strip()
- output_states[thread_data.workerThreadId] = newValue
- newOutput = "\r" + output_format_string.format(*output_states).strip()
+ if new_value != output_states[thread_data.workerThreadId]:
+ old_output = "\r" + output_format_string.format(*output_states).strip()
+ output_states[thread_data.workerThreadId] = new_value
+ new_output = "\r" + output_format_string.format(*output_states).strip()
# cleanup old output if the new line is shorter
- cleanerString = ""
- if len(oldOutput) > len(newOutput):
- cleanerString = " " * (len(oldOutput) - len(newOutput))
+ cleaner_string = ""
+ if len(old_output) > len(new_output):
+ cleaner_string = " " * (len(old_output) - len(new_output))
- sys.__stdout__.write(newOutput + cleanerString)
+ sys.__stdout__.write(new_output + cleaner_string)
def flush(self):
sys.__stdout__.flush()
@@ -138,27 +138,27 @@ class Task():
self.description = description
self.listOfFunctions = []
if function:
- firstFunction = TaskFunction(function, *arguments)
- self.listOfFunctions.append(firstFunction)
+ first_function = TaskFunction(function, *arguments)
+ self.listOfFunctions.append(first_function)
# exit the complete program with code -1, sys.exit would just close the thread
self.exitFunction = os._exit
self.exitFunctionArguments = [-1]
def add_function(self, function, *arguments):
- aFunction = TaskFunction(function, *arguments)
- self.listOfFunctions.append(aFunction)
+ a_function = TaskFunction(function, *arguments)
+ self.listOfFunctions.append(a_function)
def do(self):
try:
- for taskFunction in self.listOfFunctions:
- taskFunction.function(*(taskFunction.arguments))
+ for task_function in self.listOfFunctions:
+ task_function.function(*(task_function.arguments))
except Exception:
print("FAIL")
with output_lock:
# there is no clean exit so we adding linesep here
sys.__stdout__.write(os.linesep)
sys.__stdout__.flush()
- sys.__stderr__.write(format(taskFunction))
+ sys.__stderr__.write(format(task_function))
sys.__stderr__.write(os.linesep)
sys.__stderr__.write(format_exc())
sys.__stderr__.flush()
@@ -199,17 +199,17 @@ class ThreadedWork():
if max_threads > 1:
enable_threaded_print(True, max_threads)
- listOfConsumers = []
+ list_of_consumers = []
for i in range(max_threads):
# every Consumer needs a stop/none item
self.queue.put(None)
- newConsumer = Consumer(self.queue, i)
- listOfConsumers.append(newConsumer)
- newConsumer.daemon = True
- newConsumer.start()
+ new_consumer = Consumer(self.queue, i)
+ list_of_consumers.append(new_consumer)
+ new_consumer.daemon = True
+ new_consumer.start()
# block until everything is done
- for consumer in listOfConsumers:
+ for consumer in list_of_consumers:
while consumer.is_alive():
try:
# wait 1 second, then go back and ask if thread is still alive