From 2afcf8e7545c681e412e5b6d04ab8867e5edd5c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cristi=C3=A1n=20Maureira-Fredes?= Date: Thu, 13 Oct 2022 12:36:23 +0200 Subject: pathlib: migrate build_scripts away from os.path There is a usage of os.path.relpath that cannot be migrated to pathlib, which remain the only usage of os.path Task-number: PYSIDE-2080 Change-Id: Iac781e9c9324fb8b9d3559b4225912d56782072a Reviewed-by: Christian Tismer --- build_scripts/build_info_collector.py | 90 +++++------ build_scripts/config.py | 8 +- build_scripts/main.py | 186 ++++++++++++----------- build_scripts/options.py | 34 +++-- build_scripts/platforms/linux.py | 7 +- build_scripts/platforms/macos.py | 26 ++-- build_scripts/platforms/unix.py | 2 +- build_scripts/platforms/windows_desktop.py | 29 ++-- build_scripts/qtinfo.py | 2 +- build_scripts/setup_runner.py | 3 +- build_scripts/utils.py | 170 ++++++++++++--------- build_scripts/wheel_override.py | 3 +- build_scripts/wheel_utils.py | 5 +- coin/instructions/execute_test_instructions.yaml | 2 +- coin_build_instructions.py | 6 +- coin_test_instructions.py | 6 +- 16 files changed, 314 insertions(+), 265 deletions(-) diff --git a/build_scripts/build_info_collector.py b/build_scripts/build_info_collector.py index 17e307e2f..4f2df94b6 100644 --- a/build_scripts/build_info_collector.py +++ b/build_scripts/build_info_collector.py @@ -5,6 +5,7 @@ import os import platform import sys import sysconfig +from pathlib import Path from sysconfig import get_config_var from setuptools.errors import SetupError @@ -21,7 +22,7 @@ def prefix(): virtual_env_name = os.environ.get('VIRTUAL_ENV', None) has_virtual_env = False if virtual_env_name is not None: - name = os.path.basename(virtual_env_name) + name = Path(virtual_env_name).name has_virtual_env = True else: name = "qfp" @@ -31,7 +32,7 @@ def prefix(): name += "p" if OPTION["LIMITED_API"] == "yes": name += "a" - return name, has_virtual_env + return Path(name), has_virtual_env def is_debug_python(): @@ -41,33 +42,33 @@ def is_debug_python(): def _get_py_library_win(build_type, py_version, py_prefix, py_libdir, py_include_dir): """Helper for finding the Python library on Windows""" - if py_include_dir is None or not os.path.exists(py_include_dir): - py_include_dir = os.path.join(py_prefix, "include") - if py_libdir is None or not os.path.exists(py_libdir): + if py_include_dir is None or not Path(py_include_dir).exists(): + py_include_dir = Path(py_prefix) / "include" + if py_libdir is None or not Path(py_libdir).exists(): # For virtual environments on Windows, the py_prefix will contain a # path pointing to it, instead of the system Python installation path. # Since INCLUDEPY contains a path to the system location, we use the # same base directory to define the py_libdir variable. - py_libdir = os.path.join(os.path.dirname(py_include_dir), "libs") - if not os.path.isdir(py_libdir): + py_libdir = Path(py_include_dir).parent / "libs" + if not py_libdir.is_dir(): raise SetupError("Failed to locate the 'libs' directory") dbg_postfix = "_d" if build_type == "Debug" else "" if OPTION["MAKESPEC"] == "mingw": static_lib_name = f"libpython{py_version.replace('.', '')}{dbg_postfix}.a" - return os.path.join(py_libdir, static_lib_name) + return Path(py_libdir) / static_lib_name v = py_version.replace(".", "") python_lib_name = f"python{v}{dbg_postfix}.lib" - return os.path.join(py_libdir, python_lib_name) + return Path(py_libdir) / python_lib_name def _get_py_library_unix(build_type, py_version, py_prefix, py_libdir, py_include_dir): """Helper for finding the Python library on UNIX""" - if py_libdir is None or not os.path.exists(py_libdir): - py_libdir = os.path.join(py_prefix, "lib") - if py_include_dir is None or not os.path.exists(py_include_dir): + if py_libdir is None or not Path(py_libdir).exists(): + py_libdir = Path(py_prefix) / "lib" + if py_include_dir is None or not Path(py_include_dir).exists(): directory = f"include/python{py_version}" - py_include_dir = os.path.join(py_prefix, directory) + py_include_dir = Path(py_prefix) / directory lib_exts = ['.so'] if sys.platform == 'darwin': lib_exts.append('.dylib') @@ -81,8 +82,8 @@ def _get_py_library_unix(build_type, py_version, py_prefix, py_libdir, libs_tried = [] for lib_ext in lib_exts: lib_name = f"libpython{py_version}{lib_suff}{lib_ext}" - py_library = os.path.join(py_libdir, lib_name) - if os.path.exists(py_library): + py_library = Path(py_libdir) / lib_name + if py_library.exists(): return py_library libs_tried.append(py_library) @@ -90,12 +91,12 @@ def _get_py_library_unix(build_type, py_version, py_prefix, py_libdir, # suffix. py_multiarch = get_config_var("MULTIARCH") if py_multiarch: - try_py_libdir = os.path.join(py_libdir, py_multiarch) + try_py_libdir = Path(py_libdir) / py_multiarch libs_tried = [] for lib_ext in lib_exts: lib_name = f"libpython{py_version}{lib_suff}{lib_ext}" - py_library = os.path.join(try_py_libdir, lib_name) - if os.path.exists(py_library): + py_library = try_py_libdir / lib_name + if py_library.exists(): return py_library libs_tried.append(py_library) @@ -103,11 +104,11 @@ def _get_py_library_unix(build_type, py_version, py_prefix, py_libdir, if hasattr(sys, "pypy_version_info"): vi = sys.version_info[:2] version_quirk = ".".join(map(str, vi)) if vi >= (3, 9) else "3" - pypy_libdir = os.path.join(os.path.dirname(py_libdir), "bin") + pypy_libdir = Path(py_libdir).parent / "bin" for lib_ext in lib_exts: lib_name = f"libpypy{version_quirk}-c{lib_ext}" - pypy_library = os.path.join(pypy_libdir, lib_name) - if os.path.exists(pypy_library): + pypy_library = pypy_libdir / lib_name + if pypy_library.exists(): return pypy_library libs_tried.append(pypy_library) _libs_tried = ', '.join(libs_tried) @@ -122,7 +123,7 @@ def get_py_library(build_type, py_version, py_prefix, py_libdir, py_include_dir) else: py_library = _get_py_library_unix(build_type, py_version, py_prefix, py_libdir, py_include_dir) - if py_library.endswith('.a'): + if str(py_library).endswith('.a'): # Python was compiled as a static library log.error(f"Failed to locate a dynamic Python library, using {py_library}") return py_library @@ -142,7 +143,7 @@ class BuildInfoCollectorMixin(object): pass def collect_and_assign(self): - script_dir = os.getcwd() + script_dir = Path.cwd() # build_base is not set during install command, so we default to # the 'build command's build_base value ourselves. @@ -151,12 +152,12 @@ class BuildInfoCollectorMixin(object): self.build_base = "build" build_base = self.build_base - sources_dir = os.path.join(script_dir, "sources") + sources_dir = script_dir / "sources" if self.is_cross_compile: - config_tests_dir = os.path.join(script_dir, build_base, "config.tests") - python_target_info_dir = os.path.join(sources_dir, "shiboken6", "config.tests", - "target_python_info") + config_tests_dir = script_dir / build_base / "config.tests" + python_target_info_dir = (sources_dir / "shiboken6" / "config.tests" + / "target_python_info") cmake_cache_args = [] if self.python_target_path: @@ -192,13 +193,14 @@ class BuildInfoCollectorMixin(object): # We use 'base' instead (although, platbase points to the # same location) py_prefix = get_config_var("base") - if not py_prefix or not os.path.exists(py_prefix): + if not py_prefix or not Path(py_prefix).exists(): py_prefix = sys.prefix self.py_prefix = py_prefix + py_prefix = Path(py_prefix) if sys.platform == "win32": - py_scripts_dir = os.path.join(py_prefix, "Scripts") + py_scripts_dir = py_prefix / "Scripts" else: - py_scripts_dir = os.path.join(py_prefix, "bin") + py_scripts_dir = py_prefix / "bin" self.py_scripts_dir = py_scripts_dir else: # We don't look for an interpreter when cross-compiling. @@ -221,8 +223,8 @@ class BuildInfoCollectorMixin(object): py_prefix = python_info['prefix'] self.py_prefix = py_prefix - py_scripts_dir = os.path.join(py_prefix, 'bin') - if os.path.exists(py_scripts_dir): + py_scripts_dir = py_prefix / 'bin' + if py_scripts_dir.exists(): self.py_scripts_dir = py_scripts_dir else: self.py_scripts_dir = None @@ -264,9 +266,9 @@ class BuildInfoCollectorMixin(object): elif not has_virtual_env: build_name += f"-{self.build_classifiers}" - common_prefix_dir = os.path.join(script_dir, build_base) - build_dir = os.path.join(common_prefix_dir, build_name, "build") - install_dir = os.path.join(common_prefix_dir, build_name, "install") + common_prefix_dir = script_dir / build_base + build_dir = common_prefix_dir / build_name / "build" + install_dir = common_prefix_dir / build_name / "install" # Change the setuptools build_lib dir to be under the same # directory where the cmake build and install dirs are so @@ -275,15 +277,15 @@ class BuildInfoCollectorMixin(object): # Replaces # build/lib.macosx-10.14-x86_64-3.7' with # build/{venv_prefix}/package' - setup_tools_build_lib_dir = os.path.join(common_prefix_dir, build_name, "package") + setup_tools_build_lib_dir = common_prefix_dir / build_name / "package" self.build_lib = setup_tools_build_lib_dir - self.script_dir = script_dir - self.sources_dir = sources_dir - self.build_dir = build_dir - self.install_dir = install_dir - self.py_executable = py_executable - self.py_include_dir = py_include_dir + self.script_dir = Path(script_dir) + self.sources_dir = Path(sources_dir) + self.build_dir = Path(build_dir) + self.install_dir = Path(install_dir) + self.py_executable = Path(py_executable) + self.py_include_dir = Path(py_include_dir) if not self.is_cross_compile: self.py_library = get_py_library(build_type, py_version, py_prefix, @@ -293,7 +295,7 @@ class BuildInfoCollectorMixin(object): if self.is_cross_compile: site_packages_no_prefix = self.python_target_info['python_info']['site_packages_dir'] - self.site_packages_dir = os.path.join(install_dir, site_packages_no_prefix) + self.site_packages_dir = install_dir / site_packages_no_prefix else: # Setuptools doesn't have an equivalent of a get_python_lib with a # prefix, so we build the path manually: @@ -306,4 +308,4 @@ class BuildInfoCollectorMixin(object): def post_collect_and_assign(self): # self.build_lib is only available after the base class # finalize_options is called. - self.st_build_dir = os.path.join(self.script_dir, self.build_lib) + self.st_build_dir = self.script_dir / self.build_lib diff --git a/build_scripts/config.py b/build_scripts/config.py index 285739456..11967ba06 100644 --- a/build_scripts/config.py +++ b/build_scripts/config.py @@ -4,6 +4,8 @@ import os from .log import log +from pathlib import Path + from .versions import PYSIDE, PYSIDE_MODULE, SHIBOKEN @@ -97,7 +99,7 @@ class Config(object): else: self.build_type = self._build_type_all - self.setup_script_dir = setup_script_dir + self.setup_script_dir = Path(setup_script_dir) self.cmake_toolchain_file = cmake_toolchain_file @@ -222,7 +224,7 @@ class Config(object): content = '' changes = '' try: - with open(os.path.join(self.setup_script_dir, readme_filename)) as f: + with open(self.setup_script_dir / readme_filename) as f: readme = f.read() except Exception as e: log.error(f"Couldn't read contents of {readme_filename}. {e}") @@ -233,7 +235,7 @@ class Config(object): include_changes = False if include_changes: try: - with open(os.path.join(self.setup_script_dir, changes_filename)) as f: + with open(self.setup_script_dir / changes_filename) as f: changes = f.read() except Exception as e: log.error(f"Couldn't read contents of {changes_filename}. {e}") diff --git a/build_scripts/main.py b/build_scripts/main.py index f2bc13241..f6fc73a24 100644 --- a/build_scripts/main.py +++ b/build_scripts/main.py @@ -10,7 +10,7 @@ import sysconfig import time from packaging.version import parse as parse_version from pathlib import Path -from shutil import which, copytree +from shutil import copytree from textwrap import dedent # PYSIDE-1760: Pre-load setuptools modules early to avoid racing conditions. @@ -42,15 +42,15 @@ from .utils import (copydir, copyfile, detect_clang, filter_match, get_numpy_location, get_python_dict, init_msvc_env, linux_fix_rpaths_for_library, macos_fix_rpaths_for_library, platform_cmake_options, remove_tree, run_process, - run_process_output, update_env_path) + run_process_output, update_env_path, which) from .versions import PYSIDE, PYSIDE_MODULE, SHIBOKEN from .wheel_override import get_bdist_wheel_override, wheel_module_exists from .wheel_utils import (get_package_timestamp, get_package_version, macos_plat_name, macos_pyside_min_deployment_target) -setup_script_dir = os.getcwd() -build_scripts_dir = os.path.join(setup_script_dir, 'build_scripts') -setup_py_path = os.path.join(setup_script_dir, "setup.py") +setup_script_dir = Path.cwd() +build_scripts_dir = setup_script_dir / 'build_scripts' +setup_py_path = setup_script_dir / "setup.py" start_time = int(time.time()) @@ -78,15 +78,15 @@ def _get_make(platform_arch, build_type): if makespec == "make": return ("make", "Unix Makefiles") if makespec == "msvc": - nmake_path = which("nmake") - if nmake_path is None or not os.path.exists(nmake_path): + nmake_path = Path(which("nmake")) + if nmake_path is None or not nmake_path.exists(): log.info("nmake not found. Trying to initialize the MSVC env...") init_msvc_env(platform_arch, build_type) - nmake_path = which("nmake") - if not nmake_path or not os.path.exists(nmake_path): + nmake_path = Path(which("nmake")) + if not nmake_path or not nmake_path.exists(): raise SetupError('"nmake" could not be found.') if not OPTION["NO_JOM"]: - jom_path = which("jom") + jom_path = Path(which("jom")) if jom_path: log.info(f"jom was found in {jom_path}") return (jom_path, "NMake Makefiles JOM") @@ -96,18 +96,18 @@ def _get_make(platform_arch, build_type): raise SetupError(msg) return (nmake_path, "NMake Makefiles") if makespec == "mingw": - return ("mingw32-make", "mingw32-make") + return (Path("mingw32-make"), "mingw32-make") if makespec == "ninja": - return ("ninja", "Ninja") + return (Path("ninja"), "Ninja") raise SetupError(f'Invalid option --make-spec "{makespec}".') def get_make(platform_arch, build_type): """Retrieve the make command and CMake generator name""" (make_path, make_generator) = _get_make(platform_arch, build_type) - if not os.path.isabs(make_path): - found_path = which(make_path) - if not found_path or not os.path.exists(found_path): + if not make_path.is_absolute(): + found_path = Path(which(make_path)) + if not found_path or not found_path.exists(): m = (f"You need the program '{make_path}' on your system path to " f"compile {PYSIDE_MODULE}.") raise SetupError(m) @@ -161,8 +161,8 @@ def prepare_build(): if install_prefix.endswith("qtbase"): qt_src_dir = install_prefix else: # SDK: Use 'Src' directory - maybe_qt_src_dir = os.path.join(os.path.dirname(install_prefix), 'Src', 'qtbase') - if os.path.exists(maybe_qt_src_dir): + maybe_qt_src_dir = Path(install_prefix).parent / 'Src' / 'qtbase' + if maybe_qt_src_dir.exists(): qt_src_dir = maybe_qt_src_dir @@ -255,9 +255,9 @@ class PysideInstallLib(_install_lib): or into build/wheel when command is 'bdist_wheel'. """ - if os.path.isdir(self.build_dir): + if self.build_dir.is_dir(): # Using our own copydir makes sure to preserve symlinks. - outfiles = copydir(os.path.abspath(self.build_dir), os.path.abspath(self.install_dir)) + outfiles = copydir(Path(self.build_dir).resolve(), Path(self.install_dir).resolve()) else: self.warn(f"'{self.build_dir}' does not exist -- no Python modules to install") return @@ -351,8 +351,8 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): and parse_version(self.qtinfo.version) >= parse_version("5.7.0")): clang_dir, clang_source = detect_clang() if clang_dir: - clangBinDir = os.path.join(clang_dir, 'bin') - if clangBinDir not in os.environ.get('PATH'): + clangBinDir = clang_dir / 'bin' + if str(clangBinDir) not in os.environ.get('PATH'): log.info(f"Adding {clangBinDir} as detected by {clang_source} to PATH") additional_paths.append(clangBinDir) else: @@ -368,18 +368,18 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # Save the shiboken build dir path for clang deployment # purposes. - self.shiboken_build_dir = os.path.join(self.build_dir, SHIBOKEN) + self.shiboken_build_dir = self.build_dir / SHIBOKEN self.log_pre_build_info() # Prepare folders - if not os.path.exists(self.sources_dir): + if not self.sources_dir.exists(): log.info(f"Creating sources folder {self.sources_dir}...") os.makedirs(self.sources_dir) - if not os.path.exists(self.build_dir): + if not self.build_dir.exists(): log.info(f"Creating build folder {self.build_dir}...") os.makedirs(self.build_dir) - if not os.path.exists(self.install_dir): + if not self.install_dir.exists(): log.info(f"Creating install folder {self.install_dir}...") os.makedirs(self.install_dir) @@ -400,10 +400,10 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # we record the latest successful build and note the # build directory for supporting the tests. timestamp = time.strftime('%Y-%m-%d_%H%M%S') - build_history = os.path.join(setup_script_dir, 'build_history') - unique_dir = os.path.join(build_history, timestamp) - os.makedirs(unique_dir) - fpath = os.path.join(unique_dir, 'build_dir.txt') + build_history = setup_script_dir / 'build_history' + unique_dir = build_history / timestamp + unique_dir.mkdir(parents=True) + fpath = unique_dir / 'build_dir.txt' with open(fpath, 'w') as f: print(self.build_dir, file=f) print(self.build_classifiers, file=f) @@ -437,7 +437,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): if _project is not None: if not _wheel_path.exists(): - _wheel_path.mkdir() + _wheel_path.mkdir(parents=True) _src = Path(_path / _project) _dst = Path(_wheel_path / _project) # Remove the directory in case it exists. @@ -524,12 +524,13 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): return self._patchelf_path = which('patchelf') if self._patchelf_path: - if not os.path.isabs(self._patchelf_path): - self._patchelf_path = os.path.join(os.getcwd(), self._patchelf_path) + self._patchelf_path = Path(self._patchelf_path) + if not self._patchelf_path.is_absolute(): + self._patchelf_path = Path.cwd() / self._patchelf_path log.info(f"Using {self._patchelf_path} ...") return else: - raise DistutilsSetupError("patchelf not found") + raise SetupError("patchelf not found") def _enable_numpy(self): if OPTION["ENABLE_NUMPY_SUPPORT"] or OPTION["PYSIDE_NUMPY_SUPPORT"]: @@ -549,13 +550,13 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # Prepare folders os.chdir(self.build_dir) - module_build_dir = os.path.join(self.build_dir, extension) - skipflag_file = f"{module_build_dir} -skip" - if os.path.exists(skipflag_file): + module_build_dir = self.build_dir / extension + skipflag_file = Path(f"{module_build_dir}-skip") + if skipflag_file.exists(): log.info(f"Skipping {extension} because {skipflag_file} exists") return - module_build_exists = os.path.exists(module_build_dir) + module_build_exists = module_build_dir.exists() if module_build_exists: if not OPTION["REUSE_BUILD"]: log.info(f"Deleting module build folder {module_build_dir}...") @@ -566,15 +567,15 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): log.error(f'ignored error: {e}') else: log.info(f"Reusing module build folder {module_build_dir}...") - if not os.path.exists(module_build_dir): + if not module_build_dir.exists(): log.info(f"Creating module build folder {module_build_dir}...") os.makedirs(module_build_dir) os.chdir(module_build_dir) - module_src_dir = os.path.join(self.sources_dir, extension) + module_src_dir = self.sources_dir / extension # Build module - cmake_cmd = [OPTION["CMAKE"]] + cmake_cmd = [str(OPTION["CMAKE"])] if OPTION["QUIET"]: # Pass a special custom option, to allow printing a lot less information when doing # a quiet build. @@ -593,7 +594,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # Record the minimum/maximum Python version for later use in Shiboken.__init__ f"-DMINIMUM_PYTHON_VERSION={get_allowed_python_versions()[0]}", f"-DMAXIMUM_PYTHON_VERSION={get_allowed_python_versions()[-1]}", - module_src_dir + str(module_src_dir) ] # When cross-compiling we set Python_ROOT_DIR to tell @@ -613,7 +614,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # If a custom shiboken cmake config directory path was provided, pass it to CMake. if OPTION["SHIBOKEN_CONFIG_DIR"] and config.is_internal_pyside_build(): config_dir = OPTION["SHIBOKEN_CONFIG_DIR"] - if os.path.exists(config_dir): + if config_dir.exists(): log.info(f"Using custom provided {SHIBOKEN} installation: {config_dir}") cmake_cmd.append(f"-DShiboken6_DIR={config_dir}") else: @@ -797,9 +798,9 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): cmake_cmd.append(f"-DQFP_QT_HOST_PATH={self.qt_host_path}") if self.is_cross_compile and (not OPTION["SHIBOKEN_HOST_PATH"] - or not os.path.exists(OPTION["SHIBOKEN_HOST_PATH"])): - raise SetupError( - "Please specify the location of host shiboken tools via --shiboken-host-path=") + or not OPTION["SHIBOKEN_HOST_PATH"].exists()): + raise SetupError("Please specify the location of host shiboken tools via " + "--shiboken-host-path=") if self.shiboken_host_path: cmake_cmd.append(f"-DQFP_SHIBOKEN_HOST_PATH={self.shiboken_host_path}") @@ -822,7 +823,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): log.info(f"Reusing old configuration for module {extension} ({module_src_dir})...") log.info(f"-- Compiling module {extension}...") - cmd_make = [self.make_path] + cmd_make = [str(self.make_path)] if OPTION["JOBS"]: cmd_make.append(OPTION["JOBS"]) if OPTION["VERBOSE_BUILD"] and self.make_generator == "Ninja": @@ -842,7 +843,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): found = importlib.util.find_spec("sphinx") if found: log.info("Generating Shiboken documentation") - make_doc_cmd = [self.make_path, "doc"] + make_doc_cmd = [str(self.make_path), "doc"] if OPTION["VERBOSE_BUILD"] and self.make_generator == "Ninja": make_doc_cmd.append("-v") if run_process(make_doc_cmd) != 0: @@ -865,7 +866,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): time.sleep(1) # ninja: error: unknown target 'install/fast' target = 'install/fast' if self.make_generator != 'Ninja' else 'install' - if run_process([self.make_path, target]) != 0: + if run_process([str(self.make_path), target]) != 0: raise SetupError(f"Error pseudo installing {extension}") else: log.info(f"Skipped installing module {extension}") @@ -923,7 +924,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # when copying the files for each of the sub-projects and # we don't want to accidentally install shiboken files # as part of pyside-tools package. - if os.path.isdir(self.st_build_dir): + if self.st_build_dir.is_dir(): log.info(f"Removing {self.st_build_dir}") try: remove_tree(self.st_build_dir) @@ -941,15 +942,13 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): raise def qt_is_framework_build(self): - if os.path.isdir(f"{self.qtinfo.headers_dir}/../lib/QtCore.framework"): - return True - return False + return Path(f"{self.qtinfo.headers_dir}/../lib/QtCore.framework").is_dir() def get_built_pyside_config(self, _vars): # Get config that contains list of built modules, and # SOVERSIONs of the built libraries. - st_build_dir = _vars['st_build_dir'] - config_path = os.path.join(st_build_dir, config.package_name(), "_config.py") + st_build_dir = Path(_vars['st_build_dir']) + config_path = st_build_dir / config.package_name() / "_config.py" temp_config = get_python_dict(config_path) if 'built_modules' not in temp_config: temp_config['built_modules'] = [] @@ -967,11 +966,11 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): """ log.info('Finding path to the libclang shared library.') cmake_cmd = [ - OPTION["CMAKE"], + str(OPTION["CMAKE"]), "-L", # Lists variables "-N", # Just inspects the cache (faster) "-B", # Specifies the build dir - self.shiboken_build_dir + str(self.shiboken_build_dir) ] out = run_process_output(cmake_cmd) lines = [s.strip() for s in out] @@ -992,10 +991,11 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # clang_lib_path points to the static import library # (lib/libclang.lib), whereas we want to copy the shared # library (bin/libclang.dll). - clang_lib_path = re.sub(r'lib/libclang.lib$', + clang_lib_path = Path(re.sub(r'lib/libclang.lib$', 'bin/libclang.dll', - clang_lib_path) + clang_lib_path)) else: + clang_lib_path = Path(clang_lib_path) # shiboken6 links against libclang.so.6 or a similarly # named library. # If the linked against library is a symlink, resolve @@ -1007,26 +1007,26 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # E.g. On Linux libclang.so -> libclang.so.6 -> # libclang.so.6.0. # "libclang.so.6" is the name we want for the copied file. - if os.path.islink(clang_lib_path): - link_target = os.readlink(clang_lib_path) - if os.path.isabs(link_target): + if clang_lib_path.is_symlink(): + link_target = Path(os.readlink(clang_lib_path)) + if link_target.is_absolute(): clang_lib_path = link_target else: # link_target is relative, transform to absolute. - clang_lib_path = os.path.join(os.path.dirname(clang_lib_path), link_target) - clang_lib_path = os.path.abspath(clang_lib_path) + clang_lib_path = clang_lib_path.parent / link_target + clang_lib_path = clang_lib_path.resolve() # The destination will be the shiboken package folder. _vars = {} _vars['st_build_dir'] = self.st_build_dir _vars['st_package_name'] = config.package_name() - destination_dir = "{st_build_dir}/{st_package_name}".format(**_vars) + destination_dir = Path("{st_build_dir}/{st_package_name}".format(**_vars)) - if os.path.exists(clang_lib_path): - basename = os.path.basename(clang_lib_path) + if clang_lib_path.exists(): + basename = clang_lib_path.name log.info(f"Copying libclang shared library {clang_lib_path} to the package " f"folder as {basename}.") - destination_path = os.path.join(destination_dir, basename) + destination_path = destination_dir / basename # Need to modify permissions in case file is not writable # (a reinstall would cause a permission denied error). @@ -1071,7 +1071,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): for dir_path, dir_names, file_names in os.walk(initial_path): for name in file_names: if filter_match(name, filters): - library_path = os.path.join(dir_path, name) + library_path = Path(dir_path) / name libraries.append(library_path) return libraries @@ -1115,10 +1115,10 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): # Update rpath for srcname in executables: - srcpath = os.path.join(package_path, srcname) - if os.path.isdir(srcpath) or os.path.islink(srcpath): + srcpath = Path(package_path) / srcname + if srcpath.is_dir() or srcpath.is_symlink(): continue - if not os.path.exists(srcpath): + if not srcpath.exists(): continue rpath_cmd(srcpath) log.info("Patched rpath to '$ORIGIN/' (Linux) or " @@ -1129,6 +1129,7 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): plugin_paths, qt_lib_dir=None, is_qml_plugin=False): + # If the linux sysroot (where the plugins are copied from) # is from a mainline distribution, it might have a different # directory layout than then one we expect to have in the @@ -1141,15 +1142,19 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): log.info("Patching rpath for Qt and QML plugins.") for plugin in plugin_paths: - if os.path.isdir(plugin) or os.path.islink(plugin): + plugin = Path(plugin) + if plugin.is_dir() or plugin.is_symlink(): continue - if not os.path.exists(plugin): + if not plugin.exists(): continue if is_qml_plugin: - plugin_dir = os.path.dirname(plugin) + plugin_dir = plugin.parent + # FIXME: there is no os.path.relpath equivalent on pathlib. + # The Path.relative_to is not equivalent and raises ValueError when the paths + # are not subpaths, so it doesn't generate "../../something". rel_path_from_qml_plugin_qt_lib_dir = os.path.relpath(qt_lib_dir, plugin_dir) - rpath_value = os.path.join("$ORIGIN", rel_path_from_qml_plugin_qt_lib_dir) + rpath_value = Path("$ORIGIN") / rel_path_from_qml_plugin_qt_lib_dir else: rpath_value = "$ORIGIN/../../lib" @@ -1164,14 +1169,15 @@ class PysideBuild(_build, CommandMixin, BuildInfoCollectorMixin): if not (self.is_cross_compile and sys.platform.startswith('linux') and self.standalone): return + qt_lib_dir = Path(qt_lib_dir) rpath_value = "$ORIGIN" log.info(f"Patching rpath for Qt and ICU libraries in {qt_lib_dir}.") libs = self.package_libraries(qt_lib_dir) - lib_paths = [os.path.join(qt_lib_dir, lib) for lib in libs] + lib_paths = [qt_lib_dir / lib for lib in libs] for library in lib_paths: - if os.path.isdir(library) or os.path.islink(library): + if library.is_dir() or library.is_symlink(): continue - if not os.path.exists(library): + if library.exists(): continue linux_fix_rpaths_for_library(self._patchelf_path, library, rpath_value, override=True) @@ -1195,9 +1201,7 @@ class PysideRstDocs(Command, CommandMixin): self.skip = True if not self.skip: self.name = config.package_name().lower() - self.doc_dir = os.path.join(config.setup_script_dir, "sources") - self.doc_dir = os.path.join(self.doc_dir, self.name) - self.doc_dir = os.path.join(self.doc_dir, "doc") + self.doc_dir = config.setup_script_dir / "sources" / self.name / "doc" # Check if sphinx is installed to proceed. found = importlib.util.find_spec("sphinx") if found: @@ -1213,20 +1217,20 @@ class PysideRstDocs(Command, CommandMixin): # creating directories html/pyside6/shiboken6 try: - if not os.path.isdir(self.html_dir): - os.mkdir(self.html_dir) + if not self.html_dir.is_dir(): + self.html_dir.mkdir(parents=True) if self.name == SHIBOKEN: - out_pyside = os.path.join(self.html_dir, PYSIDE) - if not os.path.isdir(out_pyside): - os.mkdir(out_pyside) - out_shiboken = os.path.join(out_pyside, SHIBOKEN) - if not os.path.isdir(out_shiboken): - os.mkdir(out_shiboken) + out_pyside = self.html_dir / PYSIDE + if not out_pyside.is_dir(): + out_pyside.mkdir(parents=True) + out_shiboken = out_pyside / SHIBOKEN + if not out_shiboken.is_dir(): + out_shiboken.mkdir(parents=True) self.out_dir = out_shiboken # We know that on the shiboken step, we already created the # 'pyside6' directory elif self.name == PYSIDE: - self.out_dir = os.path.join(self.html_dir, PYSIDE) + self.out_dir = self.html_dir / PYSIDE except (PermissionError, FileExistsError): raise SetupError(f"Error while creating directories for {self.doc_dir}") @@ -1245,7 +1249,7 @@ class PysideRstDocs(Command, CommandMixin): raise SetupError(f"Error running CMake for {self.doc_dir}") if self.name == PYSIDE: - self.sphinx_src = os.path.join(self.out_dir, "rst") + self.sphinx_src = self.out_dir / "rst" elif self.name == SHIBOKEN: self.sphinx_src = self.out_dir diff --git a/build_scripts/options.py b/build_scripts/options.py index c019a1061..9f1cb1f5e 100644 --- a/build_scripts/options.py +++ b/build_scripts/options.py @@ -14,11 +14,10 @@ import sys import warnings import logging from pathlib import Path -from shutil import which from .log import log from .qtinfo import QtInfo -from .utils import memoize +from .utils import memoize, which _AVAILABLE_MKSPECS = ["ninja", "msvc", "mingw"] if sys.platform == "win32" else ["ninja", "make"] @@ -363,13 +362,13 @@ class CommandMixin(object): qtpaths_abs_path = None if self.qtpaths: - qtpaths_abs_path = os.path.abspath(self.qtpaths) + qtpaths_abs_path = self.qtpaths.resolve() OPTION['QTPATHS'] = qtpaths_abs_path # FIXME PYSIDE7: Remove qmake handling # make qtinfo.py independent of relative paths. qmake_abs_path = None if self.qmake: - qmake_abs_path = os.path.abspath(self.qmake) + qmake_abs_path = Path(self.qmake).resolve() OPTION['QMAKE'] = qmake_abs_path OPTION['HAS_QMAKE_OPTION'] = self.has_qmake_option OPTION['QT_VERSION'] = self.qt @@ -378,14 +377,15 @@ class CommandMixin(object): qt_target_path = None if self.qt_target_path: + self.qt_target_path = Path(self.qt_target_path) qt_target_path = self.qt_target_path # We use the CMake project to find host Qt if neither qmake or # qtpaths is available. This happens when building the host # tools in the overall cross-building process. use_cmake = False - if using_cmake_toolchain_file or \ - (not self.qmake and not self.qtpaths and self.qt_target_path): + if (using_cmake_toolchain_file or + (not self.qmake and not self.qtpaths and self.qt_target_path)): use_cmake = True QtInfo().setup(qtpaths_abs_path, self.cmake, qmake_abs_path, @@ -407,7 +407,7 @@ class CommandMixin(object): "Error was:\n\n\n") raise e - OPTION['CMAKE'] = os.path.abspath(self.cmake) + OPTION['CMAKE'] = self.cmake.resolve() OPTION['OPENSSL'] = self.openssl OPTION['SHIBOKEN_CONFIG_DIR'] = self.shiboken_config_dir if self.shiboken_config_dir: @@ -455,20 +455,26 @@ class CommandMixin(object): def _find_qtpaths_in_path(self): if not self.qtpaths: - self.qtpaths = which("qtpaths") + self.qtpaths = Path(which("qtpaths")) if not self.qtpaths: - self.qtpaths = which("qtpaths6") + self.qtpaths = Path(which("qtpaths6")) def _determine_defaults_and_check(self): if not self.cmake: - self.cmake = which("cmake") + self.cmake = Path(which("cmake")) if not self.cmake: log.error("cmake could not be found.") return False - if not os.path.exists(self.cmake): + if not self.cmake.exists(): log.error(f"'{self.cmake}' does not exist.") return False + # Setting up the Paths when passing via command line + if isinstance(self.qtpaths, str): + self.qtpaths = Path(self.qtpaths) + if isinstance(self.qmake, str): + self.qmake = Path(self.qmake) + # When cross-compiling, we only accept the qt-target-path # option and don't rely on auto-searching in PATH or the other # qtpaths / qmake options. @@ -493,17 +499,17 @@ class CommandMixin(object): return False # Validate that the given tool path exists. - if self.qtpaths and not os.path.exists(self.qtpaths): + if self.qtpaths and not self.qtpaths.exists(): log.error(f"The specified qtpaths path '{self.qtpaths}' does not exist.") return False - if self.qmake and not os.path.exists(self.qmake): + if self.qmake and not self.qmake.exists(): log.error(f"The specified qmake path '{self.qmake}' does not exist.") return False else: # Check for existence, but don't require if it's not set. A # check later will be done to see if it's needed. - if self.qt_target_path and not os.path.exists(self.qt_target_path): + if self.qt_target_path and not self.qt_target_path.exists(): log.error(f"Provided --qt-target-path='{self.qt_target_path}' " "path does not exist.") return False diff --git a/build_scripts/platforms/linux.py b/build_scripts/platforms/linux.py index a75259698..4629d456c 100644 --- a/build_scripts/platforms/linux.py +++ b/build_scripts/platforms/linux.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only import os +from pathlib import Path from ..config import config from ..options import OPTION @@ -121,8 +122,8 @@ def prepare_standalone_package_linux(self, _vars, cross_build=False): if copy_qt_conf: # Copy the qt.conf file to libexec. - qt_libexec_path = "{st_build_dir}/{st_package_name}/Qt/libexec".format(**_vars) - if not os.path.isdir(qt_libexec_path): + qt_libexec_path = Path("{st_build_dir}/{st_package_name}/Qt/libexec".format(**_vars)) + if not qt_libexec_path.is_dir(): os.makedirs(qt_libexec_path) copyfile(f"{{build_dir}}/{PYSIDE}/{{st_package_name}}/qt.conf", - qt_libexec_path, _vars=_vars) + str(qt_libexec_path), _vars=_vars) diff --git a/build_scripts/platforms/macos.py b/build_scripts/platforms/macos.py index 5ae27103e..ffb2070b7 100644 --- a/build_scripts/platforms/macos.py +++ b/build_scripts/platforms/macos.py @@ -3,6 +3,7 @@ import fnmatch import os +from pathlib import Path from ..config import config from ..options import OPTION @@ -72,11 +73,11 @@ def prepare_standalone_package_macos(self, _vars): if dir_name in ['Headers', 'fonts']: return False - if dir_full_path.endswith('Versions/Current'): + if str(dir_full_path).endswith('Versions/Current'): return False - if dir_full_path.endswith('Versions/5/Resources'): + if str(dir_full_path).endswith('Versions/5/Resources'): return False - if dir_full_path.endswith('Versions/5/Helpers'): + if str(dir_full_path).endswith('Versions/5/Helpers'): return False return general_dir_filter(dir_name, parent_full_path, dir_full_path) @@ -87,7 +88,7 @@ def prepare_standalone_package_macos(self, _vars): def framework_variant_filter(file_name, file_full_path): if self.qtinfo.build_type != 'debug_and_release': return True - dir_path = os.path.dirname(file_full_path) + dir_path = Path(file_full_path).parent in_framework = dir_path.endswith("Versions/5") if file_name.endswith('_debug') and in_framework and no_copy_debug: return False @@ -104,12 +105,11 @@ def prepare_standalone_package_macos(self, _vars): # from Versions/5/Helpers, thus adding two more levels of # directory hierarchy. if self.is_webengine_built(built_modules): - qt_lib_path = "{st_build_dir}/{st_package_name}/Qt/lib".format(**_vars) - bundle = "QtWebEngineCore.framework/Helpers/" - bundle += "QtWebEngineProcess.app" + qt_lib_path = Path("{st_build_dir}/{st_package_name}/Qt/lib".format(**_vars)) + bundle = Path("QtWebEngineCore.framework/Helpers/") / "QtWebEngineProcess.app" binary = "Contents/MacOS/QtWebEngineProcess" - webengine_process_path = os.path.join(bundle, binary) - final_path = os.path.join(qt_lib_path, webengine_process_path) + webengine_process_path = bundle / binary + final_path = qt_lib_path / webengine_process_path rpath = "@loader_path/../../../../../" macos_fix_rpaths_for_library(final_path, rpath) else: @@ -135,16 +135,16 @@ def prepare_standalone_package_macos(self, _vars): _vars=_vars) # Fix rpath for WebEngine process executable. - qt_libexec_path = "{st_build_dir}/{st_package_name}/Qt/libexec".format(**_vars) + qt_libexec_path = Path("{st_build_dir}/{st_package_name}/Qt/libexec".format(**_vars)) binary = "QtWebEngineProcess" - final_path = os.path.join(qt_libexec_path, binary) + final_path = qt_libexec_path / binary rpath = "@loader_path/../lib" macos_fix_rpaths_for_library(final_path, rpath) if copy_qt_conf: # Copy the qt.conf file to libexec. - if not os.path.isdir(qt_libexec_path): - os.makedirs(qt_libexec_path) + if not qt_libexec_path.is_dir(): + qt_libexec_path.mkdir(parents=True) copyfile( f"{{build_dir}}/{PYSIDE}/{{st_package_name}}/qt.conf", qt_libexec_path, _vars=_vars) diff --git a/build_scripts/platforms/unix.py b/build_scripts/platforms/unix.py index 39fac5530..c2804a6b2 100644 --- a/build_scripts/platforms/unix.py +++ b/build_scripts/platforms/unix.py @@ -205,7 +205,7 @@ def prepare_packages_posix(self, _vars, cross_build=False): return False return True # examples/* -> /{st_package_name}/examples - copydir(os.path.join(self.script_dir, "examples"), + copydir(str(self.script_dir / "examples"), "{st_build_dir}/{st_package_name}/examples", force=False, _vars=_vars, dir_filter_function=pycache_dir_filter) diff --git a/build_scripts/platforms/windows_desktop.py b/build_scripts/platforms/windows_desktop.py index 013eadb20..eb229aad5 100644 --- a/build_scripts/platforms/windows_desktop.py +++ b/build_scripts/platforms/windows_desktop.py @@ -5,6 +5,8 @@ import fnmatch import functools import os +from pathlib import Path + from ..config import config from ..options import OPTION from ..utils import (copydir, copyfile, download_and_extract_7z, filter_match, @@ -194,7 +196,7 @@ def prepare_packages_win32(self, _vars): return False return True # examples/* -> /{st_package_name}/examples - copydir(os.path.join(self.script_dir, "examples"), + copydir(self.script_dir / "examples", "{st_build_dir}/{st_package_name}/examples", force=False, _vars=_vars, dir_filter_function=pycache_dir_filter) @@ -209,11 +211,11 @@ def prepare_packages_win32(self, _vars): if config.is_internal_shiboken_module_build(): # The C++ std library dlls need to be packaged with the # shiboken module, because libshiboken uses C++ code. - copy_msvc_redist_files(_vars, "{build_dir}/msvc_redist".format(**_vars)) + copy_msvc_redist_files(_vars, Path("{build_dir}/msvc_redist".format(**_vars))) if config.is_internal_pyside_build() or config.is_internal_shiboken_generator_build(): copy_qt_artifacts(self, copy_pdbs, _vars) - copy_msvc_redist_files(_vars, "{build_dir}/msvc_redist".format(**_vars)) + copy_msvc_redist_files(_vars, Path("{build_dir}/msvc_redist".format(**_vars))) def copy_msvc_redist_files(_vars, redist_target_path): @@ -233,8 +235,8 @@ def copy_msvc_redist_files(_vars, redist_target_path): ] # Make a directory where the files should be extracted. - if not os.path.exists(redist_target_path): - os.makedirs(redist_target_path) + if not redist_target_path.exists(): + redist_target_path.mkdir(parents=True) # Extract Qt dependency dlls when building on Qt CI. in_coin = os.environ.get('COIN_LAUNCH_PARAMETERS', None) @@ -315,7 +317,7 @@ def copy_qt_artifacts(self, copy_pdbs, _vars): # because the extracted archive also contains the opengl32sw # and the d3dcompiler dlls, which are copied not by this # function, but by the copydir below. - copy_msvc_redist_files(_vars, "{qt_bin_dir}".format(**_vars)) + copy_msvc_redist_files(_vars, Path("{qt_bin_dir}".format(**_vars))) if artifacts: copydir("{qt_bin_dir}", @@ -345,6 +347,10 @@ def copy_qt_artifacts(self, copy_pdbs, _vars): return True return False + # Setup Paths + file_name = Path(file_name) + file_full_path = Path(file_full_path) + # In debug_and_release case, choosing which files to copy # is more difficult. We want to copy only the files that # match the PySide6 build type. So if PySide6 is built in @@ -359,23 +365,24 @@ def copy_qt_artifacts(self, copy_pdbs, _vars): # file is a debug or release file. # e.g. ["Qt6Cored", ".dll"] - file_base_name, file_ext = os.path.splitext(file_name) + file_base_name = file_name.stem + file_ext = file_name.suffix # e.g. "/home/work/qt/qtbase/bin" - file_path_dir_name = os.path.dirname(file_full_path) + file_path_dir_name = file_full_path.parent # e.g. "Qt6Coredd" maybe_debug_name = f"{file_base_name}d" if self.debug: _filter = debug def predicate(path): - return not os.path.exists(path) + return not path.exists() else: _filter = release def predicate(path): - return os.path.exists(path) + return path.exists() # e.g. "/home/work/qt/qtbase/bin/Qt6Coredd.dll" - other_config_path = os.path.join(file_path_dir_name, maybe_debug_name + file_ext) + other_config_path = file_path_dir_name / (maybe_debug_name + file_ext) if (filter_match(file_name, _filter) and predicate(other_config_path)): return True diff --git a/build_scripts/qtinfo.py b/build_scripts/qtinfo.py index b9271d0c6..c63fc77f2 100644 --- a/build_scripts/qtinfo.py +++ b/build_scripts/qtinfo.py @@ -130,7 +130,7 @@ class QtInfo(object): if args_list is None: args_list = [] assert self._qtpaths_command - cmd = [self._qtpaths_command] + cmd = [str(self._qtpaths_command)] cmd.extend(args_list) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=False, cwd=cwd, universal_newlines=True) diff --git a/build_scripts/setup_runner.py b/build_scripts/setup_runner.py index 4d5607a6b..ae5005f4a 100644 --- a/build_scripts/setup_runner.py +++ b/build_scripts/setup_runner.py @@ -7,6 +7,7 @@ import tempfile import textwrap import logging +from pathlib import Path from setuptools import setup from build_scripts.config import config @@ -26,7 +27,7 @@ class SetupRunner(object): self.orig_argv = orig_argv self.sub_argv = list(orig_argv) - self.setup_script_dir = os.getcwd() + self.setup_script_dir = Path.cwd() @staticmethod def cmd_line_argument_is_in_args(argument, args): diff --git a/build_scripts/utils.py b/build_scripts/utils.py index 9422afccd..30dfe2672 100644 --- a/build_scripts/utils.py +++ b/build_scripts/utils.py @@ -34,6 +34,21 @@ try: except NameError: WindowsError = None +def which(name): + """ + Like shutil.which, but accepts a string or a PathLike and returns a Path + """ + path = None + try: + if isinstance(name, Path): + name = str(name) + path = shutil.which(name) + if path is None: + raise TypeError("None was returned") + path = Path(path) + except TypeError as e: + log.error(f"{name} was not found in PATH: {e}") + return path def is_64bit(): return sys.maxsize > 2147483647 @@ -51,7 +66,7 @@ def filter_match(name, patterns): def update_env_path(newpaths): paths = os.environ['PATH'].lower().split(os.pathsep) for path in newpaths: - if not path.lower() in paths: + if not str(path).lower() in paths: log.info(f"Inserting path '{path}' to environment") paths.insert(0, path) os.environ['PATH'] = f"{path}{os.pathsep}{os.environ['PATH']}" @@ -83,13 +98,13 @@ def winsdk_setenv(platform_arch, build_type): sdk_versions = msvc9.Reg.read_keys(base, msvc9.WINSDK_BASE) if sdk_versions: for sdk_version in sdk_versions: - installationfolder = msvc9.Reg.get_value(f"{msvc9.WINSDK_BASE}\\{sdk_version}", - "installationfolder") + installationfolder = Path(msvc9.Reg.get_value(f"{msvc9.WINSDK_BASE}\\{sdk_version}", + "installationfolder")) # productversion = msvc9.Reg.get_value( # "{}\\{}".format(msvc9.WINSDK_BASE, sdk_version), # "productversion") - setenv_path = os.path.join(installationfolder, os.path.join('bin', 'SetEnv.cmd')) - if not os.path.exists(setenv_path): + setenv_path = installationfolder / 'bin' / 'SetEnv.cmd' + if setenv_path.exists(): continue if sdk_version not in sdk_version_map: continue @@ -131,7 +146,7 @@ def find_vcdir(version): from setuptools._distutils import msvc9compiler as msvc9 vsbase = msvc9.VS_BASE % version try: - productdir = msvc9.Reg.get_value(rf"{vsbase}\Setup\VC", "productdir") + productdir = Path(msvc9.Reg.get_value(rf"{vsbase}\Setup\VC", "productdir")) except KeyError: productdir = None @@ -149,14 +164,14 @@ def find_vcdir(version): productdir = None log.debug("Unable to find productdir in registry") - if not productdir or not os.path.isdir(productdir): + if not productdir or not productdir.is_dir(): toolskey = f"VS{version:0.0f}0COMNTOOLS" - toolsdir = os.environ.get(toolskey, None) + toolsdir = Path(os.environ.get(toolskey, None)) - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): + if toolsdir and toolsdir.is_dir(): + productdir = toolsdir / os.pardir / os.pardir / "VC" + productdir = productdir.resolve() + if not productdir.is_dir(): log.debug(f"{productdir} is not a valid directory") return None else: @@ -171,7 +186,7 @@ def init_msvc_env(platform_arch, build_type): from setuptools._distutils import msvc9compiler as msvc9 log.info(f"Searching MSVC compiler version {msvc9.VERSION}") - vcdir_path = find_vcdir(msvc9.VERSION) + vcdir_path = Path(find_vcdir(msvc9.VERSION)) if not vcdir_path: raise SetupError(f"Failed to find the MSVC compiler version {msvc9.VERSION} on " "your system.") @@ -180,15 +195,15 @@ def init_msvc_env(platform_arch, build_type): log.info(f"Searching MSVC compiler {msvc9.VERSION} environment init script") if platform_arch.startswith("32"): - vcvars_path = os.path.join(vcdir_path, "bin", "vcvars32.bat") + vcvars_path = vcdir_path / "bin" / "vcvars32.bat" else: - vcvars_path = os.path.join(vcdir_path, "bin", "vcvars64.bat") - if not os.path.exists(vcvars_path): - vcvars_path = os.path.join(vcdir_path, "bin", "amd64", "vcvars64.bat") - if not os.path.exists(vcvars_path): - vcvars_path = os.path.join(vcdir_path, "bin", "amd64", "vcvarsamd64.bat") + vcvars_path = vcdir_path / "bin" / "vcvars64.bat" + if not vcvars_path.exists(): + vcvars_path = vcdir_path / "bin" / "amd64" / "vcvars64.bat" + if not vcvars_path.exists(): + vcvars_path = vcdir_path / "bin" / "amd64" / "vcvarsamd64.bat" - if not os.path.exists(vcvars_path): + if not vcvars_path.exists(): # MSVC init script not found, try to find and init Windows SDK env log.error("Failed to find the MSVC compiler environment init script " "(vcvars.bat) on your system.") @@ -233,15 +248,18 @@ def platform_cmake_options(as_tuple_list=False): def copyfile(src, dst, force=True, _vars=None, force_copy_symlink=False, make_writable_by_owner=False): if _vars is not None: - src = src.format(**_vars) - dst = dst.format(**_vars) + src = Path(str(src).format(**_vars)) + dst = Path(str(dst).format(**_vars)) + else: + src = Path(src) + dst = Path(dst) - if not os.path.exists(src) and not force: + if not src.exists() and not force: log.info(f"**Skipping copy file\n {src} to\n {dst}\n Source does not exist") return - if not os.path.islink(src) or force_copy_symlink: - if os.path.isfile(dst): + if not src.is_symlink() or force_copy_symlink: + if dst.is_file(): src_stat = os.stat(src) dst_stat = os.stat(dst) if (src_stat.st_size == dst_stat.st_size @@ -256,16 +274,18 @@ def copyfile(src, dst, force=True, _vars=None, force_copy_symlink=False, return dst - link_target_path = os.path.realpath(src) - if os.path.dirname(link_target_path) == os.path.dirname(src): - link_target = os.path.basename(link_target_path) - link_name = os.path.basename(src) - current_directory = os.getcwd() + # We use 'strict=False' to mimic os.path.realpath in case + # the directory doesn't exist. + link_target_path = src.resolve(strict=False) + if link_target_path.parent == src.parent: + link_target = link_target_path.name + link_name = src.name + current_directory = Path.cwd() try: - target_dir = dst if os.path.isdir(dst) else os.path.dirname(dst) + target_dir = dst if dst.is_dir() else dst.parent os.chdir(target_dir) - if os.path.exists(link_name): - if (os.path.islink(link_name) + if link_name.exists(): + if (link_name.is_symlink() and os.readlink(link_name) == link_target): log.info(f"Symlink already exists\n {link_name} ->\n {link_target}") return dst @@ -287,13 +307,13 @@ def makefile(dst, content=None, _vars=None): if _vars is not None: if content is not None: content = content.format(**_vars) - dst = dst.format(**_vars) + dst = Path(dst.format(**_vars)) log.info(f"Making file {dst}.") - dstdir = os.path.dirname(dst) - if not os.path.exists(dstdir): - os.makedirs(dstdir) + dstdir = dst.parent + if not dstdir.exists(): + dstdir.mkdir(parents=True) with open(dst, "wt") as f: if content is not None: @@ -304,14 +324,14 @@ def copydir(src, dst, _filter=None, ignore=None, force=True, recursive=True, _va dir_filter_function=None, file_filter_function=None, force_copy_symlinks=False): if _vars is not None: - src = src.format(**_vars) - dst = dst.format(**_vars) + src = Path(str(src).format(**_vars)) + dst = Path(str(dst).format(**_vars)) if _filter is not None: _filter = [i.format(**_vars) for i in _filter] if ignore is not None: ignore = [i.format(**_vars) for i in ignore] - if not os.path.exists(src) and not force: + if not src.exists() and not force: log.info(f"**Skipping copy tree\n {src} to\n {dst}\n Source does not exist. " f"filter={_filter}. ignore={ignore}.") return [] @@ -323,10 +343,10 @@ def copydir(src, dst, _filter=None, ignore=None, force=True, recursive=True, _va results = [] copy_errors = [] for name in names: - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) + srcname = src / name + dstname = dst / name try: - if os.path.isdir(srcname): + if srcname.is_dir(): if (dir_filter_function and not dir_filter_function(name, src, srcname)): continue if recursive: @@ -338,8 +358,8 @@ def copydir(src, dst, _filter=None, ignore=None, force=True, recursive=True, _va or (_filter is not None and not filter_match(name, _filter)) or (ignore is not None and filter_match(name, ignore))): continue - if not os.path.exists(dst): - os.makedirs(dst) + if not dst.is_dir(): + dst.mkdir(parents=True) results.append(copyfile(srcname, dstname, True, _vars, force_copy_symlinks)) # catch the Error from the recursive copytree so that we can # continue with other files @@ -348,8 +368,8 @@ def copydir(src, dst, _filter=None, ignore=None, force=True, recursive=True, _va except EnvironmentError as why: copy_errors.append((srcname, dstname, str(why))) try: - if os.path.exists(dst): - shutil.copystat(src, dst) + if dst.exists(): + shutil.copystat(str(src), str(dst)) except OSError as why: if WindowsError is not None and isinstance(why, WindowsError): # Copying file access times may fail on Windows @@ -396,7 +416,7 @@ def run_process(args, initial_env=None): No output is captured. """ command = " ".join([(" " in x and f'"{x}"' or x) for x in args]) - log.info(f"In directory {os.getcwd()}:\n\tRunning command: {command}") + log.info(f"In directory {Path.cwd()}:\n\tRunning command: {command}") if initial_env is None: initial_env = os.environ @@ -659,7 +679,7 @@ def find_glob_in_path(pattern): pattern += '.exe' for path in os.environ.get('PATH', '').split(os.pathsep): - for match in glob.glob(os.path.join(path, pattern)): + for match in glob.glob(str(Path(path) / pattern)): result.append(match) return result @@ -684,7 +704,7 @@ def detect_clang(): clang_dir = os.environ.get(source, None) if not clang_dir: raise OSError("clang not found") - return (clang_dir, source) + return (Path(clang_dir), source) _7z_binary = None @@ -710,8 +730,8 @@ def download_and_extract_7z(fileurl, target): outputDir = f"-o{target}" if not _7z_binary: if sys.platform == "win32": - candidate = "c:\\Program Files\\7-Zip\\7z.exe" - if os.path.exists(candidate): + candidate = Path("c:\\Program Files\\7-Zip\\7z.exe") + if candidate.exists(): _7z_binary = candidate if not _7z_binary: _7z_binary = '7z' @@ -841,7 +861,8 @@ def _ldd_ldso(executable_path): # Choose appropriate runtime dynamic linker. for rtld in rtld_list: - if os.path.isfile(rtld) and os.access(rtld, os.X_OK): + rtld = Path(rtld) + if rtld.is_file() and os.access(rtld, os.X_OK): (_, _, code) = back_tick(rtld, True) # Code 127 is returned by ld.so when called without any # arguments (some kind of sanity check I guess). @@ -895,8 +916,8 @@ def ldd(executable_path): def find_files_using_glob(path, pattern): """ Returns list of files that matched glob `pattern` in `path`. """ - final_pattern = os.path.join(path, pattern) - maybe_files = glob.glob(final_pattern) + final_pattern = Path(path) / pattern + maybe_files = glob.glob(str(final_pattern)) return maybe_files @@ -920,9 +941,9 @@ def copy_icu_libs(patchelf, destination_lib_dir): Copy ICU libraries that QtCore depends on, to given `destination_lib_dir`. """ - qt_core_library_path = find_qt_core_library_glob(destination_lib_dir) + qt_core_library_path = Path(find_qt_core_library_glob(destination_lib_dir)) - if not qt_core_library_path or not os.path.exists(qt_core_library_path): + if not qt_core_library_path or not qt_core_library_path.exists(): raise RuntimeError(f"QtCore library does not exist at path: {qt_core_library_path}. " "Failed to copy ICU libraries.") @@ -943,12 +964,13 @@ def copy_icu_libs(patchelf, destination_lib_dir): raise RuntimeError("Failed to find the necessary ICU libraries required by QtCore.") log.info('Copying the detected ICU libraries required by QtCore.') - if not os.path.exists(destination_lib_dir): - os.makedirs(destination_lib_dir) + destination_lib_dir = Path(destination_lib_dir) + if not destination_lib_dir.exists(): + destination_lib_dir.mkdir(parents=True) for path in paths: - basename = os.path.basename(path) - destination = os.path.join(destination_lib_dir, basename) + basename = Path(path).name + destination = destination_lib_dir / basename copyfile(path, destination, force_copy_symlink=True) # Patch the ICU libraries to contain the $ORIGIN rpath # value, so that only the local package libraries are used. @@ -973,7 +995,7 @@ def linux_run_read_elf(executable_path): def linux_set_rpaths(patchelf, executable_path, rpath_string): """ Patches the `executable_path` with a new rpath string. """ - cmd = [patchelf, '--set-rpath', rpath_string, executable_path] + cmd = [str(patchelf), '--set-rpath', str(rpath_string), str(executable_path)] if run_process(cmd) != 0: raise RuntimeError(f"Error patching rpath in {executable_path}") @@ -1146,22 +1168,22 @@ def get_qtci_virtualEnv(python_ver, host, hostArch, targetArch): if python_ver.startswith("3"): var = f"PYTHON{python_ver}-32_PATH" log.info(f"Try to find python from {var} env variable") - _path = os.getenv(var, "") - _pExe = os.path.join(_path, "python.exe") - if not os.path.isfile(_pExe): - log.warning(f"Can't find python.exe from {_pExe}, using default python3") - _pExe = os.path.join(os.getenv("PYTHON3_32_PATH"), "python.exe") + _path = Path(os.getenv(var, "")) + _pExe = _path / "python.exe" + if not _pExe.is_file(): + log.warn(f"Can't find python.exe from {_pExe}, using default python3") + _pExe = Path(os.getenv("PYTHON3_32_PATH")) / "python.exe" else: - _pExe = os.path.join(os.getenv("PYTHON2_32_PATH"), "python.exe") + _pExe = Path(os.getenv("PYTHON2_32_PATH")) / "python.exe" else: if python_ver.startswith("3"): var = f"PYTHON{python_ver}-64_PATH" log.info(f"Try to find python from {var} env variable") - _path = os.getenv(var, "") - _pExe = os.path.join(_path, "python.exe") - if not os.path.isfile(_pExe): - log.warning(f"Can't find python.exe from {_pExe}, using default python3") - _pExe = os.path.join(os.getenv("PYTHON3_PATH"), "python.exe") + _path = Path(os.getenv(var, "")) + _pExe = _path / "python.exe" + if not _pExe.is_file(): + log.warn(f"Can't find python.exe from {_pExe}, using default python3") + _pExe = Path(os.getenv("PYTHON3_PATH")) / "python.exe" env_python = f"{_env}\\Scripts\\python.exe" env_pip = f"{_env}\\Scripts\\pip.exe" else: @@ -1295,6 +1317,8 @@ def configure_cmake_project(project_path, for arg, value in cmake_cache_args: cmd.extend([f'-D{arg}={value}']) + cmd = [str(i) for i in cmd] + proc = subprocess.run(cmd, shell=False, cwd=build_path, capture_output=True, universal_newlines=True) return_code = proc.returncode diff --git a/build_scripts/wheel_override.py b/build_scripts/wheel_override.py index 292c049f1..84d8083e2 100644 --- a/build_scripts/wheel_override.py +++ b/build_scripts/wheel_override.py @@ -5,6 +5,7 @@ import os import platform import sys +from pathlib import Path from email.generator import Generator from .log import log @@ -253,7 +254,7 @@ class PysideBuildWheel(_bdist_wheel, CommandMixin): for impl in impl_tag.split('.'): writeTag(impl) - wheelfile_path = os.path.join(wheelfile_base, 'WHEEL') + wheelfile_path = Path(wheelfile_base) / 'WHEEL' log.info(f'creating {wheelfile_path}') with open(wheelfile_path, 'w') as f: Generator(f, maxheaderlen=0).flatten(msg) diff --git a/build_scripts/wheel_utils.py b/build_scripts/wheel_utils.py index 31d8c7bd0..a6ccebcdc 100644 --- a/build_scripts/wheel_utils.py +++ b/build_scripts/wheel_utils.py @@ -3,6 +3,7 @@ import os import time +from pathlib import Path from sysconfig import get_config_var, get_platform from packaging.version import parse as parse_version @@ -40,8 +41,8 @@ def get_qt_version(): @memoize def get_package_version(): """ Returns the version string for the PySide6 package. """ - setup_script_dir = os.getcwd() - pyside_project_dir = os.path.join(setup_script_dir, "sources", PYSIDE) + setup_script_dir = Path.cwd() + pyside_project_dir = setup_script_dir / "sources" / PYSIDE d = parse_cmake_conf_assignments_by_key(pyside_project_dir) major_version = d['pyside_MAJOR_VERSION'] minor_version = d['pyside_MINOR_VERSION'] diff --git a/coin/instructions/execute_test_instructions.yaml b/coin/instructions/execute_test_instructions.yaml index fec4b279d..b40bec126 100644 --- a/coin/instructions/execute_test_instructions.yaml +++ b/coin/instructions/execute_test_instructions.yaml @@ -55,7 +55,7 @@ instructions: userMessageOnFailure: > Failed to execute test instructions on Linux - type: ExecuteCommand - command: "c:\\users\\qt\\MSVC.bat {{.Env.PYTHON3_PATH}}\\python.exe -u coin_test_instructions.py --os={{.Env.CI_OS}} {{.Env.CI_PACKAGING_FEATURE}} --instdir=\\Users\\qt\\work\\install --targetOs={{.Env.CI_OS}} --hostArch=X86_64 --targetArch={{.Env.CI_TARGET_ARCHITECTURE}}" + command: "c:\\users\\qt\\MSVC.bat {{.Env.PYTHON3_PATH}}\\python.exe -u coin_test_instructions.py --os={{.Env.CI_OS}} {{.Env.CI_PACKAGING_FEATURE}} --instdir=c:\\Users\\qt\\work\\install --targetOs={{.Env.CI_OS}} --hostArch=X86_64 --targetArch={{.Env.CI_TARGET_ARCHITECTURE}}" maxTimeInSeconds: 14400 maxTimeBetweenOutput: 600 enable_if: diff --git a/coin_build_instructions.py b/coin_build_instructions.py index f81e8455c..8a8182234 100644 --- a/coin_build_instructions.py +++ b/coin_build_instructions.py @@ -92,15 +92,15 @@ def call_setup(python_ver, phase): env_path = os.path.join(site.USER_BASE, "Scripts") v_env = os.path.join(env_path, "virtualenv.exe") try: - run_instruction([v_env, "--version"], "Using default virtualenv") + run_instruction([str(v_env), "--version"], "Using default virtualenv") except Exception as e: log.info("Failed to use the default virtualenv") log.info(f"{type(e).__name__}: {e}") v_env = "virtualenv" - run_instruction([v_env, "-p", _pExe, _env], "Failed to create virtualenv") + run_instruction([str(v_env), "-p", str(_pExe), str(_env)], "Failed to create virtualenv") # When the 'python_ver' variable is empty, we are using Python 2 # Pip is always upgraded when CI template is provisioned, upgrading it in later phase may cause perm issue - run_instruction([env_pip, "install", "-r", "requirements.txt"], "Failed to install dependencies") + run_instruction([str(env_pip), "install", "-r", "requirements.txt"], "Failed to install dependencies") cmd = [env_python, "-u", "setup.py"] if phase in ["BUILD"]: diff --git a/coin_test_instructions.py b/coin_test_instructions.py index cd521f044..3261599e4 100644 --- a/coin_test_instructions.py +++ b/coin_test_instructions.py @@ -41,7 +41,7 @@ def call_testrunner(python_ver, buildnro): # we shouldn't install anything to m1, while it is not virtualized if CI_HOST_OS == "MacOS" and CI_HOST_ARCH == "ARM64": v_env = "virtualenv" - run_instruction([v_env, "-p", _pExe, _env], "Failed to create virtualenv") + run_instruction([str(v_env), "-p", str(_pExe), str(_env)], "Failed to create virtualenv") else: run_instruction([python3, "-m", "pip", "install", "--user", "virtualenv==20.7.2"], "Failed to pin virtualenv") # installing to user base might not be in PATH by default. @@ -51,12 +51,12 @@ def call_testrunner(python_ver, buildnro): env_path = os.path.join(site.USER_BASE, "Scripts") v_env = os.path.join(env_path, "virtualenv.exe") try: - run_instruction([v_env, "--version"], "Using default virtualenv") + run_instruction([str(v_env), "--version"], "Using default virtualenv") except Exception as e: log.info("Failed to use the default virtualenv") log.info(f"{type(e).__name__}: {e}") v_env = "virtualenv" - run_instruction([v_env, "-p", _pExe, _env], "Failed to create virtualenv") + run_instruction([str(v_env), "-p", str(_pExe), str(_env)], "Failed to create virtualenv") # When the 'python_ver' variable is empty, we are using Python 2 # Pip is always upgraded when CI template is provisioned, upgrading it in later phase may cause perm issue run_instruction([env_pip, "install", "-r", "requirements.txt"], "Failed to install dependencies") -- cgit v1.2.3