aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPatrik Teivonen <patrik.teivonen@qt.io>2022-11-17 11:22:29 +0200
committerPatrik Teivonen <patrik.teivonen@qt.io>2023-03-13 13:58:07 +0000
commit11499642c11e24cecde84f1c58a9053deec1bbbd (patch)
tree2e38fc7ed89d348da6663918e6a799a76dffd2f7
parentd8c59f75f53dd70b22b4850c42906fb797518c53 (diff)
Add option to sign and notarize installer payloadv6.5.0-rc1-packaging
Offline installer/Online repo content should be notarized to avoid Gatekeeper issues on latest versions of macOS. Before compressing the final payload component archives, (re-)sign, notarize, and finally staple the relevant types of nested install content before distribution. Steps will be performed if '--notarize-payload' argument is passed onto create_installer.py script or when 'notarize_payload' is specified and set to yes|true|1 in the IFW task config. Change-Id: Icb5c482674af9421fa08e58f4e27043a6766e5c7 Reviewed-by: Iikka Eklund <iikka.eklund@qt.io>
-rw-r--r--Pipfile1
-rw-r--r--packaging-tools/create_installer.py120
-rwxr-xr-xpackaging-tools/release_repo_updater.py2
-rwxr-xr-xpackaging-tools/release_task_reader.py4
-rw-r--r--packaging-tools/sdkcomponent.py61
-rwxr-xr-xpackaging-tools/sign_installer.py167
-rw-r--r--packaging-tools/tests/test_bldinstallercommon.py4
-rw-r--r--packaging-tools/tests/test_create_installer.py6
-rw-r--r--packaging-tools/tests/test_sdkcomponent.py3
9 files changed, 304 insertions, 64 deletions
diff --git a/Pipfile b/Pipfile
index d5a4509de..e72668477 100644
--- a/Pipfile
+++ b/Pipfile
@@ -25,6 +25,7 @@ validators = "==0.20.0"
aiofiles = "==0.8.0"
aiohttp = "==3.8.4"
typing_extensions = "==4.1.1"
+macholib = "==1.16.2"
[dev-packages]
ddt = "==1.6.0"
diff --git a/packaging-tools/create_installer.py b/packaging-tools/create_installer.py
index 682369825..8f663a9da 100644
--- a/packaging-tools/create_installer.py
+++ b/packaging-tools/create_installer.py
@@ -68,6 +68,7 @@ from patch_qt import patch_files, patch_qt_edition
from pkg_constants import INSTALLER_OUTPUT_DIR_NAME, PKG_TEMPLATE_BASE_DIR_NAME
from runner import run_cmd
from sdkcomponent import IfwPayloadItem, IfwSdkComponent, parse_ifw_sdk_comp
+from sign_installer import recursive_sign_notarize
from threadedwork import ThreadedWork
log = init_logger(__name__, debug_mode=False)
@@ -289,6 +290,8 @@ def parse_component_data(
pkg_template_search_dirs=task.packages_dir_name_list,
substitutions=task.substitutions,
file_share_base_url=task.archive_base_url,
+ base_work_dir=Path(task.packages_full_path_dst),
+ notarize_payload=task.notarize_payload,
)
# validate the component
# - payload URIs are always checked when not in dry_run or when mode is 'payload'
@@ -581,9 +584,7 @@ def get_component_data(
task: QtInstallerTaskType,
sdk_comp: IfwSdkComponent,
archive: IfwPayloadItem,
- install_dir: Path,
data_dir_dest: Path,
- compress_dir: Path,
) -> None:
"""
Download and create data for a component's payload item including patching operations
@@ -592,10 +593,10 @@ def get_component_data(
task: An instance of QtInstallerTask
sdk_comp: An instance of IfwSdkComponent, specifies the component that the data is part of
archive: An instance of IfwPayloadItem, specifies the payload item to process
- install_dir: A directory resembling the final installation directory structure
data_dir_dest: A directory location for the final component data
- compress_dir: A directory containing the items to compress to the final archive
"""
+ install_dir = sdk_comp.work_dir_temp / archive.archive_name / archive.get_archive_install_dir()
+ install_dir.mkdir(parents=True, exist_ok=True)
# Handle pattern match payload URIs for IfwPayloadItem
if archive.payload_base_uri:
for payload_uri in archive.payload_uris:
@@ -621,7 +622,7 @@ def get_component_data(
log.info("[%s] Download: %s", archive.package_name, str(install_dir / dl_name))
download(payload_uri, str(install_dir / dl_name))
# For payload already in IFW compatible format, use the raw artifact and continue
- elif not archive.requires_extraction and Path(dl_name).suffix in archive.ifw_arch_formats:
+ elif archive.is_raw_artifact is True:
# Save to data dir as archive_name
log.info(
"[%s] Rename raw artifact to final archive name: %s -> %s",
@@ -651,9 +652,6 @@ def get_component_data(
# Add SHA1 from payload to component if specified
if archive.component_sha1:
read_component_sha(sdk_comp, install_dir / archive.component_sha1)
- # Finally compress the content of the component to the final package
- log.info("[%s] Compress: %s", archive.package_name, archive.archive_name)
- recompress_component(task, archive, data_dir_dest, compress_dir)
def handle_set_executable(base_dir: str, package_finalize_items: str) -> None:
@@ -792,54 +790,38 @@ def create_target_components(task: QtInstallerTaskType) -> None:
for sdk_comp in task.sdk_component_list:
log.info(sdk_comp)
if sdk_comp.archive_skip:
- break
+ continue
# substitute pkg_template dir names and package_name
package_name = substitute_package_name(task, sdk_comp.ifw_sdk_comp_name)
sdk_comp.ifw_sdk_comp_name = package_name
dest_base = Path(task.packages_full_path_dst) / package_name
- meta_dir_dest = dest_base / "meta"
- data_dir_dest = dest_base / "data"
- temp_data_dir = dest_base / "tmp"
- # save path for later substitute_component_tags call
- sdk_comp.meta_dir_dest = meta_dir_dest
- # create meta destination folder
- meta_dir_dest.mkdir(parents=True, exist_ok=True)
+ # Create work dirs
+ sdk_comp.init_work_dirs()
# Copy Meta data
metadata_content_source_root = os.path.join(sdk_comp.pkg_template_folder, "meta")
- copy_tree(metadata_content_source_root, str(meta_dir_dest))
+ copy_tree(metadata_content_source_root, str(sdk_comp.meta_dir_dest))
if os.path.isfile(os.path.join(task.script_root_dir, "lrelease")):
# create translation binaries if translation source files exist for component
update_script = os.path.join(task.script_root_dir, "update_component_translations.sh")
lrelease_tool = os.path.join(task.script_root_dir, "lrelease")
run_cmd(cmd=[update_script, "-r", lrelease_tool, str(dest_base)])
# add files into tag substitution
- task.directories_for_substitutions.append(str(meta_dir_dest))
+ task.directories_for_substitutions.append(str(sdk_comp.meta_dir_dest))
# handle archives
- if sdk_comp.downloadable_archives:
- # save path for later substitute_component_tags call
- sdk_comp.temp_data_dir = Path(temp_data_dir)
- # Copy archives into temporary build directory if exists
- for archive in sdk_comp.downloadable_archives:
- # fetch packages only if offline installer or repo creation,
- # for online installer just handle the metadata
- if task.offline_installer or task.create_repository:
- # Create needed data dirs
- compress_dir = temp_data_dir / archive.archive_name
- install_dir = compress_dir / archive.get_archive_install_dir()
- # adding get_component_data task to our work queue
- # Create needed data dirs before the threads start to work
- install_dir.mkdir(parents=True, exist_ok=True)
- data_dir_dest.mkdir(parents=True, exist_ok=True)
- get_component_data_work.add_task(
- f"adding {archive.archive_name} to {sdk_comp.ifw_sdk_comp_name}",
- get_component_data,
- task,
- sdk_comp,
- archive,
- install_dir,
- data_dir_dest,
- compress_dir,
- )
+ for archive in sdk_comp.downloadable_archives:
+ # fetch packages only if offline installer or repo creation,
+ # for online installer just handle the metadata
+ if task.offline_installer or task.create_repository:
+ # adding get_component_data task to our work queue
+ get_component_data_work.add_task(
+ f"adding {archive.archive_name} to {sdk_comp.ifw_sdk_comp_name}",
+ get_component_data,
+ task,
+ sdk_comp,
+ archive,
+ sdk_comp.data_dir_dest,
+ )
+
# handle component sha1 uri
if sdk_comp.comp_sha1_uri:
sha1_file_dest = (dest_base / "SHA1")
@@ -850,28 +832,48 @@ def create_target_components(task: QtInstallerTaskType) -> None:
sha1_file_dest,
)
- # maybe there is some static data
+ # maybe there is some bundled payload in config templates
data_content_source_root = os.path.normpath(sdk_comp.pkg_template_folder + os.sep + "data")
if os.path.exists(data_content_source_root):
- data_dir_dest.mkdir(parents=True, exist_ok=True)
- log.info("Adding static data from %s", data_content_source_root)
- copy_tree(data_content_source_root, str(data_dir_dest))
+ sdk_comp.data_dir_dest.mkdir(parents=True, exist_ok=True)
+ log.info("Adding payload data from %s", data_content_source_root)
+ copy_tree(data_content_source_root, str(sdk_comp.data_dir_dest))
if not task.dry_run:
# start the work threaded, more than 8 parallel downloads are not so useful
get_component_data_work.run(min([task.max_cpu_count, cpu_count()]))
+ # Sign, notarize, staple macOS content
+ if is_macos() and task.notarize_payload is True:
+ recursive_sign_notarize(Path(task.packages_full_path_dst))
+ compress_component_data_work = ThreadedWork("compress final components data")
+ # Compress to final components
+ for sdk_comp in task.sdk_component_list:
+ if sdk_comp.archive_skip:
+ continue
+ for archive in sdk_comp.downloadable_archives:
+ if archive.is_raw_artifact is False:
+ compress_dir = sdk_comp.work_dir_temp / archive.archive_name
+ compress_component_data_work.add_task(
+ f"[{archive.package_name}] Compress: {archive.archive_name}",
+ recompress_component,
+ task,
+ archive,
+ sdk_comp.data_dir_dest,
+ compress_dir
+ )
+ # threaded compress
+ compress_component_data_work.run(min([task.max_cpu_count, cpu_count()]))
for sdk_comp in task.sdk_component_list:
# substitute tags
substitute_component_tags(create_metadata_map(sdk_comp), str(sdk_comp.meta_dir_dest))
- if sdk_comp.temp_data_dir is None or not sdk_comp.temp_data_dir.exists():
- continue
# lastly remove temp dir after all data is prepared
- if not remove_tree(str(sdk_comp.temp_data_dir)):
- raise CreateInstallerError(f"Unable to remove dir: {sdk_comp.temp_data_dir}")
- # substitute downloadable archive names in installscript.qs
- archive_list = sdk_comp.generate_downloadable_archive_list()
- substitute_component_tags(archive_list, str(sdk_comp.meta_dir_dest))
+ if not remove_tree(str(sdk_comp.work_dir_temp)):
+ raise CreateInstallerError(f"Unable to remove dir: {sdk_comp.work_dir_temp}")
+ if sdk_comp.downloadable_archives:
+ # substitute downloadable archive names in installscript.qs
+ archive_list = sdk_comp.generate_downloadable_archive_list()
+ substitute_component_tags(archive_list, str(sdk_comp.meta_dir_dest))
##############################################################
@@ -1193,6 +1195,7 @@ class QtInstallerTask(Generic[QtInstallerTaskType]):
offline_installer: bool = False
online_installer: bool = False
create_repository: bool = False
+ notarize_payload: bool = False
partial_installer: bool = False # Invalid IfwSdkComponents will be excluded from the installer
dry_run: Optional[DryRunMode] = None
errors: List[str] = field(default_factory=list)
@@ -1244,6 +1247,7 @@ class QtInstallerTask(Generic[QtInstallerTaskType]):
Version number auto increase value: {self.version_number_auto_increase_value}
Mac cpu count: {self.max_cpu_count}
Long paths supported: {is_long_path_supported()}
+ Notarize payload (macOS): {self.notarize_payload}
To reproduce build task with the above configuration, run the following command:
{get_reproduce_args(self)}"""
@@ -1403,6 +1407,13 @@ def main() -> None:
dest="require_long_path_support",
action="store_false",
)
+ if is_macos():
+ parser.add_argument(
+ "--notarize-payload",
+ dest="notarize_payload",
+ action="store_true",
+ default=False
+ )
args = parser.parse_args(sys.argv[1:])
@@ -1420,6 +1431,7 @@ def main() -> None:
online_installer=args.online_installer,
create_repository=args.create_repository,
partial_installer=args.partial_installer,
+ notarize_payload=args.notarize_payload if is_macos() else False,
dry_run=DryRunMode[args.dry_run] if args.dry_run else None,
archive_base_url=args.archive_base_url,
ifw_tools_uri=args.ifw_tools_uri,
diff --git a/packaging-tools/release_repo_updater.py b/packaging-tools/release_repo_updater.py
index a5f35b038..4e4dc33dc 100755
--- a/packaging-tools/release_repo_updater.py
+++ b/packaging-tools/release_repo_updater.py
@@ -722,6 +722,7 @@ async def build_online_repositories(
force_version_number_increase=True,
substitution_list=task.substitutions,
build_timestamp=job_timestamp,
+ notarize_payload=task.notarize_payload,
dry_run=dry_run,
)
try:
@@ -1091,6 +1092,7 @@ async def _build_offline_tasks(
force_version_number_increase=True,
substitution_list=task.substitutions,
build_timestamp=job_timestamp,
+ notarize_payload=task.notarize_payload,
dry_run=dry_run,
)
try:
diff --git a/packaging-tools/release_task_reader.py b/packaging-tools/release_task_reader.py
index d13855168..53f48ee02 100755
--- a/packaging-tools/release_task_reader.py
+++ b/packaging-tools/release_task_reader.py
@@ -211,6 +211,10 @@ class IFWReleaseTask(IFWBaseReleaseTask):
def prerelease_version(self) -> str:
return self._get("prerelease_version")
+ @property
+ def notarize_payload(self) -> bool:
+ return self._get("notarize_payload").lower() in ("yes", "true", "1")
+
def validate(self) -> None:
validate_list = ["config_file", "repo_path"]
if "ifw.offline" in self.name:
diff --git a/packaging-tools/sdkcomponent.py b/packaging-tools/sdkcomponent.py
index 1f493a4ee..648c842bf 100644
--- a/packaging-tools/sdkcomponent.py
+++ b/packaging-tools/sdkcomponent.py
@@ -42,6 +42,7 @@ from typing import Any, Dict, List, Optional, Tuple
import htmllistparse # type: ignore
from urlpath import URL # type: ignore
+from bld_utils import is_macos
from bldinstallercommon import uri_exists
from logging_util import init_logger
@@ -76,12 +77,14 @@ class IfwPayloadItem:
archive_name: str
payload_base_uri: str = field(default_factory=str)
errors: List[str] = field(default_factory=list)
+ notarize_payload: bool = False
# List of archive formats supported by Installer Framework:
ifw_arch_formats: Tuple[str, ...] = (".7z", ".tar", ".gz", ".zip", ".xz", ".bz2")
# List of payload archive formats supported by scripts for extraction:
supported_arch_formats: Tuple[str, ...] = (".7z", ".tar", ".gz", ".zip", ".xz", ".bz2")
_requires_extraction: Optional[bool] = None
_requires_patching: Optional[bool] = None
+ _raw_artifact: bool = False
def __post_init__(self) -> None:
"""Post init: run sanity checks"""
@@ -183,6 +186,22 @@ class IfwPayloadItem:
return self._requires_patching
@property
+ def is_raw_artifact(self) -> bool:
+ """
+ Determine if raw artifact is already supported by IFW and doesn't require any operations
+
+ Returns:
+ A boolean for whether archive can be used as is
+ """
+ if self._raw_artifact is None:
+ self._raw_artifact = (
+ len(self.payload_uris) == 1
+ and not self.requires_extraction
+ and Path(self.payload_uris[0]).suffix in self.ifw_arch_formats
+ )
+ return self._raw_artifact
+
+ @property
def requires_extraction(self) -> bool:
"""
A property to determine whether the archive needs to be extracted.
@@ -199,6 +218,9 @@ class IfwPayloadItem:
# Extract IFW supported archives if patching required or archive has a sha1 file
# Otherwise, use the raw CI artifact
self._requires_extraction = bool(self.component_sha1) or self.requires_patching
+ # For macOS, we can't use raw artifacts since they need to be signed and notarized
+ if is_macos() and self.notarize_payload is True:
+ self._requires_extraction = True
# If archive extraction is disabled, compress as-is (disable_extract_archive=False)
if self.disable_extract_archive:
self._requires_extraction = False
@@ -340,19 +362,33 @@ class IfwSdkComponent:
package_default: str
comp_sha1_uri: str
include_filter: str
+ base_work_dir: Path
component_sha1: Optional[str] = None
- temp_data_dir: Optional[Path] = None
- meta_dir_dest: Optional[Path] = None
archive_skip: bool = False
errors: List[str] = field(default_factory=list)
def __post_init__(self) -> None:
- """Post init: resolve component sha1 uri if it exists"""
+ """Post init: resolve component sha1 uri if it exists, set work dirs"""
if self.comp_sha1_uri:
_, match_uris = self.archive_resolver.resolve_payload_uri(self.comp_sha1_uri)
assert len(match_uris) == 1, f"More than one match for component sha: {match_uris}"
self.comp_sha1_uri = match_uris.pop()
+ @property
+ def work_dir_temp(self) -> Path:
+ """Return temporary work dir for IfwSdkComponent"""
+ return self.base_work_dir / self.ifw_sdk_comp_name / "temp"
+
+ @property
+ def data_dir_dest(self) -> Path:
+ """Return destination data dir for IfwSdkComponent"""
+ return self.base_work_dir / self.ifw_sdk_comp_name / "data"
+
+ @property
+ def meta_dir_dest(self) -> Path:
+ """Return destination meta dir for IfwSdkComponent"""
+ return self.base_work_dir / self.ifw_sdk_comp_name / "meta"
+
def validate(self, uri_check: bool = True, ignore_errors: bool = False) -> bool:
"""
Perform validation on IfwSdkComponent, raise error if component not valid
@@ -392,6 +428,19 @@ class IfwSdkComponent:
log.exception("[%s] Ignored error in component: %s", self.ifw_sdk_comp_name, err)
return False
+ def init_work_dirs(self) -> None:
+ """Create the required work directories for the payload data"""
+ try:
+ self.work_dir_temp.mkdir(parents=True)
+ self.data_dir_dest.mkdir(parents=True)
+ self.meta_dir_dest.mkdir(parents=True)
+ except FileExistsError as err:
+ log.warning(
+ "Work directories already initialized for the component (duplicate name?): %s",
+ self.ifw_sdk_comp_name,
+ exc_info=err,
+ )
+
def generate_downloadable_archive_list(self) -> List[List[str]]:
"""
Generate list that is embedded into package.xml
@@ -482,6 +531,8 @@ def parse_ifw_sdk_comp(
pkg_template_search_dirs: List[str],
substitutions: Dict[str, str],
file_share_base_url: str,
+ base_work_dir: Path,
+ notarize_payload: bool = False,
) -> IfwSdkComponent:
"""
Parse IfwSdkComponent from the given config
@@ -515,6 +566,7 @@ def parse_ifw_sdk_comp(
archive_resolver=archive_resolver,
parent_target_install_base=target_install_base,
substitutions=substitutions,
+ notarize_payload=notarize_payload,
)
return IfwSdkComponent(
ifw_sdk_comp_name=section,
@@ -528,6 +580,7 @@ def parse_ifw_sdk_comp(
package_default=package_default,
comp_sha1_uri=comp_sha1_uri,
include_filter=include_filter,
+ base_work_dir=base_work_dir,
)
@@ -537,6 +590,7 @@ def parse_ifw_sdk_archives(
archive_resolver: ArchiveResolver,
parent_target_install_base: str,
substitutions: Dict[str, str],
+ notarize_payload: bool = False,
) -> List[IfwPayloadItem]:
"""
Parsed IfwPayloadItems for the given payload sections in config
@@ -590,6 +644,7 @@ def parse_ifw_sdk_archives(
component_sha1=component_sha1_file,
archive_name=archive_name,
payload_base_uri=base_uri,
+ notarize_payload=notarize_payload,
)
payload.validate()
parsed_archives.append(payload)
diff --git a/packaging-tools/sign_installer.py b/packaging-tools/sign_installer.py
index 46ef57371..62fb77adb 100755
--- a/packaging-tools/sign_installer.py
+++ b/packaging-tools/sign_installer.py
@@ -3,7 +3,7 @@
#############################################################################
#
-# Copyright (C) 2022 The Qt Company Ltd.
+# Copyright (C) 2023 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of the release tools of the Qt Toolkit.
@@ -32,15 +32,178 @@
import argparse
import os
import sys
-from shutil import rmtree
+from contextlib import suppress
+from pathlib import Path
+from shutil import copy2, rmtree
+from subprocess import CalledProcessError
+from typing import List, Tuple
+from macholib import MachO # type: ignore
+from temppathlib import TemporaryDirectory
+
+from bldinstallercommon import locate_paths
from logging_util import init_logger
+from notarize import embed_notarization, notarize
from read_remote_config import get_pkg_value
from runner import run_cmd, run_cmd_silent
log = init_logger(__name__, debug_mode=False)
+def _is_app_bundle(path: Path) -> bool:
+ """
+ Determine whether a folder contains .app bundle structure based on contents
+
+ Args:
+ path: A file system path to .app directory
+
+ Returns:
+ True if 'Info.plist' file found, otherwise False
+ """
+ if path.suffix != ".app":
+ return False
+ return path.joinpath("Contents", "Info.plist").exists()
+
+
+def _is_mach_o_executable(path: Path) -> bool:
+ """
+ Determine whether a file is a Mach-O executable
+
+ Args:
+ path: A file system path to a file
+
+ Returns:
+ True if Mach-O header found successfully, otherwise False
+ """
+ try:
+ headers = MachO.MachO(path.resolve(strict=True)).headers
+ return bool(headers)
+ except Exception:
+ return False
+
+
+def _is_framework_version(path: Path) -> bool:
+ """
+ Determine whether a folder is part of a macOS multi-versioned framework
+
+ Args:
+ path: A file system path to a folder
+
+ Returns:
+ True if directory is a framework version bundle, otherwise False
+ """
+ path = path.resolve(strict=True)
+ with suppress(IndexError):
+ if path.parent.name == "Versions" and path.parents[1].suffix == ".framework":
+ return True
+ return False
+
+
+def _find_signable_content(pkg_dir: Path) -> Tuple[List[Path], List[Path]]:
+ """
+ Find all content to be signed, and that supports stapling:
+ .app bundles, frameworks, packages, disk images, binaries (e.g. executables, dylib)
+
+ Args:
+ pkg_dir: A file system path to a directory to search recursively from
+
+ Returns:
+ Lists of paths sorted for codesign and staple operations
+ """
+ sign_list: List[Path] = []
+ staple_list: List[Path] = []
+ for path in sorted(
+ set(Path(p).resolve(strict=True) for p in locate_paths(pkg_dir, patterns=["*"])),
+ key=lambda path: len(path.parts), # Sort by path part length
+ reverse=True, # Nested items first to ensure signing order (important)
+ ):
+ if path.is_symlink():
+ continue # ignore symlinks
+ # App bundles and frameworks
+ if path.is_dir():
+ if _is_app_bundle(path):
+ sign_list.append(path)
+ staple_list.append(path)
+ elif _is_framework_version(path):
+ sign_list.append(path)
+ # Containers, Mach-O shared libraries and dynamically loaded modules, Mach-O executables
+ elif path.is_file():
+ # Known suffixes for containers
+ if path.suffix in (".pkg", ".dmg"):
+ sign_list.append(path)
+ staple_list.append(path)
+ # Known suffixes for libs, modules, ...
+ elif path.suffix in (".dylib", ".so", ".bundle"):
+ sign_list.append(path)
+ # Mach-O files by header, exec bit
+ elif os.access(path, os.X_OK) and _is_mach_o_executable(path):
+ sign_list.append(path)
+ return sign_list, staple_list
+
+
+def recursive_sign_notarize(pkg_dir: Path) -> None:
+ """
+ Sign, notarize, and staple content from a directory recursively
+
+ Args:
+ pkg_dir: A file system path to the directory with content
+ """
+ sign_items, staple_items = _find_signable_content(pkg_dir=pkg_dir)
+ # Run codesign for items
+ sign_mac_content(sign_items)
+ # Copy only the notarizable (codesigned) content to a temporary dir
+ # (ditto does not support multiple source items for archive generation)
+ # Exclude other content from the notarization request to reduce file size
+ with TemporaryDirectory() as notarize_dir:
+ for path in reversed(sign_items):
+ # Skip if parent directory already in list
+ if not any(p for p in path.parents if p in sign_items):
+ create_dir = notarize_dir.path / path.relative_to(pkg_dir).parent
+ create_dir.mkdir(parents=True, exist_ok=True)
+ if path.is_dir():
+ # use ditto here to copy, preserves the directory hierarchy properly
+ run_cmd(["ditto", str(path), str(create_dir / path.name)])
+ else:
+ copy2(path, create_dir, follow_symlinks=False)
+ # Notarize
+ notarize(notarize_dir.path)
+ # Staple original files
+ count = len(staple_items)
+ log.info("Stapling ticket to %s payload items", count)
+ for idx, path in enumerate(staple_items):
+ log.info("[%s/%s] Staple: %s", idx, count, str(path))
+ embed_notarization(path)
+
+
+def sign_mac_content(paths: List[Path]) -> None:
+ """
+ Run codesign for the given paths
+
+ Args:
+ paths: List of signable content
+
+ Raises:
+ CalledProcessError: On code signing failure
+ """
+ run_cmd(cmd=["/Users/qt/unlock-keychain.sh"]) # unlock the keychain first
+ count = len(paths)
+ log.info("Codesigning %s payload items", count)
+ for idx, path in enumerate(paths):
+ log.info("[%s/%s] Codesign: %s", idx, count, str(path))
+ cmd_args = [
+ 'codesign', '--verbose=3', str(path),
+ '-r', get_pkg_value("SIGNING_FLAGS"), # signing requirements
+ '-s', get_pkg_value("SIGNING_IDENTITY"), # developer id identity
+ '-o', 'runtime', # enable hardened runtime, required for notarization
+ "--timestamp", # contact apple servers for time validation
+ "--force" # resign all the code with different signature
+ ]
+ try:
+ run_cmd_silent(cmd=cmd_args)
+ except CalledProcessError as err:
+ raise Exception(f"Failed to codesign: {str(path)}") from err
+
+
def sign_mac_app(app_path: str, signing_identity: str) -> None:
assert app_path.endswith(".app"), f"Not a valid path to .app bundle: {app_path}"
# we need to unlock the keychain first
diff --git a/packaging-tools/tests/test_bldinstallercommon.py b/packaging-tools/tests/test_bldinstallercommon.py
index 303e7fd13..e1451d1f7 100644
--- a/packaging-tools/tests/test_bldinstallercommon.py
+++ b/packaging-tools/tests/test_bldinstallercommon.py
@@ -54,8 +54,8 @@ from bldinstallercommon import (
from installer_utils import PackagingError
-@ddt
-class TestCommon(unittest.TestCase): # pylint: disable=R0904
+@ddt # pylint: disable=R0904
+class TestCommon(unittest.TestCase):
@data( # type: ignore
(
"%TAG_VERSION%%TAG_EDITION%",
diff --git a/packaging-tools/tests/test_create_installer.py b/packaging-tools/tests/test_create_installer.py
index e13dfa756..87a563907 100644
--- a/packaging-tools/tests/test_create_installer.py
+++ b/packaging-tools/tests/test_create_installer.py
@@ -80,7 +80,7 @@ class TestCommon(unittest.TestCase):
)
def test_read_component_sha(self, test_data: Tuple[str, Optional[str]]) -> None:
sha, exp = test_data
- sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "") # type: ignore
+ sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "", "") # type: ignore
with TemporaryDirectory() as tmpdir:
test_sha = tmpdir.path / "test"
test_sha.write_text(sha, encoding="utf-8")
@@ -94,7 +94,7 @@ class TestCommon(unittest.TestCase):
(""),
)
def test_read_component_sha_invalid_content(self, test_sha1: str) -> None:
- sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "") # type: ignore
+ sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "", "") # type: ignore
with TemporaryDirectory() as tmpdir:
test_sha = tmpdir.path / "test"
test_sha.write_text(test_sha1, encoding="utf-8")
@@ -102,7 +102,7 @@ class TestCommon(unittest.TestCase):
read_component_sha(sdk_comp, test_sha)
def test_read_component_sha_invalid_path(self) -> None:
- sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "") # type: ignore
+ sdk_comp = IfwSdkComponent("", "", "", "", "", "", "", "", "", "", "", "") # type: ignore
with TemporaryDirectory() as tmpdir:
with self.assertRaises(CreateInstallerError):
read_component_sha(sdk_comp, tmpdir.path / "invalid")
diff --git a/packaging-tools/tests/test_sdkcomponent.py b/packaging-tools/tests/test_sdkcomponent.py
index e349198d8..0173c1b9e 100644
--- a/packaging-tools/tests/test_sdkcomponent.py
+++ b/packaging-tools/tests/test_sdkcomponent.py
@@ -154,6 +154,8 @@ class TestRunner(unittest.TestCase):
pkg_template_search_dirs,
key_value_subst_dict(),
file_share_base_url,
+ base_work_dir=Path("."),
+ notarize_payload=False,
)
self.assertEqual(len(comp.downloadable_archives), 3)
@@ -307,6 +309,7 @@ class TestRunner(unittest.TestCase):
package_default="package_default",
comp_sha1_uri="comp_sha1_uri",
include_filter="include_filter",
+ base_work_dir=Path(".")
)
with self.assertRaises(IfwSdkError):
component.validate(uri_check=False, ignore_errors=False)