aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorIikka Eklund <iikka.eklund@qt.io>2022-05-19 22:18:36 +0300
committerPatrik Teivonen <patrik.teivonen@qt.io>2022-11-25 10:47:25 +0000
commitaf10f08f8d9ccc3a68b58aa701cb820c581c27c7 (patch)
treef70d1063d07e2905642cd5bf9f386edec4c4e448
parent7195aa18d9639f7259a6630a1b98ef7578dabf42 (diff)
Refactor sdkcomponent.py
-Remove archiveresolver.py in favor of simpler ArchiveResolver class -Clean up sdkcomponent.py, use dataclasses, add better validations -Use raw CI artifacts without extracting and repackaging when possible (archive format supported by IFW, and patching not required) -Add unit tests -Adapt create_installer.py to required changes Follow up: create_installer.get_component_data() Change-Id: If4068dfd0842c5d5c343fa99ddabac86450adbb1 Reviewed-by: Antti Kokko <antti.kokko@qt.io>
-rw-r--r--packaging-tools/archiveresolver.py173
-rw-r--r--packaging-tools/create_installer.py303
-rw-r--r--packaging-tools/sdkcomponent.py663
-rw-r--r--packaging-tools/tests/test_sdkcomponent.py256
4 files changed, 836 insertions, 559 deletions
diff --git a/packaging-tools/archiveresolver.py b/packaging-tools/archiveresolver.py
deleted file mode 100644
index e7a47f8c1..000000000
--- a/packaging-tools/archiveresolver.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-
-#############################################################################
-#
-# Copyright (C) 2022 The Qt Company Ltd.
-# Contact: https://www.qt.io/licensing/
-#
-# This file is part of the release tools of the Qt Toolkit.
-#
-# $QT_BEGIN_LICENSE:GPL-EXCEPT$
-# Commercial License Usage
-# Licensees holding valid commercial Qt licenses may use this file in
-# accordance with the commercial license agreement provided with the
-# Software or, alternatively, in accordance with the terms contained in
-# a written agreement between you and The Qt Company. For licensing terms
-# and conditions see https://www.qt.io/terms-conditions. For further
-# information use the contact form at https://www.qt.io/contact-us.
-#
-# GNU General Public License Usage
-# Alternatively, this file may be used under the terms of the GNU
-# General Public License version 3 as published by the Free Software
-# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
-# included in the packaging of this file. Please review the following
-# information to ensure the GNU General Public License requirements will
-# be met: https://www.gnu.org/licenses/gpl-3.0.html.
-#
-# $QT_END_LICENSE$
-#
-#############################################################################
-
-import os
-from configparser import ConfigParser
-from typing import List
-from urllib.parse import urlparse
-
-from bldinstallercommon import config_section_map, is_content_url_valid, safe_config_key_fetch
-from logging_util import init_logger
-from pkg_constants import PKG_TEMPLATE_BASE_DIR_NAME
-
-SERVER_NAMESPACE = 'ArchiveRemoteLocation'
-PACKAGE_REMOTE_LOCATION_RELEASE = 'release'
-PACKAGE_ARCHIVE_TAG = 'ARCHIVE_TAG'
-log = init_logger(__name__, debug_mode=False)
-
-
-###############################
-# class ArchiveLocationResolver
-###############################
-class ArchiveLocationResolver:
- """Helper class to resolve full URI for archive"""
-
- ######################################
- # inner class ArchiveRemoteLocation
- ######################################
- class ArchiveRemoteLocation:
- """Container class for server URL data"""
-
- ###############################
- # Constructor
- ###############################
- def __init__(self, server_name: str, server_base_url: str, server_base_path: str) -> None:
- self.server_name = server_name
- temp = server_base_url
- if not temp.endswith('/') and not server_base_path.startswith('/'):
- temp = temp + '/'
- temp = temp + server_base_path
- self.server_url = temp
-
- ###############################
- # Constructor
- ###############################
- def __init__(
- self,
- target_config: ConfigParser,
- server_base_url_override: str,
- configurations_root_dir: str,
- key_substitution_list: List[List[str]],
- ) -> None:
- """Init data based on the target configuration"""
- self.server_list = []
- self.pkg_templates_dir_list: List[str] = []
- self.default_server = None
- self.configurations_root_dir = configurations_root_dir
- self.key_substitution_list = key_substitution_list
- # get packages tempalates src dir first
- pkg_templates_dir = os.path.normpath(config_section_map(target_config, 'PackageTemplates')['template_dirs'])
- self.pkg_templates_dir_list = pkg_templates_dir.replace(' ', '').rstrip(',\n').split(',')
- # next read server list
- if server_base_url_override:
- server_obj = ArchiveLocationResolver.ArchiveRemoteLocation('default_server_name', server_base_url_override, '')
- self.server_list.append(server_obj)
- else:
- for section in target_config.sections():
- if section.startswith(SERVER_NAMESPACE):
- server_name = section.split('.')[-1]
- base_url = safe_config_key_fetch(target_config, section, 'base_url')
- base_path = safe_config_key_fetch(target_config, section, 'base_path')
- base_path.replace(' ', '')
- # if base path is defined, then the following logic applies:
- # if script is used in testclient mode fetch the packages from "RnD" location
- # otherwise fetch packages from "release" location.
- # If the base_path is not defined, use the address as-is
- if base_path:
- base_path = base_path + PACKAGE_REMOTE_LOCATION_RELEASE
- server_obj = ArchiveLocationResolver.ArchiveRemoteLocation(server_name, base_url, base_path)
- self.server_list.append(server_obj)
- if len(self.server_list) == 1:
- self.default_server = self.server_list[0]
-
- ###############################
- # Get full server URL by name
- ###############################
- def server_url_by_name(self, server_name: str) -> str:
- """Get server URL by name. If empty name given, return the default server (may be null)."""
- if not server_name:
- return self.default_server.server_url if self.default_server else ""
- for server in self.server_list:
- if server.server_name == server_name:
- return server.server_url
- raise RuntimeError(f"*** Error! Unable to find server by name: {server_name}")
-
- ###############################
- # Get full server URI
- ###############################
- def resolve_full_uri(self, package_name: str, server_name: str, archive_uri: str) -> str:
- """Resolve the full URI in the following order
- 1. is archive_uri a valid URI as such
- 2. check if given archive_uri denotes a package under package templates directory
- 3. check if given URI is valid full URL
- 4. try to compose full URL
- return the resolved URI
- """
- # substitute key value pairs if any
- for item in self.key_substitution_list:
- temp = archive_uri.replace(item[0], item[1])
- if temp != archive_uri:
- archive_uri = temp
- # 1. check if given archive_uri denotes a package under package templates directory
- base_path = os.path.join(self.configurations_root_dir, PKG_TEMPLATE_BASE_DIR_NAME)
- package_path = package_name + os.sep + 'data' + os.sep + archive_uri
- # find the correct template subdirectory
- for subdir in self.pkg_templates_dir_list:
- path_temp = os.path.join(base_path, subdir)
- if not os.path.isdir(path_temp):
- path_temp = path_temp.replace(os.sep + PKG_TEMPLATE_BASE_DIR_NAME, '')
- if os.path.isdir(path_temp):
- temp = os.path.join(path_temp, package_path)
- if os.path.isfile(temp):
- return temp
- # 2. check if given URI is valid full URL
- res = is_content_url_valid(archive_uri)
- if res:
- return archive_uri
- parts = urlparse(archive_uri)
- if parts.scheme and parts.netloc:
- raise RuntimeError(f"Url: [{archive_uri}] points to valid location but it is inaccessible.")
- # 3. try to compose full URL
- temp = self.server_url_by_name(server_name)
- if not temp.endswith('/') and not archive_uri.startswith('/'):
- temp = temp + '/'
- return temp + archive_uri
-
- ###############################
- # Print out server list
- ###############################
- def print_server_list(self) -> None:
- log.info("--------------------------------------------------")
- log.info(" Server list:")
- for server in self.server_list:
- log.info(" ---------------------------------------------")
- log.info(" Server name: %s", server.server_name)
- log.info(" Server url: %s", server.server_url)
diff --git a/packaging-tools/create_installer.py b/packaging-tools/create_installer.py
index 413ad173d..a2e1150fc 100644
--- a/packaging-tools/create_installer.py
+++ b/packaging-tools/create_installer.py
@@ -40,10 +40,8 @@ from dataclasses import dataclass, field
from multiprocessing import cpu_count
from pathlib import Path
from time import gmtime, strftime
-from typing import Any, Generator, List, Optional
+from typing import Any, Dict, Generator, List, Optional
-import pkg_constants
-from archiveresolver import ArchiveLocationResolver
from bld_utils import download, is_linux, is_macos, is_windows
from bldinstallercommon import (
copy_tree,
@@ -62,9 +60,9 @@ from bldinstallercommon import (
from installer_utils import PackagingError
from logging_util import init_logger
from patch_qt import patch_files, patch_qt_edition
-from pkg_constants import INSTALLER_OUTPUT_DIR_NAME
+from pkg_constants import INSTALLER_OUTPUT_DIR_NAME, PKG_TEMPLATE_BASE_DIR_NAME
from runner import run_cmd
-from sdkcomponent import SdkComponent
+from sdkcomponent import IfwPayloadItem, IfwSdkComponent, IfwSdkError, parse_ifw_sdk_comp
from threadedwork import ThreadedWork
if is_windows():
@@ -154,8 +152,8 @@ def set_config_xml(task: Any) -> Any:
fileslist = [config_template_dest]
replace_in_files(fileslist, UPDATE_REPOSITORY_URL_TAG, update_repository_url)
# substitute values also from global substitution list
- for item in task.substitutions:
- replace_in_files(fileslist, item[0], item[1])
+ for key, value in task.substitutions.items():
+ replace_in_files(fileslist, key, value)
return config_template_dest
@@ -167,8 +165,8 @@ def substitute_global_tags(task: Any) -> None:
log.info("Substituting global tags:")
log.info("%%PACKAGE_CREATION_DATE%% = %s", task.build_timestamp)
log.info("%%VERSION_NUMBER_AUTO_INCREASE%% = %s", task.version_number_auto_increase_value)
- for item in task.substitutions:
- log.info("%s = %s", item[0], item[1])
+ for key, value in task.substitutions.items():
+ log.info("%s = %s", key, value)
# initialize the file list
fileslist = []
@@ -183,8 +181,8 @@ def substitute_global_tags(task: Any) -> None:
replace_in_files(fileslist, PACKAGE_CREATION_DATE_TAG, task.build_timestamp)
if task.force_version_number_increase:
replace_in_files(fileslist, VERSION_NUMBER_AUTO_INCREASE_TAG, task.version_number_auto_increase_value)
- for item in task.substitutions:
- replace_in_files(fileslist, item[0], item[1])
+ for key, value in task.substitutions.items():
+ replace_in_files(fileslist, key, value)
##############################################################
@@ -245,30 +243,36 @@ def parse_component_data(task: Any, configuration_file: str, configurations_base
section_namespace = section.split(".")[0]
if section_namespace in task.package_namespace:
if section not in task.sdk_component_ignore_list:
- sdk_component = SdkComponent(
- section_name=section,
- target_config=configuration,
- packages_full_path_list=task.packages_dir_name_list,
- archive_location_resolver=task.archive_location_resolver,
- key_value_substitution_list=task.substitutions,
+ sdk_comp = parse_ifw_sdk_comp(
+ config=configuration,
+ section=section,
+ pkg_template_search_dirs=task.packages_dir_name_list,
+ substitutions=task.substitutions,
+ file_share_base_url=task.archive_base_url,
)
- if task.dry_run:
- sdk_component.set_archive_skip(True)
- # validate component
- sdk_component.validate()
- if sdk_component.is_valid():
- # if include filter defined for component it is included only if LICENSE_TYPE matches to include_filter
- # same configuration file can contain components that are included only to either edition
- if sdk_component.include_filter and sdk_component.include_filter in task.license_type:
- task.sdk_component_list.append(sdk_component)
- # components without include_filter definition are added by default
- elif not sdk_component.include_filter:
- task.sdk_component_list.append(sdk_component)
- else:
- if task.strict_mode:
- raise CreateInstallerError(f"{sdk_component.error_msg()}")
- log.warning("Ignore invalid component (missing payload/metadata?): %s", section)
- task.sdk_component_list_skipped.append(sdk_component)
+ try:
+ # Validate component
+ sdk_comp.validate()
+ # Skip archive download if dry run
+ if task.dry_run:
+ sdk_comp.archive_skip = True
+ except IfwSdkError as err:
+ if not task.strict_mode:
+ raise CreateInstallerError from err
+ log.warning(
+ "Skip invalid component (missing payload/metadata?): [%s]",
+ sdk_comp.ifw_sdk_comp_name
+ )
+ sdk_comp.archive_skip = True
+ # if include filter defined for component it is included only if LICENSE_TYPE
+ # matches to include_filter
+ # same configuration file can contain components that are included only to
+ # either edition
+ if sdk_comp.include_filter and sdk_comp.include_filter in task.license_type:
+ task.sdk_component_list.append(sdk_comp)
+ # components without include_filter definition are added by default
+ elif not sdk_comp.include_filter:
+ task.sdk_component_list.append(sdk_comp)
# check for extra configuration files if defined
extra_conf_list = safe_config_key_fetch(configuration, 'PackageConfigurationFiles', 'file_list')
if extra_conf_list:
@@ -291,7 +295,7 @@ def parse_components(task: Any) -> None:
parse_component_data(task, main_conf_file, conf_base_path)
-def create_metadata_map(sdk_component: SdkComponent) -> List[List[str]]:
+def create_metadata_map(sdk_component: IfwSdkComponent) -> List[List[str]]:
"""create lists for component specific tag substitutions"""
component_metadata_tag_pair_list = []
# version tag substitution if exists
@@ -300,12 +304,6 @@ def create_metadata_map(sdk_component: SdkComponent) -> List[List[str]]:
# default package info substitution if exists
if sdk_component.package_default:
component_metadata_tag_pair_list.append([PACKAGE_DEFAULT_TAG, sdk_component.package_default])
- # install priority info substitution if exists
- if sdk_component.install_priority:
- component_metadata_tag_pair_list.append([INSTALL_PRIORITY_TAG, sdk_component.install_priority])
- # install priority info substitution if exists
- if sdk_component.sorting_priority:
- component_metadata_tag_pair_list.append([SORTING_PRIORITY_TAG, sdk_component.sorting_priority])
# target install dir substitution
if sdk_component.target_install_base:
component_metadata_tag_pair_list.append([TARGET_INSTALL_DIR_NAME_TAG, sdk_component.target_install_base])
@@ -319,9 +317,9 @@ def create_metadata_map(sdk_component: SdkComponent) -> List[List[str]]:
return component_metadata_tag_pair_list
-def get_component_sha1_file(sdk_component: SdkComponent, sha1_file_dest: str) -> None:
+def get_component_sha1_file(sdk_component: IfwSdkComponent, sha1_file_dest: str) -> None:
"""download component sha1 file"""
- download(sdk_component.component_sha1_uri, sha1_file_dest)
+ download(sdk_component.comp_sha1_uri, sha1_file_dest)
# read sha1 from the file
with open(sha1_file_dest, "r", encoding="utf-8") as sha1_file:
@@ -330,61 +328,45 @@ def get_component_sha1_file(sdk_component: SdkComponent, sha1_file_dest: str) ->
def get_component_data(
task: Any,
- sdk_component: SdkComponent,
- archive: SdkComponent.DownloadableArchive,
+ sdk_component: IfwSdkComponent,
+ archive: IfwPayloadItem,
install_dir: str,
data_dir_dest: str,
compress_content_dir: str,
) -> None:
- """download and create data for a component"""
- package_raw_name = os.path.basename(archive.archive_uri)
-
- # if no data to be installed, then just continue
- if not package_raw_name:
+ """Download and create data for a component"""
+ # Continue if payload item has no data
+ if not os.path.basename(archive.archive_uri):
return
- if not archive.package_strip_dirs:
- archive.package_strip_dirs = '0'
-
- if package_raw_name.endswith(('.7z', '.tar.xz')) \
- and archive.package_strip_dirs == '0' \
- and not archive.package_finalize_items \
- and not archive.archive_action \
- and not archive.rpath_target \
- and sdk_component.target_install_base == '/' \
- and not archive.target_install_dir:
- log.info("No repackaging actions required for the package, just download it directly to data directory")
- downloaded_archive = os.path.normpath(data_dir_dest + os.sep + archive.archive_name)
- # start download
- download(archive.archive_uri, downloaded_archive)
+ # Download payload to data_dir_dest
+ downloaded_file = Path(data_dir_dest, archive.arch_name)
+ download(archive.archive_uri, str(downloaded_file))
+ # For non-archive payload, move to install_dir for packing
+ if not archive.archive_uri.endswith(archive.supported_arch_formats):
+ shutil.move(str(downloaded_file), install_dir)
+ # For payload already in IFW compatible format, use the raw artifact and continue
+ elif not archive.requires_extraction and archive.archive_uri.endswith(archive.ifw_arch_formats):
return
-
- downloaded_archive = os.path.normpath(install_dir + os.sep + package_raw_name)
- # start download
- download(archive.archive_uri, downloaded_archive)
-
- # repackage content so that correct dir structure will get into the package
-
- if not archive.extract_archive:
- archive.extract_archive = 'yes'
-
- # extract contents
- if archive.extract_archive == 'yes':
- extracted = extract_file(downloaded_archive, install_dir)
- # remove old package if extraction was successful, else keep it
- if extracted:
- os.remove(downloaded_archive)
-
+ # Extract payload archive if it requires to be patched or recompressed to a compatible format
+ else:
+ if not extract_file(str(downloaded_file), install_dir):
+ # Raise error on unsuccessful extraction
+ raise CreateInstallerError(f"Couldn't extract archive: {downloaded_file}")
+ # Remove original archive after extraction complete
+ os.remove(downloaded_file)
+ # If patching items are specified, execute them here
+ if archive.requires_patching:
# perform custom action script for the extracted archive
if archive.archive_action:
- script_file, script_args = archive.archive_action.split(",")
+ script_file, script_args = archive.archive_action
script_args = script_args or ""
- script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), script_file)
- if not os.path.exists(script_path):
+ script_path = Path(__file__).parent.resolve() / script_file
+ if not script_path.exists():
raise CreateInstallerError(f"Custom archive action script missing: {script_path}")
- cmd = [script_path, "--input-dir=" + install_dir, script_args.strip()]
- if script_path.endswith(".py"):
+ cmd = [str(script_path), "--input-dir=" + install_dir, script_args.strip()]
+ if script_path.suffix == ".py":
cmd.insert(0, sys.executable)
- run_cmd(cmd)
+ run_cmd(cmd=cmd)
# strip out unnecessary folder structure based on the configuration
count = 0
@@ -409,47 +391,51 @@ def get_component_data(
except PackagingError:
pass
if 'patch_qt' in archive.package_finalize_items:
- patch_files(install_dir, product='qt_framework')
+ patch_files(install_dir, product="qt_framework")
if 'set_executable' in archive.package_finalize_items:
handle_set_executable(install_dir, archive.package_finalize_items)
if 'set_licheck' in archive.package_finalize_items:
handle_set_licheck(task, install_dir, archive.package_finalize_items)
- # remove debug information files when explicitly defined so
- if not task.remove_pdb_files or not task.remove_debug_information_files:
- # don't remove debug information files from debug information archives
- if not archive.archive_name.endswith('debug-symbols.7z'):
- # Check if debug information file types are defined
- if task.remove_pdb_files or task.remove_debug_information_files:
- # Remove debug information files according to host platform defaults
- remove_all_debug_information_files(install_dir)
-
- # remove debug libraries
- if task.remove_debug_libraries:
- remove_all_debug_libraries(install_dir)
-
- if archive.rpath_target:
- if not archive.rpath_target.startswith(os.sep):
- archive.rpath_target = os.sep + archive.rpath_target
- if is_linux():
- handle_component_rpath(install_dir, archive.rpath_target)
-
- if archive.component_sha1_file:
+ # remove debug information files when explicitly defined so
+ if not task.remove_pdb_files or not task.remove_debug_information_files:
+ # don't remove debug information files from debug information archives
+ if not archive.arch_name.endswith("debug-symbols.7z"):
+ # Check if debug information file types are defined
+ if task.remove_pdb_files or task.remove_debug_information_files:
+ # Remove debug information files according to host platform defaults
+ remove_all_debug_information_files(install_dir)
+
+ # remove debug libraries
+ if task.remove_debug_libraries:
+ remove_all_debug_libraries(install_dir)
+
+ if archive.rpath_target:
+ if not archive.rpath_target.startswith(os.sep):
+ archive.rpath_target = os.sep + archive.rpath_target
+ if is_linux():
+ handle_component_rpath(install_dir, archive.rpath_target)
+
+ if archive.component_sha1:
# read sha1 from the file
- sha1_file_path = install_dir + os.sep + archive.component_sha1_file
+ sha1_file_path = install_dir + os.sep + archive.component_sha1
if os.path.exists(sha1_file_path):
with open(sha1_file_path, "r", encoding="utf-8") as sha1_file:
sdk_component.component_sha1 = sha1_file.read().strip()
else:
- raise CreateInstallerError(f"Component SHA1 file '{archive.component_sha1_file}' not found")
-
- # lastly compress the component back to .7z archive
+ raise CreateInstallerError(
+ f"Component SHA1 file '{archive.component_sha1}' not found"
+ )
+ # Lastly, compress the component back to a 7z archive
+ if not archive.arch_name.endswith(".7z"): # Remove old archive suffix
+ while Path(archive.arch_name).suffix in archive.supported_arch_formats:
+ archive.arch_name = Path(archive.arch_name).stem
+ archive.arch_name = Path(archive.arch_name + ".7z").name
content_list = os.listdir(compress_content_dir)
- # adding compress_content_dir in front of every item
+ # Add compress_content_dir in front of every item
content_list = [(compress_content_dir + os.sep + x) for x in content_list]
-
- saveas = os.path.normpath(data_dir_dest + os.sep + archive.archive_name)
- run_cmd(cmd=[task.archivegen_tool, saveas] + content_list, cwd=data_dir_dest)
+ save_as = os.path.normpath(os.path.join(data_dir_dest, archive.arch_name))
+ run_cmd(cmd=[task.archivegen_tool, save_as] + content_list, cwd=data_dir_dest)
def handle_set_executable(base_dir: str, package_finalize_items: str) -> None:
@@ -483,8 +469,8 @@ def parse_package_finalize_items(package_finalize_items: str, item_category: str
# Substitute pkg template directory names
##############################################################
def substitute_package_name(task: Any, package_name: str) -> str:
- for item in task.substitutions:
- package_name = package_name.replace(item[0], item[1])
+ for key, value in task.substitutions.items():
+ package_name = package_name.replace(key, value)
return package_name
@@ -579,22 +565,27 @@ def create_target_components(task: Any) -> None:
if task.create_repository and os.environ.get("LRELEASE_TOOL"):
if not os.path.isfile(os.path.join(task.script_root_dir, "lrelease")):
download(os.environ.get("LRELEASE_TOOL", ""), task.script_root_dir)
- extract_file(os.path.basename(os.environ.get("LRELEASE_TOOL", "")), task.script_root_dir)
+ extract_file(
+ os.path.basename(os.environ.get("LRELEASE_TOOL", "")), task.script_root_dir
+ )
get_component_data_work = ThreadedWork("get components data")
- for sdk_component in task.sdk_component_list:
- sdk_component.print_component_data()
+ for sdk_comp in task.sdk_component_list:
+ log.info(sdk_comp)
+ if sdk_comp.archive_skip:
+ break
# substitute pkg_template dir names and package_name
- package_name = substitute_package_name(task, sdk_component.package_name)
+ package_name = substitute_package_name(task, sdk_comp.ifw_sdk_comp_name)
+ sdk_comp.ifw_sdk_comp_name = package_name
dest_base = task.packages_full_path_dst + os.sep + package_name + os.sep
meta_dir_dest = os.path.normpath(dest_base + 'meta')
data_dir_dest = os.path.normpath(dest_base + 'data')
temp_data_dir = os.path.normpath(dest_base + 'tmp')
# save path for later substitute_component_tags call
- sdk_component.meta_dir_dest = meta_dir_dest
+ sdk_comp.meta_dir_dest = Path(meta_dir_dest)
# create meta destination folder
- Path(meta_dir_dest).mkdir(parents=True, exist_ok=True)
+ sdk_comp.meta_dir_dest.mkdir(parents=True, exist_ok=True)
# Copy Meta data
- metadata_content_source_root = os.path.normpath(sdk_component.pkg_template_dir + os.sep + 'meta')
+ metadata_content_source_root = os.path.join(sdk_comp.pkg_template_folder, "meta")
copy_tree(metadata_content_source_root, meta_dir_dest)
if os.path.isfile(os.path.join(task.script_root_dir, "lrelease")):
# create translation binaries if translation source files exist for component
@@ -604,16 +595,21 @@ def create_target_components(task: Any) -> None:
# add files into tag substitution
task.directories_for_substitutions.append(meta_dir_dest)
# handle archives
- if sdk_component.downloadable_archive_list:
+ if sdk_comp.downloadable_archives:
# save path for later substitute_component_tags call
- sdk_component.temp_data_dir = temp_data_dir
+ sdk_comp.temp_data_dir = Path(temp_data_dir)
# Copy archives into temporary build directory if exists
- for archive in sdk_component.downloadable_archive_list:
- # fetch packages only if offline installer or repo creation, for online installer just handle the metadata
+ for archive in sdk_comp.downloadable_archives:
+ # fetch packages only if offline installer or repo creation,
+ # for online installer just handle the metadata
if task.offline_installer or task.create_repository:
# Create needed data dirs
- compress_content_dir = os.path.normpath(temp_data_dir + os.sep + archive.archive_name)
- install_dir = os.path.normpath(compress_content_dir + archive.get_archive_installation_directory())
+ compress_content_dir = os.path.normpath(
+ temp_data_dir + os.sep + archive.arch_name
+ )
+ install_dir = os.path.normpath(
+ compress_content_dir + archive.get_archive_install_dir()
+ )
# adding get_component_data task to our work queue
# Create needed data dirs before the threads start to work
Path(install_dir).mkdir(parents=True, exist_ok=True)
@@ -621,16 +617,28 @@ def create_target_components(task: Any) -> None:
if is_windows():
install_dir = win32api.GetShortPathName(install_dir)
data_dir_dest = win32api.GetShortPathName(data_dir_dest)
- get_component_data_work.add_task(f"adding {archive.archive_name} to {sdk_component.package_name}",
- get_component_data, task, sdk_component, archive, install_dir, data_dir_dest, compress_content_dir)
+ get_component_data_work.add_task(
+ f"adding {archive.arch_name} to {sdk_comp.ifw_sdk_comp_name}",
+ get_component_data,
+ task,
+ sdk_comp,
+ archive,
+ install_dir,
+ data_dir_dest,
+ compress_content_dir,
+ )
# handle component sha1 uri
- if sdk_component.component_sha1_uri:
+ if sdk_comp.comp_sha1_uri:
sha1_file_dest = os.path.normpath(dest_base + 'SHA1')
- get_component_data_work.add_task(f"getting component sha1 file for {sdk_component.package_name}",
- get_component_sha1_file, sdk_component, sha1_file_dest)
+ get_component_data_work.add_task(
+ f"getting component sha1 file for {sdk_comp.ifw_sdk_comp_name}",
+ get_component_sha1_file,
+ sdk_comp,
+ sha1_file_dest,
+ )
# maybe there is some static data
- data_content_source_root = os.path.normpath(sdk_component.pkg_template_dir + os.sep + 'data')
+ data_content_source_root = os.path.normpath(sdk_comp.pkg_template_folder + os.sep + "data")
if os.path.exists(data_content_source_root):
Path(data_dir_dest).mkdir(parents=True, exist_ok=True)
copy_tree(data_content_source_root, data_dir_dest)
@@ -642,9 +650,9 @@ def create_target_components(task: Any) -> None:
for sdk_component in task.sdk_component_list:
# substitute tags
substitute_component_tags(create_metadata_map(sdk_component), sdk_component.meta_dir_dest)
- if hasattr(sdk_component, 'temp_data_dir') and os.path.exists(sdk_component.temp_data_dir):
+ if sdk_component.temp_data_dir and os.path.exists(sdk_component.temp_data_dir):
# lastly remove temp dir after all data is prepared
- if not remove_tree(sdk_component.temp_data_dir):
+ if not remove_tree(str(sdk_component.temp_data_dir)):
raise CreateInstallerError(f"Unable to remove directory: {sdk_component.temp_data_dir}")
# substitute downloadable archive names in installscript.qs
substitute_component_tags(sdk_component.generate_downloadable_archive_list(), sdk_component.meta_dir_dest)
@@ -941,12 +949,11 @@ class QtInstallerTask:
platform_identifier: str = ""
installer_name: str = ""
packages_dir_name_list: List[str] = field(default_factory=list)
- substitutions: List[List[str]] = field(default_factory=list)
+ substitutions: Dict[str, str] = field(default_factory=dict)
directories_for_substitutions: List[str] = field(default_factory=list)
- sdk_component_list: List[SdkComponent] = field(default_factory=list)
- sdk_component_list_skipped: List[SdkComponent] = field(default_factory=list)
+ sdk_component_list: List[IfwSdkComponent] = field(default_factory=list)
+ sdk_component_list_skipped: List[IfwSdkComponent] = field(default_factory=list)
sdk_component_ignore_list: List[str] = field(default_factory=list)
- archive_location_resolver: Optional[ArchiveLocationResolver] = None
archive_base_url: str = ""
remove_debug_information_files: bool = False
remove_debug_libraries: bool = False
@@ -977,10 +984,6 @@ class QtInstallerTask:
self.config.get("PackageTemplates", "template_dirs"), self.configurations_dir
)
self._parse_substitutions()
- if self.archive_location_resolver is None:
- self.archive_location_resolver = ArchiveLocationResolver(
- self.config, self.archive_base_url, self.configurations_dir, self.substitutions
- )
def __str__(self) -> str:
return f"""Installer task:
@@ -1014,8 +1017,8 @@ class QtInstallerTask:
key, value = item.split("=", maxsplit=1)
if not value:
log.warning("Empty value for substitution string given, substituting anyway: %s", item)
- self.substitutions.append([key, value]) # pylint: disable=no-member
- self.substitutions.append(['%LICENSE%', self.license_type]) # pylint: disable=no-member
+ self.substitutions[key] = value # pylint: disable=unsupported-assignment-operation
+ self.substitutions["%LICENSE%"] = self.license_type # pylint: disable=E1137
def parse_ifw_pkg_template_dirs(self, template_list: str, configurations_dir: str) -> List[str]:
ret = []
@@ -1029,7 +1032,7 @@ class QtInstallerTask:
ret.append(package_template_dir)
else:
# first check if the pkg templates are under assumed "/configurations/pkg_templates" directory
- pkg_template_dir = os.path.join(configurations_dir, pkg_constants.PKG_TEMPLATE_BASE_DIR_NAME,
+ pkg_template_dir = os.path.join(configurations_dir, PKG_TEMPLATE_BASE_DIR_NAME,
package_template_dir)
if os.path.exists(pkg_template_dir):
ret.append(pkg_template_dir)
diff --git a/packaging-tools/sdkcomponent.py b/packaging-tools/sdkcomponent.py
index 231921331..8b4ffed96 100644
--- a/packaging-tools/sdkcomponent.py
+++ b/packaging-tools/sdkcomponent.py
@@ -29,250 +29,441 @@
#
#############################################################################
-import ntpath
import os
from configparser import ConfigParser
-from typing import Any, List
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple
+from urllib.parse import urlparse
-from archiveresolver import ArchiveLocationResolver
-from bldinstallercommon import config_section_map, is_content_url_valid, safe_config_key_fetch
from logging_util import init_logger
log = init_logger(__name__, debug_mode=False)
-ONLINE_ARCHIVE_LIST_TAG = '<!--ONLINE_ARCHIVE_LIST-->'
-
-
-class SdkComponent:
- """SdkComponent class contains all required info for one installable SDK component"""
- class DownloadableArchive:
- """DownloadableArchive subclass contains all required info about data packages for one SDK component"""
- def __init__(
- self,
- archive: str,
- package_name: str,
- parent_target_install_base: str,
- archive_server_name: str,
- target_config: ConfigParser,
- archive_location_resolver: ArchiveLocationResolver,
- key_value_substitution_list: List[str],
- ) -> None:
- self.archive_uri = config_section_map(target_config, archive)['archive_uri']
- self.archive_action = safe_config_key_fetch(target_config, archive, 'archive_action')
- self.extract_archive = safe_config_key_fetch(target_config, archive, 'extract_archive')
- self.package_strip_dirs = safe_config_key_fetch(target_config, archive, 'package_strip_dirs')
- self.package_finalize_items = safe_config_key_fetch(target_config, archive, 'package_finalize_items')
- # parent's 'target_install_base'
- self.parent_target_install_base = parent_target_install_base
- # in case the individual archive needs to be installed outside the root dir specified by the parent component
- self.target_install_base: str = safe_config_key_fetch(target_config, archive, 'target_install_base')
- # this is relative to 1) current archive's 'target_install_base' 2) parent components 'target_install_base'. (1) takes priority
- self.target_install_dir: str = safe_config_key_fetch(target_config, archive, 'target_install_dir').lstrip(os.path.sep)
- self.rpath_target = safe_config_key_fetch(target_config, archive, 'rpath_target')
- self.component_sha1_file = safe_config_key_fetch(target_config, archive, 'component_sha1_file')
- self.nomalize_archive_uri(package_name, archive_server_name, archive_location_resolver)
- self.archive_name = safe_config_key_fetch(target_config, archive, 'archive_name')
- if not self.archive_name:
- self.archive_name = self.path_leaf(self.archive_uri)
- # Parse unnecessary extensions away from filename (QTBUG-39219)
- known_archive_types = ['.tar.gz', '.tar', '.zip', '.tar.xz', '.tar.bz2']
- for item in known_archive_types:
- if self.archive_name.endswith(item):
- self.archive_name = self.archive_name.replace(item, '')
- if not self.archive_name.endswith('.7z'):
- self.archive_name += '.7z'
- # substitute key-value pairs if any
- for item in key_value_substitution_list:
- self.target_install_base = self.target_install_base.replace(item[0], item[1])
- self.target_install_dir = self.target_install_dir.replace(item[0], item[1])
- self.archive_name = self.archive_name.replace(item[0], item[1])
-
- def nomalize_archive_uri(
- self, package_name: str, archive_server_name: str, archive_location_resolver: ArchiveLocationResolver
- ) -> None:
- self.archive_uri = archive_location_resolver.resolve_full_uri(package_name, archive_server_name, self.archive_uri)
-
- def check_archive_data(self) -> Any:
- if self.archive_uri.startswith('http'):
- res = is_content_url_valid(self.archive_uri)
- if not res:
- return '*** Archive check fail! ***\n*** Unable to locate archive: ' + self.archive_uri
- elif not os.path.isfile(self.archive_uri):
- return '*** Archive check fail! ***\n*** Unable to locate archive: ' + self.archive_uri
- return None
-
- def path_leaf(self, path: str) -> str:
- head, tail = ntpath.split(path)
- return tail or ntpath.basename(head)
-
- def get_archive_installation_directory(self) -> str:
- if self.target_install_base:
- return self.target_install_base + os.path.sep + self.target_install_dir
- return self.parent_target_install_base + os.path.sep + self.target_install_dir
-
- def __init__(
- self,
- section_name: str,
- target_config: ConfigParser,
- packages_full_path_list: List[str],
- archive_location_resolver: ArchiveLocationResolver,
- key_value_substitution_list: List[str],
- ):
- self.static_component = safe_config_key_fetch(target_config, section_name, 'static_component')
- self.root_component = safe_config_key_fetch(target_config, section_name, 'root_component')
- self.package_name = section_name
- self.package_subst_name = section_name
- self.packages_full_path_list = packages_full_path_list
- self.archives = safe_config_key_fetch(target_config, section_name, 'archives')
- self.archives = self.archives.replace(' ', '').replace('\n', '')
- self.archives_extract_dir = safe_config_key_fetch(target_config, section_name, 'archives_extract_dir')
- self.archive_server_name = safe_config_key_fetch(target_config, section_name, 'archive_server_name')
- self.downloadable_archive_list: List[SdkComponent.DownloadableArchive] = [] # pylint: disable=E0601
- self.target_install_base = safe_config_key_fetch(target_config, section_name, 'target_install_base')
- self.version = safe_config_key_fetch(target_config, section_name, 'version')
- self.version_tag = safe_config_key_fetch(target_config, section_name, 'version_tag')
- self.package_default = safe_config_key_fetch(target_config, section_name, 'package_default')
- self.install_priority = safe_config_key_fetch(target_config, section_name, 'install_priority')
- self.sorting_priority = safe_config_key_fetch(target_config, section_name, 'sorting_priority')
- self.component_sha1 = ""
- self.component_sha1_uri = safe_config_key_fetch(target_config, section_name, 'component_sha1_uri')
- if self.component_sha1_uri:
- self.component_sha1_uri = archive_location_resolver.resolve_full_uri(self.package_name, self.archive_server_name, self.component_sha1_uri)
- self.key_value_substitution_list = key_value_substitution_list
- self.archive_skip = False
- self.include_filter = safe_config_key_fetch(target_config, section_name, 'include_filter')
- self.downloadable_arch_list_qs: List[Any] = []
- self.pkg_template_dir = ''
- self.sanity_check_error_msg = ''
- self.target_config = target_config
- self.archive_location_resolver = archive_location_resolver
- self.meta_dir_dest: str = ""
- self.temp_data_dir: str = ""
- # substitute key-value pairs if any
- for item in self.key_value_substitution_list:
- self.target_install_base = self.target_install_base.replace(item[0], item[1])
- self.version = self.version.replace(item[0], item[1])
-
- def is_root_component(self) -> bool:
- if self.root_component in ('yes', 'true'):
- return True
- return False
-
- def set_archive_skip(self, do_skip: bool) -> None:
- self.archive_skip = do_skip
+
+class IfwSdkError(Exception):
+ """Exception class for IfwSdkComponent errors"""
+
+
+@dataclass
+class IfwPayloadItem:
+ """Payload item class for IfwSdkComponent's archives"""
+
+ package_name: str
+ archive_uri: str
+ archive_action: Optional[Tuple[Path, str]]
+ extract_archive: bool
+ package_strip_dirs: int
+ package_finalize_items: str
+ parent_target_install_base: str
+ arch_target_install_base: str
+ arch_target_install_dir: str
+ rpath_target: str
+ component_sha1: str
+ arch_name: str
+ errors: List[str] = field(default_factory=list)
+ # List of archive formats supported by Installer Framework:
+ ifw_arch_formats: Tuple[str, ...] = (".7z", ".tar", ".gz", ".zip", ".xz", ".bz2")
+ # List of payload archive formats supported by scripts for extraction:
+ supported_arch_formats: Tuple[str, ...] = (".7z", ".tar", ".gz", ".zip", ".xz", ".bz2")
+ _requires_extraction: Optional[bool] = None
+ _requires_patching: Optional[bool] = None
+
+ def __post_init__(self) -> None:
+ """Post init: run sanity checks"""
+ assert self.package_name, "The 'package_name' was not set?"
+ self.arch_name = self._ensure_ifw_arch_name()
+ self._sanity_check()
+
+ def _sanity_check(self) -> None:
+ """Perform a sanity check on the payload archive configuration and append the errors"""
+ if self.archive_action:
+ script_path, _ = self.archive_action
+ if not script_path.exists() and script_path.is_file():
+ self.errors += [f"Unable to locate custom archive action script: {script_path}"]
+ if not self.archive_uri:
+ self.errors += [f"[{self.package_name}] is missing 'archive_uri'"]
+ if self.package_strip_dirs is None:
+ self.errors += [f"[{self.package_name}] is missing 'package_strip_dirs'"]
+ if not self.get_archive_install_dir():
+ self.errors += [f"[{self.package_name}] is missing payload installation directory"]
+ if not self.extract_archive and self.requires_patching:
+ self.errors += [f"[{self.package_name}] patching specified with extract_archive=no"]
+ if not self.archive_uri.endswith(self.supported_arch_formats) and self.requires_patching:
+ if self.package_strip_dirs != 0:
+ self.errors += [f"[{self.package_name}] package_strip_dirs!=0 for a non-archive"]
+ if self.package_finalize_items:
+ self.errors += [f"[{self.package_name}] package_finalize_items for a non-archive"]
def validate(self) -> None:
- # look up correct package template directory from list
- found = False
- for item in self.key_value_substitution_list:
- self.package_name = self.package_name.replace(item[0], item[1])
- for item in self.packages_full_path_list:
- template_full_path = os.path.normpath(item + os.sep + self.package_subst_name)
- if os.path.exists(template_full_path):
- if not found:
- # take the first match
- self.pkg_template_dir = template_full_path
- found = True
- else:
- # sanity check, duplicate template should not exist to avoid
- # problems!
- log.warning("Found duplicate template for: %s", self.package_name)
- log.warning("Ignoring: %s", template_full_path)
- log.warning("Using: %s", self.pkg_template_dir)
- self.parse_archives(self.target_config, self.archive_location_resolver)
- self.check_component_data()
-
- def check_component_data(self) -> None:
- if self.static_component:
- if not os.path.isfile(self.static_component):
- self.sanity_check_fail(self.package_name, 'Unable to locate given static package: ' + self.static_component)
- return
- # no more checks needed for static component
- return
- if not self.package_name:
- self.sanity_check_fail(self.package_name, 'Undefined package name?')
- return
- if self.archives and not self.target_install_base:
- self.sanity_check_fail(self.package_name, 'Undefined target_install_base?')
- return
- if self.version and not self.version_tag:
- self.sanity_check_fail(self.package_name, 'Undefined version_tag?')
- return
- if self.version_tag and not self.version:
- self.sanity_check_fail(self.package_name, 'Undefined version?')
- return
- if self.package_default not in ['true', 'false', 'script']:
- self.package_default = 'false'
- # check that package template exists
- if not os.path.exists(self.pkg_template_dir):
- self.sanity_check_fail(self.package_name, 'Package template dir does not exist: ' + self.pkg_template_dir)
- return
- if not self.archive_skip:
- # next check that archive locations exist
- for archive in self.downloadable_archive_list:
- error_msg = archive.check_archive_data()
- if error_msg:
- self.sanity_check_fail(self.package_name, error_msg)
- return
-
- def sanity_check_fail(self, component_name: str, message: str) -> None:
- self.sanity_check_error_msg = '*** Sanity check fail! ***\n*** Component: [' + component_name + ']\n*** ' + message
-
- def is_valid(self) -> bool:
- if self.sanity_check_error_msg:
- return False
- return True
-
- def error_msg(self) -> str:
- return self.sanity_check_error_msg
-
- def parse_archives(self, target_config: ConfigParser, archive_location_resolver: ArchiveLocationResolver) -> None:
- if self.archives:
- archives_list = self.archives.split(',')
- for archive in archives_list:
- if not archive:
- log.warning("[%s]: Archive list in config has ',' issues", self.package_name)
- continue
- # check that archive template exists
- if not target_config.has_section(archive):
- raise RuntimeError(f'*** Error! Given archive section does not exist in configuration file: {archive}')
- archive_obj = SdkComponent.DownloadableArchive(archive, self.package_name, self.target_install_base, self.archive_server_name,
- target_config, archive_location_resolver,
- self.key_value_substitution_list)
- self.downloadable_archive_list.append(archive_obj)
+ """
+ Validate IfwPayloadItem, log the errors
- def generate_downloadable_archive_list(self) -> List[List[str]]:
- """Generate list that is embedded into package.xml"""
- output = ''
- for item in self.downloadable_archive_list:
- if not output:
- output = item.archive_name
+ Raises:
+ IfwSdkError: If there are errors in the payload item
+ """
+ log.info("[[%s]] - %s", self.package_name, "NOK" if self.errors else "OK")
+ if self.errors:
+ for err in self.errors:
+ log.error(err)
+ log.debug(self) # Log also the details of the payload item with errors
+ raise IfwSdkError(
+ f"[[{self.package_name}]] Invalid payload configuration - check your configs!"
+ )
+
+ def _ensure_ifw_arch_name(self) -> str:
+ """
+ Get the archive name by splitting from its uri if a name doesn't already exist
+
+ Returns:
+ Name for the payload item
+ """
+ arch_name: str = self.arch_name or Path(self.archive_uri).name
+ return arch_name
+
+ def get_archive_install_dir(self) -> str:
+ """
+ Resolve archive install directory based on config
+
+ Returns:
+ Resolved install directory for the archive
+ """
+ ret = os.path.join(
+ self.arch_target_install_base or self.parent_target_install_base,
+ self.arch_target_install_dir.lstrip(os.path.sep),
+ )
+ return ret.rstrip(os.path.sep) or ret
+
+ @property
+ def requires_patching(self) -> bool:
+ """
+ A property to determine whether the payload content needs to be patched.
+ The value is calculated once and saved to _requires_patching.
+
+ Returns:
+ A boolean for whether patching the payload is needed.
+ """
+ if self._requires_patching is None:
+ self._requires_patching = not (
+ self.package_strip_dirs == 0
+ and not self.package_finalize_items
+ and not self.archive_action
+ and not self.rpath_target
+ and self.parent_target_install_base == "/"
+ and not self.arch_target_install_dir
+ )
+ return self._requires_patching
+
+ @property
+ def requires_extraction(self) -> bool:
+ """
+ A property to determine whether the archive needs to be extracted.
+ The value is calculated once and saved to _requires_extraction.
+
+ Returns:
+ A boolean for whether extracting the payload is needed.
+ """
+ if self._requires_extraction is None:
+ if self.archive_uri.endswith(self.ifw_arch_formats):
+ # Extract IFW supported archives if patching required or archive has a sha1 file
+ # Otherwise, use the raw CI artifact
+ self._requires_extraction = bool(self.component_sha1) or self.requires_patching
+ # It is also possible to disable the extraction in config (extract_archive=False)
+ if not self.extract_archive:
+ self._requires_extraction = False
+ elif self.archive_uri.endswith(self.supported_arch_formats):
+ # Repack supported archives to IFW friendly archive format
+ self._requires_extraction = True
else:
- output = output + ', ' + item.archive_name
-
- temp_list = []
- temp_list.append([ONLINE_ARCHIVE_LIST_TAG, output])
- return temp_list
-
- def print_component_data(self) -> None:
- log.info("=============================================================")
- log.info("[%s]", self.package_name)
- if self.static_component:
- log.info("Static component: %s", self.static_component)
- return
- if self.root_component:
- log.info("Root component: %s", self.root_component)
- log.info("Include filter: %s", self.include_filter)
- log.info("Target install base: %s", self.target_install_base)
- log.info("Version: %s", self.version)
- log.info("Version tag: %s", self.version_tag)
- log.info("Package default: %s", self.package_default)
- if self.downloadable_archive_list:
- log.info(" Archives:")
- for archive in self.downloadable_archive_list:
- log.info("---------------------------------------------------------------")
- log.info("Downloadable archive name: %s", archive.archive_name)
- log.info("Strip dirs: %s", archive.package_strip_dirs)
- log.info("Target install dir: %s", archive.get_archive_installation_directory())
- log.info("RPath target: %s", archive.rpath_target)
- log.info("URI: %s", archive.archive_uri)
+ # Payload not a supported archive type, use as-is
+ self._requires_extraction = False
+ return self._requires_extraction
+
+ def __str__(self) -> str:
+ return f"""
+- Downloadable payload name: {self.arch_name}
+ Payload URI: {self.archive_uri}
+ Extract archive: {self.requires_extraction}
+ Patch payload: {self.requires_patching}""" + (
+ f""", config:
+ Strip package dirs: {self.package_strip_dirs}
+ Finalize items: {self.package_finalize_items}
+ Action script: {self.archive_action}
+ RPath target: {self.rpath_target}
+ Target install dir: {self.get_archive_install_dir()}"""
+ if self.requires_patching else ""
+ )
+
+
+class ArchiveResolver:
+ """Resolver class for archive payload uris"""
+
+ def __init__(self, file_share_base_url: str, pkg_template_folder: str) -> None:
+ self.file_share_base_url = file_share_base_url
+ self.pkg_template_folder = pkg_template_folder
+
+ def resolve_payload_uri(self, unresolved_archive_uri: str) -> str:
+ """
+ Resolves the given archive URI and resolves it based on the type of URI given
+ Available URI types:
+ - file system string paths, file system URIs
+ - network locations e.g. HTTP URLs
+ - file system string paths relative to data folder under package template root
+
+ Args:
+ unresolved_archive_uri: Original URI to resolve
+
+ Returns:
+ A resolved URI location for the payload
+ """
+ # is it a file system path or an absolute URL which can be downloaded
+ if os.path.exists(unresolved_archive_uri) or urlparse(unresolved_archive_uri).netloc:
+ return unresolved_archive_uri
+ # is it relative to pkg template root dir, under the 'data' directory
+ pkg_data_dir = os.path.join(self.pkg_template_folder, "data", unresolved_archive_uri)
+ if os.path.exists(pkg_data_dir):
+ return pkg_data_dir
+ # ok, we assume this is a URL which can be downloaded
+ return self.file_share_base_url.rstrip("/") + "/" + unresolved_archive_uri.lstrip("/")
+
+
+@dataclass
+class IfwSdkComponent:
+ """Installer framework sdk component class"""
+
+ ifw_sdk_comp_name: str
+ pkg_template_folder: str
+ archive_resolver: ArchiveResolver
+ downloadable_archives: List[IfwPayloadItem]
+ archives_extract_dir: str
+ target_install_base: str
+ version: str
+ version_tag: str
+ package_default: str
+ comp_sha1_uri: str
+ include_filter: str
+ component_sha1: Optional[str] = None
+ temp_data_dir: Optional[Path] = None
+ meta_dir_dest: Optional[Path] = None
+ archive_skip: bool = False
+
+ def __post_init__(self) -> None:
+ """Post init: convert component sha1 uri to resolved uri if it exists"""
+ if self.comp_sha1_uri:
+ self.comp_sha1_uri = self.archive_resolver.resolve_payload_uri(self.comp_sha1_uri)
+
+ def validate(self) -> None:
+ """
+ Perform validation on IfwSdkComponent, raise error if component not valid
+
+ Raises:
+ AssertionError: When the component's package name doesn't exist
+ IfwSdkError: When component with payload doesn't have target install base configured
+ """
+ assert self.ifw_sdk_comp_name, "Undefined package name?"
+ if self.downloadable_archives and not self.target_install_base:
+ raise IfwSdkError(f"[{self.ifw_sdk_comp_name}] is missing 'target_install_base'")
+
+ def generate_downloadable_archive_list(self) -> List[List[str]]:
+ """
+ Generate list that is embedded into package.xml
+
+ Returns:
+ Generated downloaded archive list
+ """
+ archive_list: List[str] = [a.arch_name for a in self.downloadable_archives]
+ return [["<!--ONLINE_ARCHIVE_LIST-->", ", ".join(archive_list)]]
+
+ def __str__(self) -> str:
+ print_data = f"""
+[{self.ifw_sdk_comp_name}]
+Include filter: {self.include_filter}
+Target install base: {self.target_install_base}
+Version: {self.version}
+Version tag: {self.version_tag}
+Package default: {self.package_default}
+Archives:"""
+ for archive in self.downloadable_archives:
+ print_data += str(archive)
+ return print_data
+
+
+class ConfigSubst:
+ """Configuration file key substitutor and resolver"""
+
+ def __init__(self, config: ConfigParser, section: str, substitutions: Dict[str, str]) -> None:
+ if not config.has_section(section):
+ raise IfwSdkError(f"Missing section in configuration file: {section}")
+ self.config = config
+ self.section = section
+ self.substitutions: Dict[str, str] = substitutions
+ self.resolved: Dict[str, str] = {}
+
+ def get(self, key: str, default: str = "") -> str:
+ """
+ Perform substitutions for the given key and return resolved key value.
+ The values are saved to self.resolved for future lookups.
+
+ Args:
+ key: The key to look up from already resolved dict or to resolve
+ default: This value is used when key not found from config section
+
+ Returns:
+ A string value for the key or the given default (default=empty string)
+
+ Raises:
+ KeyError: When value for given key doesn't exist yet, handled
+ """
+ try:
+ return self.resolved[key]
+ except KeyError:
+ tmp = self.config[self.section].get(key, default)
+ for subst_key, subst_value in self.substitutions.items():
+ tmp = tmp.replace(subst_key, subst_value)
+ self.resolved[key] = tmp
+ return self.resolved[key]
+
+
+def locate_pkg_templ_dir(search_dirs: List[str], component_name: str) -> str:
+ """
+ Return one result for given component name from given search directories or fail
+
+ Args:
+ search_dirs: The list of string file system paths for the directories to look from
+ component_name: The component's directory name to match for
+
+ Returns:
+ A matching file system string path to a component's template folder
+
+ Raises:
+ IfwSdkError: When there are more than one matches
+ """
+ # look up correct package template directory from list
+ log.info("Searching pkg template '%s' folder from: %s", component_name, search_dirs)
+ matches: List[str] = []
+ for item in search_dirs:
+ matches.extend([str(p) for p in Path(item).resolve(strict=True).rglob(component_name)])
+ if len(matches) < 1:
+ raise IfwSdkError(f"Expected to find one result for '{component_name}' from {search_dirs}")
+ return matches.pop()
+
+
+def parse_ifw_sdk_comp(
+ config: ConfigParser,
+ section: str,
+ pkg_template_search_dirs: List[str],
+ substitutions: Dict[str, str],
+ file_share_base_url: str,
+) -> IfwSdkComponent:
+ """
+ Parse IfwSdkComponent from the given config
+
+ Args:
+ config: The given config to parse via ConfigParser
+ section: The section name for the component
+ pkg_template_search_dirs: Paths that should contain the template folder for the component
+ substitutions: String substitutions to apply for the config/template while parsing
+ file_share_base_url: URL to the file share server containing the payload content
+
+ Returns:
+ An instance of the parsed IfwSdkComponent
+ """
+ log.info("Parsing section: %s", section)
+ config_subst = ConfigSubst(config, section, substitutions)
+ pkg_template_folder = locate_pkg_templ_dir(pkg_template_search_dirs, component_name=section)
+ archive_resolver = ArchiveResolver(file_share_base_url, pkg_template_folder)
+ archives = config[section].get("archives", "")
+ archive_sections = [s.strip() for s in archives.split(",") if s.strip() != ""]
+ archives_extract_dir = config_subst.get("archives_extract_dir")
+ target_install_base = config_subst.get("target_install_base")
+ version = config_subst.get("version")
+ version_tag = config_subst.get("version_tag")
+ package_default = config_subst.get("package_default", "false")
+ comp_sha1_uri = config_subst.get("component_sha1_uri", "")
+ include_filter = config_subst.get("include_filter")
+ parsed_archives = parse_ifw_sdk_archives(
+ config=config,
+ archive_sections=archive_sections,
+ archive_resolver=archive_resolver,
+ parent_target_install_base=target_install_base,
+ substitutions=substitutions,
+ )
+ return IfwSdkComponent(
+ ifw_sdk_comp_name=section,
+ pkg_template_folder=pkg_template_folder,
+ archive_resolver=archive_resolver,
+ downloadable_archives=parsed_archives,
+ archives_extract_dir=archives_extract_dir,
+ target_install_base=target_install_base,
+ version=version,
+ version_tag=version_tag,
+ package_default=package_default,
+ comp_sha1_uri=comp_sha1_uri,
+ include_filter=include_filter,
+ )
+
+
+def parse_ifw_sdk_archives(
+ config: ConfigParser,
+ archive_sections: List[str],
+ archive_resolver: ArchiveResolver,
+ parent_target_install_base: str,
+ substitutions: Dict[str, str],
+) -> List[IfwPayloadItem]:
+ """
+ Parsed IfwPayloadItems for the given payload sections in config
+
+ Args:
+ config: The config containing the payload sections via ConfigParser
+ archive_sections: The payload sections for the component
+ archive_resolver: The resolver to use for payload URIs
+ parent_target_install_base: The parent component's root install folder
+ substitutions: The string substitutions to apply while parsing config/templates
+
+ Returns:
+ A list of parsed IfwPayloadItems for the component
+ """
+ parsed_archives = []
+ for arch_section_name in archive_sections:
+ config_subst = ConfigSubst(config, arch_section_name, substitutions)
+ unresolved_archive_uri = config_subst.get("archive_uri")
+ resolved_archive_uri = archive_resolver.resolve_payload_uri(unresolved_archive_uri)
+ archive_action_string = config_subst.get("archive_action", "")
+ archive_action: Optional[Tuple[Path, str]] = None
+ if archive_action_string:
+ script_path, script_args = archive_action_string.split(",")
+ archive_action = Path(__file__).parent / script_path, script_args.strip() or ""
+ extract_archive = bool(
+ config_subst.get("extract_archive", "yes").lower() in ["yes", "true", "1"]
+ )
+ package_strip_dirs = int(config_subst.get("package_strip_dirs") or 0)
+ package_finalize_items = config_subst.get("package_finalize_items")
+ # in case the individual archive needs to be installed outside the root dir specified by
+ # the parent component
+ target_install_base = config_subst.get("target_install_base", "")
+ # this is relative to:
+ # 1) current archive's 'target_install_base'
+ # 2) parent components 'target_install_base'. (1) takes priority
+ target_install_dir = config_subst.get("target_install_dir")
+ rpath_target = config_subst.get("rpath_target")
+ if rpath_target and not rpath_target.startswith(os.sep):
+ rpath_target = os.sep + rpath_target
+ component_sha1_file = config_subst.get("component_sha1_file")
+ archive_name = config_subst.get("archive_name")
+ payload = IfwPayloadItem(
+ package_name=arch_section_name,
+ archive_uri=resolved_archive_uri,
+ archive_action=archive_action,
+ extract_archive=extract_archive,
+ package_strip_dirs=package_strip_dirs,
+ package_finalize_items=package_finalize_items,
+ parent_target_install_base=parent_target_install_base,
+ arch_target_install_base=target_install_base,
+ arch_target_install_dir=target_install_dir,
+ rpath_target=rpath_target,
+ component_sha1=component_sha1_file,
+ arch_name=archive_name,
+ )
+ payload.validate()
+ parsed_archives.append(payload)
+ return parsed_archives
diff --git a/packaging-tools/tests/test_sdkcomponent.py b/packaging-tools/tests/test_sdkcomponent.py
new file mode 100644
index 000000000..ce5523a7f
--- /dev/null
+++ b/packaging-tools/tests/test_sdkcomponent.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#############################################################################
+#
+# Copyright (C) 2022 The Qt Company Ltd.
+# Contact: https://www.qt.io/licensing/
+#
+# This file is part of the release tools of the Qt Toolkit.
+#
+# $QT_BEGIN_LICENSE:GPL-EXCEPT$
+# Commercial License Usage
+# Licensees holding valid commercial Qt licenses may use this file in
+# accordance with the commercial license agreement provided with the
+# Software or, alternatively, in accordance with the terms contained in
+# a written agreement between you and The Qt Company. For licensing terms
+# and conditions see https://www.qt.io/terms-conditions. For further
+# information use the contact form at https://www.qt.io/contact-us.
+#
+# GNU General Public License Usage
+# Alternatively, this file may be used under the terms of the GNU
+# General Public License version 3 as published by the Free Software
+# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
+# included in the packaging of this file. Please review the following
+# information to ensure the GNU General Public License requirements will
+# be met: https://www.gnu.org/licenses/gpl-3.0.html.
+#
+# $QT_END_LICENSE$
+#
+#############################################################################
+
+import os
+import tempfile
+import unittest
+from configparser import ConfigParser, ExtendedInterpolation
+from pathlib import Path
+from typing import Dict, List, Tuple
+
+from ddt import data, ddt, unpack # type: ignore
+
+from sdkcomponent import (
+ ArchiveResolver,
+ IfwPayloadItem,
+ IfwSdkError,
+ locate_pkg_templ_dir,
+ parse_ifw_sdk_comp,
+)
+
+
+def ifw_sdk_config_valid(section_name: str) -> ConfigParser:
+ conf = ConfigParser(interpolation=ExtendedInterpolation())
+ conf.add_section(section_name)
+ conf.set(section_name, "archives", "sub.arch1, sub.arch2, sub.sub3.arch3")
+ conf.set(section_name, "target_install_base", "/%QT_VERSION%/gcc_64")
+ conf.add_section("sub.arch1")
+ conf.set(
+ "sub.arch1", "archive_uri", "/qt/dev/release_content/qtbase/qtbase-%LINUX_GCC64_TARGET%.7z"
+ )
+ conf.add_section("sub.arch2")
+ conf.set(
+ "sub.arch2", "archive_uri", "/qt/dev/release_content/qtsvg/qtsvg-%LINUX_GCC64_TARGET%.7z"
+ )
+ conf.add_section("sub.sub3.arch3")
+ conf.set(
+ "sub.sub3.arch3",
+ "archive_uri",
+ "/%LICENSE%/%QT_VERSION%/foo/qtdeclarative-%LINUX_GCC64_TARGET%.7z",
+ )
+ return conf
+
+
+def key_value_subst_dict() -> Dict[str, str]:
+ return {
+ "%LICENSE%": "opensource",
+ "%QT_VERSION%": "5.10.0",
+ "%LINUX_GCC64_TARGET%": "RHEL_7_4",
+ }
+
+
+def ifw_pkg_templ_dirs(ifw_pkg_names: List[str]) -> List[str]:
+ ret: List[str] = []
+ for ifw_pkg_name in ifw_pkg_names:
+ ret.extend(
+ [
+ f"pkg_templates/product1/{ifw_pkg_name}/meta/package.xml",
+ f"pkg_templates/product1/{ifw_pkg_name}/meta/installscript.qs",
+ f"pkg_templates/product1/{ifw_pkg_name}/data/readme.txt",
+ ]
+ )
+ return ret
+
+
+def create_paths(root_folder: str, paths: List[str]) -> List[str]:
+ ret: List[str] = []
+ for item in paths:
+ full_path = os.path.join(root_folder, item)
+ ret.append(full_path)
+ head, tail = os.path.split(full_path)
+ os.makedirs(head, exist_ok=True)
+ if tail:
+ with open(full_path, "a", encoding="utf-8"):
+ pass
+ return ret
+
+
+@ddt
+class TestRunner(unittest.TestCase):
+ def test_parse_ifw_sdk_archives(self) -> None:
+ section = "qt.qt5.5100.gcc_64"
+ pkg_template_paths = ifw_pkg_templ_dirs([section])
+ ifw_sdk_config = ifw_sdk_config_valid(section)
+
+ with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
+ pkg_template_search_dirs: List[str] = []
+ create_paths(tmp_base_dir, pkg_template_paths)
+ pkg_template_search_dirs.append(os.path.join(tmp_base_dir, "pkg_templates"))
+
+ file_share_base_url = "http://fileshare.intra/base/path/"
+ comp = parse_ifw_sdk_comp(
+ ifw_sdk_config,
+ section,
+ pkg_template_search_dirs,
+ key_value_subst_dict(),
+ file_share_base_url,
+ )
+
+ self.assertEqual(len(comp.downloadable_archives), 3)
+ self.assertListEqual(
+ comp.generate_downloadable_archive_list(),
+ [
+ [
+ "<!--ONLINE_ARCHIVE_LIST-->",
+ "qtbase-RHEL_7_4.7z, qtsvg-RHEL_7_4.7z, qtdeclarative-RHEL_7_4.7z",
+ ]
+ ],
+ )
+ self.assertEqual(
+ {a.archive_uri for a in comp.downloadable_archives},
+ {
+ file_share_base_url + "qt/dev/release_content/qtbase/qtbase-RHEL_7_4.7z",
+ file_share_base_url + "qt/dev/release_content/qtsvg/qtsvg-RHEL_7_4.7z",
+ file_share_base_url + "opensource/5.10.0/foo/qtdeclarative-RHEL_7_4.7z",
+ },
+ )
+ self.assertEqual(
+ {a.arch_name for a in comp.downloadable_archives},
+ {"qtbase-RHEL_7_4.7z", "qtsvg-RHEL_7_4.7z", "qtdeclarative-RHEL_7_4.7z"},
+ )
+ for downloadable_archive in comp.downloadable_archives:
+ self.assertFalse(downloadable_archive.errors)
+ self.assertEqual(downloadable_archive.get_archive_install_dir(), "/5.10.0/gcc_64")
+
+ def test_ifw_payload_item_invalid(self) -> None:
+ with self.assertRaises(AssertionError):
+ IfwPayloadItem(
+ package_name="",
+ archive_uri="http://foo.com/readme.7z",
+ archive_action=None,
+ extract_archive=True,
+ package_strip_dirs=0,
+ package_finalize_items="",
+ parent_target_install_base="/base/install/dir",
+ arch_target_install_base="/foo/bar",
+ arch_target_install_dir="/bar/foo",
+ rpath_target="",
+ component_sha1="",
+ arch_name="readme.7z",
+ )
+
+ def test_ifw_payload_item_valid(self) -> None:
+ item = IfwPayloadItem(
+ package_name="foobar",
+ archive_uri="http://foo.com/readme.7z",
+ archive_action=None,
+ extract_archive=True,
+ package_strip_dirs=0,
+ package_finalize_items="",
+ parent_target_install_base="",
+ arch_target_install_base="/foo/bar",
+ arch_target_install_dir="",
+ rpath_target="",
+ component_sha1="",
+ arch_name="readme.7z",
+ )
+ self.assertFalse(item.errors)
+
+ @data( # type: ignore
+ ("foo.7z", None, True, 0, "", "/", "", "", "", False, False),
+ ("foo.7z", None, False, 0, "", "/", "", "", "", False, False),
+ ("foo.7z", (Path("foo.sh"), ""), True, 0, "", "/", "", "", "", True, True),
+ ("foo.7z", None, True, 1, "", "/", "", "", "", True, True),
+ ("foo.7z", None, True, 0, "foo", "/", "", "", "", True, True),
+ ("foo.7z", None, True, 0, "", "/foo", "", "", "", True, True),
+ ("foo.7z", None, True, 0, "", "/", "foo", "", "", True, True),
+ ("foo.7z", None, True, 0, "", "/", "", "foo", "", True, True),
+ ("foo.7z", None, True, 0, "", "/", "", "", "foo", True, False),
+ ("foo.txt", None, True, 0, "", "/", "", "", "foo", False, False),
+ ("foo.txt", None, True, 0, "", "/", "", "", "", False, False),
+ ("foo.txt", None, True, 0, "", "/", "foo/bar", "", "", False, True),
+ )
+ @unpack # type: ignore
+ def test_ifw_payload_item_requires_packaging_operations(
+ self,
+ archive_uri: str,
+ archive_action: Tuple[Path, str],
+ extract_archive: bool,
+ package_strip_dirs: int,
+ package_finalize_items: str,
+ parent_target_install_base: str,
+ arch_target_install_dir: str,
+ rpath_target: str,
+ component_sha1: str,
+ expected_requires_extraction: bool,
+ expected_requires_patching: bool,
+ ) -> None:
+ item = IfwPayloadItem(
+ package_name="foobar",
+ archive_uri=archive_uri,
+ archive_action=archive_action,
+ extract_archive=extract_archive,
+ package_strip_dirs=package_strip_dirs,
+ package_finalize_items=package_finalize_items,
+ parent_target_install_base=parent_target_install_base,
+ arch_target_install_base="/foo/bar",
+ arch_target_install_dir=arch_target_install_dir,
+ rpath_target=rpath_target,
+ component_sha1=component_sha1,
+ arch_name="readme.7z",
+ )
+ self.assertEqual(item.requires_extraction, expected_requires_extraction)
+ self.assertEqual(item.requires_patching, expected_requires_patching)
+
+ def test_archive_resolver(self) -> None:
+ with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
+ template_folder = os.path.join(tmp_base_dir, "qt.tools.foo")
+ data_folder = os.path.join(template_folder, "data")
+ payload_file = os.path.join(data_folder, "readme.txt")
+ os.makedirs(data_folder, exist_ok=True)
+ with open(payload_file, "a", encoding="utf-8"):
+ pass
+
+ resolver = ArchiveResolver("http://intranet.local.it/artifacts", template_folder)
+ self.assertEqual(resolver.resolve_payload_uri("readme.txt"), payload_file)
+ self.assertEqual(
+ resolver.resolve_payload_uri("qt/qtsvg/qtsvg-RHEL_7_4.7z"),
+ "http://intranet.local.it/artifacts/qt/qtsvg/qtsvg-RHEL_7_4.7z",
+ )
+ self.assertEqual(resolver.resolve_payload_uri(__file__), __file__)
+
+ def test_locate_pkg_templ_dir_invalid(self) -> None:
+ with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmp_base_dir:
+ with self.assertRaises(IfwSdkError):
+ locate_pkg_templ_dir([tmp_base_dir], "qt.foo")
+
+
+if __name__ == "__main__":
+ unittest.main()