aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorIikka Eklund <iikka.eklund@qt.io>2022-10-31 11:59:56 +0200
committerAntti Kokko <antti.kokko@qt.io>2023-01-30 06:59:12 +0000
commitf0fde869f638bfbc6bdd79b8bd945aeea25be678 (patch)
tree1d5c9bb1895e60cab7aa707414f75ec169546b9b
parentd139575a56526190d9ab5dd4cb0be171a197675d (diff)
Support Debian task type in packaging configurations
Refactor the ReleaseTask class as a base class for two specialized cases: - IFWReleaseTask for tasks that use Qt Installer Framework to produce installers or repositories - DebReleaseTask for tasks that produce Debian repositories Support key-value substitution also in the configuration files that are parsed by ReleaseTask classes. Remove unused functionality from ReleaseTask class and change the various getter methods as @property getters: better data hiding and more Pythonic access to container attributes. Construct the task types via an factory: ReleaseTaskFactory To make the task parsing more type specific and straightforward introduce a breaking change in the task specification syntax: [task.foo.bar.abc...] -> [task.ifw.foo.bar.abc...] or [task.deb.foo.bar.abc...] Use "ifw" specifier for tasks that should use Qt Installer Framework. Use "deb" specifier for tasks that should produce Debian repositories. The release configuration files need to be updated at the same time of this change. Task-number: QTIFW-2813 Change-Id: I5c12ae3705a9e89a16b536ceef5382130860e0b0 Reviewed-by: Patrik Teivonen <patrik.teivonen@qt.io> Reviewed-by: Antti Kokko <antti.kokko@qt.io>
-rwxr-xr-xpackaging-tools/release_repo_updater.py98
-rwxr-xr-xpackaging-tools/release_task_reader.py282
-rwxr-xr-xpackaging-tools/tests/test_release_repo_updater.py26
-rwxr-xr-xpackaging-tools/tests/test_release_task_reader.py97
4 files changed, 344 insertions, 159 deletions
diff --git a/packaging-tools/release_repo_updater.py b/packaging-tools/release_repo_updater.py
index 6ad82b057..ab8c84d80 100755
--- a/packaging-tools/release_repo_updater.py
+++ b/packaging-tools/release_repo_updater.py
@@ -58,7 +58,7 @@ from installer_utils import PackagingError, download_archive, extract_archive, i
from logging_util import init_logger
from notarize import notarize
from read_remote_config import get_pkg_value
-from release_task_reader import ReleaseTask, parse_config
+from release_task_reader import IFWReleaseTask, TaskType, parse_config
from runner import run_cmd, run_cmd_async
from sign_installer import create_mac_dmg, sign_mac_app
from sign_windows_installer import sign_executable
@@ -200,13 +200,13 @@ class RepoUpdateStrategy:
return RepoUpdateStrategy(build_repositories, repo_layout, remote_repo_update_source,
repo_update_destinations)
- def get_remote_source_repo_path(self, task: ReleaseTask) -> str:
+ def get_remote_source_repo_path(self, task: IFWReleaseTask) -> str:
if self.remote_repo_update_source == RepoSource.PENDING:
return os.path.join(str(self.remote_repo_layout.get_pending_path()),
- task.get_repo_path(), "repository")
+ task.repo_path, "repository")
if self.remote_repo_update_source == RepoSource.STAGING:
return os.path.join(str(self.remote_repo_layout.get_staging_path()),
- task.get_repo_path())
+ task.repo_path)
raise PackagingError("Invalid remote source repo defined: "
f"{self.remote_repo_update_source}")
@@ -306,8 +306,8 @@ def get_remote_login_cmd(server: str) -> List[str]:
return ['ssh', '-t', '-t', server]
-def trigger_rta(rta_server_url: str, task: ReleaseTask) -> None:
- for key in task.get_rta_key_list():
+def trigger_rta(rta_server_url: str, task: IFWReleaseTask) -> None:
+ for key in task.rta_key_list:
url = rta_server_url + key + '/build?token=RTA_JENKINS'
log.info("Triggering RTA case: %s", url)
try:
@@ -463,9 +463,9 @@ def spawn_remote_background_task(server: str, server_home: str, remote_cmd: List
async def update_repository(staging_server: str, update_strategy: RepoUpdateStrategy,
- task: ReleaseTask, rta: str) -> None:
+ task: IFWReleaseTask, rta: str) -> None:
# ensure the repository paths exists at server
- log.info("Starting repository update: %s", task.get_repo_path())
+ log.info("Starting repository update: %s", task.repo_path)
create_remote_paths(staging_server, update_strategy.remote_repo_layout.get_repo_layout())
# this is the repo path on remote which will act as the 'source' for remote updates
@@ -475,7 +475,7 @@ async def update_repository(staging_server: str, update_strategy: RepoUpdateStra
if update_strategy.requires_local_source_repo_upload():
# this is the repo path from local repo build which will act as the 'source' which to
# upload to the remote
- local_repo_source_path = task.get_source_online_repository_path()
+ local_repo_source_path = task.source_online_repository_path
# We always replace existing repository if previous version should exist.
# Previous version is moved as backup
upload_pending_repository_content(staging_server, local_repo_source_path,
@@ -483,10 +483,10 @@ async def update_repository(staging_server: str, update_strategy: RepoUpdateStra
# Now we can run the updates on the remote
for update_destination in update_strategy.remote_repo_update_destinations:
- remote_repo_destination_path = os.path.join(update_destination, task.get_repo_path())
+ remote_repo_destination_path = os.path.join(update_destination, task.repo_path)
reset_new_remote_repository(staging_server, remote_repo_source_path,
remote_repo_destination_path)
- log.info("Update done: %s", task.get_repo_path())
+ log.info("Update done: %s", task.repo_path)
# Delete pending content
if update_strategy.purge_remote_source_repo():
@@ -497,7 +497,7 @@ async def update_repository(staging_server: str, update_strategy: RepoUpdateStra
async def build_online_repositories(
- tasks: List[ReleaseTask],
+ tasks: List[IFWReleaseTask],
license_: str,
installer_config_base_dir: str,
artifact_share_base_url: str,
@@ -525,14 +525,14 @@ async def build_online_repositories(
# use same timestamp for all built repos
job_timestamp = strftime("%Y-%m-%d", gmtime())
for task in tasks:
- tmp_dir = os.path.join(tmp_base_dir, task.get_repo_path())
+ tmp_dir = os.path.join(tmp_base_dir, task.repo_path)
task.source_online_repository_path = os.path.join(tmp_dir, "online_repository")
if not build_repositories:
# this is usually for testing purposes in env where repositories are already built, we just update task objects
continue
- log.info("Building repository: %s", task.get_repo_path())
- installer_config_file = os.path.join(installer_config_base_dir, task.get_config_file())
+ log.info("Building repository: %s", task.repo_path)
+ installer_config_file = os.path.join(installer_config_base_dir, task.config_file)
if not os.path.isfile(installer_config_file):
raise PackagingError(f"Invalid 'config_file' path: {installer_config_file}")
@@ -544,7 +544,7 @@ async def build_online_repositories(
archive_base_url=artifact_share_base_url,
ifw_tools_uri=ifw_tools,
force_version_number_increase=True,
- substitution_list=task.get_installer_string_replacement_list(),
+ substitution_list=task.substitutions,
build_timestamp=job_timestamp,
dry_run=dry_run,
)
@@ -568,7 +568,7 @@ async def build_online_repositories(
async def update_repositories(
- tasks: List[ReleaseTask],
+ tasks: List[IFWReleaseTask],
staging_server: str,
update_strategy: RepoUpdateStrategy,
rta: str,
@@ -581,17 +581,17 @@ async def update_repositories(
raise
-async def sync_production(tasks: List[ReleaseTask], repo_layout: QtRepositoryLayout, sync_s3: str, sync_ext: str,
+async def sync_production(tasks: List[IFWReleaseTask], repo_layout: QtRepositoryLayout, sync_s3: str, sync_ext: str,
staging_server: str, staging_server_root: str, license_: str, event_injector: str,
export_data: Dict[str, str]) -> None:
log.info("triggering production sync..")
# collect production sync jobs
updated_production_repositories = {} # type: Dict[str, str]
for task in tasks:
- key = os.path.join(repo_layout.get_repo_domain(), task.get_repo_path())
+ key = os.path.join(repo_layout.get_repo_domain(), task.repo_path)
if key in updated_production_repositories:
raise PackagingError(f"Duplicate repository path found: {key}")
- updated_production_repositories[key] = os.path.join(repo_layout.get_production_path(), task.get_repo_path())
+ updated_production_repositories[key] = os.path.join(repo_layout.get_production_path(), task.repo_path)
# if _all_ repository updates to production were successful then we can sync to production
if sync_s3:
@@ -609,7 +609,7 @@ async def handle_update(
staging_server: str,
staging_server_root: str,
license_: str,
- tasks: List[ReleaseTask],
+ tasks: List[IFWReleaseTask],
installer_config_base_dir: str,
artifact_share_base_url: str,
update_strategy: RepoUpdateStrategy,
@@ -673,8 +673,8 @@ def format_task_filters(task_filters: List[str]) -> List[str]:
return [char.replace('.', ',') for char in task_filters]
-def create_offline_remote_dirs(task: ReleaseTask, staging_server: str, staging_server_root: str, installer_build_id: str) -> str:
- remote_base_dir = staging_server_root + '/' + task.get_project_name() + '/' + task.get_version() + '/' + 'installers'
+def create_offline_remote_dirs(task: IFWReleaseTask, staging_server: str, staging_server_root: str, installer_build_id: str) -> str:
+ remote_base_dir = staging_server_root + '/' + task.project_name + '/' + task.version + '/' + 'installers'
remote_dir = remote_base_dir + '/' + installer_build_id + '/'
remote_latest_available_dir = remote_base_dir + '/' + 'latest_available' + '/'
if not remote_path_exists(staging_server, remote_dir):
@@ -684,9 +684,9 @@ def create_offline_remote_dirs(task: ReleaseTask, staging_server: str, staging_s
return remote_dir
-def update_remote_latest_available_dir(new_installer: str, remote_upload_path: str, task: ReleaseTask, staging_server_root: str, installer_build_id: str) -> None:
+def update_remote_latest_available_dir(new_installer: str, remote_upload_path: str, task: IFWReleaseTask, staging_server_root: str, installer_build_id: str) -> None:
log.info("Update latest available installer directory: %s", remote_upload_path)
- regex = re.compile('.*' + task.get_version())
+ regex = re.compile('.*' + task.version)
new_installer_base_path = "".join(regex.findall(new_installer))
name = Path(new_installer_base_path).name
@@ -704,7 +704,7 @@ def update_remote_latest_available_dir(new_installer: str, remote_upload_path: s
run_cmd(cmd=cmd_cp, timeout=60 * 60) # 1h
-def upload_offline_to_remote(installer_path: str, remote_upload_path: str, staging_server: str, task: ReleaseTask,
+def upload_offline_to_remote(installer_path: str, remote_upload_path: str, staging_server: str, task: IFWReleaseTask,
installer_build_id: str, enable_oss_snapshots: bool, license_: str) -> None:
for file in os.listdir(installer_path):
file_path = Path(file)
@@ -740,11 +740,19 @@ async def sign_offline_installer(installer_path: str, installer_name: str) -> No
async def build_offline_tasks(
- staging_server: str, staging_server_root: str, tasks: List[ReleaseTask], license_: str,
- installer_config_base_dir: str, artifact_share_base_url: str,
- ifw_tools: str, installer_build_id: str, update_staging: bool,
- enable_oss_snapshots: bool, event_injector: str, export_data: Dict[str, str],
- dry_run: Optional[DryRunMode] = None
+ staging_server: str,
+ staging_server_root: str,
+ tasks: List[IFWReleaseTask],
+ license_: str,
+ installer_config_base_dir: str,
+ artifact_share_base_url: str,
+ ifw_tools: str,
+ installer_build_id: str,
+ update_staging: bool,
+ enable_oss_snapshots: bool,
+ event_injector: str,
+ export_data: Dict[str, str],
+ dry_run: Optional[DryRunMode] = None,
) -> None:
async with EventRegister(f"{license_}: offline", event_injector, export_data):
await _build_offline_tasks(
@@ -765,7 +773,7 @@ async def build_offline_tasks(
async def _build_offline_tasks(
staging_server: str,
staging_server_root: str,
- tasks: List[ReleaseTask],
+ tasks: List[IFWReleaseTask],
license_: str,
installer_config_base_dir: str,
artifact_share_base_url: str,
@@ -792,8 +800,8 @@ async def _build_offline_tasks(
# use same timestamp for all installer tasks
job_timestamp = strftime("%Y-%m-%d", gmtime())
for task in tasks:
- log.info("Building offline installer: %s", task.get_installer_name())
- installer_config_file = os.path.join(installer_config_base_dir, task.get_config_file())
+ log.info("Building offline installer: %s", task.installer_name)
+ installer_config_file = os.path.join(installer_config_base_dir, task.config_file)
if not os.path.isfile(installer_config_file):
raise PackagingError(f"Invalid 'config_file' path: {installer_config_file}")
@@ -804,9 +812,9 @@ async def _build_offline_tasks(
license_type=license_,
archive_base_url=artifact_share_base_url,
ifw_tools_uri=ifw_tools,
- installer_name=task.get_installer_name(),
+ installer_name=task.installer_name,
force_version_number_increase=True,
- substitution_list=task.get_installer_string_replacement_list(),
+ substitution_list=task.substitutions,
build_timestamp=job_timestamp,
dry_run=dry_run,
)
@@ -820,15 +828,15 @@ async def _build_offline_tasks(
log.exception("Installer build failed!")
raise PackagingError from exc
- await sign_offline_installer(installer_output_dir, task.get_installer_name())
+ await sign_offline_installer(installer_output_dir, task.installer_name)
if update_staging:
remote_upload_path = create_offline_remote_dirs(task, staging_server, staging_server_root, installer_build_id)
upload_offline_to_remote(installer_output_dir, remote_upload_path, staging_server, task, installer_build_id, enable_oss_snapshots, license_)
-def upload_snapshots_to_remote(staging_server: str, remote_upload_path: str, task: ReleaseTask, installer_build_id: str, installer_filename: str) -> None:
- project_name = task.get_project_name()
- version_full = task.get_version()
+def upload_snapshots_to_remote(staging_server: str, remote_upload_path: str, task: IFWReleaseTask, installer_build_id: str, installer_filename: str) -> None:
+ project_name = task.project_name
+ version_full = task.version
version_minor_match = re.match(r"\d+\.\d+", version_full)
if version_minor_match:
version_minor = version_minor_match[0]
@@ -837,7 +845,7 @@ def upload_snapshots_to_remote(staging_server: str, remote_upload_path: str, tas
snapshot_path = Path(get_pkg_value("SNAPSHOT_PATH"))
if snapshot_path.name != project_name:
snapshot_path = snapshot_path.with_name(project_name)
- snapshot_upload_path = os.path.join(snapshot_path, version_minor, version_full + task.get_prerelease_version(), installer_build_id)
+ snapshot_upload_path = os.path.join(snapshot_path, version_minor, version_full + task.prerelease_version, installer_build_id)
remote_installer_path = os.path.join(remote_upload_path, installer_filename)
if platform.system() == "Windows":
# commands are run in Linux, adjust the upload paths
@@ -1003,13 +1011,14 @@ def main() -> None:
# get offline tasks
tasks = parse_config(
config_file=args.config,
+ task_type=TaskType.IFW_TASK_TYPE,
task_filters=append_to_task_filters(args.task_filters, "offline"),
)
asyncio_run(
build_offline_tasks(
staging_server=args.staging_server,
staging_server_root=args.staging_server_root,
- tasks=tasks,
+ tasks=tasks, # type: ignore
license_=args.license_,
installer_config_base_dir=installer_config_base_dir,
artifact_share_base_url=args.artifact_share_url,
@@ -1027,7 +1036,8 @@ def main() -> None:
# get repository tasks
tasks = parse_config(
config_file=args.config,
- task_filters=append_to_task_filters(args.task_filters, "repository")
+ task_type=TaskType.IFW_TASK_TYPE,
+ task_filters=append_to_task_filters(args.task_filters, "repository"),
)
update_strategy = RepoUpdateStrategy.get_strategy(
staging_server_root=args.staging_server_root,
@@ -1043,7 +1053,7 @@ def main() -> None:
staging_server=args.staging_server,
staging_server_root=args.staging_server_root,
license_=args.license_,
- tasks=tasks,
+ tasks=tasks, # type: ignore
installer_config_base_dir=installer_config_base_dir,
artifact_share_base_url=args.artifact_share_url,
update_strategy=update_strategy,
diff --git a/packaging-tools/release_task_reader.py b/packaging-tools/release_task_reader.py
index 3ba70cc62..99fa8cecb 100755
--- a/packaging-tools/release_task_reader.py
+++ b/packaging-tools/release_task_reader.py
@@ -33,9 +33,12 @@ import argparse
import os
import re
import sys
+from abc import ABC
from configparser import ConfigParser, ExtendedInterpolation, SectionProxy
-from typing import List
+from enum import Enum
+from typing import Any, List, Union
+from installer_utils import PackagingError
from logging_util import init_logger
log = init_logger(__name__, debug_mode=False)
@@ -45,123 +48,242 @@ class ReleaseTaskError(Exception):
pass
-class ReleaseTask:
- def __init__(self, name: str, settings: SectionProxy):
- if not len(name.split(".")) >= 3:
- raise ReleaseTaskError(f"The '[{name}]' has too few dot separated elements!")
- self.name = name
- self.config_file = settings["config_file"]
- self.project_name = settings.get("project_name", "")
- self.version = settings.get("version", "")
- self.prerelease_version = settings.get("prerelease_version", "")
- self.substitutions = settings.get("substitutions", "")
- self.repo_path = settings.get("repo_path", "")
- self.repo_components_to_update = settings.get("repo_components_to_update", "")
- self.installer_name = settings.get("installer_name", "")
- self.rta_key_list = settings.get("rta_key_list", "")
- tmp_list: List[str] = [x.strip() for x in self.substitutions.split(',')]
- self.installer_string_replacement_list = list(filter(None, tmp_list))
- self.source_online_repository_path = ""
- self.source_pkg_path = ""
+class TaskType(Enum):
+ IFW_TASK_TYPE = "ifw"
+ DEB_TASK_TYPE = "deb"
- def add_to_substitutions_list(self, substitutions: List[str]) -> None:
- self.installer_string_replacement_list += substitutions
+ @classmethod
+ def from_value(cls, value: str) -> 'TaskType':
+ _values = {
+ TaskType.IFW_TASK_TYPE.value: TaskType.IFW_TASK_TYPE,
+ TaskType.DEB_TASK_TYPE.value: TaskType.DEB_TASK_TYPE,
+ }
+ return _values[value]
- def is_repository_task(self) -> bool:
- return self.name.split(".")[1] == "repository"
- def is_offline_installer_task(self) -> bool:
- return self.name.split(".")[1] == "offline"
+class ReleaseTask(ABC):
+ """Abstraction for attributes for online repo/offline installer build jobs."""
- def is_online_installer_task(self) -> bool:
- return self.name.split(".")[1] == "online"
+ def __init__(self, name: str, settings: SectionProxy, common_substitutions: str):
+ self._name = name
+ self._settings = settings
+ self._substitutions = self._parse_substitutions(common_substitutions)
+ self._substitutions += self._parse_substitutions(self._settings.get("substitutions", ""))
+ self._subst_map = dict(i.split("=", maxsplit=1) for i in self._substitutions if "=" in i)
+ self._subst_pattern = re.compile("|".join(self._subst_map.keys()))
- def get_config_file(self) -> str:
- return self.config_file
+ @property
+ def name(self) -> str:
+ return self._name
- def get_substitutions(self) -> str:
- return self.substitutions
+ @property
+ def config_file(self) -> str:
+ return self._get("config_file")
- def get_installer_string_replacement_list(self) -> List[str]:
- return self.installer_string_replacement_list
+ @property
+ def repo_path(self) -> str:
+ return self._get("repo_path")
- def get_repo_components_to_update(self) -> str:
- return self.repo_components_to_update
+ @property
+ def rta_key_list(self) -> List[str]:
+ return self._parse_key_list(self._get("rta_key_list"))
- def get_installer_name(self) -> str:
- return self.installer_name
+ @property
+ def substitutions(self) -> List[str]:
+ return self._substitutions
- def get_project_name(self) -> str:
- return self.project_name
+ def _parse_substitutions(self, substitutions: str) -> List[str]:
+ return list(filter(None, [x.strip() for x in substitutions.split(',')]))
- def get_version(self) -> str:
- return self.version
+ def _parse_key_list(self, keys: str) -> List[str]:
+ return list(filter(None, keys.strip().replace(' ', '').split(",")))
- def get_prerelease_version(self) -> str:
- return self.prerelease_version.strip()
+ def _multireplace(self, text: str) -> str:
+ """Replace all substitutions in the given 'text'.
- def get_repo_path(self) -> str:
- return self.repo_path
+ Args:
+ text: The string for which to run the substitutions.
- def get_rta_key_list(self) -> List[str]:
- tmp_list = self.rta_key_list.strip().replace(' ', '').split(",")
- return list(filter(None, tmp_list))
-
- def get_source_online_repository_path(self) -> str:
- # this points to local repository build path
- return self.source_online_repository_path
-
- def get_source_pkg_path(self) -> str:
- # this points to local repository build path
- return self.source_pkg_path
-
-
-def parse_substitutions_list(parser: ConfigParser, section: str) -> List[str]:
- try:
- args = parser[section]['substitutions']
- return [x.strip() for x in args.split(',')]
- except KeyError:
- # it's ok, the 'substitutions' is not mandatory
- pass
- return []
+ Returns:
+ The converted text if there were matches for the substitutions.
+ """
+ if not self._subst_map:
+ return text
+ return self._subst_pattern.sub(lambda m: self._subst_map[re.escape(m.group(0))], text)
+
+ def _get(self, key: str) -> str:
+ return self._multireplace(self._settings.get(key, ""))
+
+
+class DebReleaseTask(ReleaseTask):
+ """Attributes specific to Debian repository build jobs."""
+
+ @property
+ def distribution(self) -> str:
+ return self._get("distribution")
+
+ @property
+ def component(self) -> str:
+ return self._get("component")
+
+ @property
+ def architectures(self) -> List[str]:
+ return self._parse_key_list(self._get("architectures"))
+
+ @property
+ def snapshot_name(self) -> str:
+ return self._multireplace(self.repo_path) + "_snapshot"
+
+ @property
+ def content_sources(self) -> List[str]:
+ return self._parse_key_list(self._get("content_sources"))
+
+ @property
+ def endpoint_type(self) -> str:
+ return self._get("endpoint_type")
+
+ @property
+ def endpoint_name(self) -> str:
+ return self._get("endpoint_name")
+
+
+class IFWReleaseTask(ReleaseTask):
+ """Attributes specific to IFW online repository build jobs."""
+
+ def __init__(self, name: str, settings: SectionProxy, common_substitutions: str):
+ super().__init__(name, settings, common_substitutions)
+ self._source_online_repository_path: str = ""
+
+ @property
+ def installer_name(self) -> str:
+ return self._get("installer_name")
+
+ @property
+ def project_name(self) -> str:
+ return self._get("project_name")
+
+ @property
+ def version(self) -> str:
+ return self._get("version")
+
+ @property
+ def prerelease_version(self) -> str:
+ return self._get("prerelease_version")
+
+ @property
+ def source_online_repository_path(self) -> str:
+ if not self._source_online_repository_path:
+ raise PackagingError("Something is wrong, 'source_online_repository_path' isn't set!")
+ return self._source_online_repository_path
+
+ @source_online_repository_path.setter
+ def source_online_repository_path(self, value: str) -> None:
+ self._source_online_repository_path = value
+
+
+class ReleaseTaskFactory:
+ """A factory to create a specific ReleaseTask object based on the given configuration data."""
+
+ task_types = {
+ TaskType.IFW_TASK_TYPE.value: IFWReleaseTask,
+ TaskType.DEB_TASK_TYPE.value: DebReleaseTask,
+ }
+
+ @classmethod
+ def task_from_spec(
+ cls,
+ task_spec: str,
+ requested_task_type: TaskType,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Union[None, IFWReleaseTask, DebReleaseTask]:
+ """Instantiate a specific ReleaseTask object based on the given configuration.
+
+ Args:
+ task_spec: The task specifier i.e. the section name in the .ini file.
+ requested_task_type: The type of the ReleaseTask requested. If the 'task_spec' does
+ not match with the requested
+ *args: positional arguments passed to the constructor of the instantiated ReleaseTask
+ object.
+ **kwargs: keyword arguments passed to the constructor of the instantiated ReleaseTask
+ object.
+ Returns:
+ IFWReleaseTask: if 'requested_task_type' was of type TaskType.IFW_TASK_TYPE and the
+ 'task_spec' contained matching configuration.
+ DebReleaseTask: if 'requested_task_type' was of type TaskType.DEB_TASK_TYPE and the
+ 'task_spec' contained matching configuration.
+ None: if 'requested_task_type' could not be parsed from the 'task_spec'.
+ Raises:
+ ReleaseTaskError: if 'task_spec' is unsuitable for ReleaseTask constructing.
+ """
+ try:
+ parsed_type = cls.check_type(task_spec)
+ if parsed_type == requested_task_type.value:
+ return cls.task_types[parsed_type](*args, **kwargs) # type: ignore
+ return None
+ except KeyError as kerr:
+ raise ReleaseTaskError(f"Unsupported task type in: {task_spec}") from kerr
+
+ @classmethod
+ def check_type(cls, spec: str) -> str:
+ parts = spec.split(".")
+ if not len(parts) >= 4:
+ raise ReleaseTaskError(f"'[{spec}]' should have at least 4 dot separated elements!")
+ if not parts[0] == "task":
+ raise PackagingError(f"'[{spec}]' should start with 'task.'")
+ if not parts[1] in cls.task_types:
+ raise PackagingError(f"Invalid: '[{spec}]'. Supported types: {cls.task_types.keys()}")
+ return parts[1]
def get_filter_parts(section_filters: str) -> List[str]:
return list(filter(None, re.split("[, ;:]+", section_filters)))
-def parse_data(settings: ConfigParser, task_filters: List[str]) -> List[ReleaseTask]:
- tasks = [] # type: List[ReleaseTask]
- common_substitution_list = parse_substitutions_list(settings, 'common.substitutions')
- section_filters_list = [get_filter_parts(x) for x in task_filters]
+def parse_data(
+ settings: ConfigParser,
+ task_type: TaskType,
+ task_filters: List[str],
+) -> List[Union[IFWReleaseTask, DebReleaseTask]]:
+ tasks: List[Union[IFWReleaseTask, DebReleaseTask]] = []
+ sec_filters_list = [get_filter_parts(x) for x in task_filters]
+ common_substs = settings.get("common.substitutions", "substitutions", fallback="")
for section in settings.sections():
parts = section.split(".")
if not parts[0].startswith("task"):
continue
append_task = True
- if section_filters_list:
+ if sec_filters_list:
append_task = False
- for section_filters in section_filters_list:
+ for section_filters in sec_filters_list:
if set(section_filters).issubset(set(parts)):
append_task = True
break
if append_task:
log.info("Parsing Task: %s", section)
- release_task = ReleaseTask(section, settings[section])
- release_task.add_to_substitutions_list(common_substitution_list)
- tasks.append(release_task)
+ task = ReleaseTaskFactory.task_from_spec(task_spec=section,
+ requested_task_type=task_type,
+ name=section,
+ settings=settings[section],
+ common_substitutions=common_substs)
+ if task is not None:
+ tasks.append(task)
else:
- log.info("Skipping task: [%s] - not included by task filter(s): %s", section, section_filters_list)
+ log.info("Skipping task: [%s] - excluded by filter(s): %s", section, sec_filters_list)
return tasks
-def parse_config(config_file: str, task_filters: List[str]) -> List[ReleaseTask]:
+def parse_config(
+ config_file: str,
+ task_type: TaskType,
+ task_filters: List[str],
+) -> List[Union[IFWReleaseTask, DebReleaseTask]]:
if not os.path.isfile(config_file):
raise ReleaseTaskError(f"Not such file: {config_file}")
settings = ConfigParser(interpolation=ExtendedInterpolation())
settings.read(config_file)
- return parse_data(settings, task_filters)
+ return parse_data(settings, task_type, task_filters)
def main() -> None:
@@ -172,10 +294,12 @@ def main() -> None:
parser.add_argument("--task-filter", dest="task_filters", action='append',
help="Task include filters per section name in the --config file to match with "
"the section name, e.g. 'offline', 'repository', ...")
+ parser.add_argument("--task-type", dest="task_type", values=[e.value for e in TaskType],
+ help=f"Define the task type: {[e.value for e in TaskType]}")
args = parser.parse_args(sys.argv[1:])
assert os.path.isfile(args.config), f"Not a valid file: {args.config}"
- parse_config(args.config, args.task_filters)
+ parse_config(args.config, TaskType.from_value(args.task_type), args.task_filters)
if __name__ == "__main__":
diff --git a/packaging-tools/tests/test_release_repo_updater.py b/packaging-tools/tests/test_release_repo_updater.py
index 8052d4b39..8b526d896 100755
--- a/packaging-tools/tests/test_release_repo_updater.py
+++ b/packaging-tools/tests/test_release_repo_updater.py
@@ -34,7 +34,7 @@ import unittest
from configparser import ConfigParser
from pathlib import Path
from shutil import rmtree
-from typing import List
+from typing import List, cast
from ddt import ddt # type: ignore
from temppathlib import TemporaryDirectory
@@ -56,7 +56,7 @@ from release_repo_updater import (
upload_ifw_to_remote,
upload_pending_repository_content,
)
-from release_task_reader import parse_data
+from release_task_reader import IFWReleaseTask, TaskType, parse_data
from tests.testhelpers import (
asyncio_test,
asyncio_test_parallel_data,
@@ -215,7 +215,7 @@ class TestReleaseRepoUpdater(unittest.TestCase):
@asyncio_test
async def test_build_online_repositories_dryrun(self) -> None:
sample_config = """
- [task.repository.linux.x86_64.repo1]
+ [task.ifw.repository.linux.x86_64.repo1]
config_file: foobar_config_file
repo_path: foo/bar/path_1
"""
@@ -223,10 +223,16 @@ class TestReleaseRepoUpdater(unittest.TestCase):
config.read_string(sample_config)
# parse all tasks i.e. no filters
- tasks = parse_data(config, task_filters=[])
- await build_online_repositories(tasks=tasks, license_="opensource", installer_config_base_dir="foo", artifact_share_base_url="foo",
- ifw_tools="foo", build_repositories=False)
- task = tasks.pop()
+ tasks = parse_data(config, task_type=TaskType.IFW_TASK_TYPE, task_filters=[])
+ await build_online_repositories(
+ tasks=cast(List[IFWReleaseTask], tasks),
+ license_="opensource",
+ installer_config_base_dir="foo",
+ artifact_share_base_url="foo",
+ ifw_tools="foo",
+ build_repositories=False,
+ )
+ task = cast(IFWReleaseTask, tasks.pop())
self.assertTrue(task.source_online_repository_path.endswith("foo/bar/path_1/online_repository"))
@asyncio_test
@@ -279,9 +285,9 @@ class TestReleaseRepoUpdater(unittest.TestCase):
self.assertEqual(append_to_task_filters(task_filters, "repository"), exp_result)
@asyncio_test_parallel_data( # type: ignore
- (["task.repository.linux.x64.feature1"], ["task,repository,linux,x64,feature1"]),
- (["task.repository.linux.x64.feature1", "windows.x64,feature2"],
- ["task,repository,linux,x64,feature1", "windows,x64,feature2"]),
+ (["task.ifw.repository.linux.x64.feature1"], ["task,ifw,repository,linux,x64,feature1"]),
+ (["task.ifw.repository.linux.x64.feature1", "windows.x64,feature2"],
+ ["task,ifw,repository,linux,x64,feature1", "windows,x64,feature2"]),
(["offline,linux.x64,feature1"], ["offline,linux,x64,feature1"]),
(["linux"], ["linux"]),
([""], [""])
diff --git a/packaging-tools/tests/test_release_task_reader.py b/packaging-tools/tests/test_release_task_reader.py
index 09cb5ecaa..cf97f26eb 100755
--- a/packaging-tools/tests/test_release_task_reader.py
+++ b/packaging-tools/tests/test_release_task_reader.py
@@ -35,7 +35,7 @@ from typing import List
from ddt import ddt # type: ignore
-from release_task_reader import ReleaseTaskError, get_filter_parts, parse_data
+from release_task_reader import ReleaseTaskError, TaskType, get_filter_parts, parse_data
from tests.testhelpers import asyncio_test, asyncio_test_parallel_data
@@ -51,33 +51,31 @@ class TestReleaseTaskReader(unittest.TestCase):
self.assertEqual(get_filter_parts(task_filters), expected_result)
@asyncio_test
- async def test_release_task_reader(self) -> None:
+ async def test_release_task_reader_ifw(self) -> None:
sample_config = """
- [task.repository.linux.x86_64]
+ [task.ifw.repository.linux.x86_64]
config_file: foobar-file-repository
substitutions: arg1, arg2, arg3
- repo_components_to_update: *
repo_path: foo/bar/path
rta_key_list: key1, key2,key3 , key4
- [task.offline.linux.x86_64.foobar]
+ [task.ifw.offline.linux.x86_64.foobar]
config_file: foobar-file-repository-2
substitutions: arg1, arg2, arg3
- repo_components_to_update: *
repo_path: foo/bar/path2
rta_key_list: key1, key2,key3 , key4
- [task.offline.linux.x86_64]
+ [task.ifw.offline.linux.x86_64]
config_file: foobar-file-offline
substitutions: arg11, arg21, arg31
rta_key_list: keyA, keyB
- [task.online.linux.x86_64]
+ [task.ifw.online.linux.x86_64]
config_file: foobar-file-online
substitutions: arg12, arg22, arg32
rta_key_list: key12, key22
- [foo.online.linux.x86_64]
+ [foo.ifw.online.linux.x86_64]
config_file: foobar-file-online
substitutions: arg13, arg23, arg33
rta_key_list: key13, key23
@@ -86,46 +84,93 @@ class TestReleaseTaskReader(unittest.TestCase):
config.read_string(sample_config)
# parse all tasks i.e. no filters
- tasks = parse_data(config, task_filters=[])
+ tasks = parse_data(config, task_type=TaskType.IFW_TASK_TYPE, task_filters=[])
self.assertTrue(len(tasks) == 4, "Did not parse all tasks from sample config")
# parse only "repository" tasks
- tasks = parse_data(config, task_filters=["repository"])
+ tasks = parse_data(config, task_type=TaskType.IFW_TASK_TYPE, task_filters=["repository"])
self.assertTrue(len(tasks) == 1)
- self.assertEqual(tasks[0].is_repository_task(), True)
- self.assertEqual(tasks[0].is_offline_installer_task(), False)
- self.assertEqual(tasks[0].is_online_installer_task(), False)
- self.assertEqual(tasks[0].get_config_file(), "foobar-file-repository")
- self.assertEqual(tasks[0].get_substitutions(), "arg1, arg2, arg3")
- self.assertEqual(tasks[0].get_installer_string_replacement_list(), ["arg1", "arg2", "arg3"])
- self.assertEqual(tasks[0].get_repo_components_to_update(), "*")
- self.assertEqual(tasks[0].get_repo_path(), "foo/bar/path")
- self.assertEqual(sorted(tasks[0].get_rta_key_list()), sorted(["key1", "key2", "key3", "key4"]))
+ self.assertEqual(tasks[0].config_file, "foobar-file-repository")
+ self.assertEqual(tasks[0].substitutions, ["arg1", "arg2", "arg3"])
+ self.assertEqual(tasks[0].rta_key_list, ["key1", "key2", "key3", "key4"])
+ self.assertEqual(tasks[0].repo_path, "foo/bar/path")
+ self.assertEqual(sorted(tasks[0].rta_key_list), sorted(["key1", "key2", "key3", "key4"]))
# parse only "offline" tasks with multiple filters
- tasks = parse_data(config, task_filters=["offline,linux,x86_64"])
+ tasks = parse_data(
+ config,
+ task_type=TaskType.IFW_TASK_TYPE,
+ task_filters=["offline,linux,x86_64"],
+ )
self.assertTrue(len(tasks) == 2)
- tasks = parse_data(config, task_filters=["offline,linux,x86_64,foobar"])
+ tasks = parse_data(
+ config,
+ task_type=TaskType.IFW_TASK_TYPE,
+ task_filters=["offline,linux,x86_64,foobar"],
+ )
self.assertTrue(len(tasks) == 1)
# parse "offline" tasks with multiple filters and "online" tasks
- tasks = parse_data(config, task_filters=["offline,linux,x86_64", "online,linux,x86_64"])
+ tasks = parse_data(
+ config,
+ task_type=TaskType.IFW_TASK_TYPE,
+ task_filters=["offline,linux,x86_64", "online,linux,x86_64"],
+ )
self.assertTrue(len(tasks) == 3)
@asyncio_test
+ async def test_release_task_reader_deb(self) -> None:
+ sample_config = """
+ [task.deb.repository.linux.amd64]
+ repo_path: test_repo/amd64
+ distribution: foobar-file-repository-amd64
+ component: main
+ architectures: amd64
+ content_sources: http://foo.com/content1, http://foo.com/content2
+ substitutions: foo=bar, aaa=bbb
+ rta_key_list: key1, key2
+
+ [task.deb.repository.linux.arm64]
+ repo_path: test_repo/arm64
+ distribution: foobar-file-repository-arm64
+ component: main
+ architectures: arm64
+ content_sources: http://foo.com/content3, http://foo.com/content4
+ substitutions: foo=bar, aaa=bbb
+ rta_key_list: key3, key4
+ """
+ config = ConfigParser()
+ config.read_string(sample_config)
+
+ # parse all tasks i.e. no filters
+ tasks = parse_data(config, task_type=TaskType.DEB_TASK_TYPE, task_filters=[])
+ self.assertTrue(len(tasks) == 2, "Did not parse all tasks from sample config")
+
+ # parse only "repository" tasks
+ tasks = parse_data(config, task_type=TaskType.DEB_TASK_TYPE, task_filters=["amd64"])
+ self.assertTrue(len(tasks) == 1)
+ self.assertEqual(tasks[0].substitutions, ["foo=bar", "aaa=bbb"])
+ self.assertEqual(tasks[0].repo_path, "test_repo/amd64")
+ self.assertEqual(tasks[0].distribution, "barbar-file-repository-amd64") # type: ignore
+ self.assertEqual(tasks[0].component, "main") # type: ignore
+ self.assertEqual(tasks[0].architectures, ["amd64"]) # type: ignore
+ self.assertEqual(tasks[0].content_sources, ["http://bar.com/content1", # type: ignore
+ "http://bar.com/content2"])
+ self.assertEqual(sorted(tasks[0].rta_key_list), sorted(["key1", "key2"]))
+
+ @asyncio_test
async def test_release_task_reader_invalid_config(self) -> None:
sample_config = """
- [task.repository]
+ [task.ifw.repository]
config_file: foobar-file-repository
substitutions: arg1, arg2, arg3
- repo_components_to_update: *
repo_path: foo/bar/path
rta_key_list: key1, key2
"""
config = ConfigParser()
config.read_string(sample_config)
with self.assertRaises(ReleaseTaskError):
- parse_data(config, task_filters=[])
+ parse_data(config, task_type=TaskType.IFW_TASK_TYPE, task_filters=[])
if __name__ == '__main__':