summaryrefslogtreecommitdiffstats
path: root/util/cmake/condition_simplifier_cache.py
diff options
context:
space:
mode:
Diffstat (limited to 'util/cmake/condition_simplifier_cache.py')
-rw-r--r--util/cmake/condition_simplifier_cache.py75
1 files changed, 69 insertions, 6 deletions
diff --git a/util/cmake/condition_simplifier_cache.py b/util/cmake/condition_simplifier_cache.py
index 947b18af81..6303ff828e 100644
--- a/util/cmake/condition_simplifier_cache.py
+++ b/util/cmake/condition_simplifier_cache.py
@@ -35,7 +35,7 @@ import os
import sys
import time
-from typing import Any, Callable, Dict, Union
+from typing import Any, Callable, Dict
condition_simplifier_cache_enabled = True
@@ -86,16 +86,49 @@ def init_cache_dict():
}
+def merge_dicts_recursive(a: Dict[str, Any], other: Dict[str, Any]) -> Dict[str, Any]:
+ """Merges values of "other" into "a", mutates a."""
+ for key in other:
+ if key in a:
+ if isinstance(a[key], dict) and isinstance(other[key], dict):
+ merge_dicts_recursive(a[key], other[key])
+ elif a[key] == other[key]:
+ pass
+ else:
+ a[key] = other[key]
+ return a
+
+
+def open_file_safe(file_path: str, mode: str = "r+"):
+ # Use portalocker package for file locking if available,
+ # otherwise print a message to install the package.
+ try:
+ import portalocker # type: ignore
+
+ file_open_func = portalocker.Lock
+ file_open_args = [file_path]
+ file_open_kwargs = {"mode": mode, "flags": portalocker.LOCK_EX}
+ file_handle = file_open_func(*file_open_args, **file_open_kwargs)
+ return file_handle
+ except ImportError:
+ print(
+ "The conversion script is missing a required package: portalocker. Please run "
+ "python -m pip install requirements.txt to install the missing dependency."
+ )
+ exit(1)
+
+
def simplify_condition_memoize(f: Callable[[str], str]):
cache_path = get_cache_location()
cache_file_content: Dict[str, Any] = {}
if os.path.exists(cache_path):
try:
- with open(cache_path, "r") as cache_file:
+ with open_file_safe(cache_path, mode="r") as cache_file:
cache_file_content = json.load(cache_file)
except (IOError, ValueError):
- pass
+ print(f"Invalid pro2cmake cache file found at: {cache_path}. Removing it.")
+ os.remove(cache_path)
if not cache_file_content:
cache_file_content = init_cache_dict()
@@ -107,13 +140,43 @@ def simplify_condition_memoize(f: Callable[[str], str]):
def update_cache_file():
if not os.path.exists(cache_path):
os.makedirs(os.path.dirname(cache_path), exist_ok=True)
- with open(cache_path, "w") as cache_file_write_handle:
- json.dump(cache_file_content, cache_file_write_handle, indent=4)
+ # Create the file if it doesn't exist, but don't override
+ # it.
+ with open(cache_path, "a") as temp_file_handle:
+ pass
+
+ updated_cache = cache_file_content
+
+ with open_file_safe(cache_path, "r+") as cache_file_write_handle:
+ # Read any existing cache content, and truncate the file.
+ cache_file_existing_content = cache_file_write_handle.read()
+ cache_file_write_handle.seek(0)
+ cache_file_write_handle.truncate()
+
+ # Merge the new cache into the old cache if it exists.
+ if cache_file_existing_content:
+ possible_cache = json.loads(cache_file_existing_content)
+ if (
+ "checksum" in possible_cache
+ and "schema_version" in possible_cache
+ and possible_cache["checksum"] == cache_file_content["checksum"]
+ and possible_cache["schema_version"] == cache_file_content["schema_version"]
+ ):
+ updated_cache = merge_dicts_recursive(dict(possible_cache), updated_cache)
+
+ json.dump(updated_cache, cache_file_write_handle, indent=4)
+
+ # Flush any buffered writes.
+ cache_file_write_handle.flush()
+ os.fsync(cache_file_write_handle.fileno())
atexit.register(update_cache_file)
def helper(condition: str) -> str:
- if condition not in cache_file_content["cache"]["conditions"] or not condition_simplifier_cache_enabled:
+ if (
+ condition not in cache_file_content["cache"]["conditions"]
+ or not condition_simplifier_cache_enabled
+ ):
cache_file_content["cache"]["conditions"][condition] = f(condition)
return cache_file_content["cache"]["conditions"][condition]