summaryrefslogtreecommitdiffstats
path: root/util/cmake/condition_simplifier_cache.py
blob: 58cd5b88c5f43635f0f30c64176c3c63d2e56612 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
#!/usr/bin/env python3
#############################################################################
##
## Copyright (C) 2018 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the plugins of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################


import atexit
import hashlib
import json
import os
import sys
import time

from typing import Any, Callable, Dict

condition_simplifier_cache_enabled = True


def set_condition_simplified_cache_enabled(value: bool):
    global condition_simplifier_cache_enabled
    condition_simplifier_cache_enabled = value


def get_current_file_path() -> str:
    try:
        this_file = __file__
    except NameError:
        this_file = sys.argv[0]
    this_file = os.path.abspath(this_file)
    return this_file


def get_cache_location() -> str:
    this_file = get_current_file_path()
    dir_path = os.path.dirname(this_file)
    cache_path = os.path.join(dir_path, ".pro2cmake_cache", "cache.json")
    return cache_path


def get_file_checksum(file_path: str) -> str:
    try:
        with open(file_path, "r") as content_file:
            content = content_file.read()
    except IOError:
        content = str(time.time())
    checksum = hashlib.md5(content.encode("utf-8")).hexdigest()
    return checksum


def get_condition_simplifier_checksum() -> str:
    current_file_path = get_current_file_path()
    dir_name = os.path.dirname(current_file_path)
    condition_simplifier_path = os.path.join(dir_name, "condition_simplifier.py")
    return get_file_checksum(condition_simplifier_path)


def init_cache_dict():
    return {
        "checksum": get_condition_simplifier_checksum(),
        "schema_version": "1",
        "cache": {"conditions": {}},
    }


def merge_dicts_recursive(a: Dict[str, Any], other: Dict[str, Any]) -> Dict[str, Any]:
    """Merges values of "other" into "a", mutates a."""
    for key in other:
        if key in a:
            if isinstance(a[key], dict) and isinstance(other[key], dict):
                merge_dicts_recursive(a[key], other[key])
            elif a[key] == other[key]:
                pass
        else:
            a[key] = other[key]
    return a


def open_file_safe(file_path: str, mode: str = "r+"):
    # Use portalocker package for file locking if available,
    # otherwise print a message to install the package.
    try:
        import portalocker  # type: ignore

        file_open_func = portalocker.Lock
        file_open_args = [file_path]
        file_open_kwargs = {"mode": mode, "flags": portalocker.LOCK_EX}
        file_handle = file_open_func(*file_open_args, **file_open_kwargs)
        return file_handle
    except ImportError:
        print(
            "The conversion script is missing a required package: portalocker. Please run "
            "python -m pip install -r requirements.txt to install the missing dependency."
        )
        exit(1)


def simplify_condition_memoize(f: Callable[[str], str]):
    cache_path = get_cache_location()
    cache_file_content: Dict[str, Any] = {}

    if os.path.exists(cache_path):
        try:
            with open_file_safe(cache_path, mode="r") as cache_file:
                cache_file_content = json.load(cache_file)
        except (IOError, ValueError):
            print(f"Invalid pro2cmake cache file found at: {cache_path}. Removing it.")
            os.remove(cache_path)

    if not cache_file_content:
        cache_file_content = init_cache_dict()

    current_checksum = get_condition_simplifier_checksum()
    if cache_file_content["checksum"] != current_checksum:
        cache_file_content = init_cache_dict()

    def update_cache_file():
        if not os.path.exists(cache_path):
            os.makedirs(os.path.dirname(cache_path), exist_ok=True)
            # Create the file if it doesn't exist, but don't override
            # it.
            with open(cache_path, "a"):
                pass

        updated_cache = cache_file_content

        with open_file_safe(cache_path, "r+") as cache_file_write_handle:
            # Read any existing cache content, and truncate the file.
            cache_file_existing_content = cache_file_write_handle.read()
            cache_file_write_handle.seek(0)
            cache_file_write_handle.truncate()

            # Merge the new cache into the old cache if it exists.
            if cache_file_existing_content:
                possible_cache = json.loads(cache_file_existing_content)
                if (
                    "checksum" in possible_cache
                    and "schema_version" in possible_cache
                    and possible_cache["checksum"] == cache_file_content["checksum"]
                    and possible_cache["schema_version"] == cache_file_content["schema_version"]
                ):
                    updated_cache = merge_dicts_recursive(dict(possible_cache), updated_cache)

            json.dump(updated_cache, cache_file_write_handle, indent=4)

            # Flush any buffered writes.
            cache_file_write_handle.flush()
            os.fsync(cache_file_write_handle.fileno())

    atexit.register(update_cache_file)

    def helper(condition: str) -> str:
        if (
            condition not in cache_file_content["cache"]["conditions"]
            or not condition_simplifier_cache_enabled
        ):
            cache_file_content["cache"]["conditions"][condition] = f(condition)
        return cache_file_content["cache"]["conditions"][condition]

    return helper