summaryrefslogtreecommitdiffstats
path: root/chromium/build/android/gyp
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2022-05-17 17:24:03 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2022-06-22 07:51:41 +0000
commit774f54339e5db91f785733232d3950366db65d07 (patch)
tree068e1b47bd1af94d77094ed12b604a6b83d9c22a /chromium/build/android/gyp
parentf7eaed5286974984ba5f9e3189d8f49d03e99f81 (diff)
BASELINE: Update Chromium to 102.0.5005.57
Change-Id: I885f714bb40ee724c28f94ca6bd8dbdb39915158 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/build/android/gyp')
-rwxr-xr-xchromium/build/android/gyp/compile_java.py2
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py73
-rwxr-xr-xchromium/build/android/gyp/create_unwind_table.py63
-rwxr-xr-xchromium/build/android/gyp/create_unwind_table_tests.py106
-rwxr-xr-xchromium/build/android/gyp/dex.py246
-rw-r--r--chromium/build/android/gyp/dex.pydeps1
-rwxr-xr-xchromium/build/android/gyp/javac_output_processor.py11
-rwxr-xr-xchromium/build/android/gyp/proguard.py141
-rw-r--r--chromium/build/android/gyp/proguard.pydeps1
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py6
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py270
-rwxr-xr-xchromium/build/android/gyp/write_native_libraries_java.py20
12 files changed, 473 insertions, 467 deletions
diff --git a/chromium/build/android/gyp/compile_java.py b/chromium/build/android/gyp/compile_java.py
index 6be0eed4ad9..d3fca59058d 100755
--- a/chromium/build/android/gyp/compile_java.py
+++ b/chromium/build/android/gyp/compile_java.py
@@ -187,6 +187,8 @@ ERRORPRONE_WARNINGS_TO_DISABLE = [
# The only time we trigger this is when it is better to be explicit in a
# list of unicode characters, e.g. FindAddress.java
'UnicodeEscape',
+ # Nice to have.
+ 'AlreadyChecked',
]
# Full list of checks: https://errorprone.info/bugpatterns
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 28595bf74cc..e8613a9f2d1 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -18,6 +18,7 @@ import filecmp
import hashlib
import logging
import os
+import pathlib
import re
import shutil
import subprocess
@@ -177,7 +178,6 @@ def _ParseArgs(args):
output_opts.add_argument('--arsc-path', help='Apk output for arsc format.')
output_opts.add_argument('--proto-path', help='Apk output for proto format.')
- group = input_opts.add_mutually_exclusive_group()
output_opts.add_argument(
'--info-path', help='Path to output info file for the partial apk.')
@@ -796,7 +796,7 @@ def _PackageApk(options, build):
if options.package_id:
link_command += [
'--package-id',
- hex(options.package_id),
+ '0x%02x' % options.package_id,
'--allow-reserved-package-id',
]
@@ -810,12 +810,15 @@ def _PackageApk(options, build):
desired_manifest_package_name
]
- # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
- # Also creates R.txt
- if options.use_resource_ids_path:
- _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
- fixed_manifest_package)
- link_command += ['--stable-ids', build.stable_ids_path]
+ if options.package_id is not None:
+ package_id = options.package_id
+ elif options.shared_resources:
+ package_id = 0
+ else:
+ package_id = 0x7f
+ _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
+ fixed_manifest_package, package_id)
+ link_command += ['--stable-ids', build.stable_ids_path]
link_command += partials
@@ -876,29 +879,38 @@ def _PackageApk(options, build):
build.arsc_path, build.proto_path
])
+ # Sanity check that the created resources have the expected package ID.
+ logging.debug('Performing sanity check')
+ _, actual_package_id = resource_utils.ExtractArscPackage(
+ options.aapt2_path,
+ build.arsc_path if options.arsc_path else build.proto_path)
+ # When there are no resources, ExtractArscPackage returns (None, None), in
+ # this case there is no need to check for matching package ID.
+ if actual_package_id is not None and actual_package_id != package_id:
+ raise Exception('Invalid package ID 0x%x (expected 0x%x)' %
+ (actual_package_id, package_id))
+
return desired_manifest_package_name
-@contextlib.contextmanager
-def _CreateStableIdsFile(in_path, out_path, package_name):
+def _CreateStableIdsFile(in_path, out_path, package_name, package_id):
"""Transforms a file generated by --emit-ids from another package.
--stable-ids is generally meant to be used by different versions of the same
package. To make it work for other packages, we need to transform the package
name references to match the package that resources are being generated for.
-
- Note: This will fail if the package ID of the resources in
- |options.use_resource_ids_path| does not match the package ID of the
- resources being linked.
"""
- with open(in_path) as stable_ids_file:
- with open(out_path, 'w') as output_ids_file:
- output_stable_ids = re.sub(
- r'^.*?:',
- package_name + ':',
- stable_ids_file.read(),
- flags=re.MULTILINE)
- output_ids_file.write(output_stable_ids)
+ if in_path:
+ data = pathlib.Path(in_path).read_text()
+ else:
+ # Force IDs to use 0x01 for the type byte in order to ensure they are
+ # different from IDs generated by other apps. https://crbug.com/1293336
+ data = 'pkg:id/fake_resource_id = 0x7f010000\n'
+ # Replace "pkg:" with correct package name.
+ data = re.sub(r'^.*?:', package_name + ':', data, flags=re.MULTILINE)
+ # Replace "0x7f" with correct package id.
+ data = re.sub(r'0x..', '0x%02x' % package_id, data)
+ pathlib.Path(out_path).write_text(data)
def _WriteOutputs(options, build):
@@ -1006,23 +1018,6 @@ def main(args):
options.extra_main_r_text_files)
build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
- # Sanity check that the created resources have the expected package ID.
- logging.debug('Performing sanity check')
- if options.package_id:
- expected_id = options.package_id
- elif options.shared_resources:
- expected_id = 0
- else:
- expected_id = 127 # == '0x7f'.
- _, package_id = resource_utils.ExtractArscPackage(
- options.aapt2_path,
- build.arsc_path if options.arsc_path else build.proto_path)
- # When there are no resources, ExtractArscPackage returns (None, None), in
- # this case there is no need to check for matching package ID.
- if package_id is not None and package_id != expected_id:
- raise Exception(
- 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
-
logging.debug('Copying outputs')
_WriteOutputs(options, build)
diff --git a/chromium/build/android/gyp/create_unwind_table.py b/chromium/build/android/gyp/create_unwind_table.py
index 8c9c347f36b..12a54e0928e 100755
--- a/chromium/build/android/gyp/create_unwind_table.py
+++ b/chromium/build/android/gyp/create_unwind_table.py
@@ -8,8 +8,8 @@ import abc
import argparse
import collections
import enum
+import json
import logging
-import os
import re
import struct
import subprocess
@@ -178,7 +178,7 @@ def EncodeStackPointerUpdate(offset: int) -> bytes:
instruction_code | ((min(abs_offset, 0x100) - 4) >> 2)
]
# For vsp increments of 0x104-0x200 we use 00xxxxxx twice.
- if abs_offset > 0x104:
+ if abs_offset >= 0x104:
instructions.append(instruction_code | ((abs_offset - 0x100 - 4) >> 2))
try:
return EncodeAsBytes(*instructions)
@@ -614,22 +614,21 @@ REFUSE_TO_UNWIND: Tuple[EncodedAddressUnwind, ...] = (EncodedAddressUnwind(
complete_instruction_sequence=bytes([0b10000000, 0b00000000])), )
-def EncodeFunctionUnwinds(function_unwinds: Iterable[FunctionUnwind]
+def EncodeFunctionUnwinds(function_unwinds: Iterable[FunctionUnwind],
+ text_section_start_address: int
) -> Iterable[EncodedFunctionUnwind]:
"""Encodes the unwind state for all functions defined in the binary.
This function
- sorts the collection of `FunctionUnwind`s by address.
- fills in gaps between functions with trivial unwind.
- - fills the space in the space in last page after last function with refuse
+ - fills the space in the last page after last function with refuse to unwind.
+ - fills the space in the first page before the first function with refuse
to unwind.
- Note:
- This function assumes that min function start address is the text section
- start address.
-
Args:
function_unwinds: An iterable of function unwind states.
+ text_section_start_address: The address of .text section in ELF file.
Returns:
The encoded function unwind states with no gaps between functions, ordered
@@ -656,7 +655,11 @@ def EncodeFunctionUnwinds(function_unwinds: Iterable[FunctionUnwind]
sorted_function_unwinds: List[FunctionUnwind] = sorted(
function_unwinds, key=lambda function_unwind: function_unwind.address)
- text_section_start_address: int = sorted_function_unwinds[0].address
+ if sorted_function_unwinds[0].address > text_section_start_address:
+ yield EncodedFunctionUnwind(page_number=0,
+ page_offset=0,
+ address_unwinds=REFUSE_TO_UNWIND)
+
prev_func_end_address: int = sorted_function_unwinds[0].address
gaps = 0
@@ -695,7 +698,7 @@ def EncodeFunctionOffsetTable(
) -> Tuple[bytes, Dict[Tuple[EncodedAddressUnwind, ...], int]]:
"""Encodes the function offset table.
- The function offset table maps local address_offset from function
+ The function offset table maps local instruction offset from function
start to the location in the unwind instruction table.
Args:
@@ -720,8 +723,13 @@ def EncodeFunctionOffsetTable(
offsets[sequence] = len(function_offset_table)
for address_offset, complete_instruction_sequence in sequence:
- function_offset_table += Uleb128Encode(address_offset) + Uleb128Encode(
- unwind_instruction_table_offsets[complete_instruction_sequence])
+ # Note: address_offset is the number of bytes from one address to another,
+ # while the instruction_offset is the number of 2-byte instructions
+ # from one address to another.
+ instruction_offset = address_offset >> 1
+ function_offset_table += (
+ Uleb128Encode(instruction_offset) + Uleb128Encode(
+ unwind_instruction_table_offsets[complete_instruction_sequence]))
return bytes(function_offset_table), offsets
@@ -1009,6 +1017,29 @@ def GenerateUnwindTables(
unwind_instruction_table)
+def ReadTextSectionStartAddress(readobj_path: str, libchrome_path: str) -> int:
+ """Reads the .text section start address of libchrome ELF.
+
+ Arguments:
+ readobj_path: Path to llvm-obj binary.
+ libchrome_path: Path to libchrome binary.
+
+ Returns:
+ The text section start address as a number.
+ """
+ proc = subprocess.Popen(
+ [readobj_path, '--sections', '--elf-output-style=JSON', libchrome_path],
+ stdout=subprocess.PIPE,
+ encoding='ascii')
+
+ elfs = json.loads(proc.stdout.read())[0]
+ assert len(elfs) == 1
+ sections = list(elfs.values())[0]['Sections']
+
+ return next(s['Section']['Address'] for s in sections
+ if s['Section']['Name']['Value'] == '.text')
+
+
def main():
build_utils.InitLogging('CREATE_UNWIND_TABLE_DEBUG')
parser = argparse.ArgumentParser(description=__doc__)
@@ -1024,6 +1055,10 @@ def main():
required=True,
help='The path of the dump_syms binary.',
metavar='FILE')
+ parser.add_argument('--readobj_path',
+ required=True,
+ help='The path of the llvm-readobj binary.',
+ metavar='FILE')
args = parser.parse_args()
proc = subprocess.Popen(['./' + args.dump_syms_path, args.input_path, '-v'],
@@ -1032,7 +1067,9 @@ def main():
function_cfis = ReadFunctionCfi(proc.stdout)
function_unwinds = GenerateUnwinds(function_cfis, parsers=ALL_PARSERS)
- encoded_function_unwinds = EncodeFunctionUnwinds(function_unwinds)
+ encoded_function_unwinds = EncodeFunctionUnwinds(
+ function_unwinds,
+ ReadTextSectionStartAddress(args.readobj_path, args.input_path))
(page_table, function_table, function_offset_table,
unwind_instruction_table) = GenerateUnwindTables(encoded_function_unwinds)
unwind_info: bytes = EncodeUnwindInfo(page_table, function_table,
diff --git a/chromium/build/android/gyp/create_unwind_table_tests.py b/chromium/build/android/gyp/create_unwind_table_tests.py
index 81e9b7d7f3d..2a6abd2507e 100755
--- a/chromium/build/android/gyp/create_unwind_table_tests.py
+++ b/chromium/build/android/gyp/create_unwind_table_tests.py
@@ -142,6 +142,8 @@ class _TestEncodeStackPointerUpdate(unittest.TestCase):
self.assertEqual(bytes([0b00000000 | 3]), EncodeStackPointerUpdate(16))
self.assertEqual(bytes([0b01000000 | 3]), EncodeStackPointerUpdate(-16))
+ self.assertEqual(bytes([0b00111111]), EncodeStackPointerUpdate(0x100))
+
# 10110010 uleb128
# vsp = vsp + 0x204 + (uleb128 << 2)
self.assertEqual(bytes([0b10110010, 0b00000000]),
@@ -150,6 +152,8 @@ class _TestEncodeStackPointerUpdate(unittest.TestCase):
EncodeStackPointerUpdate(0x208))
# For vsp increments of 0x104-0x200, use 00xxxxxx twice.
+ self.assertEqual(bytes([0b00111111, 0b00000000]),
+ EncodeStackPointerUpdate(0x104))
self.assertEqual(bytes([0b00111111, 0b00111111]),
EncodeStackPointerUpdate(0x200))
self.assertEqual(bytes([0b01111111, 0b01111111]),
@@ -311,10 +315,10 @@ class _TestEncodeFunctionUnwinds(unittest.TestCase):
FunctionUnwind(address=100,
size=PAGE_SIZE - 100,
address_unwinds=()),
- FunctionUnwind(address=0,
- size=100,
- address_unwinds=()),
- ])))
+ FunctionUnwind(
+ address=0, size=100, address_unwinds=()),
+ ],
+ text_section_start_address=0)))
@unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds')
def testFillingGaps(self, MockEncodeAddressUnwinds):
@@ -332,38 +336,65 @@ class _TestEncodeFunctionUnwinds(unittest.TestCase):
],
list(
EncodeFunctionUnwinds([
- FunctionUnwind(address=0,
- size=50,
- address_unwinds=()),
+ FunctionUnwind(
+ address=0, size=50, address_unwinds=()),
FunctionUnwind(address=100,
size=PAGE_SIZE - 100,
address_unwinds=()),
- ])))
+ ],
+ text_section_start_address=0)))
@unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds')
def testFillingLastPage(self, MockEncodeAddressUnwinds):
MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00')
- self.assertEqual([
- EncodedFunctionUnwind(page_number=0,
- page_offset=0,
- address_unwinds=EncodedAddressUnwind(0, b'\x00')),
- EncodedFunctionUnwind(page_number=0,
- page_offset=100 >> 1,
- address_unwinds=EncodedAddressUnwind(0, b'\x00')),
- EncodedFunctionUnwind(page_number=0,
- page_offset=200 >> 1,
- address_unwinds=REFUSE_TO_UNWIND),
- ],
- list(
- EncodeFunctionUnwinds([
- FunctionUnwind(address=1100,
- size=100,
- address_unwinds=()),
- FunctionUnwind(address=1200,
- size=100,
- address_unwinds=()),
- ])))
+ self.assertEqual(
+ [
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=0,
+ address_unwinds=EncodedAddressUnwind(
+ 0, b'\x00')),
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=100 >> 1,
+ address_unwinds=EncodedAddressUnwind(
+ 0, b'\x00')),
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=200 >> 1,
+ address_unwinds=REFUSE_TO_UNWIND),
+ ],
+ list(
+ EncodeFunctionUnwinds([
+ FunctionUnwind(address=1100, size=100, address_unwinds=()),
+ FunctionUnwind(address=1200, size=100, address_unwinds=()),
+ ],
+ text_section_start_address=1100)))
+
+ @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds')
+ def testFillingFirstPage(self, MockEncodeAddressUnwinds):
+ MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00')
+
+ self.assertEqual(
+ [
+ EncodedFunctionUnwind(
+ page_number=0, page_offset=0, address_unwinds=REFUSE_TO_UNWIND),
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=100 >> 1,
+ address_unwinds=EncodedAddressUnwind(
+ 0, b'\x00')),
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=200 >> 1,
+ address_unwinds=EncodedAddressUnwind(
+ 0, b'\x00')),
+ EncodedFunctionUnwind(page_number=0,
+ page_offset=300 >> 1,
+ address_unwinds=REFUSE_TO_UNWIND),
+ ],
+ list(
+ EncodeFunctionUnwinds([
+ FunctionUnwind(address=1100, size=100, address_unwinds=()),
+ FunctionUnwind(address=1200, size=100, address_unwinds=()),
+ ],
+ text_section_start_address=1000)))
@unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds')
def testOverlappedFunctions(self, _):
@@ -374,7 +405,8 @@ class _TestEncodeFunctionUnwinds(unittest.TestCase):
EncodeFunctionUnwinds([
FunctionUnwind(address=0, size=100, address_unwinds=()),
FunctionUnwind(address=50, size=100, address_unwinds=()),
- ])))
+ ],
+ text_section_start_address=0)))
class _TestNullParser(unittest.TestCase):
@@ -662,7 +694,7 @@ class _TestFunctionOffsetTable(unittest.TestCase):
complete_instruction_sequence1 = bytes([1, 3])
sequence1 = (
- EncodedAddressUnwind(0x200, complete_instruction_sequence1),
+ EncodedAddressUnwind(0x400, complete_instruction_sequence1),
EncodedAddressUnwind(0x0, complete_instruction_sequence0),
)
@@ -697,11 +729,11 @@ class _TestFunctionOffsetTable(unittest.TestCase):
complete_instruction_sequence2 = bytes([2, 3])
sequence1 = (
- EncodedAddressUnwind(0x10, complete_instruction_sequence1),
+ EncodedAddressUnwind(0x20, complete_instruction_sequence1),
EncodedAddressUnwind(0x0, complete_instruction_sequence0),
)
sequence2 = (
- EncodedAddressUnwind(0x200, complete_instruction_sequence2),
+ EncodedAddressUnwind(0x400, complete_instruction_sequence2),
EncodedAddressUnwind(0x0, complete_instruction_sequence0),
)
address_unwind_sequences = [sequence1, sequence2]
@@ -743,11 +775,11 @@ class _TestFunctionOffsetTable(unittest.TestCase):
complete_instruction_sequence2 = bytes([2, 3])
sequence1 = (
- EncodedAddressUnwind(0x10, complete_instruction_sequence1),
+ EncodedAddressUnwind(0x20, complete_instruction_sequence1),
EncodedAddressUnwind(0x0, complete_instruction_sequence0),
)
sequence2 = (
- EncodedAddressUnwind(0x200, complete_instruction_sequence2),
+ EncodedAddressUnwind(0x400, complete_instruction_sequence2),
EncodedAddressUnwind(0x0, complete_instruction_sequence0),
)
sequence3 = sequence1
@@ -1082,15 +1114,15 @@ class _TestGenerateUnwindTables(unittest.TestCase):
def testGenerateUnwindTables(self):
"""This is an integration test that hooks everything together. """
address_unwind_sequence0 = (
- EncodedAddressUnwind(0x10, bytes([0, 0xb0])),
+ EncodedAddressUnwind(0x20, bytes([0, 0xb0])),
EncodedAddressUnwind(0x0, bytes([0xb0])),
)
address_unwind_sequence1 = (
- EncodedAddressUnwind(0x10, bytes([1, 0xb0])),
+ EncodedAddressUnwind(0x20, bytes([1, 0xb0])),
EncodedAddressUnwind(0x0, bytes([0xb0])),
)
address_unwind_sequence2 = (
- EncodedAddressUnwind(0x100, bytes([2, 0xb0])),
+ EncodedAddressUnwind(0x200, bytes([2, 0xb0])),
EncodedAddressUnwind(0x0, bytes([0xb0])),
)
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index 49deee4a8ca..883e0faac64 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -18,10 +18,6 @@ from util import build_utils
from util import md5_check
from util import zipalign
-sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
-
-import convert_dex_profile
-
_DEX_XMX = '2G' # Increase this when __final_dex OOMs.
@@ -46,6 +42,11 @@ _IGNORE_WARNINGS = (
# desugar doesn't preserve interfaces in the same way. This should be
# removed when D8 is used for desugaring.
r'Warning: Cannot emulate interface ',
+ # Desugaring configs may occasionally not match types in our program. This
+ # may happen temporarily until we move over to the new desugared library
+ # json flags. See crbug.com/1302088 - this should be removed when this bug
+ # is fixed.
+ r'Warning: Specification conversion: The following prefixes do not match any type:', # pylint: disable=line-too-long
# Only relevant for R8 when optimizing an app that doesn't use proto.
r'Ignoring -shrinkunusedprotofields since the protobuf-lite runtime is',
)
@@ -131,37 +132,8 @@ def _ParseArgs(args):
action='store_true',
help='Use when filing D8 bugs to capture inputs.'
' Stores inputs to d8inputs.zip')
-
- group = parser.add_argument_group('Dexlayout')
- group.add_argument(
- '--dexlayout-profile',
- help=('Text profile for dexlayout. If present, a dexlayout '
- 'pass will happen'))
- group.add_argument(
- '--profman-path',
- help=('Path to ART profman binary. There should be a lib/ directory at '
- 'the same path with shared libraries (shared with dexlayout).'))
- group.add_argument(
- '--dexlayout-path',
- help=('Path to ART dexlayout binary. There should be a lib/ directory at '
- 'the same path with shared libraries (shared with dexlayout).'))
- group.add_argument('--dexdump-path', help='Path to dexdump binary.')
- group.add_argument(
- '--proguard-mapping-path',
- help=('Path to proguard map from obfuscated symbols in the jar to '
- 'unobfuscated symbols present in the code. If not present, the jar '
- 'is assumed not to be obfuscated.'))
-
options = parser.parse_args(args)
- if options.dexlayout_profile:
- build_utils.CheckOptions(
- options,
- parser,
- required=('profman_path', 'dexlayout_path', 'dexdump_path'))
- elif options.proguard_mapping_path is not None:
- parser.error('Unexpected proguard mapping without dexlayout')
-
if options.main_dex_rules_path and not options.multi_dex:
parser.error('--main-dex-rules-path is unused if multidex is not enabled')
@@ -223,14 +195,23 @@ def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors,
stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings)
- with tempfile.NamedTemporaryFile(mode='w') as flag_file:
+ is_debug = logging.getLogger().isEnabledFor(logging.DEBUG)
+
+ # Avoid deleting the flag file when DEX_DEBUG is set in case the flag file
+ # needs to be examined after the build.
+ with tempfile.NamedTemporaryFile(mode='w', delete=not is_debug) as flag_file:
# Chosen arbitrarily. Needed to avoid command-line length limits.
MAX_ARGS = 50
if len(dex_cmd) > MAX_ARGS:
- flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
- flag_file.flush()
- dex_cmd = dex_cmd[:MAX_ARGS]
- dex_cmd.append('@' + flag_file.name)
+ # Add all flags to D8 (anything after the first --) as well as all
+ # positional args at the end to the flag file.
+ for idx, cmd in enumerate(dex_cmd):
+ if cmd.startswith('--'):
+ flag_file.write('\n'.join(dex_cmd[idx:]))
+ flag_file.flush()
+ dex_cmd = dex_cmd[:idx]
+ dex_cmd.append('@' + flag_file.name)
+ break
# stdout sometimes spams with things like:
# Stripped invalid locals information from 1 method.
@@ -239,128 +220,6 @@ def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors,
fail_on_output=warnings_as_errors)
-def _EnvWithArtLibPath(binary_path):
- """Return an environment dictionary for ART host shared libraries.
-
- Args:
- binary_path: the path to an ART host binary.
-
- Returns:
- An environment dictionary where LD_LIBRARY_PATH has been augmented with the
- shared library path for the binary. This assumes that there is a lib/
- directory in the same location as the binary.
- """
- lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
- env = os.environ.copy()
- libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
- libraries.append(lib_path)
- env['LD_LIBRARY_PATH'] = ':'.join(libraries)
- return env
-
-
-def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
- """Create a binary profile for dexlayout.
-
- Args:
- text_profile: The ART text profile that will be converted to a binary
- profile.
- input_dex: The input dex file to layout.
- profman_path: Path to the profman binary.
- temp_dir: Directory to work in.
-
- Returns:
- The name of the binary profile, which will live in temp_dir.
- """
- binary_profile = os.path.join(
- temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
- open(binary_profile, 'w').close() # Touch binary_profile.
- profman_cmd = [profman_path,
- '--apk=' + input_dex,
- '--dex-location=' + input_dex,
- '--create-profile-from=' + text_profile,
- '--reference-profile-file=' + binary_profile]
- build_utils.CheckOutput(
- profman_cmd,
- env=_EnvWithArtLibPath(profman_path),
- stderr_filter=lambda output:
- build_utils.FilterLines(output, '|'.join(
- [r'Could not find (method_id|proto_id|name):',
- r'Could not create type list'])))
- return binary_profile
-
-
-def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
- """Layout a dexfile using a profile.
-
- Args:
- binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
- input_dex: The dex file used to create the binary profile.
- dexlayout_path: Path to the dexlayout binary.
- temp_dir: Directory to work in.
-
- Returns:
- List of output files produced by dexlayout. This will be one if the input
- was a single dexfile, or multiple files if the input was a multidex
- zip. These output files are located in temp_dir.
- """
- dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
- os.mkdir(dexlayout_output_dir)
- dexlayout_cmd = [ dexlayout_path,
- '-u', # Update checksum
- '-p', binary_profile,
- '-w', dexlayout_output_dir,
- input_dex ]
- build_utils.CheckOutput(
- dexlayout_cmd,
- env=_EnvWithArtLibPath(dexlayout_path),
- stderr_filter=lambda output:
- build_utils.FilterLines(output,
- r'Can.t mmap dex file.*please zipalign'))
- output_files = os.listdir(dexlayout_output_dir)
- if not output_files:
- raise Exception('dexlayout unexpectedly produced no output')
- return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files])
-
-
-def _ZipMultidex(file_dir, dex_files):
- """Zip dex files into a multidex.
-
- Args:
- file_dir: The directory into which to write the output.
- dex_files: The dexfiles forming the multizip. Their names must end with
- classes.dex, classes2.dex, ...
-
- Returns:
- The name of the multidex file, which will live in file_dir.
- """
- ordered_files = [] # List of (archive name, file name)
- for f in dex_files:
- if f.endswith('dex.jar'):
- ordered_files.append(('classes.dex', f))
- break
- if not ordered_files:
- raise Exception('Could not find classes.dex multidex file in %s' %
- dex_files)
- for dex_idx in range(2, len(dex_files) + 1):
- archive_name = 'classes%d.dex' % dex_idx
- for f in dex_files:
- if f.endswith(archive_name):
- ordered_files.append((archive_name, f))
- break
- else:
- raise Exception('Could not find classes%d.dex multidex file in %s' %
- dex_files)
- if len(set(f[1] for f in ordered_files)) != len(ordered_files):
- raise Exception('Unexpected clashing filenames for multidex in %s' %
- dex_files)
-
- zip_name = os.path.join(file_dir, 'multidex_classes.zip')
- build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
- for archive_name, file_name in ordered_files),
- zip_name)
- return zip_name
-
-
def _ZipAligned(dex_files, output_path):
"""Creates a .dex.jar with 4-byte aligned files.
@@ -374,30 +233,6 @@ def _ZipAligned(dex_files, output_path):
zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
-def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
- if options.proguard_mapping_path is not None:
- matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
- convert_dex_profile.ObfuscateProfile(
- options.dexlayout_profile, tmp_dex_output,
- options.proguard_mapping_path, options.dexdump_path, matching_profile)
- else:
- logging.warning('No obfuscation for %s', options.dexlayout_profile)
- matching_profile = options.dexlayout_profile
- binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
- options.profman_path, tmp_dir)
- output_files = _LayoutDex(binary_profile, tmp_dex_output,
- options.dexlayout_path, tmp_dir)
- if len(output_files) > 1:
- return _ZipMultidex(tmp_dir, output_files)
-
- if zipfile.is_zipfile(output_files[0]):
- return output_files[0]
-
- final_output = os.path.join(tmp_dir, 'dex_classes.zip')
- _ZipAligned(output_files, final_output)
- return final_output
-
-
def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
@@ -428,9 +263,6 @@ def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
_ZipAligned(sorted(d8_inputs), tmp_dex_output)
logging.debug('Quick-zipped %d files', len(d8_inputs))
- if options and options.dexlayout_profile:
- tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
-
# The dex file is complete and can be moved out of tmp_dir.
shutil.move(tmp_dex_output, output)
@@ -457,12 +289,34 @@ def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
def _ParseDesugarDeps(desugar_dependencies_file):
+ # pylint: disable=line-too-long
+ """Returns a dict of dependent/dependency mapping parsed from the file.
+
+ Example file format:
+ $ tail out/Debug/gen/base/base_java__dex.desugardeps
+ org/chromium/base/task/SingleThreadTaskRunnerImpl.class
+ <- org/chromium/base/task/SingleThreadTaskRunner.class
+ <- org/chromium/base/task/TaskRunnerImpl.class
+ org/chromium/base/task/TaskRunnerImpl.class
+ <- org/chromium/base/task/TaskRunner.class
+ org/chromium/base/task/TaskRunnerImplJni$1.class
+ <- obj/base/jni_java.turbine.jar:org/chromium/base/JniStaticTestMocker.class
+ org/chromium/base/task/TaskRunnerImplJni.class
+ <- org/chromium/base/task/TaskRunnerImpl$Natives.class
+ """
+ # pylint: enable=line-too-long
dependents_from_dependency = collections.defaultdict(set)
if desugar_dependencies_file and os.path.exists(desugar_dependencies_file):
with open(desugar_dependencies_file, 'r') as f:
+ dependent = None
for line in f:
- dependent, dependency = line.rstrip().split(' -> ')
- dependents_from_dependency[dependency].add(dependent)
+ line = line.rstrip()
+ if line.startswith(' <- '):
+ dependency = line[len(' <- '):]
+ # Note that this is a reversed mapping from the one in CustomD8.java.
+ dependents_from_dependency[dependency].add(dependent)
+ else:
+ dependent = line
return dependents_from_dependency
@@ -522,14 +376,14 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
strings_changed, non_direct_input_changed)
changes = None
- if changes:
+ if changes is None:
+ required_desugar_classes_set = set()
+ else:
required_desugar_classes_set = _ComputeRequiredDesugarClasses(
changes, options.desugar_dependencies, options.class_inputs,
options.classpath)
logging.debug('Class files needing re-desugar: %d',
len(required_desugar_classes_set))
- else:
- required_desugar_classes_set = set()
class_files = _ExtractClassFiles(changes, tmp_extract_dir,
options.class_inputs,
required_desugar_classes_set)
@@ -540,7 +394,13 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
# Dex necessary classes into intermediate dex files.
dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
if options.desugar_dependencies and not options.skip_custom_d8:
- dex_cmd += ['--file-tmp-prefix', tmp_extract_dir]
+ # Adding os.sep to remove the entire prefix.
+ dex_cmd += ['--file-tmp-prefix', tmp_extract_dir + os.sep]
+ if changes is None and os.path.exists(options.desugar_dependencies):
+ # Since incremental dexing only ever adds to the desugar_dependencies
+ # file, whenever full dexes are required the .desugardeps files need to
+ # be manually removed.
+ os.unlink(options.desugar_dependencies)
_RunD8(dex_cmd, class_files, options.incremental_dir,
options.warnings_as_errors,
options.show_desugar_default_interface_warnings)
diff --git a/chromium/build/android/gyp/dex.pydeps b/chromium/build/android/gyp/dex.pydeps
index 23856f3c847..f41626430bc 100644
--- a/chromium/build/android/gyp/dex.pydeps
+++ b/chromium/build/android/gyp/dex.pydeps
@@ -2,7 +2,6 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
../../gn_helpers.py
../../print_python_deps.py
-../convert_dex_profile.py
dex.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/javac_output_processor.py b/chromium/build/android/gyp/javac_output_processor.py
index 225f9479259..f0a6cd16b57 100755
--- a/chromium/build/android/gyp/javac_output_processor.py
+++ b/chromium/build/android/gyp/javac_output_processor.py
@@ -9,6 +9,7 @@ import os
import pathlib
import re
import sys
+import traceback
from util import build_utils
@@ -106,8 +107,14 @@ class JavacOutputProcessor:
previous_line = next(lines, None)
line = next(lines, None)
while previous_line != None:
- elaborated_lines = self._ElaborateLineForUnknownSymbol(
- previous_line, line)
+ try:
+ elaborated_lines = self._ElaborateLineForUnknownSymbol(
+ previous_line, line)
+ except Exception:
+ elaborated_lines = ["Error in _ElaborateLineForUnknownSymbol ---"]
+ elaborated_lines += traceback.format_exc().splitlines()
+ elaborated_lines += ["--- end _ElaborateLineForUnknownSymbol error"]
+ elaborated_lines += [previous_line]
for elaborated_line in elaborated_lines:
yield elaborated_line
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 7260d8fae00..76f7d846268 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -16,24 +16,11 @@ import zipfile
import dex
import dex_jdk_libs
-from pylib.dex import dex_parser
from util import build_utils
from util import diff_utils
-_API_LEVEL_VERSION_CODE = [
- (21, 'L'),
- (22, 'LollipopMR1'),
- (23, 'M'),
- (24, 'N'),
- (25, 'NMR1'),
- (26, 'O'),
- (27, 'OMR1'),
- (28, 'P'),
- (29, 'Q'),
- (30, 'R'),
- (31, 'S'),
-]
-
+sys.path.insert(1, os.path.dirname(os.path.dirname(__file__)))
+from pylib.dex import dex_parser
def _ParseOptions():
args = build_utils.ExpandFileArgs(sys.argv[1:])
@@ -84,10 +71,6 @@ def _ParseOptions():
parser.add_argument(
'--repackage-classes', help='Package all optimized classes are put in.')
parser.add_argument(
- '--disable-outlining',
- action='store_true',
- help='Disable the outlining optimization provided by R8.')
- parser.add_argument(
'--disable-checks',
action='store_true',
help='Disable -checkdiscard directives and missing symbols check')
@@ -305,11 +288,8 @@ def _OptimizeWithR8(options,
# R8 OOMs with the default xmx=1G.
cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
- '-Dcom.android.tools.r8.allowTestProguardOptions=1',
'-Dcom.android.tools.r8.disableHorizontalClassMerging=1',
]
- if options.disable_outlining:
- cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
if options.dump_inputs:
cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
cmd += [
@@ -424,7 +404,7 @@ def _OptimizeWithR8(options,
# Mapping files generated by R8 include comments that may break
# some of our tooling so remove those (specifically: apkanalyzer).
out_file.writelines(l for l in in_file if not l.startswith('#'))
- return base_context
+ return split_contexts_by_name
def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
@@ -501,8 +481,8 @@ def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
stderr = build_utils.FilterLines(
stderr, '|'.join(re.escape(x) for x in ignored_lines))
if stderr:
- if ' ' in stderr:
- stderr = error_title + """
+ if 'Missing' in stderr:
+ stderr = 'TraceReferences failed: ' + error_title + """
Tip: Build with:
is_java_debug=false
treat_warnings_as_errors=false
@@ -525,7 +505,6 @@ https://chromium.googlesource.com/chromium/src.git/+/main/docs/ui/android/byteco
stderr = ''
return stderr
- logging.debug('cmd: %s', ' '.join(cmd))
build_utils.CheckOutput(cmd,
print_stdout=True,
stderr_filter=stderr_filter,
@@ -566,7 +545,7 @@ def _CombineConfigs(configs,
ret.extend(format_config_contents(config, contents))
- for path, contents in sorted(embedded_configs.items(), key=lambda x: x[0]):
+ for path, contents in sorted(embedded_configs.items()):
ret.extend(format_config_contents(path, contents))
@@ -598,17 +577,27 @@ def _CreateDynamicConfig(options):
return '\n'.join(ret)
-def _ExtractEmbeddedConfigs(jar_paths):
- embedded_configs = {}
- for jar_path in jar_paths:
- with zipfile.ZipFile(jar_path) as z:
- for info in z.infolist():
- if info.is_dir():
- continue
- if info.filename.startswith('META-INF/proguard/'):
- config_path = '{}:{}'.format(jar_path, info.filename)
- embedded_configs[config_path] = z.read(info).decode('utf-8').rstrip()
- return embedded_configs
+def _ExtractEmbeddedConfigs(jar_path, embedded_configs):
+ with zipfile.ZipFile(jar_path) as z:
+ proguard_names = []
+ r8_names = []
+ for info in z.infolist():
+ if info.is_dir():
+ continue
+ if info.filename.startswith('META-INF/proguard/'):
+ proguard_names.append(info.filename)
+ elif info.filename.startswith('META-INF/com.android.tools/r8/'):
+ r8_names.append(info.filename)
+ elif info.filename.startswith('META-INF/com.android.tools/r8-from'):
+ # Assume our version of R8 is always latest.
+ if '-upto-' not in info.filename:
+ r8_names.append(info.filename)
+
+ # Give preference to r8-from-*, then r8/, then proguard/.
+ active = r8_names or proguard_names
+ for filename in active:
+ config_path = '{}:{}'.format(jar_path, filename)
+ embedded_configs[config_path] = z.read(filename).decode('utf-8').rstrip()
def _ContainsDebuggingConfig(config_str):
@@ -625,6 +614,47 @@ def _MaybeWriteStampAndDepFile(options, inputs):
build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
+def _IterParentContexts(context_name, split_contexts_by_name):
+ while context_name:
+ context = split_contexts_by_name[context_name]
+ yield context
+ context_name = context.parent_name
+
+
+def _DoTraceReferencesChecks(options, split_contexts_by_name):
+ # Set of all contexts that are a parent to another.
+ parent_splits_context_names = {
+ c.parent_name
+ for c in split_contexts_by_name.values() if c.parent_name
+ }
+ context_sets = [
+ list(_IterParentContexts(n, split_contexts_by_name))
+ for n in parent_splits_context_names
+ ]
+ # Visit them in order of: base, base+chrome, base+chrome+thing.
+ context_sets.sort(key=lambda x: (len(x), x[0].name))
+
+ # Ensure there are no missing references when considering all dex files.
+ error_title = 'DEX contains references to non-existent symbols after R8.'
+ dex_files = sorted(c.final_output_path
+ for c in split_contexts_by_name.values())
+ _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath,
+ options.warnings_as_errors, error_title)
+
+ for context_set in context_sets:
+ # Ensure there are no references from base -> chrome module, or from
+ # chrome -> feature modules.
+ error_title = (f'DEX within module "{context_set[0].name}" contains '
+ 'reference(s) to symbols within child splits')
+ dex_files = [c.final_output_path for c in context_set]
+ # Each check currently takes about 3 seconds on a fast dev machine, and we
+ # run 3 of them (all, base, base+chrome).
+ # We could run them concurrently, to shave off 5-6 seconds, but would need
+ # to make sure that the order is maintained.
+ _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath,
+ options.warnings_as_errors, error_title)
+
+
def main():
build_utils.InitLogging('PROGUARD_DEBUG')
options = _ParseOptions()
@@ -649,7 +679,10 @@ def main():
# If a jar is part of input no need to include it as library jar.
if p not in libraries and p not in options.input_paths:
libraries.append(p)
- embedded_configs = _ExtractEmbeddedConfigs(options.input_paths + libraries)
+
+ embedded_configs = {}
+ for jar_path in options.input_paths + libraries:
+ _ExtractEmbeddedConfigs(jar_path, embedded_configs)
# ProGuard configs that are derived from flags.
merged_configs = _CombineConfigs(proguard_configs,
@@ -673,27 +706,21 @@ def main():
options.keep_rules_output_path)
return
- base_context = _OptimizeWithR8(options, proguard_configs, libraries,
- dynamic_config_data, print_stdout)
+ # TODO(agrieve): Stop appending to dynamic_config_data once R8 natively
+ # supports finding configs the "tools" directory.
+ # https://issuetracker.google.com/227983179
+ tools_configs = {
+ k: v
+ for k, v in embedded_configs.items() if 'com.android.tools' in k
+ }
+ dynamic_config_data += '\n' + _CombineConfigs([], None, tools_configs)
+
+ split_contexts_by_name = _OptimizeWithR8(options, proguard_configs, libraries,
+ dynamic_config_data, print_stdout)
if not options.disable_checks:
logging.debug('Running tracereferences')
- all_dex_files = []
- if options.output_path:
- all_dex_files.append(options.output_path)
- if options.dex_dests:
- all_dex_files.extend(options.dex_dests)
- error_title = 'DEX contains references to non-existent symbols after R8.'
- _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath,
- options.warnings_as_errors, error_title)
- # Also ensure that base module doesn't have any references to child dex
- # symbols.
- # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put
- # synthesized classes in the base module.
- error_title = 'Base module DEX contains references symbols within DFMs.'
- _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path],
- options.classpath, options.warnings_as_errors,
- error_title)
+ _DoTraceReferencesChecks(options, split_contexts_by_name)
for output in options.extra_mapping_output_paths:
shutil.copy(options.mapping_output, output)
diff --git a/chromium/build/android/gyp/proguard.pydeps b/chromium/build/android/gyp/proguard.pydeps
index c1de73b57e8..ebb536bd10c 100644
--- a/chromium/build/android/gyp/proguard.pydeps
+++ b/chromium/build/android/gyp/proguard.pydeps
@@ -2,7 +2,6 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
../../gn_helpers.py
../../print_python_deps.py
-../convert_dex_profile.py
../pylib/__init__.py
../pylib/dex/__init__.py
../pylib/dex/dex_parser.py
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index c78b1a21bb5..db724bfb0f9 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -704,6 +704,10 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} '
dep_path = GetCustomPackagePath(grandparent_custom_package_name)
+ # Don't actually mark fields as "final" or else R8 complain when aapt2 uses
+ # --proguard-conditional-keep-rules. E.g.:
+ # Rule precondition matches static final fields javac has inlined.
+ # Such rules are unsound as the shrinker cannot infer the inlining precisely.
template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
package {{ package }};
@@ -712,7 +716,7 @@ public final class R {
{% for resource_type in resource_types %}
public static class {{ resource_type }} """ + extends_string + """ {
{% for e in final_resources[resource_type] %}
- public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
{% endfor %}
{% for e in non_final_resources[resource_type] %}
{% if e.value != '0' %}
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index 62aa5a82fc3..bf3d6a3825a 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -243,11 +243,7 @@ by compiling sources, or providing them with a prebuilt jar.
* `deps_info['public_deps_configs']`: List of paths to the `.build_config` files
of *direct* dependencies of the current target which are exposed as part of the
-current target's public API. This should be a subset of
-deps_info['deps_configs'].
-
-* `deps_info['ignore_dependency_public_deps']`: If true, 'public_deps' will not
-be collected from the current target's direct deps.
+current target's public API.
* `deps_info['unprocessed_jar_path']`:
Path to the original .jar file for this target, before any kind of processing
@@ -567,13 +563,12 @@ This type corresponds to an Android app bundle (`.aab` file).
--------------- END_MARKDOWN ---------------------------------------------------
"""
-from __future__ import print_function
-
import collections
import itertools
import json
import optparse
import os
+import shutil
import sys
import xml.dom.minidom
@@ -587,6 +582,9 @@ _ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
# Types that should not allow code deps to pass through.
_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+# Cache of path -> JSON dict.
+_dep_config_cache = {}
+
class OrderedSet(collections.OrderedDict):
@staticmethod
@@ -653,12 +651,15 @@ class AndroidManifest:
return self.manifest.getAttribute('package')
-dep_config_cache = {}
-def GetDepConfig(path):
- if not path in dep_config_cache:
+def GetDepConfigRoot(path):
+ if not path in _dep_config_cache:
with open(path) as jsonfile:
- dep_config_cache[path] = json.load(jsonfile)['deps_info']
- return dep_config_cache[path]
+ _dep_config_cache[path] = json.load(jsonfile)
+ return _dep_config_cache[path]
+
+
+def GetDepConfig(path):
+ return GetDepConfigRoot(path)['deps_info']
def DepsOfType(wanted_type, configs):
@@ -670,13 +671,20 @@ def DepPathsOfType(wanted_type, config_paths):
def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None):
- def GetDeps(path):
+ def apply_filter(paths):
+ if filter_func:
+ return [p for p in paths if filter_func(GetDepConfig(p))]
+ return paths
+
+ def discover(path):
config = GetDepConfig(path)
- if filter_func and not filter_func(config):
- return []
- return config['deps_configs']
+ all_deps = config['deps_configs'] + config.get('public_deps_configs', [])
+ return apply_filter(all_deps)
- return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+ deps_config_paths = apply_filter(deps_config_paths)
+ deps_config_paths = build_utils.GetSortedTransitiveDependencies(
+ deps_config_paths, discover)
+ return deps_config_paths
def GetObjectByPath(obj, key_path):
@@ -716,18 +724,6 @@ class Deps:
return self._direct_deps_configs
return DepsOfType(wanted_type, self._direct_deps_configs)
- def DirectAndChildPublicDeps(self, wanted_type=None):
- """Returns direct dependencies and dependencies exported via public_deps of
- direct dependencies.
- """
- dep_paths = set(self._direct_deps_config_paths)
- for direct_dep in self._direct_deps_configs:
- dep_paths.update(direct_dep.get('public_deps_configs', []))
- deps_list = [GetDepConfig(p) for p in dep_paths]
- if wanted_type is None:
- return deps_list
- return DepsOfType(wanted_type, deps_list)
-
def AllConfigPaths(self):
return self._all_deps_config_paths
@@ -751,7 +747,9 @@ class Deps:
if config['is_prebuilt']:
pass
elif config['gradle_treat_as_prebuilt']:
- helper(Deps(config['deps_configs']))
+ all_deps = config['deps_configs'] + config.get(
+ 'public_deps_configs', [])
+ helper(Deps(all_deps))
elif config not in ret:
ret.append(config)
@@ -799,22 +797,23 @@ def _MergeAssets(all_assets):
return create_list(compressed), create_list(uncompressed), locale_paks
-def _ResolveGroups(config_paths):
+def _ResolveGroupsAndPublicDeps(config_paths):
"""Returns a list of configs with all groups inlined."""
- ret = list(config_paths)
- ret_set = set(config_paths)
- while True:
- group_paths = DepPathsOfType('group', ret)
- if not group_paths:
- return ret
- for group_path in group_paths:
- index = ret.index(group_path)
- expanded_config_paths = []
- for deps_config_path in GetDepConfig(group_path)['deps_configs']:
- if not deps_config_path in ret_set:
- expanded_config_paths.append(deps_config_path)
- ret[index:index + 1] = expanded_config_paths
- ret_set.update(expanded_config_paths)
+
+ def helper(config_path):
+ config = GetDepConfig(config_path)
+ if config['type'] == 'group':
+ # Groups combine public_deps with deps_configs, so no need to check
+ # public_config_paths separately.
+ return config['deps_configs']
+ if config['type'] == 'android_resources':
+ # android_resources targets do not support public_deps, but instead treat
+ # all resource deps as public deps.
+ return DepPathsOfType('android_resources', config['deps_configs'])
+
+ return config.get('public_deps_configs', [])
+
+ return build_utils.GetSortedTransitiveDependencies(config_paths, helper)
def _DepsFromPaths(dep_paths,
@@ -856,6 +855,18 @@ def _DepsFromPaths(dep_paths,
return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist)
+def _FilterConfigPaths(dep_paths, blocklist=None, allowlist=None):
+ if not blocklist and not allowlist:
+ return dep_paths
+ configs = [GetDepConfig(p) for p in dep_paths]
+ if blocklist:
+ configs = [c for c in configs if c['type'] not in blocklist]
+ if allowlist:
+ configs = [c for c in configs if c['type'] in allowlist]
+
+ return [c['path'] for c in configs]
+
+
def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None):
"""Resolves all groups and trims dependency branches that we never want.
@@ -868,17 +879,17 @@ def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None):
about (i.e. we wish to prune all other branches that do not start from one of
these).
"""
- group_paths = DepPathsOfType('group', dep_paths)
- config_paths = dep_paths
- if group_paths:
- config_paths = _ResolveGroups(dep_paths) + group_paths
- configs = [GetDepConfig(p) for p in config_paths]
- if blocklist:
- configs = [c for c in configs if c['type'] not in blocklist]
+ # Filter both before and after so that public_deps of blocked targets are not
+ # added.
+ allowlist_with_groups = None
if allowlist:
- configs = [c for c in configs if c['type'] in allowlist]
+ allowlist_with_groups = set(allowlist)
+ allowlist_with_groups.add('group')
+ dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist_with_groups)
+ dep_paths = _ResolveGroupsAndPublicDeps(dep_paths)
+ dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist)
- return Deps([c['path'] for c in configs])
+ return Deps(dep_paths)
def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
@@ -934,10 +945,23 @@ def _CompareClasspathPriority(dep):
return 1 if dep.get('low_classpath_priority') else 0
+def _CopyBuildConfigsForDebugging(debug_dir):
+ shutil.rmtree(debug_dir, ignore_errors=True)
+ os.makedirs(debug_dir)
+ for src_path in _dep_config_cache:
+ dst_path = os.path.join(debug_dir, src_path)
+ assert dst_path.startswith(debug_dir), dst_path
+ os.makedirs(os.path.dirname(dst_path), exist_ok=True)
+ shutil.copy(src_path, dst_path)
+ print(f'Copied {len(_dep_config_cache)} .build_config.json into {debug_dir}')
+
+
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--build-config', help='Path to build_config output.')
+ parser.add_option('--store-deps-for-debugging-to',
+ help='Path to copy all transitive build config files to.')
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
@@ -989,11 +1013,6 @@ def main(argv):
parser.add_option('--public-deps-configs',
help='GN list of config files of deps which are exposed as '
'part of the target\'s public API.')
- parser.add_option(
- '--ignore-dependency-public-deps',
- action='store_true',
- help='If true, \'public_deps\' will not be collected from the current '
- 'target\'s direct deps.')
parser.add_option('--aar-path', help='Path to containing .aar file.')
parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
@@ -1262,9 +1281,13 @@ def main(argv):
}
deps_configs_paths = build_utils.ParseGnList(options.deps_configs)
+ public_deps_configs_paths = build_utils.ParseGnList(
+ options.public_deps_configs)
+ deps_configs_paths += public_deps_configs_paths
deps = _DepsFromPaths(deps_configs_paths,
options.type,
recursive_resource_deps=options.recursive_resource_deps)
+ public_deps = _DepsFromPaths(public_deps_configs_paths, options.type)
processor_deps = _DepsFromPaths(
build_utils.ParseGnList(options.annotation_processor_configs or ''),
options.type, filter_root_targets=False)
@@ -1281,18 +1304,20 @@ def main(argv):
allowlist=['java_library'])
all_inputs.extend(recursive_java_deps.AllConfigPaths())
- direct_deps = deps.Direct()
system_library_deps = deps.Direct('system_java_library')
all_deps = deps.All()
all_library_deps = deps.All('java_library')
- all_resources_deps = deps.All('android_resources')
if options.type == 'java_library':
- java_library_deps = _DepsFromPathsWithFilters(
- deps_configs_paths, allowlist=['android_resources'])
- # for java libraries, we only care about resources that are directly
- # reachable without going through another java_library.
- all_resources_deps = java_library_deps.All('android_resources')
+ # For Java libraries, restrict to resource targets that are direct deps, or
+ # are indirect via other resource targets.
+ # The indirect-through-other-targets ones are picked up because
+ # _ResolveGroupsAndPublicDeps() treats resource deps of resource targets as
+ # public_deps.
+ all_resources_deps = deps.Direct('android_resources')
+ else:
+ all_resources_deps = deps.All('android_resources')
+
if options.type == 'android_resources' and options.recursive_resource_deps:
# android_resources targets that want recursive resource deps also need to
# collect package_names from all library deps. This ensures the R.java files
@@ -1314,8 +1339,8 @@ def main(argv):
base_module_build_config = None
if options.base_module_build_config:
- with open(options.base_module_build_config, 'r') as f:
- base_module_build_config = json.load(f)
+ base_module_build_config = GetDepConfigRoot(
+ options.base_module_build_config)
# Initialize some common config.
# Any value that needs to be queryable by dependents must go within deps_info.
@@ -1325,7 +1350,6 @@ def main(argv):
'path': options.build_config,
'type': options.type,
'gn_target': options.gn_target,
- 'deps_configs': [d['path'] for d in direct_deps],
'chromium_code': not options.non_chromium_code,
},
# Info needed only by generate_gradle.py.
@@ -1334,6 +1358,24 @@ def main(argv):
deps_info = config['deps_info']
gradle = config['gradle']
+ # The paths we record as deps can differ from deps_config_paths:
+ # 1) Paths can be removed when blocked by _ROOT_TYPES / _RESOURCE_TYPES.
+ # 2) Paths can be added when promoted from group deps or public_deps of deps.
+ # Deps are promoted from groups/public_deps in order to make the filtering
+ # of 1) work through group() targets (which themselves are not resource
+ # targets, but should be treated as such when depended on by a resource
+ # target. A more involved filtering implementation could work to maintain
+ # the semantics of 1) without the need to promote deps, but we've avoided
+ # such an undertaking so far.
+ public_deps_set = set()
+ if public_deps_configs_paths:
+ deps_info['public_deps_configs'] = [d['path'] for d in public_deps.Direct()]
+ public_deps_set = set(deps_info['public_deps_configs'])
+
+ deps_info['deps_configs'] = [
+ d['path'] for d in deps.Direct() if d['path'] not in public_deps_set
+ ]
+
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_config = tested_apk_deps.Direct()[0]
@@ -1361,21 +1403,24 @@ def main(argv):
deps_info['java_sources_file'] = options.java_sources_file
if is_java_target:
- if options.bundled_srcjars:
- gradle['bundled_srcjars'] = deps_info['bundled_srcjars']
-
- gradle['dependent_android_projects'] = []
- gradle['dependent_java_projects'] = []
- gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
-
if options.main_class:
deps_info['main_class'] = options.main_class
+ dependent_prebuilt_jars = deps.GradlePrebuiltJarPaths()
+ dependent_prebuilt_jars.sort()
+ if dependent_prebuilt_jars:
+ gradle['dependent_prebuilt_jars'] = dependent_prebuilt_jars
+
+ dependent_android_projects = []
+ dependent_java_projects = []
for c in deps.GradleLibraryProjectDeps():
if c['requires_android']:
- gradle['dependent_android_projects'].append(c['path'])
+ dependent_android_projects.append(c['path'])
else:
- gradle['dependent_java_projects'].append(c['path'])
+ dependent_java_projects.append(c['path'])
+
+ gradle['dependent_android_projects'] = dependent_android_projects
+ gradle['dependent_java_projects'] = dependent_java_projects
if options.r_text_path:
deps_info['r_text_path'] = options.r_text_path
@@ -1383,14 +1428,16 @@ def main(argv):
# TODO(tiborg): Remove creation of JNI info for type group and java_library
# once we can generate the JNI registration based on APK / module targets as
# opposed to groups and libraries.
- if is_apk_or_module_target or options.type in (
- 'group', 'java_library', 'junit_binary'):
+ if is_apk_or_module_target or options.type in ('group', 'java_library',
+ 'junit_binary', 'dist_aar'):
deps_info['jni'] = {}
all_java_sources = [c['java_sources_file'] for c in all_library_deps
if 'java_sources_file' in c]
if options.java_sources_file:
all_java_sources.append(options.java_sources_file)
+ if is_apk_or_module_target or options.type in ('group', 'java_library',
+ 'junit_binary'):
if options.apk_proto_resources:
deps_info['proto_resources_path'] = options.apk_proto_resources
@@ -1440,9 +1487,6 @@ def main(argv):
if options.unprocessed_jar_path:
deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
deps_info['interface_jar_path'] = options.interface_jar_path
- if options.public_deps_configs:
- deps_info['public_deps_configs'] = build_utils.ParseGnList(
- options.public_deps_configs)
if options.device_jar_path:
deps_info['device_jar_path'] = options.device_jar_path
if options.host_jar_path:
@@ -1543,22 +1587,6 @@ def main(argv):
]
deps_info['dependency_r_txt_files'] = r_text_files
- # For feature modules, remove any resources that already exist in the base
- # module.
- if base_module_build_config:
- dependency_zips = [
- c for c in dependency_zips
- if c not in base_module_build_config['deps_info']['dependency_zips']
- ]
- dependency_zip_overlays = [
- c for c in dependency_zip_overlays if c not in
- base_module_build_config['deps_info']['dependency_zip_overlays']
- ]
- extra_package_names = [
- c for c in extra_package_names if c not in
- base_module_build_config['deps_info']['extra_package_names']
- ]
-
if options.type == 'android_apk' and options.tested_apk_config:
config['deps_info']['arsc_package_name'] = (
tested_apk_config['package_name'])
@@ -1571,17 +1599,23 @@ def main(argv):
if options.res_size_info:
config['deps_info']['res_size_info'] = options.res_size_info
+ # Safe to sort: Build checks that non-overlay resource have no overlap.
+ dependency_zips.sort()
config['deps_info']['dependency_zips'] = dependency_zips
config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays
+ # Order doesn't matter, so make stable.
+ extra_package_names.sort()
config['deps_info']['extra_package_names'] = extra_package_names
# These are .jars to add to javac classpath but not to runtime classpath.
extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars)
if extra_classpath_jars:
+ extra_classpath_jars.sort()
deps_info['extra_classpath_jars'] = extra_classpath_jars
mergeable_android_manifests = build_utils.ParseGnList(
options.mergeable_android_manifests)
+ mergeable_android_manifests.sort()
if mergeable_android_manifests:
deps_info['mergeable_android_manifests'] = mergeable_android_manifests
@@ -1593,13 +1627,8 @@ def main(argv):
if is_java_target:
- if options.ignore_dependency_public_deps:
- classpath_direct_deps = deps.Direct()
- classpath_direct_library_deps = deps.Direct('java_library')
- else:
- classpath_direct_deps = deps.DirectAndChildPublicDeps()
- classpath_direct_library_deps = deps.DirectAndChildPublicDeps(
- 'java_library')
+ classpath_direct_deps = deps.Direct()
+ classpath_direct_library_deps = deps.Direct('java_library')
# The classpath used to compile this target when annotation processors are
# present.
@@ -1813,7 +1842,7 @@ def main(argv):
deps_info['extra_main_r_text_files'] = sorted(extra_main_r_text_files)
if is_apk_or_module_target or options.type in ('group', 'java_library',
- 'junit_binary'):
+ 'junit_binary', 'dist_aar'):
deps_info['jni']['all_source'] = sorted(set(all_java_sources))
system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
@@ -1942,8 +1971,8 @@ def main(argv):
config['javac']['processor_classpath'] += [
c['host_jar_path'] for c in processor_deps.All('java_library')
]
- config['javac']['processor_classes'] = [
- c['main_class'] for c in processor_deps.Direct()]
+ config['javac']['processor_classes'] = sorted(
+ c['main_class'] for c in processor_deps.Direct())
deps_info['javac_full_classpath'] = list(javac_full_classpath)
deps_info['javac_full_interface_classpath'] = list(
javac_full_interface_classpath)
@@ -1961,8 +1990,8 @@ def main(argv):
deps_info['javac_full_interface_classpath'] = list(
javac_full_interface_classpath)
- if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
- 'android_app_bundle'):
+ if options.type in ('android_apk', 'android_app_bundle',
+ 'android_app_bundle_module', 'dist_aar', 'dist_jar'):
deps_info['device_classpath'] = device_classpath
if options.add_view_trace_events:
trace_event_rewritten_device_classpath = []
@@ -2010,17 +2039,22 @@ def main(argv):
if options.secondary_abi_shared_libraries_runtime_deps:
secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
options.secondary_abi_shared_libraries_runtime_deps)
+ secondary_abi_library_paths.sort()
all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
native_library_placeholder_paths = build_utils.ParseGnList(
options.native_lib_placeholders)
+ native_library_placeholder_paths.sort()
secondary_native_library_placeholder_paths = build_utils.ParseGnList(
options.secondary_native_lib_placeholders)
+ secondary_native_library_placeholder_paths.sort()
loadable_modules = build_utils.ParseGnList(options.loadable_modules)
+ loadable_modules.sort()
secondary_abi_loadable_modules = build_utils.ParseGnList(
options.secondary_abi_loadable_modules)
+ secondary_abi_loadable_modules.sort()
config['native'] = {
'libraries':
@@ -2064,6 +2098,7 @@ def main(argv):
if 'java_resources_jar' in d]
java_resources_jars = [jar for jar in java_resources_jars
if jar not in tested_apk_resource_jars]
+ java_resources_jars.sort()
config['java_resources_jars'] = java_resources_jars
if options.java_resources_jar_path:
@@ -2074,6 +2109,9 @@ def main(argv):
# are not duplicated on the feature module.
if base_module_build_config:
base = base_module_build_config
+ RemoveObjDups(config, base, 'deps_info', 'dependency_zips')
+ RemoveObjDups(config, base, 'deps_info', 'dependency_zip_overlays')
+ RemoveObjDups(config, base, 'deps_info', 'extra_package_names')
RemoveObjDups(config, base, 'deps_info', 'device_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
@@ -2098,15 +2136,19 @@ def main(argv):
# Used by bytecode_processor to give better error message when missing
# deps are found.
- config['deps_info']['javac_full_classpath_targets'] = [
- jar_to_target[x] for x in deps_info['javac_full_classpath']
- ]
+ config['deps_info']['javac_full_classpath_targets'] = sorted(
+ jar_to_target[x] for x in deps_info['javac_full_classpath'])
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
build_utils.WriteDepfile(options.depfile, options.build_config,
sorted(set(all_inputs)))
+
+ if options.store_deps_for_debugging_to:
+ GetDepConfig(options.build_config) # Add it to cache.
+ _CopyBuildConfigsForDebugging(options.store_deps_for_debugging_to)
+
return 0
diff --git a/chromium/build/android/gyp/write_native_libraries_java.py b/chromium/build/android/gyp/write_native_libraries_java.py
index 3200145d57d..9570bc88273 100755
--- a/chromium/build/android/gyp/write_native_libraries_java.py
+++ b/chromium/build/android/gyp/write_native_libraries_java.py
@@ -85,17 +85,19 @@ def main():
options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
- assert (options.enable_chromium_linker or not options.load_library_from_apk)
-
- native_libraries_list = []
+ native_libraries = []
if options.main_component_library:
- native_libraries_list.append(
- _FormatLibraryName(options.main_component_library))
+ native_libraries.append(options.main_component_library)
elif options.native_libraries_list:
with open(options.native_libraries_list) as f:
- for path in f:
- path = path.strip()
- native_libraries_list.append(_FormatLibraryName(path))
+ native_libraries.extend(l.strip() for l in f)
+
+ if options.enable_chromium_linker and len(native_libraries) > 1:
+ sys.stderr.write(
+ 'Multiple libraries not supported when using chromium linker. Found:\n')
+ sys.stderr.write('\n'.join(native_libraries))
+ sys.stderr.write('\n')
+ sys.exit(1)
def bool_str(value):
if value:
@@ -109,7 +111,7 @@ def main():
'USE_LINKER': bool_str(options.enable_chromium_linker),
'USE_LIBRARY_IN_ZIP_FILE': bool_str(options.load_library_from_apk),
'USE_MODERN_LINKER': bool_str(options.use_modern_linker),
- 'LIBRARIES': ','.join(native_libraries_list),
+ 'LIBRARIES': ','.join(_FormatLibraryName(n) for n in native_libraries),
'CPU_FAMILY': options.cpu_family,
}
with build_utils.AtomicOutput(options.output) as f: