summaryrefslogtreecommitdiffstats
path: root/chromium/build/android/gyp
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-06 12:48:11 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:33:43 +0000
commit7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (patch)
treefa14ba0ca8d2683ba2efdabd246dc9b18a1229c6 /chromium/build/android/gyp
parent79b4f909db1049fca459c07cca55af56a9b54fe3 (diff)
BASELINE: Update Chromium to 84.0.4147.141
Change-Id: Ib85eb4cfa1cbe2b2b81e5022c8cad5c493969535 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/build/android/gyp')
-rwxr-xr-xchromium/build/android/gyp/apkbuilder.py269
-rw-r--r--chromium/build/android/gyp/apkbuilder.pydeps1
-rwxr-xr-xchromium/build/android/gyp/assert_static_initializers.py29
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py12
-rwxr-xr-xchromium/build/android/gyp/compile_java.py4
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py377
-rw-r--r--chromium/build/android/gyp/compile_resources.pydeps1
-rwxr-xr-xchromium/build/android/gyp/create_app_bundle.py8
-rwxr-xr-xchromium/build/android/gyp/create_bundle_wrapper_script.py7
-rwxr-xr-xchromium/build/android/gyp/create_size_info_files.py26
-rwxr-xr-xchromium/build/android/gyp/dex.py43
-rwxr-xr-xchromium/build/android/gyp/dex_jdk_libs.py59
-rw-r--r--chromium/build/android/gyp/dex_jdk_libs.pydeps6
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py10
-rwxr-xr-xchromium/build/android/gyp/extract_unwind_tables.py16
-rwxr-xr-xchromium/build/android/gyp/extract_unwind_tables_tests.py10
-rwxr-xr-xchromium/build/android/gyp/lint.py308
-rw-r--r--chromium/build/android/gyp/lint.pydeps23
-rwxr-xr-xchromium/build/android/gyp/prepare_resources.py4
-rwxr-xr-xchromium/build/android/gyp/proguard.py184
-rwxr-xr-xchromium/build/android/gyp/util/diff_utils.py22
-rw-r--r--chromium/build/android/gyp/util/manifest_utils.py30
-rw-r--r--chromium/build/android/gyp/util/protoresources.py308
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py105
-rwxr-xr-xchromium/build/android/gyp/util/resource_utils_test.py17
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py183
26 files changed, 1448 insertions, 614 deletions
diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py
index fe6e4e1ec71..7f8403919ca 100755
--- a/chromium/build/android/gyp/apkbuilder.py
+++ b/chromium/build/android/gyp/apkbuilder.py
@@ -18,6 +18,7 @@ import zlib
import finalize_apk
from util import build_utils
+from util import diff_utils
from util import zipalign
# Input dex.jar files are zipaligned.
@@ -57,6 +58,9 @@ def _ParseArgs(args):
default='apk', help='Specify output format.')
parser.add_argument('--dex-file',
help='Path to the classes.dex to use')
+ parser.add_argument(
+ '--jdk-libs-dex-file',
+ help='Path to classes.dex created by dex_jdk_libs.py')
parser.add_argument('--uncompress-dex', action='store_true',
help='Store .dex files uncompressed in the APK')
parser.add_argument('--native-libs',
@@ -107,6 +111,33 @@ def _ParseArgs(args):
'--best-compression',
action='store_true',
help='Use zip -9 rather than zip -1')
+ parser.add_argument(
+ '--stamp', help='If passed, the script touches the passed in stamp file.')
+ parser.add_argument(
+ '--expected-native-libs-and-assets',
+ help='Expected list of native libraries and assets.')
+ parser.add_argument(
+ '--native-libs-and-assets-expectation-failure-file',
+ help='Write to this file if the expected list of native libraries and '
+ 'assets does not match the actual list.')
+ parser.add_argument(
+ '--only-verify-expectations',
+ action="store_true",
+ help='When passed, the script ends execution after verifying '
+ 'expectations.')
+ parser.add_argument(
+ '--fail-on-expectations',
+ action="store_true",
+ help='When passed, fails the build on libraries and assets expectation '
+ 'mismatches.')
+ parser.add_argument(
+ '--library-always-compress',
+ action='append',
+ help='The list of library files that we always compress.')
+ parser.add_argument(
+ '--library-renames',
+ action='append',
+ help='The list of library files that we prepend crazy. to their names.')
options = parser.parse_args(args)
options.assets = build_utils.ParseGnList(options.assets)
options.uncompressed_assets = build_utils.ParseGnList(
@@ -119,6 +150,9 @@ def _ParseArgs(args):
options.native_libs = build_utils.ParseGnList(options.native_libs)
options.secondary_native_libs = build_utils.ParseGnList(
options.secondary_native_libs)
+ options.library_always_compress = build_utils.ParseGnList(
+ options.library_always_compress)
+ options.library_renames = build_utils.ParseGnList(options.library_renames)
# --apksigner-jar, --zipalign-path, --key-xxx arguments are
# required when building an APK, but not a bundle module.
@@ -145,6 +179,9 @@ def _ParseArgs(args):
and options.secondary_android_abi):
raise Exception('Must specify --is-multi-abi with both --android-abi '
'and --secondary-android-abi.')
+ if options.only_verify_expectations and not options.stamp:
+ raise Exception('Must specify --stamp when using '
+ '--only-verify-expectations.')
return options
@@ -181,14 +218,25 @@ def _ExpandPaths(paths):
return ret
-def _AddAssets(apk, path_tuples, fast_align, disable_compression=False):
- """Adds the given paths to the apk.
+def _GetAssetsToAdd(path_tuples,
+ fast_align,
+ disable_compression=False,
+ allow_reads=True):
+ """Returns the list of file_detail tuples for assets in the apk.
Args:
- apk: ZipFile to write to.
- paths: List of paths (with optional :zipPath suffix) to add.
+ path_tuples: List of src_path, dest_path tuples to add.
+ fast_align: Whether to perform alignment in python zipfile (alternatively
+ alignment can be done using the zipalign utility out of band).
disable_compression: Whether to disable compression.
+ allow_reads: If false, we do not try to read the files from disk (to find
+ their size for example).
+
+ Returns: A list of (src_path, apk_path, compress, alignment) tuple
+ representing what and how assets are added.
"""
+ assets_to_add = []
+
# Group all uncompressed assets together in the hope that it will increase
# locality of mmap'ed files.
for target_compress in (False, True):
@@ -199,56 +247,120 @@ def _AddAssets(apk, path_tuples, fast_align, disable_compression=False):
if target_compress == compress:
# AddToZipHermetic() uses this logic to avoid growing small files.
# We need it here in order to set alignment correctly.
- if compress and os.path.getsize(src_path) < 16:
+ if allow_reads and compress and os.path.getsize(src_path) < 16:
compress = False
apk_path = 'assets/' + dest_path
- try:
- apk.getinfo(apk_path)
- # Should never happen since write_build_config.py handles merging.
- raise Exception('Multiple targets specified the asset path: %s' %
- apk_path)
- except KeyError:
- zipalign.AddToZipHermetic(
- apk,
- apk_path,
- src_path=src_path,
- compress=compress,
- alignment=0 if compress and not fast_align else 4)
-
-
-def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress,
- fast_align):
- """Add native libraries to APK."""
- has_crazy_linker = any(
- 'android_linker' in os.path.basename(p) for p in native_libs)
- has_monochrome = any('monochrome' in os.path.basename(p) for p in native_libs)
+ alignment = 0 if compress and not fast_align else 4
+ assets_to_add.append((apk_path, src_path, compress, alignment))
+ return assets_to_add
+
+
+def _AddFiles(apk, details):
+ """Adds files to the apk.
+
+ Args:
+ apk: path to APK to add to.
+ details: A list of file detail tuples (src_path, apk_path, compress,
+ alignment) representing what and how files are added to the APK.
+ """
+ for apk_path, src_path, compress, alignment in details:
+ # This check is only relevant for assets, but it should not matter if it is
+ # checked for the whole list of files.
+ try:
+ apk.getinfo(apk_path)
+ # Should never happen since write_build_config.py handles merging.
+ raise Exception(
+ 'Multiple targets specified the asset path: %s' % apk_path)
+ except KeyError:
+ zipalign.AddToZipHermetic(
+ apk,
+ apk_path,
+ src_path=src_path,
+ compress=compress,
+ alignment=alignment)
+
+
+def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
+ lib_always_compress, lib_renames):
+ """Returns the list of file_detail tuples for native libraries in the apk.
+
+ Returns: A list of (src_path, apk_path, compress, alignment) tuple
+ representing what and how native libraries are added.
+ """
+ libraries_to_add = []
+
for path in native_libs:
basename = os.path.basename(path)
- compress = True
- if uncompress and os.path.splitext(basename)[1] == '.so':
- # Trichrome
- if has_crazy_linker and has_monochrome:
- compress = False
- elif ('android_linker' not in basename
- and (not has_crazy_linker or 'clang_rt' not in basename)
- and (not has_crazy_linker or 'crashpad_handler' not in basename)):
- compress = False
- if has_crazy_linker and not has_monochrome:
- basename = 'crazy.' + basename
+ compress = not uncompress or any(lib_name in basename
+ for lib_name in lib_always_compress)
+ rename = any(lib_name in basename for lib_name in lib_renames)
+ if rename:
+ basename = 'crazy.' + basename
lib_android_abi = android_abi
if path.startswith('android_clang_arm64_hwasan/'):
lib_android_abi = 'arm64-v8a-hwasan'
apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
- zipalign.AddToZipHermetic(
- out_apk,
- apk_path,
- src_path=path,
- compress=compress,
- alignment=0 if compress and not fast_align else 0x1000)
+ alignment = 0 if compress and not fast_align else 0x1000
+ libraries_to_add.append((apk_path, path, compress, alignment))
+
+ return libraries_to_add
+
+
+def _VerifyNativeLibsAndAssets(
+ native_libs, assets, expectation_file_path,
+ unexpected_native_libs_and_assets_failure_file_path, fail_on_mismatch):
+ """Verifies the native libraries and assets are as expected.
+
+ Check that the native libraries and assets being added are consistent with
+ the expectation file.
+ """
+
+ native_libs = sorted(native_libs)
+ assets = sorted(assets)
+
+ with tempfile.NamedTemporaryFile() as generated_output:
+ for apk_path, _, compress, alignment in native_libs + assets:
+ generated_output.write('apk_path=%s, compress=%s, alignment=%s\n' %
+ (apk_path, compress, alignment))
+
+ generated_output.flush()
+
+ msg = diff_utils.DiffFileContents(
+ expectation_file_path, generated_output.name, show_files_compared=False)
+ if not msg:
+ return
+
+ msg_header = """\
+Native Libraries and Assets expectations file needs updating. For details see:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
+"""
+ sys.stderr.write(msg_header)
+ sys.stderr.write(msg)
+ if unexpected_native_libs_and_assets_failure_file_path:
+ build_utils.MakeDirectory(
+ os.path.dirname(unexpected_native_libs_and_assets_failure_file_path))
+ with open(unexpected_native_libs_and_assets_failure_file_path, 'w') as f:
+ f.write(msg_header)
+ f.write(msg)
+
+ if fail_on_mismatch:
+ sys.exit(1)
+
+
+def _MaybeWriteDepAndStampFiles(options, depfile_deps):
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+ if options.depfile:
+ if options.only_verify_expectations:
+ output = options.stamp
+ else:
+ output = options.output_apk
+ build_utils.WriteDepfile(
+ options.depfile, output, inputs=depfile_deps, add_pydeps=False)
def main(args):
@@ -318,6 +430,50 @@ def main(args):
apk_root_dir = ''
apk_dex_dir = ''
+ def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
+ asset_details = _GetAssetsToAdd(
+ assets, fast_align, disable_compression=False, allow_reads=allow_reads)
+ asset_details.extend(
+ _GetAssetsToAdd(
+ uncompressed_assets,
+ fast_align,
+ disable_compression=True,
+ allow_reads=allow_reads))
+ return asset_details
+
+ # We compute expectations without reading the files. This allows us to check
+ # expectations for different targets by just generating their build_configs
+ # and not have to first generate all the actual files and all their
+ # dependencies (for example by just passing --only-verify-expectations).
+ expectation_asset_details = _GetAssetDetails(
+ assets, uncompressed_assets, fast_align, allow_reads=False)
+
+ libs_to_add = _GetNativeLibrariesToAdd(
+ native_libs, options.android_abi, options.uncompress_shared_libraries,
+ fast_align, options.library_always_compress, options.library_renames)
+ if options.secondary_android_abi:
+ libs_to_add.extend(
+ _GetNativeLibrariesToAdd(
+ secondary_native_libs, options.secondary_android_abi,
+ options.uncompress_shared_libraries, fast_align,
+ options.library_always_compress, options.library_renames))
+
+ if options.expected_native_libs_and_assets:
+ _VerifyNativeLibsAndAssets(
+ libs_to_add, expectation_asset_details,
+ options.expected_native_libs_and_assets,
+ options.native_libs_and_assets_expectation_failure_file,
+ options.fail_on_expectations)
+ if options.only_verify_expectations:
+ _MaybeWriteDepAndStampFiles(options, depfile_deps)
+ return
+
+ # If we are past this point, we are going to actually create the final apk so
+ # we should recompute asset details again but maybe perform some optimizations
+ # based on the size of the files on disk.
+ assets_to_add = _GetAssetDetails(
+ assets, uncompressed_assets, fast_align, allow_reads=True)
+
# Targets generally do not depend on apks, so no need for only_if_changed.
with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
with zipfile.ZipFile(options.resource_apk) as resource_apk, \
@@ -348,37 +504,39 @@ def main(args):
# 2. Assets
logging.debug('Adding assets/')
- _AddAssets(out_apk, assets, fast_align, disable_compression=False)
- _AddAssets(
- out_apk, uncompressed_assets, fast_align, disable_compression=True)
+ _AddFiles(out_apk, assets_to_add)
# 3. Dex files
logging.debug('Adding classes.dex')
if options.dex_file:
with open(options.dex_file) as dex_file_obj:
if options.dex_file.endswith('.dex'):
+ max_dex_number = 1
# This is the case for incremental_install=true.
add_to_zip(
apk_dex_dir + 'classes.dex',
dex_file_obj.read(),
compress=not options.uncompress_dex)
else:
+ max_dex_number = 0
with zipfile.ZipFile(dex_file_obj) as dex_zip:
for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+ max_dex_number += 1
add_to_zip(
apk_dex_dir + dex,
dex_zip.read(dex),
compress=not options.uncompress_dex)
+ if options.jdk_libs_dex_file:
+ with open(options.jdk_libs_dex_file) as dex_file_obj:
+ add_to_zip(
+ apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
+ dex_file_obj.read(),
+ compress=not options.uncompress_dex)
+
# 4. Native libraries.
logging.debug('Adding lib/')
- _AddNativeLibraries(out_apk, native_libs, options.android_abi,
- options.uncompress_shared_libraries, fast_align)
-
- if options.secondary_android_abi:
- _AddNativeLibraries(out_apk, secondary_native_libs,
- options.secondary_android_abi,
- options.uncompress_shared_libraries, fast_align)
+ _AddFiles(out_apk, libs_to_add)
# Add a placeholder lib if the APK should be multi ABI but is missing libs
# for one of the ABIs.
@@ -441,12 +599,7 @@ def main(args):
options.key_name, int(options.min_sdk_version))
logging.debug('Moving file into place')
- if options.depfile:
- build_utils.WriteDepfile(
- options.depfile,
- options.output_apk,
- inputs=depfile_deps,
- add_pydeps=False)
+ _MaybeWriteDepAndStampFiles(options, depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/apkbuilder.pydeps b/chromium/build/android/gyp/apkbuilder.pydeps
index 9f94da137fe..e6122edd2f1 100644
--- a/chromium/build/android/gyp/apkbuilder.pydeps
+++ b/chromium/build/android/gyp/apkbuilder.pydeps
@@ -5,4 +5,5 @@ apkbuilder.py
finalize_apk.py
util/__init__.py
util/build_utils.py
+util/diff_utils.py
util/zipalign.py
diff --git a/chromium/build/android/gyp/assert_static_initializers.py b/chromium/build/android/gyp/assert_static_initializers.py
index 2f1a8a64175..c7333726656 100755
--- a/chromium/build/android/gyp/assert_static_initializers.py
+++ b/chromium/build/android/gyp/assert_static_initializers.py
@@ -71,7 +71,8 @@ def _ReadInitArray(so_path, tool_prefix, expect_no_initializers):
else:
return 0
elif not match:
- raise Exception('Did not find section: .init_array in:\n' + stdout)
+ raise Exception('Did not find section: .init_array in {}:\n{}'.format(
+ so_path, stdout))
size_str = re.split(r'\W+', match.group(0))[5]
return int(size_str, 16)
@@ -94,11 +95,11 @@ def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers):
return init_array_size / word_size
-def _AnalyzeStaticInitializers(apk_filename, tool_prefix, dump_sis, out_dir,
+def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir,
ignored_libs, no_initializers_libs):
# Static initializer counting mostly copies logic in
# infra/scripts/legacy/scripts/slave/chromium/sizes.py.
- with zipfile.ZipFile(apk_filename) as z:
+ with zipfile.ZipFile(apk_or_aab) as z:
so_files = [
f for f in z.infolist() if f.filename.endswith('.so')
and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
@@ -109,11 +110,18 @@ def _AnalyzeStaticInitializers(apk_filename, tool_prefix, dump_sis, out_dir,
has_64 = any('64' in f.filename for f in so_files)
files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+ # Do not check partitioned libs. They have no ".init_array" section since
+ # all SIs are considered "roots" by the linker, and so end up in the base
+ # module.
+ files_to_check = [
+ f for f in files_to_check if not f.filename.endswith('_partition.so')
+ ]
+
si_count = 0
for f in files_to_check:
- expect_no_initializers = (os.path.basename(
- f.filename) in no_initializers_libs)
- with tempfile.NamedTemporaryFile() as temp:
+ lib_basename = os.path.basename(f.filename)
+ expect_no_initializers = lib_basename in no_initializers_libs
+ with tempfile.NamedTemporaryFile(prefix=lib_basename) as temp:
temp.write(z.read(f))
temp.flush()
si_count += _CountStaticInitializers(temp.name, tool_prefix,
@@ -134,7 +142,7 @@ def main():
parser.add_argument('--expected-count', required=True, type=int,
help='Fail if number of static initializers is not '
'equal to this value.')
- parser.add_argument('apk', help='APK file path.')
+ parser.add_argument('apk_or_aab', help='Path to .apk or .aab file.')
args = parser.parse_args()
# TODO(crbug.com/838414): add support for files included via loadable_modules.
@@ -143,8 +151,9 @@ def main():
# regular check throw. It should not have any.
no_initializers_libs = ['libchromium_android_linker.so']
- si_count = _AnalyzeStaticInitializers(args.apk, args.tool_prefix, False, '.',
- ignored_libs, no_initializers_libs)
+ si_count = _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix,
+ False, '.', ignored_libs,
+ no_initializers_libs)
if si_count != args.expected_count:
print('Expected {} static initializers, but found {}.'.format(
args.expected_count, si_count))
@@ -155,7 +164,7 @@ def main():
else:
print('Dumping static initializers via dump-static-initializers.py:')
sys.stdout.flush()
- _AnalyzeStaticInitializers(args.apk, args.tool_prefix, True, '.',
+ _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.',
ignored_libs, no_initializers_libs)
print()
print('If the above list is not useful, consider listing them with:')
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index 86aa46ec83b..3d78347998d 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -31,8 +31,8 @@ def main(argv):
action='append', default=[],
help='Extra inputs, passed last to the binary script.')
parser.add_argument('-v', '--verbose', action='store_true')
+ parser.add_argument('--missing-classes-allowlist')
_AddSwitch(parser, '--is-prebuilt')
- _AddSwitch(parser, '--enable-custom-resources')
_AddSwitch(parser, '--enable-thread-annotations')
_AddSwitch(parser, '--enable-check-class-path')
args = parser.parse_args(argv)
@@ -46,6 +46,8 @@ def main(argv):
extra_classpath_jars = []
for a in args.extra_jars:
extra_classpath_jars.extend(build_utils.ParseGnList(a))
+ args.missing_classes_allowlist = build_utils.ParseGnList(
+ args.missing_classes_allowlist)
if args.verbose:
verbose = '--verbose'
@@ -54,10 +56,10 @@ def main(argv):
cmd = ([
args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
- args.enable_custom_resources, args.enable_thread_annotations,
- args.enable_check_class_path,
- str(len(sdk_jars))
- ] + sdk_jars + [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
+ args.enable_thread_annotations, args.enable_check_class_path
+ ] + [str(len(args.missing_classes_allowlist))] +
+ args.missing_classes_allowlist + [str(len(sdk_jars))] + sdk_jars +
+ [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
subprocess.check_call(cmd)
diff --git a/chromium/build/android/gyp/compile_java.py b/chromium/build/android/gyp/compile_java.py
index e912edb1984..d3ace6dc36a 100755
--- a/chromium/build/android/gyp/compile_java.py
+++ b/chromium/build/android/gyp/compile_java.py
@@ -137,6 +137,9 @@ ERRORPRONE_WARNINGS_TO_TURN_OFF = [
# Must be off since we are now passing in annotation processor generated
# code as a source jar (deduplicating work with turbine).
'RefersToDaggerCodegen',
+ # We already have presubmit checks for this. Not necessary to warn on
+ # every build.
+ 'RemoveUnusedImports',
]
# Full list of checks: https://errorprone.info/bugpatterns
@@ -150,7 +153,6 @@ ERRORPRONE_WARNINGS_TO_ERROR = [
'LongLiteralLowerCaseSuffix',
'MultiVariableDeclaration',
'RedundantOverride',
- 'RemoveUnusedImports',
'StaticQualifiedUsingExpression',
'StringEquality',
'TimeUnitMismatch',
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 6ed38475948..2ca4ec781c8 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -18,7 +18,7 @@ import contextlib
import filecmp
import hashlib
import logging
-import multiprocessing.pool
+import multiprocessing.dummy
import os
import re
import shutil
@@ -26,6 +26,7 @@ import subprocess
import sys
import tempfile
import textwrap
+import time
import zipfile
from xml.etree import ElementTree
@@ -33,30 +34,9 @@ from util import build_utils
from util import diff_utils
from util import manifest_utils
from util import md5_check
+from util import protoresources
from util import resource_utils
-# `Resources_pb2` module imports `descriptor`, which imports `six`.
-sys.path.insert(
- 1,
- os.path.join(
- os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
- 'third_party', 'six', 'src'))
-
-# Import jinja2 from third_party/jinja2
-sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party'))
-from jinja2 import Template # pylint: disable=F0401
-
-# Make sure the pb2 files are able to import google.protobuf
-sys.path.insert(
- 1,
- os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
- 'python'))
-from proto import Resources_pb2
-
-_JETIFY_SCRIPT_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party',
- 'jetifier_standalone', 'bin',
- 'jetifier-standalone')
-
# Pngs that we shouldn't convert to webp. Please add rationale when updating.
_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
# Crashes on Galaxy S5 running L (https://crbug.com/807059).
@@ -81,8 +61,10 @@ def _ParseArgs(args):
input_opts.add_argument(
'--android-manifest', required=True, help='AndroidManifest.xml path.')
input_opts.add_argument(
- '--android-manifest-expected',
- help='Expected contents for the final manifest.')
+ '--expected-file',
+ help='Expected contents for the check. If'
+ '--android-manifest-verify-diff-base is set, this is a diff file. If'
+ 'not, this is a AndroidManifest file.')
input_opts.add_argument(
'--android-manifest-normalized', help='Normalized manifest.')
input_opts.add_argument(
@@ -90,6 +72,20 @@ def _ParseArgs(args):
help='Write to this file if expected manifest contents do not match '
'final manifest contents.')
input_opts.add_argument(
+ '--fail-on-expectations',
+ action="store_true",
+ help='When passed, fails the build on AndroidManifest expectation '
+ 'mismatches.')
+ input_opts.add_argument(
+ '--expected-manifest-base-expectation',
+ help='When we expect the actual normalized manifest is different from'
+ 'the file from --android-manifest-expected, this file specifies the'
+ 'difference.')
+ input_opts.add_argument(
+ '--only-verify-expectations',
+ action='store_true',
+ help='If passed, only verify the android manifest expectation and exit.')
+ input_opts.add_argument(
'--r-java-root-package-name',
default='base',
help='Short package name for this target\'s root R java file (ex. '
@@ -180,17 +176,19 @@ def _ParseArgs(args):
help='GN list of languages to include. All other language configs will '
'be stripped out. List may include a combination of Android locales '
'or Chrome locales.')
-
input_opts.add_argument(
'--resource-exclusion-regex',
default='',
- help='Do not include matching drawables.')
-
+ help='File-based filter for resources (applied before compiling)')
input_opts.add_argument(
'--resource-exclusion-exceptions',
default='[]',
- help='GN list of globs that say which excluded images to include even '
+ help='GN list of globs that say which files to include even '
'when --resource-exclusion-regex is set.')
+ input_opts.add_argument(
+ '--values-filter-rules',
+ help='GN list of source_glob:regex for filtering resources after they '
+ 'are compiled. Use this to filter out entries within values/ files.')
input_opts.add_argument('--png-to-webp', action='store_true',
help='Convert png files to webp format.')
@@ -252,6 +250,11 @@ def _ParseArgs(args):
help='Path to file produced by aapt2 that maps original resource paths '
'to shortened resource paths inside the apk or module.')
+ input_opts.add_argument(
+ '--is-bundle-module',
+ action='store_true',
+ help='Whether resources are being generated for a bundle module.')
+
options = parser.parse_args(args)
resource_utils.HandleCommonOptions(options)
@@ -261,6 +264,8 @@ def _ParseArgs(args):
options.shared_resources_allowlist_locales)
options.resource_exclusion_exceptions = build_utils.ParseGnList(
options.resource_exclusion_exceptions)
+ options.values_filter_rules = build_utils.ParseGnList(
+ options.values_filter_rules)
options.extra_main_r_text_files = build_utils.ParseGnList(
options.extra_main_r_text_files)
@@ -480,11 +485,23 @@ def _FixManifest(options, temp_dir):
return debug_manifest_path, orig_package
-def _VerifyManifest(actual_manifest, expected_manifest, normalized_manifest,
- unexpected_manifest_failure_file):
+def _VerifyManifest(actual_manifest, expected_file, normalized_manifest,
+ expected_manifest_base_expectation,
+ unexpected_manifest_failure_file, fail_on_mismatch):
with build_utils.AtomicOutput(normalized_manifest) as normalized_output:
normalized_output.write(manifest_utils.NormalizeManifest(actual_manifest))
- msg = diff_utils.DiffFileContents(expected_manifest, normalized_manifest)
+
+ if expected_manifest_base_expectation:
+ with tempfile.NamedTemporaryFile() as generated_diff:
+ actual_diff_content = diff_utils.GenerateDiffWithOnlyAdditons(
+ expected_manifest_base_expectation, normalized_manifest)
+ generated_diff.write(actual_diff_content)
+ generated_diff.flush()
+
+ msg = diff_utils.DiffFileContents(expected_file, generated_diff.name)
+ else:
+ msg = diff_utils.DiffFileContents(expected_file, normalized_manifest)
+
if not msg:
return
@@ -499,6 +516,8 @@ https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README
with open(unexpected_manifest_failure_file, 'w') as f:
f.write(msg_header)
f.write(msg)
+ if fail_on_mismatch:
+ sys.exit(1)
def _CreateKeepPredicate(resource_exclusion_regex,
@@ -527,23 +546,28 @@ def _CreateKeepPredicate(resource_exclusion_regex,
build_utils.MatchesGlob(path, resource_exclusion_exceptions))
-def _ConvertToWebP(webp_binary, png_files, path_info, webp_cache_dir):
- pool = multiprocessing.pool.ThreadPool(10)
+def _ConvertToWebP(webp_binary, png_paths, path_info, webp_cache_dir):
+ pool = multiprocessing.dummy.Pool(10)
build_utils.MakeDirectory(webp_cache_dir)
cwebp_version = subprocess.check_output([webp_binary, '-version']).rstrip()
cwebp_arguments = ['-mt', '-quiet', '-m', '6', '-q', '100', '-lossless']
+ sha1_time = [0]
+ cwebp_time = [0]
+ cache_hits = [0]
+
def cal_sha1(png_path):
+ start = time.time()
with open(png_path, 'rb') as f:
png_content = f.read()
- sha1_hex = hashlib.sha1(png_content).hexdigest()
- return sha1_hex
+ sha1_hex = hashlib.sha1(png_content).hexdigest()
+ sha1_time[0] += time.time() - start
+ return sha1_hex
- def get_converted_image(png_path_dir_tuple):
- png_path, original_dir = png_path_dir_tuple
+ def get_converted_image(png_path):
sha1_hash = cal_sha1(png_path)
webp_cache_path = os.path.join(
@@ -553,12 +577,15 @@ def _ConvertToWebP(webp_binary, png_files, path_info, webp_cache_dir):
webp_path = os.path.splitext(png_path)[0]
if os.path.exists(webp_cache_path):
+ cache_hits[0] += 1
os.link(webp_cache_path, webp_path)
else:
# We place the generated webp image to webp_path, instead of in the
# webp_cache_dir to avoid concurrency issues.
+ start = time.time()
args = [webp_binary, png_path] + cwebp_arguments + ['-o', webp_path]
subprocess.check_call(args)
+ cwebp_time[0] += time.time() - start
try:
os.link(webp_path, webp_cache_path)
@@ -568,40 +595,19 @@ def _ConvertToWebP(webp_binary, png_files, path_info, webp_cache_dir):
pass
os.remove(png_path)
+ original_dir = os.path.dirname(os.path.dirname(png_path))
path_info.RegisterRename(
os.path.relpath(png_path, original_dir),
os.path.relpath(webp_path, original_dir))
- pool.map(
- get_converted_image,
- [f for f in png_files if not _PNG_WEBP_EXCLUSION_PATTERN.match(f[0])])
- pool.close()
- pool.join()
-
-
-def _JetifyArchive(dep_path, output_path):
- """Runs jetify script on a directory.
-
- This converts resources to reference androidx over android support libraries.
- Directories will be put in a zip file, jetified, then unzipped as jetify
- only runs on archives.
- """
- # Jetify script only works on archives.
- with tempfile.NamedTemporaryFile() as temp_archive:
- build_utils.ZipDir(temp_archive.name, dep_path)
-
- # Use -l error to avoid warnings when nothing is jetified.
- jetify_cmd = [
- _JETIFY_SCRIPT_PATH, '-i', temp_archive.name, '-o', temp_archive.name,
- '-l', 'error'
- ]
- env = os.environ.copy()
- env['JAVA_HOME'] = build_utils.JAVA_HOME
- subprocess.check_call(jetify_cmd, env=env)
- with zipfile.ZipFile(temp_archive.name) as zf:
- zf.extractall(output_path)
-
- return output_path
+ png_paths = [f for f in png_paths if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+ try:
+ pool.map(get_converted_image, png_paths)
+ finally:
+ pool.close()
+ pool.join()
+ logging.debug('png->webp: cache: %d/%d sha1 time: %.1fms cwebp time: %.1fms',
+ cache_hits[0], len(png_paths), sha1_time[0], cwebp_time[0])
def _RemoveImageExtensions(directory, path_info):
@@ -621,126 +627,66 @@ def _RemoveImageExtensions(directory, path_info):
os.path.relpath(path_no_extension, directory))
-def _CompileDeps(aapt2_path, dep_subdirs, temp_dir):
- partials_dir = os.path.join(temp_dir, 'partials')
- build_utils.MakeDirectory(partials_dir)
- partial_compile_command = [
+def _CompileSingleDep(args):
+ index, dep_path, aapt2_path, partials_dir, exclusion_rules = args
+ basename = os.path.basename(dep_path)
+ unique_name = '{}_{}'.format(index, basename)
+ partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
+
+ compile_command = [
aapt2_path,
'compile',
# TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
# '--no-crunch',
+ '--dir',
+ dep_path,
+ '-o',
+ partial_path
]
- pool = multiprocessing.pool.ThreadPool(10)
-
- def compile_partial(params):
- index, dep_path = params
- basename = os.path.basename(dep_path)
- unique_name = '{}_{}'.format(index, basename)
- partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
-
- jetify_dir = os.path.join(partials_dir, 'jetify')
- build_utils.MakeDirectory(jetify_dir)
- working_jetify_path = os.path.join(jetify_dir, 'jetify_' + partial_path)
- jetified_dep = _JetifyArchive(dep_path, working_jetify_path)
- dep_path = jetified_dep
-
- compile_command = (
- partial_compile_command + ['--dir', dep_path, '-o', partial_path])
-
- # There are resources targeting API-versions lower than our minapi. For
- # various reasons it's easier to let aapt2 ignore these than for us to
- # remove them from our build (e.g. it's from a 3rd party library).
- build_utils.CheckOutput(
- compile_command,
- stderr_filter=lambda output:
- build_utils.FilterLines(
- output, r'ignoring configuration .* for (styleable|attribute)'))
- return partial_path
-
- partials = pool.map(compile_partial, enumerate(dep_subdirs))
- pool.close()
- pool.join()
- return partials
-
-
-def _ProcessProtoItem(item):
- if not item.HasField('ref'):
- return
- # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
- # the package to 0x02.
- if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (
- item.ref.id & 0xff000000):
- item.ref.id |= 0x02000000
- item.ref.ClearField('is_dynamic')
-
-
-def _ProcessProtoValue(value):
- if value.HasField('item'):
- _ProcessProtoItem(value.item)
- else:
- compound_value = value.compound_value
- if compound_value.HasField('style'):
- for entry in compound_value.style.entry:
- _ProcessProtoItem(entry.item)
- elif compound_value.HasField('array'):
- for element in compound_value.array.element:
- _ProcessProtoItem(element.item)
- elif compound_value.HasField('plural'):
- for entry in compound_value.plural.entry:
- _ProcessProtoItem(entry.item)
-
-
-def _ProcessProtoXmlNode(xml_node):
- if not xml_node.HasField('element'):
- return
-
- for attribute in xml_node.element.attribute:
- _ProcessProtoItem(attribute.compiled_item)
-
- for child in xml_node.element.child:
- _ProcessProtoXmlNode(child)
-
-
-def _HardcodeSharedLibraryDynamicAttributes(zip_path):
- """Hardcodes the package IDs of dynamic attributes to 0x02.
-
- This is a workaround for b/147674078, which affects Android versions pre-N.
+ # There are resources targeting API-versions lower than our minapi. For
+ # various reasons it's easier to let aapt2 ignore these than for us to
+ # remove them from our build (e.g. it's from a 3rd party library).
+ build_utils.CheckOutput(
+ compile_command,
+ stderr_filter=lambda output: build_utils.FilterLines(
+ output, r'ignoring configuration .* for (styleable|attribute)'))
- Args:
- zip_path: Path to proto APK file.
- """
- with build_utils.TempDir() as tmp_dir:
- build_utils.ExtractAll(zip_path, path=tmp_dir)
+ # Filtering these files is expensive, so only apply filters to the partials
+ # that have been explicitly targeted.
+ keep_predicate = _CreateValuesKeepPredicate(exclusion_rules, dep_path)
+ if keep_predicate:
+ logging.debug('Applying .arsc filtering to %s', dep_path)
+ protoresources.StripUnwantedResources(partial_path, keep_predicate)
+ return partial_path
- # First process the resources file.
- table = Resources_pb2.ResourceTable()
- with open(os.path.join(tmp_dir, 'resources.pb')) as f:
- table.ParseFromString(f.read())
- for package in table.package:
- for _type in package.type:
- for entry in _type.entry:
- for config_value in entry.config_value:
- _ProcessProtoValue(config_value.value)
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
- with open(os.path.join(tmp_dir, 'resources.pb'), 'w') as f:
- f.write(table.SerializeToString())
+ def iter_params():
+ for i, dep_path in enumerate(dep_subdirs):
+ yield i, dep_path, aapt2_path, partials_dir, exclusion_rules
- # Next process all the XML files.
- xml_files = build_utils.FindInDirectory(tmp_dir, '*.xml')
- for xml_file in xml_files:
- xml_node = Resources_pb2.XmlNode()
- with open(xml_file) as f:
- xml_node.ParseFromString(f.read())
+ pool = multiprocessing.dummy.Pool(10)
+ try:
+ return pool.map(_CompileSingleDep, iter_params())
+ finally:
+ pool.close()
+ pool.join()
- _ProcessProtoXmlNode(xml_node)
- with open(xml_file, 'w') as f:
- f.write(xml_node.SerializeToString())
+def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
+ patterns = [
+ x[1] for x in exclusion_rules
+ if build_utils.MatchesGlob(dep_path, [x[0]])
+ ]
+ if not patterns:
+ return None
- # Overwrite the original zip file.
- build_utils.ZipDir(zip_path, tmp_dir)
+ regexes = [re.compile(p) for p in patterns]
+ return lambda x: not any(r.search(x) for r in regexes)
def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
@@ -758,11 +704,6 @@ def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
dep_subdirs: List of resource dependency directories.
options: Command-line options namespace.
"""
- if (not options.locale_allowlist
- and not options.shared_resources_allowlist_locales):
- # Keep everything, there is nothing to do.
- return
-
# Collect locale and file paths from the existing subdirs.
# The following variable maps Android locale names to
# sets of corresponding xml file paths.
@@ -816,6 +757,21 @@ def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
path, lambda x: x not in shared_names_allowlist)
+def _FilterResourceFiles(dep_subdirs, keep_predicate):
+ # Create a function that selects which resource files should be packaged
+ # into the final output. Any file that does not pass the predicate will
+ # be removed below.
+ png_paths = []
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ if not keep_predicate(f):
+ os.remove(f)
+ elif f.endswith('.png'):
+ png_paths.append(f)
+
+ return png_paths
+
+
def _PackageApk(options, build):
"""Compile and link resources with aapt2.
@@ -834,21 +790,15 @@ def _PackageApk(options, build):
_DuplicateZhResources(dep_subdirs, path_info)
_RenameLocaleResourceDirs(dep_subdirs, path_info)
- _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
-
- # Create a function that selects which resource files should be packaged
- # into the final output. Any file that does not pass the predicate will
- # be removed below.
logging.debug('Applying file-based exclusions')
keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex,
options.resource_exclusion_exceptions)
- png_paths = []
- for directory in dep_subdirs:
- for f in _IterFiles(directory):
- if not keep_predicate(f):
- os.remove(f)
- elif f.endswith('.png'):
- png_paths.append((f, directory))
+ png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate)
+
+ if options.locale_allowlist or options.shared_resources_allowlist_locales:
+ logging.debug('Applying locale-based string exclusions')
+ _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
if png_paths and options.png_to_webp:
logging.debug('Converting png->webp')
_ConvertToWebP(options.webp_binary, png_paths, path_info,
@@ -858,6 +808,11 @@ def _PackageApk(options, build):
_MoveImagesToNonMdpiFolders(directory, path_info)
_RemoveImageExtensions(directory, path_info)
+ logging.debug('Running aapt2 compile')
+ exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules]
+ partials = _CompileDeps(options.aapt2_path, dep_subdirs, build.temp_dir,
+ exclusion_rules)
+
link_command = [
options.aapt2_path,
'link',
@@ -907,10 +862,6 @@ def _PackageApk(options, build):
options, build.temp_dir)
if options.rename_manifest_package:
desired_manifest_package_name = options.rename_manifest_package
- if options.android_manifest_expected:
- _VerifyManifest(fixed_manifest, options.android_manifest_expected,
- options.android_manifest_normalized,
- options.android_manifest_expectations_failure_file)
link_command += [
'--manifest', fixed_manifest, '--rename-manifest-package',
@@ -924,7 +875,6 @@ def _PackageApk(options, build):
desired_manifest_package_name)
link_command += ['--stable-ids', build.stable_ids_path]
- partials = _CompileDeps(options.aapt2_path, dep_subdirs, build.temp_dir)
for partial in partials:
link_command += ['-R', partial]
@@ -971,7 +921,10 @@ def _PackageApk(options, build):
# affect WebView usage, since WebView does not used dynamic attributes.
if options.shared_resources:
logging.debug('Hardcoding dynamic attributes')
- _HardcodeSharedLibraryDynamicAttributes(build.proto_path)
+ protoresources.HardcodeSharedLibraryDynamicAttributes(
+ build.proto_path, options.is_bundle_module,
+ options.shared_resources_allowlist)
+
build_utils.CheckOutput([
options.aapt2_path, 'convert', '--output-format', 'binary', '-o',
build.arsc_path, build.proto_path
@@ -1100,6 +1053,16 @@ def _WriteOutputs(options, build):
shutil.move(temp, final)
+def _VerifyExpectations(options):
+ with build_utils.TempDir() as tempdir:
+ fixed_manifest, _ = _FixManifest(options, tempdir)
+ _VerifyManifest(fixed_manifest, options.expected_file,
+ options.android_manifest_normalized,
+ options.expected_manifest_base_expectation,
+ options.android_manifest_expectations_failure_file,
+ options.fail_on_expectations)
+
+
def _OnStaleMd5(options):
path = options.arsc_path or options.proto_path
debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR')
@@ -1110,14 +1073,14 @@ def _OnStaleMd5(options):
# path of resources: crbug.com/939984
path = path + '.tmpdir'
build_utils.DeleteDirectory(path)
- build_utils.MakeDirectory(path)
with resource_utils.BuildContext(
temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build:
+
manifest_package_name = _PackageApk(options, build)
- # If --shared-resources-allowlist is used, the all resources listed in
- # the corresponding R.txt file will be non-final, and an onResourcesLoaded()
+ # If --shared-resources-allowlist is used, all the resources listed in the
+ # corresponding R.txt file will be non-final, and an onResourcesLoaded()
# will be generated to adjust them at runtime.
#
# Otherwise, if --shared-resources is used, the all resources will be
@@ -1130,6 +1093,11 @@ def _OnStaleMd5(options):
rjava_build_options.ExportSomeResources(
options.shared_resources_allowlist)
rjava_build_options.GenerateOnResourcesLoaded()
+ if options.shared_resources:
+ # The final resources will only be used in WebLayer, so hardcode the
+ # package ID to be what WebLayer expects.
+ rjava_build_options.SetFinalPackageId(
+ protoresources.SHARED_LIBRARY_HARDCODED_ID)
elif options.shared_resources or options.app_as_shared_lib:
rjava_build_options.ExportAllResources()
rjava_build_options.GenerateOnResourcesLoaded()
@@ -1183,6 +1151,11 @@ def main(args):
args = build_utils.ExpandFileArgs(args)
options = _ParseArgs(args)
+ if options.expected_file:
+ _VerifyExpectations(options)
+ if options.only_verify_expectations:
+ return
+
depfile_deps = (
options.dependencies_res_zips + options.extra_main_r_text_files +
options.extra_r_text_files + options.include_resources)
@@ -1190,7 +1163,8 @@ def main(args):
possible_input_paths = depfile_deps + [
options.aapt2_path,
options.android_manifest,
- options.android_manifest_expected,
+ options.expected_file,
+ options.expected_manifest_base_expectation,
options.resources_config_path,
options.shared_resources_allowlist,
options.use_resource_ids_path,
@@ -1222,6 +1196,7 @@ def main(args):
options.strip_resource_names,
options.support_zh_hk,
options.target_sdk_version,
+ options.values_filter_rules,
options.version_code,
options.version_name,
options.webp_cache_dir,
diff --git a/chromium/build/android/gyp/compile_resources.pydeps b/chromium/build/android/gyp/compile_resources.pydeps
index 53282f7d324..f34926c185e 100644
--- a/chromium/build/android/gyp/compile_resources.pydeps
+++ b/chromium/build/android/gyp/compile_resources.pydeps
@@ -55,4 +55,5 @@ util/build_utils.py
util/diff_utils.py
util/manifest_utils.py
util/md5_check.py
+util/protoresources.py
util/resource_utils.py
diff --git a/chromium/build/android/gyp/create_app_bundle.py b/chromium/build/android/gyp/create_app_bundle.py
index fa2f040973a..336ecf16fd9 100755
--- a/chromium/build/android/gyp/create_app_bundle.py
+++ b/chromium/build/android/gyp/create_app_bundle.py
@@ -238,9 +238,13 @@ def _RewriteLanguageAssetPath(src_path):
locale = src_path[len(_LOCALES_SUBDIR):-4]
android_locale = resource_utils.ToAndroidLocaleName(locale)
- # The locale format is <lang>-<region> or <lang>. Extract the language.
+ # The locale format is <lang>-<region> or <lang> or BCP-47 (e.g b+sr+Latn).
+ # Extract the language.
pos = android_locale.find('-')
- if pos >= 0:
+ if android_locale.startswith('b+'):
+ # If locale is in BCP-47 the language is the second tag (e.g. b+sr+Latn)
+ android_language = android_locale.split('+')[1]
+ elif pos >= 0:
android_language = android_locale[:pos]
else:
android_language = android_locale
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.py b/chromium/build/android/gyp/create_bundle_wrapper_script.py
index 89051ff3ea2..5f576acf4dc 100755
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.py
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.py
@@ -45,13 +45,13 @@ def main():
command_line_flags_file=${FLAGS_FILE},
proguard_mapping_path=resolve(${MAPPING_PATH}),
target_cpu=${TARGET_CPU},
- system_image_locales=${SYSTEM_IMAGE_LOCALES})
+ system_image_locales=${SYSTEM_IMAGE_LOCALES},
+ default_modules=${DEFAULT_MODULES})
if __name__ == '__main__':
sys.exit(main())
""")
-
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
@@ -74,6 +74,7 @@ def main(args):
parser.add_argument('--proguard-mapping-path')
parser.add_argument('--target-cpu')
parser.add_argument('--system-image-locales')
+ parser.add_argument('--default-modules', nargs='*', default=[])
args = parser.parse_args(args)
def relativize(path):
@@ -114,6 +115,8 @@ def main(args):
repr(args.target_cpu),
'SYSTEM_IMAGE_LOCALES':
repr(build_utils.ParseGnList(args.system_image_locales)),
+ 'DEFAULT_MODULES':
+ repr(args.default_modules),
}
script.write(SCRIPT_TEMPLATE.substitute(script_dict))
os.chmod(args.script_output_path, 0750)
diff --git a/chromium/build/android/gyp/create_size_info_files.py b/chromium/build/android/gyp/create_size_info_files.py
index 0928fac2094..27046db1150 100755
--- a/chromium/build/android/gyp/create_size_info_files.py
+++ b/chromium/build/android/gyp/create_size_info_files.py
@@ -8,6 +8,7 @@
import argparse
import os
+import re
import sys
import zipfile
@@ -15,13 +16,30 @@ from util import build_utils
from util import jar_info_utils
+_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)')
+
+
+def _TransformAarPaths(path):
+ # .aar files within //third_party/android_deps have a version suffix.
+ # The suffix changes each time .aar files are updated, which makes size diffs
+ # hard to compare (since the before/after have different source paths).
+ # Rather than changing how android_deps works, we employ this work-around
+ # to normalize the paths.
+ # From: .../androidx_appcompat_appcompat/appcompat-1.1.0.aar/res/...
+ # To: .../androidx_appcompat_appcompat.aar/res/...
+ # https://crbug.com/1056455
+ if 'android_deps' not in path:
+ return path
+ return _AAR_VERSION_PATTERN.sub(r'\1', path)
+
+
def _MergeResInfoFiles(res_info_path, info_paths):
# Concatenate them all.
# only_if_changed=False since no build rules depend on this as an input.
with build_utils.AtomicOutput(res_info_path, only_if_changed=False) as dst:
for p in info_paths:
with open(p) as src:
- dst.write(src.read())
+ dst.writelines(_TransformAarPaths(l) for l in src)
def _PakInfoPathsForAssets(assets):
@@ -32,7 +50,7 @@ def _MergePakInfoFiles(merged_path, pak_infos):
info_lines = set()
for pak_info_path in pak_infos:
with open(pak_info_path, 'r') as src_info_file:
- info_lines.update(src_info_file.readlines())
+ info_lines.update(_TransformAarPaths(x) for x in src_info_file)
# only_if_changed=False since no build rules depend on this as an input.
with build_utils.AtomicOutput(merged_path, only_if_changed=False) as f:
f.writelines(sorted(info_lines))
@@ -92,8 +110,8 @@ def _MergeJarInfoFiles(output, inputs):
for name in zip_info.namelist():
fully_qualified_name = _FullJavaNameFromClassFilePath(name)
if fully_qualified_name:
- info_data[fully_qualified_name] = '{}/{}'.format(
- attributed_path, name)
+ info_data[fully_qualified_name] = _TransformAarPaths('{}/{}'.format(
+ attributed_path, name))
# only_if_changed=False since no build rules depend on this as an input.
with build_utils.AtomicOutput(output, only_if_changed=False) as f:
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index 82ddac000b1..0b3dcbd28b9 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -24,7 +24,12 @@ import convert_dex_profile
_IGNORE_WARNINGS = (
# A play services library triggers this.
- 'Type `libcore.io.Memory` was not found', )
+ r'Type `libcore.io.Memory` was not found',
+ # Filter out warnings caused by our fake main dex list used to enable
+ # multidex on library targets.
+ # Warning: Application does not contain `Foo` as referenced in main-dex-list
+ r'does not contain `Foo`',
+)
def _ParseArgs(args):
@@ -64,6 +69,8 @@ def _ParseArgs(args):
action='append',
help='GN-list of bootclasspath. Needed for --desugar')
parser.add_argument(
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument(
'--classpath',
action='append',
help='GN-list of full classpath. Needed for --desugar')
@@ -114,11 +121,6 @@ def _ParseArgs(args):
if options.main_dex_list_path and not options.multi_dex:
parser.error('--main-dex-list-path is unused if multidex is not enabled')
- if options.desugar and options.classpath is None:
- parser.error('--classpath required with use of --desugar')
- if options.desugar and options.bootclasspath is None:
- parser.error('--bootclasspath required with use of --desugar')
-
options.class_inputs = build_utils.ParseGnList(options.class_inputs)
options.class_inputs_filearg = build_utils.ParseGnList(
options.class_inputs_filearg)
@@ -135,12 +137,18 @@ def _RunD8(dex_cmd, input_paths, output_path):
dex_cmd = dex_cmd + ['--output', output_path] + input_paths
def stderr_filter(output):
- # Filter out warnings caused by our fake main dex list used to enable
- # multidex on library targets.
- # Warning: Application does not contain `Foo` as referenced in main-dex-list
- pattern = r'does not contain `Foo`'
- pattern += '|' + '|'.join(re.escape(p) for p in _IGNORE_WARNINGS)
- output = build_utils.FilterLines(output, pattern)
+ patterns = _IGNORE_WARNINGS
+ # No classpath means we are using Bazel's Desugar tool to desugar lambdas
+ # and interface methods, in which case we intentionally do not pass a
+ # classpath to D8.
+ # Not having a classpath makes incremental dexing much more effective.
+ # D8 will still be used for backported method desugaring.
+ # We still use D8 for backported method desugaring.
+ if '--classpath' not in dex_cmd:
+ patterns = list(patterns) + ['default or static interface methods']
+
+ combined_pattern = '|'.join(re.escape(p) for p in patterns)
+ output = build_utils.FilterLines(output, combined_pattern)
# Each warning has a prefix line of tthe file it's from. If we've filtered
# out the warning, then also filter out the file header.
@@ -252,7 +260,7 @@ def _ZipMultidex(file_dir, dex_files):
"""
ordered_files = [] # List of (archive name, file name)
for f in dex_files:
- if f.endswith('classes.dex.zip'):
+ if f.endswith('dex.jar'):
ordered_files.append(('classes.dex', f))
break
if not ordered_files:
@@ -478,7 +486,10 @@ def main(args):
if options.min_api:
dex_cmd += ['--min-api', options.min_api]
- if options.desugar:
+ if not options.desugar:
+ dex_cmd += ['--no-desugaring']
+ elif options.classpath:
+ # Don't pass classpath when Desugar.jar is doing interface desugaring.
dex_cmd += ['--lib', build_utils.JAVA_HOME]
for path in options.bootclasspath:
dex_cmd += ['--lib', path]
@@ -488,9 +499,9 @@ def main(args):
depfile_deps += options.bootclasspath
input_paths += options.classpath
input_paths += options.bootclasspath
- else:
- dex_cmd += ['--no-desugaring']
+ if options.desugar_jdk_libs_json:
+ dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
if options.force_enable_assertions:
dex_cmd += ['--force-enable-assertions']
diff --git a/chromium/build/android/gyp/dex_jdk_libs.py b/chromium/build/android/gyp/dex_jdk_libs.py
new file mode 100755
index 00000000000..0cda991a4c9
--- /dev/null
+++ b/chromium/build/android/gyp/dex_jdk_libs.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--output', required=True, help='Dex output path.')
+ parser.add_argument('--r8-path', required=True, help='Path to R8 jar.')
+ parser.add_argument(
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument(
+ '--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.')
+ parser.add_argument('--min-api', help='minSdkVersion', required=True)
+ options = parser.parse_args(args)
+ return options
+
+
+def main(args):
+ options = _ParseArgs(args)
+
+ # TODO(agrieve): Spews a lot of stderr about missing classes.
+ with build_utils.TempDir() as tmp_dir:
+ cmd = [
+ build_utils.JAVA_PATH,
+ '-jar',
+ options.r8_path,
+ 'l8',
+ '--min-api',
+ options.min_api,
+ #'--lib', build_utils.JAVA_HOME,
+ '--desugared-lib',
+ options.desugar_jdk_libs_json,
+ '--output',
+ tmp_dir,
+ options.desugar_jdk_libs_jar
+ ]
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+
+ if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
+ raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
+ shutil.move(os.path.join(tmp_dir, 'classes.dex'), options.output)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/dex_jdk_libs.pydeps b/chromium/build/android/gyp/dex_jdk_libs.pydeps
new file mode 100644
index 00000000000..28d181f528e
--- /dev/null
+++ b/chromium/build/android/gyp/dex_jdk_libs.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex_jdk_libs.pydeps build/android/gyp/dex_jdk_libs.py
+../../gn_helpers.py
+dex_jdk_libs.py
+util/__init__.py
+util/build_utils.py
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 7bbc066f076..8e8230b97bc 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -101,15 +101,15 @@ def main(args):
if os.path.exists(module_dex_file):
curr_location_to_dest.append((module_dex_file, dest))
else:
- module_dex_file += '.zip'
+ module_dex_file += '.jar'
assert os.path.exists(
module_dex_file), 'Dexsplitter tool output not found.'
- curr_location_to_dest.append((module_dex_file + '.zip', dest))
+ curr_location_to_dest.append((module_dex_file + '.jar', dest))
for curr_location, dest in curr_location_to_dest:
with build_utils.AtomicOutput(dest) as f:
- if curr_location.endswith('.zip'):
- if dest.endswith('.zip'):
+ if curr_location.endswith('.jar'):
+ if dest.endswith('.jar'):
shutil.copy(curr_location, f.name)
else:
with zipfile.ZipFile(curr_location, 'r') as z:
@@ -119,7 +119,7 @@ def main(args):
options.input_dex_zip)
z.extract(namelist[0], f.name)
else:
- if dest.endswith('.zip'):
+ if dest.endswith('.jar'):
build_utils.ZipDir(
f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
else:
diff --git a/chromium/build/android/gyp/extract_unwind_tables.py b/chromium/build/android/gyp/extract_unwind_tables.py
index ea13c6a3e79..b20f740764a 100755
--- a/chromium/build/android/gyp/extract_unwind_tables.py
+++ b/chromium/build/android/gyp/extract_unwind_tables.py
@@ -254,10 +254,8 @@ def _WriteCfiData(cfi_data, out_file):
_Write2Bytes(out_file, data)
-def _ParseCfiData(sym_file, output_path):
- with open(sym_file, 'r') as f:
- cfi_data = _GetAllCfiRows(f)
-
+def _ParseCfiData(sym_stream, output_path):
+ cfi_data = _GetAllCfiRows(sym_stream)
with open(output_path, 'wb') as out_file:
_WriteCfiData(cfi_data, out_file)
@@ -275,13 +273,11 @@ def main():
help='The path of the dump_syms binary')
args = parser.parse_args()
+ cmd = ['./' + args.dump_syms_path, args.input_path]
+ proc = subprocess.Popen(cmd, bufsize=-1, stdout=subprocess.PIPE)
+ _ParseCfiData(proc.stdout, args.output_path)
+ assert proc.wait() == 0
- with tempfile.NamedTemporaryFile() as sym_file:
- out = subprocess.call(
- ['./' +args.dump_syms_path, args.input_path], stdout=sym_file)
- assert not out
- sym_file.flush()
- _ParseCfiData(sym_file.name, args.output_path)
return 0
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/extract_unwind_tables_tests.py b/chromium/build/android/gyp/extract_unwind_tables_tests.py
index e686607b188..7f9d0de7344 100755
--- a/chromium/build/android/gyp/extract_unwind_tables_tests.py
+++ b/chromium/build/android/gyp/extract_unwind_tables_tests.py
@@ -24,9 +24,8 @@ from util import build_utils
class TestExtractUnwindTables(unittest.TestCase):
def testExtractCfi(self):
- with tempfile.NamedTemporaryFile() as input_file, \
- tempfile.NamedTemporaryFile() as output_file:
- input_file.write("""
+ with tempfile.NamedTemporaryFile() as output_file:
+ test_data_lines = """
MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
FILE 0 ../../base/allocator/allocator_check.cc
@@ -63,9 +62,8 @@ STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
-""")
- input_file.flush()
- extract_unwind_tables._ParseCfiData(input_file.name, output_file.name)
+""".splitlines()
+ extract_unwind_tables._ParseCfiData(test_data_lines, output_file.name)
expected_cfi_data = {
0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index cfbf47ebfd1..fb751bd6ed6 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -6,43 +6,61 @@
"""Runs Android's lint tool."""
+
from __future__ import print_function
import argparse
+import logging
import os
import re
import shutil
import sys
+import time
import traceback
from xml.dom import minidom
from xml.etree import ElementTree
from util import build_utils
from util import manifest_utils
-from util import md5_check
+from util import resource_utils
_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+# These checks are not useful for test targets and adds an unnecessary burden
+# to suppress them.
+_DISABLED_FOR_TESTS = [
+ # We should not require test strings.xml files to explicitly add
+ # translatable=false since they are not translated and not used in
+ # production.
+ "MissingTranslation",
+ # Test strings.xml files often have simple names and are not translatable,
+ # so it may conflict with a production string and cause this error.
+ "Untranslatable",
+ # Test targets often use the same strings target and resources target as the
+ # production targets but may not use all of them.
+ "UnusedResources",
+]
+
def _RunLint(lint_path,
config_path,
- processed_config_path,
manifest_path,
result_path,
product_dir,
sources,
- jar_path,
cache_dir,
android_sdk_version,
srcjars,
min_sdk_version,
manifest_package,
resource_sources,
- disable=None,
- classpath=None,
+ resource_zips,
+ android_sdk_root,
+ testonly_target=False,
can_fail_build=False,
include_unexpected=False,
silent=False):
+ logging.info('Lint starting')
def _RebasePath(path):
"""Returns relative path to top-level src dir.
@@ -56,19 +74,6 @@ def _RunLint(lint_path,
ret = os.path.abspath(path)
return ret
- def _ProcessConfigFile():
- if not config_path or not processed_config_path:
- return
- if not build_utils.IsTimeStale(processed_config_path, [config_path]):
- return
-
- with open(config_path, 'rb') as f:
- content = f.read().replace(
- 'PRODUCT_DIR', _RebasePath(product_dir))
-
- with open(processed_config_path, 'wb') as f:
- f.write(content)
-
def _ProcessResultFile():
with open(result_path, 'rb') as f:
content = f.read().replace(
@@ -88,11 +93,7 @@ def _RunLint(lint_path,
location_elem = issue.getElementsByTagName('location')[0]
path = location_elem.attributes['file'].value
line = location_elem.getAttribute('line')
- if line:
- error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
- else:
- # Issues in class files don't have a line number.
- error = '%s %s: %s [warning]' % (path, message, issue_id)
+ error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
print(error.encode('utf-8'), file=sys.stderr)
for attr in ['errorLine1', 'errorLine2']:
error_line = issue.getAttribute(attr)
@@ -101,17 +102,25 @@ def _RunLint(lint_path,
return len(issues)
with build_utils.TempDir() as temp_dir:
- _ProcessConfigFile()
-
cmd = [
- _RebasePath(lint_path), '-Werror', '--exitcode', '--showall',
- '--xml', _RebasePath(result_path),
+ _RebasePath(lint_path),
+ '-Werror',
+ '--exitcode',
+ '--showall',
+ '--xml',
+ _RebasePath(result_path),
+ # An explicit sdk root needs to be specified since we have an extra
+ # intermediate 'lastest' directory under cmdline-tools which prevents
+ # lint from automatically deducing the location of the sdk. The sdk is
+ # required for many checks (e.g. NewApi). Lint also requires absolute
+ # paths.
+ '--sdk-home',
+ os.path.abspath(android_sdk_root),
]
- if jar_path:
- # --classpath is just for .class files for this one target.
- cmd.extend(['--classpath', _RebasePath(jar_path)])
- if processed_config_path:
- cmd.extend(['--config', _RebasePath(processed_config_path)])
+ if config_path:
+ cmd.extend(['--config', _RebasePath(config_path)])
+ if testonly_target:
+ cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
tmp_dir_counter = [0]
def _NewTempSubdir(prefix, append_digit=True):
@@ -124,25 +133,17 @@ def _RunLint(lint_path,
os.makedirs(new_dir)
return new_dir
- resource_dirs = []
- for resource_source in resource_sources:
- if os.path.isdir(resource_source):
- resource_dirs.append(resource_source)
- else:
- # This is a zip file with generated resources (e. g. strings from GRD).
- # Extract it to temporary folder.
- resource_dir = _NewTempSubdir(resource_source, append_digit=False)
- resource_dirs.append(resource_dir)
- build_utils.ExtractAll(resource_source, path=resource_dir)
+ resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
+ resource_sources)
+ # These are zip files with generated resources (e. g. strings from GRD).
+ for resource_zip in resource_zips:
+ resource_dir = _NewTempSubdir(resource_zip, append_digit=False)
+ resource_dirs.append(resource_dir)
+ build_utils.ExtractAll(resource_zip, path=resource_dir)
for resource_dir in resource_dirs:
cmd.extend(['--resources', _RebasePath(resource_dir)])
- if classpath:
- # --libraries is the classpath (excluding active target).
- cp = ':'.join(_RebasePath(p) for p in classpath)
- cmd.extend(['--libraries', cp])
-
# There may be multiple source files with the same basename (but in
# different directories). It is difficult to determine what part of the path
# corresponds to the java package, and so instead just link the source files
@@ -177,17 +178,21 @@ def _RunLint(lint_path,
os.symlink(src, dst)
if srcjars:
- srcjar_paths = build_utils.ParseGnList(srcjars)
- if srcjar_paths:
- srcjar_dir = _NewTempSubdir('SRC_ROOT')
- cmd.extend(['--sources', _RebasePath(srcjar_dir)])
- for srcjar in srcjar_paths:
- build_utils.ExtractAll(srcjar, path=srcjar_dir)
-
- if disable:
- cmd.extend(['--disable', ','.join(disable)])
-
- project_dir = _NewTempSubdir('SRC_ROOT')
+ srcjar_dir = _NewTempSubdir('GENERATED_SRC_ROOT', append_digit=False)
+ cmd.extend(['--sources', _RebasePath(srcjar_dir)])
+ for srcjar in srcjars:
+ # We choose to allow srcjars that contain java files which have the
+ # same package and name to clobber each other. This happens for
+ # generated files like BuildConfig.java. It is generated for
+ # targets like base_build_config_gen as well as targets like
+ # chrome_modern_public_base_bundle_module__build_config_srcjar.
+ # Although we could extract each srcjar to a separate folder, that
+ # slows down some invocations of lint by 20 seconds or more.
+ # TODO(wnwen): Switch lint.py to generate a project.xml file which
+ # supports srcjar inputs by default.
+ build_utils.ExtractAll(srcjar, path=srcjar_dir, no_clobber=False)
+
+ project_dir = _NewTempSubdir('PROJECT_ROOT', append_digit=False)
if android_sdk_version:
# Create dummy project.properies file in a temporary "project" directory.
# It is the only way to add Android SDK to the Lint's classpath. Proper
@@ -250,9 +255,13 @@ def _RunLint(lint_path,
try:
env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
build_utils.DIR_SOURCE_ROOT)
+ logging.debug('Lint command %s', cmd)
+ start = time.time()
build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
env=env or None, stderr_filter=stderr_filter,
fail_func=fail_func)
+ end = time.time() - start
+ logging.info('Lint command took %ss', end)
except build_utils.CalledProcessError:
# There is a problem with lint usage
if not os.path.exists(result_path):
@@ -296,6 +305,8 @@ def _RunLint(lint_path,
if can_fail_build:
raise Exception('Lint failed.')
+ logging.info('Lint completed')
+
def _FindInDirectories(directories, filename_filter):
all_files = []
@@ -304,10 +315,17 @@ def _FindInDirectories(directories, filename_filter):
return all_files
-def main():
+def _ParseArgs(argv):
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
-
+ parser.add_argument('--android-sdk-root',
+ required=True,
+ help='Lint needs an explicit path to the android sdk.')
+ parser.add_argument('--testonly',
+ action='store_true',
+ help='If set, some checks like UnusedResources will be '
+ 'disabled since they are not helpful for test '
+ 'targets.')
parser.add_argument('--lint-path', required=True,
help='Path to lint executable.')
parser.add_argument('--product-dir', required=True,
@@ -330,28 +348,22 @@ def main():
' if lint itself crashes with unexpected failures.')
parser.add_argument('--config-path',
help='Path to lint suppressions file.')
- parser.add_argument('--disable',
- help='List of checks to disable.')
- parser.add_argument('--jar-path',
- help='Jar file containing class files.')
- parser.add_argument('--java-sources-file',
- help='File containing a list of java files.')
+ parser.add_argument('--java-sources',
+ help='File containing a list of java sources files.')
parser.add_argument('--manifest-path',
help='Path to AndroidManifest.xml')
- parser.add_argument('--classpath', default=[], action='append',
- help='GYP-list of classpath .jar files')
- parser.add_argument('--processed-config-path',
- help='Path to processed lint suppressions file.')
- parser.add_argument('--resource-dir',
- help='Path to resource dir.')
- parser.add_argument('--resource-sources', default=[], action='append',
- help='GYP-list of resource sources (directories with '
- 'resources or archives created by resource-generating '
- 'tasks.')
+ parser.add_argument('--resource-sources',
+ default=[],
+ action='append',
+ help='GYP-list of resource sources files, similar to '
+ 'java sources files, but for resource files.')
+ parser.add_argument('--resource-zips',
+ default=[],
+ action='append',
+ help='GYP-list of resource zips, zip files of generated '
+ 'resource files.')
parser.add_argument('--silent', action='store_true',
help='If set, script will not log anything.')
- parser.add_argument('--src-dirs',
- help='Directories containing java files.')
parser.add_argument('--srcjars',
help='GN list of included srcjars.')
parser.add_argument('--stamp', help='Path to stamp upon success.')
@@ -362,99 +374,61 @@ def main():
parser.add_argument(
'--manifest-package', help='Package name of the AndroidManifest.xml.')
- args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+ args = parser.parse_args(build_utils.ExpandFileArgs(argv))
+
+ args.java_sources = build_utils.ParseGnList(args.java_sources)
+ args.srcjars = build_utils.ParseGnList(args.srcjars)
+ args.resource_sources = build_utils.ParseGnList(args.resource_sources)
+ args.resource_zips = build_utils.ParseGnList(args.resource_zips)
+
+ return args
+
+
+def main():
+ build_utils.InitLogging('LINT_DEBUG')
+ args = _ParseArgs(sys.argv[1:])
sources = []
- if args.src_dirs:
- src_dirs = build_utils.ParseGnList(args.src_dirs)
- sources = _FindInDirectories(src_dirs, '*.java')
- elif args.java_sources_file:
- sources.extend(build_utils.ReadSourcesList(args.java_sources_file))
-
- if args.config_path and not args.processed_config_path:
- parser.error('--config-path specified without --processed-config-path')
- elif args.processed_config_path and not args.config_path:
- parser.error('--processed-config-path specified without --config-path')
-
- input_paths = [
- args.lint_path,
- args.platform_xml_path,
- ]
- if args.config_path:
- input_paths.append(args.config_path)
- if args.jar_path:
- input_paths.append(args.jar_path)
- if args.manifest_path:
- input_paths.append(args.manifest_path)
- if sources:
- input_paths.extend(sources)
- classpath = []
- for gyp_list in args.classpath:
- classpath.extend(build_utils.ParseGnList(gyp_list))
- input_paths.extend(classpath)
+ for java_sources_file in args.java_sources:
+ sources.extend(build_utils.ReadSourcesList(java_sources_file))
resource_sources = []
- if args.resource_dir:
- # Backward compatibility with GYP
- resource_sources += [ args.resource_dir ]
-
- for gyp_list in args.resource_sources:
- resource_sources += build_utils.ParseGnList(gyp_list)
-
- for resource_source in resource_sources:
- if os.path.isdir(resource_source):
- input_paths.extend(build_utils.FindInDirectory(resource_source, '*'))
- else:
- input_paths.append(resource_source)
-
- input_strings = [
- args.can_fail_build,
- args.include_unexpected_failures,
- args.silent,
- ]
- if args.android_sdk_version:
- input_strings.append(args.android_sdk_version)
- if args.processed_config_path:
- input_strings.append(args.processed_config_path)
-
- disable = []
- if args.disable:
- disable = build_utils.ParseGnList(args.disable)
- input_strings.extend(disable)
-
- output_paths = [args.stamp]
-
- def on_stale_md5():
- _RunLint(
- args.lint_path,
- args.config_path,
- args.processed_config_path,
- args.manifest_path,
- args.result_path,
- args.product_dir,
- sources,
- args.jar_path,
- args.cache_dir,
- args.android_sdk_version,
- args.srcjars,
- args.min_sdk_version,
- args.manifest_package,
- resource_sources,
- disable=disable,
- classpath=classpath,
- can_fail_build=args.can_fail_build,
- include_unexpected=args.include_unexpected_failures,
- silent=args.silent)
-
- build_utils.Touch(args.stamp)
-
- md5_check.CallAndWriteDepfileIfStale(
- on_stale_md5,
- args,
- input_paths=input_paths,
- input_strings=input_strings,
- output_paths=output_paths,
- depfile_deps=classpath)
+ for resource_sources_file in args.resource_sources:
+ resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
+
+ possible_depfile_deps = (args.srcjars + args.resource_zips + sources +
+ resource_sources + [
+ args.manifest_path,
+ ])
+
+ depfile_deps = [p for p in possible_depfile_deps if p]
+
+ _RunLint(args.lint_path,
+ args.config_path,
+ args.manifest_path,
+ args.result_path,
+ args.product_dir,
+ sources,
+ args.cache_dir,
+ args.android_sdk_version,
+ args.srcjars,
+ args.min_sdk_version,
+ args.manifest_package,
+ resource_sources,
+ args.resource_zips,
+ args.android_sdk_root,
+ testonly_target=args.testonly,
+ can_fail_build=args.can_fail_build,
+ include_unexpected=args.include_unexpected_failures,
+ silent=args.silent)
+ logging.info('Creating stamp file')
+ build_utils.Touch(args.stamp)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile,
+ args.stamp,
+ depfile_deps,
+ add_pydeps=False) # pydeps listed in GN.
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/lint.pydeps b/chromium/build/android/gyp/lint.pydeps
index 49fbf17c407..d9a96c70194 100644
--- a/chromium/build/android/gyp/lint.pydeps
+++ b/chromium/build/android/gyp/lint.pydeps
@@ -1,8 +1,29 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
../../gn_helpers.py
lint.py
util/__init__.py
util/build_utils.py
util/manifest_utils.py
-util/md5_check.py
+util/resource_utils.py
diff --git a/chromium/build/android/gyp/prepare_resources.py b/chromium/build/android/gyp/prepare_resources.py
index 854c057ef83..dd5d1e43139 100755
--- a/chromium/build/android/gyp/prepare_resources.py
+++ b/chromium/build/android/gyp/prepare_resources.py
@@ -71,8 +71,8 @@ def _ParseArgs(args):
resource_utils.HandleCommonOptions(options)
with open(options.res_sources_path) as f:
- options.sources = [line.strip() for line in f.readlines()]
- options.resource_dirs = resource_utils.ExtractResourceDirsFromFileList(
+ options.sources = f.read().splitlines()
+ options.resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
options.sources)
return options
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 3d58b9be49c..18919589382 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -9,6 +9,7 @@ import os
import re
import shutil
import sys
+import tempfile
import zipfile
from util import build_utils
@@ -110,9 +111,12 @@ def _ParseOptions():
group.add_argument('--proguard-path', help='Path to the proguard.jar to use.')
group.add_argument('--r8-path', help='Path to the R8.jar to use.')
parser.add_argument(
- '--input-paths', required=True, help='GN-list of .jar files to optimize.')
- parser.add_argument(
- '--output-path', required=True, help='Path to the generated .jar file.')
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument('--input-paths',
+ action='append',
+ required=True,
+ help='GN-list of .jar files to optimize.')
+ parser.add_argument('--output-path', help='Path to the generated .jar file.')
parser.add_argument(
'--proguard-configs',
action='append',
@@ -138,6 +142,16 @@ def _ParseOptions():
help='Path to file written to if the expected merged ProGuard configs '
'differ from the generated merged ProGuard configs.')
parser.add_argument(
+ '--fail-on-expectations',
+ action="store_true",
+ help='When passed fails the build on proguard config expectation '
+ 'mismatches.')
+ parser.add_argument(
+ '--only-verify-expectations',
+ action='store_true',
+ help='If passed only verifies that the proguard configs match '
+ 'expectations but does not do any optimization with proguard/R8.')
+ parser.add_argument(
'--classpath',
action='append',
help='GN-list of .jar files to include as libraries.')
@@ -166,11 +180,33 @@ def _ParseOptions():
action='store_true',
help='Forcefully enable javac generated assertion code.')
parser.add_argument(
- '--desugar', action='store_true', help='Enable R8 Desugaring')
-
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which comprise the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--stamp',
+ help='File to touch upon success. Mutually exclusive with --output-path')
options = parser.parse_args(args)
+ if options.feature_names:
+ if options.output_path:
+ parser.error('Feature splits cannot specify an output in GN.')
+ if not options.stamp:
+ parser.error('Feature splits require a stamp file as output.')
+ elif not options.output_path:
+ parser.error('Output path required when feature splits aren\'t used')
+
if options.main_dex_rules_path and not options.r8_path:
parser.error('R8 must be enabled to pass main dex rules.')
@@ -180,16 +216,31 @@ def _ParseOptions():
if options.proguard_path and options.disable_outlining:
parser.error('--disable-outlining requires --r8-path')
+ if options.only_verify_expectations and not options.stamp:
+ parser.error('--only-verify-expectations requires --stamp')
+
options.classpath = build_utils.ParseGnList(options.classpath)
options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
options.input_paths = build_utils.ParseGnList(options.input_paths)
options.extra_mapping_output_paths = build_utils.ParseGnList(
options.extra_mapping_output_paths)
+ if options.feature_names:
+ if 'base' not in options.feature_names:
+ parser.error('"base" feature required when feature arguments are used.')
+ if len(options.feature_names) != len(options.feature_jars) or len(
+ options.feature_names) != len(options.dex_dests):
+ parser.error('Invalid feature argument lengths.')
+
+ options.feature_jars = [
+ build_utils.ParseGnList(x) for x in options.feature_jars
+ ]
+
return options
-def _VerifyExpectedConfigs(expected_path, actual_path, failure_file_path):
+def _VerifyExpectedConfigs(expected_path, actual_path, failure_file_path,
+ fail_on_mismatch):
msg = diff_utils.DiffFileContents(expected_path, actual_path)
if not msg:
return
@@ -205,6 +256,35 @@ https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README
with open(failure_file_path, 'w') as f:
f.write(msg_header)
f.write(msg)
+ if fail_on_mismatch:
+ sys.exit(1)
+
+
+class _DexPathContext(object):
+ def __init__(self, name, output_path, input_jars, work_dir):
+ self.name = name
+ self.input_paths = input_jars
+ self._final_output_path = output_path
+ self.staging_dir = os.path.join(work_dir, name)
+ os.mkdir(self.staging_dir)
+
+ def CreateOutput(self):
+ found_files = build_utils.FindInDirectory(self.staging_dir)
+ if not found_files:
+ raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
+
+ if self._final_output_path.endswith('.dex'):
+ if len(found_files) != 1:
+ raise Exception('Expected exactly 1 dex file output, found: {}'.format(
+ '\t'.join(found_files)))
+ shutil.move(found_files[0], self._final_output_path)
+ return
+
+ # Add to .jar using Python rather than having R8 output to a .zip directly
+ # in order to disable compression of the .jar, saving ~500ms.
+ tmp_jar_output = self.staging_dir + '.jar'
+ build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir)
+ shutil.move(tmp_jar_output, self._final_output_path)
def _OptimizeWithR8(options,
@@ -227,25 +307,33 @@ def _OptimizeWithR8(options,
tmp_output = os.path.join(tmp_dir, 'r8out')
os.mkdir(tmp_output)
+ feature_contexts = []
+ if options.feature_names:
+ for name, dest_dex, input_paths in zip(
+ options.feature_names, options.dex_dests, options.feature_jars):
+ feature_context = _DexPathContext(name, dest_dex, input_paths,
+ tmp_output)
+ if name == 'base':
+ base_dex_context = feature_context
+ else:
+ feature_contexts.append(feature_context)
+ else:
+ base_dex_context = _DexPathContext('base', options.output_path,
+ options.input_paths, tmp_output)
+
cmd = [
build_utils.JAVA_PATH,
'-jar',
options.r8_path,
'--no-data-resources',
'--output',
- tmp_output,
+ base_dex_context.staging_dir,
'--pg-map-output',
tmp_mapping_path,
]
- if not options.desugar:
- cmd += ['--no-desugaring']
-
- for lib in libraries:
- cmd += ['--lib', lib]
-
- for config_file in config_paths:
- cmd += ['--pg-conf', config_file]
+ if options.desugar_jdk_libs_json:
+ cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
if options.min_api:
cmd += ['--min-api', options.min_api]
@@ -253,11 +341,31 @@ def _OptimizeWithR8(options,
if options.force_enable_assertions:
cmd += ['--force-enable-assertions']
+ for lib in libraries:
+ cmd += ['--lib', lib]
+
+ for config_file in config_paths:
+ cmd += ['--pg-conf', config_file]
+
if options.main_dex_rules_path:
for main_dex_rule in options.main_dex_rules_path:
cmd += ['--main-dex-rules', main_dex_rule]
- cmd += options.input_paths
+ module_input_jars = set(base_dex_context.input_paths)
+ for feature in feature_contexts:
+ feature_input_jars = [
+ p for p in feature.input_paths if p not in module_input_jars
+ ]
+ module_input_jars.update(feature_input_jars)
+ cmd += [
+ '--feature-jar',
+ feature.staging_dir + ':' + ':'.join(feature_input_jars)
+ ]
+
+ cmd += base_dex_context.input_paths
+ # Add any extra input jars to the base module (e.g. desugar runtime).
+ extra_jars = set(options.input_paths) - module_input_jars
+ cmd += sorted(extra_jars)
env = os.environ.copy()
stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
@@ -274,17 +382,9 @@ def _OptimizeWithR8(options,
'android/docs/java_optimization.md#Debugging-common-failures\n'))
raise ProguardProcessError(err, debugging_link)
- found_files = build_utils.FindInDirectory(tmp_output)
- if not options.output_path.endswith('.dex'):
- # Add to .jar using Python rather than having R8 output to a .zip directly
- # in order to disable compression of the .jar, saving ~500ms.
- tmp_jar_output = tmp_output + '.jar'
- build_utils.DoZip(found_files, tmp_jar_output, base_dir=tmp_output)
- shutil.move(tmp_jar_output, options.output_path)
- else:
- if len(found_files) > 1:
- raise Exception('Too many files created: {}'.format(found_files))
- shutil.move(found_files[0], options.output_path)
+ base_dex_context.CreateOutput()
+ for feature in feature_contexts:
+ feature.CreateOutput()
with open(options.mapping_output, 'w') as out_file, \
open(tmp_mapping_path) as in_file:
@@ -439,6 +539,16 @@ def _ContainsDebuggingConfig(config_str):
return any(config in config_str for config in debugging_configs)
+def _MaybeWriteStampAndDepFile(options, inputs):
+ output = options.output_path
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+ output = options.stamp
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, output, inputs=inputs, add_pydeps=False)
+
+
def main():
options = _ParseOptions()
@@ -466,6 +576,18 @@ def main():
proguard_configs, dynamic_config_data, exclude_generated=True)
print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose
+
+ if options.expected_configs_file:
+ with tempfile.NamedTemporaryFile() as f:
+ f.write(merged_configs)
+ f.flush()
+ _VerifyExpectedConfigs(options.expected_configs_file, f.name,
+ options.proguard_expectations_failure_file,
+ options.fail_on_expectations)
+ if options.only_verify_expectations:
+ _MaybeWriteStampAndDepFile(options, options.proguard_configs)
+ return
+
# Writing the config output before we know ProGuard is going to succeed isn't
# great, since then a failure will result in one of the outputs being updated.
# We do it anyways though because the error message prints out the path to the
@@ -475,11 +597,6 @@ def main():
with open(options.output_config, 'w') as f:
f.write(merged_configs)
- if options.expected_configs_file:
- _VerifyExpectedConfigs(options.expected_configs_file,
- options.output_config,
- options.proguard_expectations_failure_file)
-
if options.r8_path:
_OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data,
print_stdout)
@@ -495,8 +612,7 @@ def main():
if options.apply_mapping:
inputs.append(options.apply_mapping)
- build_utils.WriteDepfile(
- options.depfile, options.output_path, inputs=inputs, add_pydeps=False)
+ _MaybeWriteStampAndDepFile(options, inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/util/diff_utils.py b/chromium/build/android/gyp/util/diff_utils.py
index bac85280fa5..5d9179eeb7f 100755
--- a/chromium/build/android/gyp/util/diff_utils.py
+++ b/chromium/build/android/gyp/util/diff_utils.py
@@ -23,7 +23,18 @@ def _SkipOmitted(line):
return line
-def DiffFileContents(expected_path, actual_path):
+def GenerateDiffWithOnlyAdditons(expected_path, actual_path):
+ """Generate a diff that only contains additions"""
+ with open(expected_path) as expected, open(actual_path) as actual:
+ expected_lines = expected.readlines()
+ actual_lines = actual.readlines()
+
+ diff = difflib.ndiff(expected_lines, actual_lines)
+ filtered_diff = (line for line in diff if line.startswith('+'))
+ return ''.join(filtered_diff)
+
+
+def DiffFileContents(expected_path, actual_path, show_files_compared=True):
"""Check file contents for equality and return the diff or None."""
with open(expected_path) as f_expected, open(actual_path) as f_actual:
expected_lines = f_expected.readlines()
@@ -42,12 +53,13 @@ def DiffFileContents(expected_path, actual_path):
tofile=os.path.join('after', expected_path),
n=0)
- # Space added before "patch" so that giant command is not put in bash history.
- return """\
+ files_compared_msg = """\
Files Compared:
* {}
* {}
+ """.format(expected_path, actual_path)
+ patch_msg = """\
If you are looking at this through LogDog, click "Raw log" before copying.
See https://bugs.chromium.org/p/chromium/issues/detail?id=984616.
@@ -57,4 +69,6 @@ To update the file, run:
{}
END_DIFF
############ END ############
-""".format(expected_path, actual_path, ''.join(diff).rstrip())
+""".format(''.join(diff).rstrip())
+
+ return files_compared_msg + patch_msg if show_files_compared else patch_msg
diff --git a/chromium/build/android/gyp/util/manifest_utils.py b/chromium/build/android/gyp/util/manifest_utils.py
index 309230fd860..984201ecdd0 100644
--- a/chromium/build/android/gyp/util/manifest_utils.py
+++ b/chromium/build/android/gyp/util/manifest_utils.py
@@ -120,6 +120,34 @@ def NormalizeManifest(path):
with open(path) as f:
# This also strips comments and sorts node attributes alphabetically.
root = ElementTree.fromstring(f.read())
+ package = GetPackage(root)
+
+ # Trichrome's static library version number is updated daily. To avoid
+ # frequent manifest check failures, we remove the exact version number
+ # during normalization.
+ app_node = root.find('application')
+ if app_node is not None:
+ for node in app_node.getchildren():
+ if (node.tag in ['uses-static-library', 'static-library']
+ and '{%s}version' % ANDROID_NAMESPACE in node.keys()
+ and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
+ node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+
+ # We also remove the exact package name (except the one at the root level)
+ # to avoid noise during manifest comparison.
+ def blur_package_name(node):
+ for key in node.keys():
+ node.set(key, node.get(key).replace(package, '$PACKAGE'))
+
+ for child in node.getchildren():
+ blur_package_name(child)
+
+ # We only blur the package names of non-root nodes because they generate a lot
+ # of diffs when doing manifest checks for upstream targets. We still want to
+ # have 1 piece of package name not blurred just in case the package name is
+ # mistakenly changed.
+ for child in root.getchildren():
+ blur_package_name(child)
# Sort nodes alphabetically, recursively.
_SortAndStripElementTree(root, reverse_toplevel=True)
@@ -138,4 +166,4 @@ def NormalizeManifest(path):
else:
lines.append(l)
- return '\n'.join(lines)
+ return '\n'.join(lines) + '\n'
diff --git a/chromium/build/android/gyp/util/protoresources.py b/chromium/build/android/gyp/util/protoresources.py
new file mode 100644
index 00000000000..1d5472d1734
--- /dev/null
+++ b/chromium/build/android/gyp/util/protoresources.py
@@ -0,0 +1,308 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions that modify resources in protobuf format.
+
+Format reference:
+https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto
+"""
+
+import logging
+import os
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+from util import resource_utils
+
+sys.path[1:1] = [
+ # `Resources_pb2` module imports `descriptor`, which imports `six`.
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'),
+ # Make sure the pb2 files are able to import google.protobuf
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
+ 'python'),
+]
+
+from proto import Resources_pb2
+
+# First bytes in an .flat.arsc file.
+# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0)
+_FLAT_ARSC_HEADER = 'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
+
+# The package ID hardcoded for shared libraries. See
+# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value
+# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java.
+SHARED_LIBRARY_HARDCODED_ID = 12
+
+
+def _ProcessZip(zip_path, process_func):
+ """Filters a .zip file via: new_bytes = process_func(filename, data)."""
+ has_changes = False
+ zip_entries = []
+ with zipfile.ZipFile(zip_path) as src_zip:
+ for info in src_zip.infolist():
+ data = src_zip.read(info)
+ new_data = process_func(info.filename, data)
+ if new_data is not data:
+ has_changes = True
+ data = new_data
+ zip_entries.append((info, data))
+
+ # Overwrite the original zip file.
+ if has_changes:
+ with zipfile.ZipFile(zip_path, 'w') as f:
+ for info, data in zip_entries:
+ f.writestr(info, data)
+
+
+def _ProcessProtoItem(item):
+ if not item.HasField('ref'):
+ return
+
+ # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
+ # the package to SHARED_LIBRARY_HARDCODED_ID.
+ if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id
+ & 0xff000000):
+ item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID)
+ item.ref.ClearField('is_dynamic')
+
+
+def _ProcessProtoValue(value):
+ if value.HasField('item'):
+ _ProcessProtoItem(value.item)
+ return
+
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ for entry in compound_value.style.entry:
+ _ProcessProtoItem(entry.item)
+ elif compound_value.HasField('array'):
+ for element in compound_value.array.element:
+ _ProcessProtoItem(element.item)
+ elif compound_value.HasField('plural'):
+ for entry in compound_value.plural.entry:
+ _ProcessProtoItem(entry.item)
+
+
+def _ProcessProtoXmlNode(xml_node):
+ if not xml_node.HasField('element'):
+ return
+
+ for attribute in xml_node.element.attribute:
+ _ProcessProtoItem(attribute.compiled_item)
+
+ for child in xml_node.element.child:
+ _ProcessProtoXmlNode(child)
+
+
+def _SplitLocaleResourceType(_type, allowed_resource_names):
+ """Splits locale specific resources out of |_type| and returns them.
+
+ Any locale specific resources will be removed from |_type|, and a new
+ Resources_pb2.Type value will be returned which contains those resources.
+
+ Args:
+ _type: A Resources_pb2.Type value
+ allowed_resource_names: Names of locale resources that should be kept in the
+ main type.
+ """
+ locale_entries = []
+ for entry in _type.entry:
+ if entry.name in allowed_resource_names:
+ continue
+
+ # First collect all resources values with a locale set.
+ config_values_with_locale = []
+ for config_value in entry.config_value:
+ if config_value.config.locale:
+ config_values_with_locale.append(config_value)
+
+ if config_values_with_locale:
+ # Remove the locale resources from the original entry
+ for value in config_values_with_locale:
+ entry.config_value.remove(value)
+
+ # Add locale resources to a new Entry, and save for later.
+ locale_entry = Resources_pb2.Entry()
+ locale_entry.CopyFrom(entry)
+ del locale_entry.config_value[:]
+ locale_entry.config_value.extend(config_values_with_locale)
+ locale_entries.append(locale_entry)
+
+ if not locale_entries:
+ return None
+
+ # Copy the original type and replace the entries with |locale_entries|.
+ locale_type = Resources_pb2.Type()
+ locale_type.CopyFrom(_type)
+ del locale_type.entry[:]
+ locale_type.entry.extend(locale_entries)
+ return locale_type
+
+
+def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist):
+ translations_package = None
+ if is_bundle_module:
+ # A separate top level package will be added to the resources, which
+ # contains only locale specific resources. The package ID of the locale
+ # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes
+ # resources in locale splits to all get assigned
+ # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug
+ # in shared library bundles where each split APK gets a separate dynamic
+ # ID, and cannot be accessed by the main APK.
+ translations_package = Resources_pb2.Package()
+ translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID
+ translations_package.package_name = (table.package[0].package_name +
+ '_translations')
+
+ # These resources are allowed in the base resources, since they are needed
+ # by WebView.
+ allowed_resource_names = set()
+ if shared_resources_allowlist:
+ allowed_resource_names = set(
+ resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist))
+
+ for package in table.package:
+ for _type in package.type:
+ for entry in _type.entry:
+ for config_value in entry.config_value:
+ _ProcessProtoValue(config_value.value)
+
+ if translations_package is not None:
+ locale_type = _SplitLocaleResourceType(_type, allowed_resource_names)
+ if locale_type:
+ translations_package.type.add().CopyFrom(locale_type)
+
+ if translations_package is not None:
+ table.package.add().CopyFrom(translations_package)
+
+
+def HardcodeSharedLibraryDynamicAttributes(zip_path,
+ is_bundle_module,
+ shared_resources_allowlist=None):
+ """Hardcodes the package IDs of dynamic attributes and locale resources.
+
+ Hardcoding dynamic attribute package IDs is a workaround for b/147674078,
+ which affects Android versions pre-N. Hardcoding locale resource package IDs
+ is a workaround for b/155437035, which affects resources built with
+ --shared-lib on all Android versions
+
+ Args:
+ zip_path: Path to proto APK file.
+ is_bundle_module: True for bundle modules.
+ shared_resources_allowlist: Set of resource names to not extract out of the
+ main package.
+ """
+
+ def process_func(filename, data):
+ if filename == 'resources.pb':
+ table = Resources_pb2.ResourceTable()
+ table.ParseFromString(data)
+ _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist)
+ data = table.SerializeToString()
+ elif filename.endswith('.xml'):
+ xml_node = Resources_pb2.XmlNode()
+ xml_node.ParseFromString(data)
+ _ProcessProtoXmlNode(xml_node)
+ data = xml_node.SerializeToString()
+ return data
+
+ _ProcessZip(zip_path, process_func)
+
+
+class _ResourceStripper(object):
+ def __init__(self, partial_path, keep_predicate):
+ self.partial_path = partial_path
+ self.keep_predicate = keep_predicate
+ self._has_changes = False
+
+ @staticmethod
+ def _IterStyles(entry):
+ for config_value in entry.config_value:
+ value = config_value.value
+ if value.HasField('compound_value'):
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ yield compound_value.style
+
+ def _StripStyles(self, entry, type_and_name):
+ # Strip style entries that refer to attributes that have been stripped.
+ for style in self._IterStyles(entry):
+ entries = style.entry
+ new_entries = []
+ for entry in entries:
+ full_name = '{}/{}'.format(type_and_name, entry.key.name)
+ if not self.keep_predicate(full_name):
+ logging.debug('Stripped %s/%s', self.partial_path, full_name)
+ else:
+ new_entries.append(entry)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def _StripEntries(self, entries, type_name):
+ new_entries = []
+ for entry in entries:
+ type_and_name = '{}/{}'.format(type_name, entry.name)
+ if not self.keep_predicate(type_and_name):
+ logging.debug('Stripped %s/%s', self.partial_path, type_and_name)
+ else:
+ new_entries.append(entry)
+ self._StripStyles(entry, type_and_name)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def StripTable(self, table):
+ self._has_changes = False
+ for package in table.package:
+ for _type in package.type:
+ self._StripEntries(_type.entry, _type.name)
+ return self._has_changes
+
+
+def _TableFromFlatBytes(data):
+ # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp
+ size_idx = len(_FLAT_ARSC_HEADER)
+ proto_idx = size_idx + 8
+ if data[:size_idx] != _FLAT_ARSC_HEADER:
+ raise Exception('Error parsing {} in {}'.format(info.filename, zip_path))
+ # Size is stored as uint64.
+ size = struct.unpack('<Q', data[size_idx:proto_idx])[0]
+ table = Resources_pb2.ResourceTable()
+ proto_bytes = data[proto_idx:proto_idx + size]
+ table.ParseFromString(proto_bytes)
+ return table
+
+
+def _FlatBytesFromTable(table):
+ proto_bytes = table.SerializeToString()
+ size = struct.pack('<Q', len(proto_bytes))
+ overage = len(proto_bytes) % 4
+ padding = '\0' * (4 - overage) if overage else ''
+ return ''.join((_FLAT_ARSC_HEADER, size, proto_bytes, padding))
+
+
+def StripUnwantedResources(partial_path, keep_predicate):
+ """Removes resources from .arsc.flat files inside of a .zip.
+
+ Args:
+ partial_path: Path to a .zip containing .arsc.flat entries
+ keep_predicate: Given "$partial_path/$res_type/$res_name", returns
+ whether to keep the resource.
+ """
+ stripper = _ResourceStripper(partial_path, keep_predicate)
+
+ def process_file(filename, data):
+ if filename.endswith('.arsc.flat'):
+ table = _TableFromFlatBytes(data)
+ if stripper.StripTable(table):
+ data = _FlatBytesFromTable(table)
+ return data
+
+ _ProcessZip(partial_path, process_file)
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index ea4696c41c4..1b92c4fb49e 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -5,6 +5,7 @@
import argparse
import collections
import contextlib
+import itertools
import os
import re
import shutil
@@ -28,6 +29,7 @@ from jinja2 import Template # pylint: disable=F0401
# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
_CHROME_TO_ANDROID_LOCALE_MAP = {
'es-419': 'es-rUS',
+ 'sr-Latn': 'b+sr+Latn',
'fil': 'tl',
'he': 'iw',
'id': 'in',
@@ -91,35 +93,38 @@ def ToAndroidLocaleName(chromium_locale):
_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
-# be prefixed with 'b+', and may include optional tags. e.g. 'b+en+US',
-# 'b+ja+Latn', 'b+ja+JP+Latn'
+# be prefixed with 'b+', and may include optional tags.
+# e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP'
_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
-# Matches an all-uppercase region name.
-_RE_ALL_UPPERCASE = re.compile(r'^[A-Z]+$')
-
def ToChromiumLocaleName(android_locale):
"""Convert an Android locale name into a Chromium one."""
lang = None
region = None
+ script = None
m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
if m:
lang = m.group(1)
if m.group(2):
region = m.group(3)
- else:
- m = _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale)
- if m:
- lang = m.group(1)
- if m.group(2):
- tags = m.group(2).split('+')
- # First all-uppercase tag is a region. This deals with cases where
- # a special tag is placed before it (e.g. 'cmn+Hant-TW')
- for tag in tags:
- if _RE_ALL_UPPERCASE.match(tag):
- region = tag
- break
+ elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale):
+ # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS)
+ tags = android_locale.split('+')
+
+ # The Lang tag is always the first tag.
+ lang = tags[1]
+
+ # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2.
+ # The optional script tag is 4ALPHA and always in pos 1.
+ optional_tags = iter(tags[2:])
+
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) == 4:
+ script = next_tag
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) < 4:
+ region = next_tag
if not lang:
return None
@@ -129,6 +134,10 @@ def ToChromiumLocaleName(android_locale):
return 'es-419'
lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+
+ if script:
+ lang = '%s-%s' % (lang, script)
+
if not region:
return lang
@@ -179,8 +188,7 @@ def _GenerateGlobs(pattern):
return pattern.replace('!', '').split(':')
-def ExtractResourceDirsFromFileList(resource_files,
- ignore_pattern=AAPT_IGNORE_PATTERN):
+def DeduceResourceDirsFromFileList(resource_files):
"""Return a list of resource directories from a list of resource files."""
# Directory list order is important, cannot use set or other data structures
# that change order. This is because resource files of the same name in
@@ -188,15 +196,27 @@ def ExtractResourceDirsFromFileList(resource_files,
# Thus the order must be maintained to prevent non-deterministic and possibly
# flakey builds.
resource_dirs = []
- globs = _GenerateGlobs(ignore_pattern)
for resource_path in resource_files:
- if build_utils.MatchesGlob(os.path.basename(resource_path), globs):
- # Ignore non-resource files like OWNERS and the like.
- continue
# Resources are always 1 directory deep under res/.
res_dir = os.path.dirname(os.path.dirname(resource_path))
if res_dir not in resource_dirs:
resource_dirs.append(res_dir)
+
+ # Check if any resource_dirs are children of other ones. This indicates that a
+ # file was listed that is not exactly 1 directory deep under res/.
+ # E.g.:
+ # sources = ["java/res/values/foo.xml", "java/res/README.md"]
+ # ^^ This will cause "java" to be detected as resource directory.
+ for a, b in itertools.permutations(resource_dirs, 2):
+ if not os.path.relpath(a, b).startswith('..'):
+ bad_sources = (s for s in resource_files
+ if os.path.dirname(os.path.dirname(s)) == b)
+ msg = """\
+Resource(s) found that are not in a proper directory structure:
+ {}
+All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE"."""
+ raise Exception(msg.format('\n '.join(bad_sources)))
+
return resource_dirs
@@ -318,12 +338,12 @@ def _FixPackageIds(resource_value):
# Resource IDs for resources belonging to regular APKs have their first byte
# as 0x7f (package id). However with webview, since it is not a regular apk
# but used as a shared library, aapt is passed the --shared-resources flag
- # which changes some of the package ids to 0x00 and 0x02. This function
- # normalises these (0x00 and 0x02) package ids to 0x7f, which the generated
- # code in R.java changes to the correct package id at runtime.
- # resource_value is a string with either, a single value '0x12345678', or an
- # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }'
- return re.sub(r'0x(?:00|02)', r'0x7f', resource_value)
+ # which changes some of the package ids to 0x00. This function normalises
+ # these (0x00) package ids to 0x7f, which the generated code in R.java changes
+ # to the correct package id at runtime. resource_value is a string with
+ # either, a single value '0x12345678', or an array of values like '{
+ # 0xfedcba98, 0x01234567, 0x56789abc }'
+ return resource_value.replace('0x00', '0x7f')
def _GetRTxtResourceNames(r_txt_path):
@@ -379,6 +399,7 @@ class RJavaBuildOptions:
self.resources_allowlist = None
self.has_on_resources_loaded = False
self.export_const_styleable = False
+ self.final_package_id = None
def ExportNoResources(self):
"""Make all resource IDs final, and don't generate a method."""
@@ -422,6 +443,30 @@ class RJavaBuildOptions:
"""
self.has_on_resources_loaded = True
+ def SetFinalPackageId(self, package_id):
+ """Sets a package ID to be used for resources marked final."""
+ self.final_package_id = package_id
+
+ def _MaybeRewriteRTxtPackageIds(self, r_txt_path):
+ """Rewrites package IDs in the R.txt file if necessary.
+
+ If SetFinalPackageId() was called, some of the resource IDs may have had
+ their package ID changed. This function rewrites the R.txt file to match
+ those changes.
+ """
+ if self.final_package_id is None:
+ return
+
+ entries = _ParseTextSymbolsFile(r_txt_path)
+ with open(r_txt_path, 'w') as f:
+ for entry in entries:
+ value = entry.value
+ if self._IsResourceFinal(entry):
+ value = re.sub(r'0x(?:00|7f)',
+ '0x{:02x}'.format(self.final_package_id), value)
+ f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type,
+ entry.name, value))
+
def _IsResourceFinal(self, entry):
"""Determines whether a resource should be final or not.
@@ -482,6 +527,7 @@ def CreateRJavaFiles(srcjar_dir,
"""
assert len(extra_res_packages) == len(extra_r_txt_files), \
'Need one R.txt file per package'
+ rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file)
packages = list(extra_res_packages)
r_txt_files = list(extra_r_txt_files)
@@ -833,6 +879,7 @@ class _ResourceBuildContext(object):
if temp_dir:
self.temp_dir = temp_dir
self.remove_on_exit = not keep_files
+ os.makedirs(temp_dir)
else:
self.temp_dir = tempfile.mkdtemp()
self.remove_on_exit = True
diff --git a/chromium/build/android/gyp/util/resource_utils_test.py b/chromium/build/android/gyp/util/resource_utils_test.py
index 60bfcf906c9..3026889460e 100755
--- a/chromium/build/android/gyp/util/resource_utils_test.py
+++ b/chromium/build/android/gyp/util/resource_utils_test.py
@@ -179,11 +179,17 @@ class ResourceUtilsTest(unittest.TestCase):
_TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
'foo': 'foo',
'foo-rBAR': 'foo-BAR',
- 'b+foo': 'foo',
- 'b+foo+BAR': 'foo-BAR',
- 'b+foo+BAR+Whatever': 'foo-BAR',
- 'b+foo+Whatever+BAR': 'foo-BAR',
- 'b+foo+Whatever': 'foo',
+ 'b+lll': 'lll',
+ 'b+ll+Extra': 'll',
+ 'b+ll+RR': 'll-RR',
+ 'b+lll+RR+Extra': 'lll-RR',
+ 'b+ll+RRR+Extra': 'll-RRR',
+ 'b+ll+Ssss': 'll-Ssss',
+ 'b+ll+Ssss+Extra': 'll-Ssss',
+ 'b+ll+Ssss+RR': 'll-Ssss-RR',
+ 'b+ll+Ssss+RRR': 'll-Ssss-RRR',
+ 'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR',
+ 'b+ll+Whatever': 'll',
'en': 'en',
'en-rUS': 'en-US',
'en-US': None,
@@ -195,6 +201,7 @@ class ResourceUtilsTest(unittest.TestCase):
'fil': 'fil',
'iw': 'he',
'iw-rIL': 'he-IL',
+ 'b+iw+IL': 'he-IL',
'in': 'id',
'in-rBAR': 'id-BAR',
'id-rBAR': 'id-BAR',
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index f96e3292201..02b02fcd538 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -264,13 +264,25 @@ In this case, `deps_info['unprocessed_jar_path']` will point to the source
Path to a single `.sources` file listing all the Java sources that were used
to generate the library (simple text format, one `.jar` path per line).
-* `deps_info['owned_resource_dirs']`:
-List of all resource directories belonging to all resource dependencies for
-this target.
+* `deps_info['lint_android_manifest']`:
+Path to an AndroidManifest.xml file to use for this lint target.
-* `deps_info['owned_resource_zips']`:
-List of all resource zip files belonging to all resource dependencies for this
-target.
+* `deps_info['lint_java_sources']`:
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies that are chromium code. Note: this is a list of files, where each
+file contains a list of Java source files. This is used for lint.
+
+* `deps_info['lint_srcjars']`:
+List of all bundled srcjars of all transitive java library targets. Excludes
+non-chromium java libraries.
+
+* `deps_info['lint_resource_sources']`:
+List of all resource sources files belonging to all transitive resource
+dependencies of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['lint_resource_zips']`:
+List of all resource zip files belonging to all transitive resource dependencies
+of this target. Excludes resources owned by non-chromium code.
* `deps_info['owned_resource_srcjars']`:
List of all .srcjar files belonging to all resource dependencies for this
@@ -386,6 +398,12 @@ or instrumentation libraries.
* `native['secondary_abi_loadable_modules']`
Secondary ABI version of loadable_modules
+* `native['library_always_compress']`
+A list of library files that we always compress.
+
+* `native['library_renames']`
+A list of library files that we prepend "crazy." to their file names.
+
* `assets`
A list of assets stored compressed in the APK. Each entry has the format
`<source-path>:<destination-path>`, where `<source-path>` is relative to
@@ -860,8 +878,9 @@ def main(argv):
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--unprocessed-jar-path',
help='Path to the .jar to use for javac classpath purposes.')
- parser.add_option('--interface-jar-path',
- help='Path to the .interface.jar to use for javac classpath purposes.')
+ parser.add_option(
+ '--interface-jar-path',
+ help='Path to the interface .jar to use for javac classpath purposes.')
parser.add_option(
'--jetified-jar-path',
help='Path to the jetified.jar to use for javac classpath purposes.')
@@ -892,6 +911,10 @@ def main(argv):
help='Path to JAR that contains java resources. Everything '
'from this JAR except meta-inf/ content and .class files '
'will be added to the final APK.')
+ parser.add_option(
+ '--non-chromium-code',
+ action='store_true',
+ help='True if a java library is not chromium code, used for lint.')
# android library options
parser.add_option('--dex-path', help='Path to target\'s dex output.')
@@ -929,6 +952,14 @@ def main(argv):
parser.add_option('--uncompress-shared-libraries', default=False,
action='store_true',
help='Whether to store native libraries uncompressed')
+ parser.add_option(
+ '--library-always-compress',
+ help='The list of library files that we always compress.')
+ parser.add_option(
+ '--library-renames',
+ default=[],
+ help='The list of library files that we prepend crazy. to their names.')
+
# apk options
parser.add_option('--apk-path', help='Path to the target\'s apk output.')
parser.add_option('--incremental-apk-path',
@@ -1048,11 +1079,19 @@ def main(argv):
is_apk_or_module_target = options.type in ('android_apk',
'android_app_bundle_module')
- if options.uncompress_shared_libraries:
- if not is_apk_or_module_target:
+ if not is_apk_or_module_target:
+ if options.uncompress_shared_libraries:
raise Exception('--uncompressed-shared-libraries can only be used '
'with --type=android_apk or '
'--type=android_app_bundle_module')
+ if options.library_always_compress:
+ raise Exception(
+ '--library-always-compress can only be used with --type=android_apk '
+ 'or --type=android_app_bundle_module')
+ if options.library_renames:
+ raise Exception(
+ '--library-renames can only be used with --type=android_apk or '
+ '--type=android_app_bundle_module')
if options.jar_path and options.supports_android and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
@@ -1109,14 +1148,15 @@ def main(argv):
# Initialize some common config.
# Any value that needs to be queryable by dependents must go within deps_info.
config = {
- 'deps_info': {
- 'name': os.path.basename(options.build_config),
- 'path': options.build_config,
- 'type': options.type,
- 'deps_configs': deps.direct_deps_config_paths
- },
- # Info needed only by generate_gradle.py.
- 'gradle': {}
+ 'deps_info': {
+ 'name': os.path.basename(options.build_config),
+ 'path': options.build_config,
+ 'type': options.type,
+ 'deps_configs': deps.direct_deps_config_paths,
+ 'chromium_code': not options.non_chromium_code,
+ },
+ # Info needed only by generate_gradle.py.
+ 'gradle': {}
}
deps_info = config['deps_info']
gradle = config['gradle']
@@ -1140,12 +1180,16 @@ def main(argv):
if options.android_manifest:
deps_info['android_manifest'] = options.android_manifest
+ if options.bundled_srcjars:
+ deps_info['bundled_srcjars'] = build_utils.ParseGnList(
+ options.bundled_srcjars)
+
+ if options.java_sources_file:
+ deps_info['java_sources_file'] = options.java_sources_file
+
if is_java_target:
- if options.java_sources_file:
- deps_info['java_sources_file'] = options.java_sources_file
if options.bundled_srcjars:
- gradle['bundled_srcjars'] = (
- build_utils.ParseGnList(options.bundled_srcjars))
+ gradle['bundled_srcjars'] = deps_info['bundled_srcjars']
gradle['dependent_android_projects'] = []
gradle['dependent_java_projects'] = []
@@ -1262,47 +1306,24 @@ def main(argv):
deps_info['package_name'] = manifest.GetPackageName()
if options.package_name:
deps_info['package_name'] = options.package_name
-
-
deps_info['res_sources_path'] = ''
if options.res_sources_path:
deps_info['res_sources_path'] = options.res_sources_path
if options.requires_android and is_java_target:
- # Lint all resources that are not already linted by a dependent library.
- owned_resource_dirs = set()
- owned_resource_zips = set()
owned_resource_srcjars = set()
for c in all_resources_deps:
- # Always use resources_dirs in favour of resources_zips so that lint error
- # messages have paths that are closer to reality (and to avoid needing to
- # extract during lint).
- if c['res_sources_path']:
- with open(c['res_sources_path']) as f:
- resource_files = f.readlines()
- resource_dirs = resource_utils.ExtractResourceDirsFromFileList(
- resource_files)
- owned_resource_dirs.update(resource_dirs)
- all_inputs.append(c['res_sources_path'])
- else:
- owned_resource_zips.add(c['resources_zip'])
srcjar = c.get('srcjar')
if srcjar:
owned_resource_srcjars.add(srcjar)
-
for c in all_library_deps:
- if c['requires_android']:
- owned_resource_dirs.difference_update(c['owned_resources_dirs'])
- owned_resource_zips.difference_update(c['owned_resources_zips'])
+ if c['requires_android'] and not c['is_prebuilt']:
# Many .aar files include R.class files in them, as it makes it easier
# for IDEs to resolve symbols. However, including them is not required
# and not all prebuilts do. Rather than try to detect their presense,
# just assume they are not there. The only consequence is redundant
# compilation of the R.class.
- if not c['is_prebuilt']:
- owned_resource_srcjars.difference_update(c['owned_resource_srcjars'])
- deps_info['owned_resources_dirs'] = sorted(owned_resource_dirs)
- deps_info['owned_resources_zips'] = sorted(owned_resource_zips)
+ owned_resource_srcjars.difference_update(c['owned_resource_srcjars'])
deps_info['owned_resource_srcjars'] = sorted(owned_resource_srcjars)
if options.type == 'java_library':
@@ -1452,19 +1473,79 @@ def main(argv):
deps_info['proguard_configs'] = list(all_configs)
extra_proguard_classpath_jars = []
+ # We allow lint to be run on android_apk targets, so we collect lint
+ # artifacts for them.
+ # We allow lint to be run on android_app_bundle targets, so we need to
+ # collect lint artifacts for the android_app_bundle_module targets that the
+ # bundle includes. Different android_app_bundle targets may include different
+ # android_app_bundle_module targets, so the bundle needs to be able to
+ # de-duplicate these lint artifacts.
+ if options.type in ('android_app_bundle_module', 'android_apk'):
+ # Collect all sources and resources at the apk/bundle_module level.
+ lint_srcjars = set()
+ lint_java_sources = set()
+ lint_resource_sources = set()
+ lint_resource_zips = set()
+
+ if options.java_sources_file:
+ lint_java_sources.add(options.java_sources_file)
+ if options.bundled_srcjars:
+ lint_srcjars.update(deps_info['bundled_srcjars'])
+ for c in all_library_deps:
+ if c['chromium_code'] and c['requires_android']:
+ if 'java_sources_file' in c:
+ lint_java_sources.add(c['java_sources_file'])
+ lint_srcjars.update(c['bundled_srcjars'])
+
+ if options.res_sources_path:
+ lint_resource_sources.add(options.res_sources_path)
+ if options.resources_zip:
+ lint_resource_zips.add(options.resources_zip)
+ for c in all_resources_deps:
+ if c['chromium_code']:
+ # Prefer res_sources_path to resources_zips so that lint errors have
+ # real paths and to avoid needing to extract during lint.
+ if c['res_sources_path']:
+ lint_resource_sources.add(c['res_sources_path'])
+ else:
+ lint_resource_zips.add(c['resources_zip'])
+
+ deps_info['lint_srcjars'] = sorted(lint_srcjars)
+ deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+ deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+
+ if options.type == 'android_apk':
+ assert options.android_manifest, 'Android APKs must define a manifest'
+ deps_info['lint_android_manifest'] = options.android_manifest
+
if options.type == 'android_app_bundle':
module_configs = [
GetDepConfig(c)
for c in build_utils.ParseGnList(options.module_build_configs)
]
jni_all_source = set()
+ lint_srcjars = set()
+ lint_java_sources = set()
+ lint_resource_sources = set()
+ lint_resource_zips = set()
for c in module_configs:
if c['is_base_module']:
assert 'base_module_config' not in deps_info, (
'Must have exactly 1 base module!')
deps_info['base_module_config'] = c['path']
+ # Use the base module's android manifest for linting.
+ deps_info['lint_android_manifest'] = c['android_manifest']
jni_all_source.update(c['jni']['all_source'])
+ lint_srcjars.update(c['lint_srcjars'])
+ lint_java_sources.update(c['lint_java_sources'])
+ lint_resource_sources.update(c['lint_resource_sources'])
+ lint_resource_zips.update(c['lint_resource_zips'])
deps_info['jni'] = {'all_source': sorted(jni_all_source)}
+ deps_info['lint_srcjars'] = sorted(lint_srcjars)
+ deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+ deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
# Map configs to classpath entries that should be included in their final dex.
classpath_entries_by_owning_config = collections.defaultdict(list)
@@ -1602,10 +1683,6 @@ def main(argv):
# is not proguarded, but it's easy enough to support.
deps_info['proguard_under_test_mapping'] = ''
- expected_tested_package = tested_apk_config['package_name']
- AndroidManifest(options.android_manifest).CheckInstrumentationElements(
- expected_tested_package)
-
# Add all tested classes to the test's classpath to ensure that the test's
# java code is a superset of the tested apk's java code
java_full_classpath.extend(
@@ -1730,6 +1807,10 @@ def main(argv):
java_libraries_list,
'uncompress_shared_libraries':
options.uncompress_shared_libraries,
+ 'library_always_compress':
+ options.library_always_compress,
+ 'library_renames':
+ options.library_renames,
'loadable_modules':
loadable_modules,
'secondary_abi_loadable_modules':