summaryrefslogtreecommitdiffstats
path: root/chromium/build/android/gyp
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-12 14:27:29 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:35:20 +0000
commitc30a6232df03e1efbd9f3b226777b07e087a1122 (patch)
treee992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/build/android/gyp
parent7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff)
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/build/android/gyp')
-rw-r--r--chromium/build/android/gyp/OWNERS2
-rwxr-xr-xchromium/build/android/gyp/aar.py79
-rw-r--r--chromium/build/android/gyp/aar.pydeps1
-rwxr-xr-xchromium/build/android/gyp/apkbuilder.py3
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py56
-rw-r--r--chromium/build/android/gyp/compile_java.pydeps1
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py157
-rw-r--r--chromium/build/android/gyp/compile_resources.pydeps2
-rwxr-xr-xchromium/build/android/gyp/copy_ex.py3
-rw-r--r--chromium/build/android/gyp/create_app_bundle_apks.pydeps1
-rw-r--r--chromium/build/android/gyp/create_bundle_wrapper_script.pydeps3
-rwxr-xr-xchromium/build/android/gyp/create_size_info_files.py8
-rwxr-xr-xchromium/build/android/gyp/create_ui_locale_resources.py4
-rw-r--r--chromium/build/android/gyp/create_ui_locale_resources.pydeps28
-rwxr-xr-xchromium/build/android/gyp/desugar.py8
-rwxr-xr-xchromium/build/android/gyp/dex.py39
-rw-r--r--chromium/build/android/gyp/dex.pydeps1
-rwxr-xr-xchromium/build/android/gyp/dex_jdk_libs.py42
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py4
-rwxr-xr-xchromium/build/android/gyp/dist_aar.py9
-rwxr-xr-xchromium/build/android/gyp/filter_zip.py33
-rwxr-xr-xchromium/build/android/gyp/gcc_preprocess.py2
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum.py17
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum_tests.py36
-rwxr-xr-xchromium/build/android/gyp/java_cpp_strings.py33
-rwxr-xr-xchromium/build/android/gyp/java_cpp_strings_tests.py25
-rwxr-xr-xchromium/build/android/gyp/lint.py511
-rw-r--r--chromium/build/android/gyp/lint.pydeps22
-rwxr-xr-xchromium/build/android/gyp/main_dex_list.py11
-rwxr-xr-xchromium/build/android/gyp/merge_manifest.py7
-rw-r--r--chromium/build/android/gyp/prepare_resources.pydeps1
-rwxr-xr-xchromium/build/android/gyp/proguard.py130
-rw-r--r--chromium/build/android/gyp/proguard.pydeps1
-rw-r--r--chromium/build/android/gyp/turbine.pydeps1
-rw-r--r--chromium/build/android/gyp/util/build_utils.py47
-rw-r--r--chromium/build/android/gyp/util/md5_check.py34
-rwxr-xr-xchromium/build/android/gyp/util/md5_check_test.py17
-rw-r--r--chromium/build/android/gyp/util/parallel.py214
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py1
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py188
-rwxr-xr-xchromium/build/android/gyp/write_native_libraries_java.py7
-rwxr-xr-xchromium/build/android/gyp/zip.py5
42 files changed, 1039 insertions, 755 deletions
diff --git a/chromium/build/android/gyp/OWNERS b/chromium/build/android/gyp/OWNERS
index 7defba6b1ae..25557e1fc55 100644
--- a/chromium/build/android/gyp/OWNERS
+++ b/chromium/build/android/gyp/OWNERS
@@ -2,5 +2,3 @@ agrieve@chromium.org
digit@chromium.org
smaier@chromium.org
wnwen@chromium.org
-
-# COMPONENT: Build
diff --git a/chromium/build/android/gyp/aar.py b/chromium/build/android/gyp/aar.py
index 87f189014a1..ffd6cf8fa12 100755
--- a/chromium/build/android/gyp/aar.py
+++ b/chromium/build/android/gyp/aar.py
@@ -23,15 +23,30 @@ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
import gn_helpers
+# Regular expression to extract -checkdiscard / -check* lines.
+# Does not support nested comments with "}" in them (oh well).
+_CHECKDISCARD_PATTERN = re.compile(r'^\s*?-check.*?}\s*',
+ re.DOTALL | re.MULTILINE)
+
+_PROGUARD_TXT = 'proguard.txt'
+_PROGUARD_CHECKS_TXT = 'proguard-checks.txt'
+
+
def _IsManifestEmpty(manifest_str):
- """Returns whether the given manifest has merge-worthy elements.
+ """Decides whether the given manifest has merge-worthy elements.
E.g.: <activity>, <service>, etc.
+
+ Args:
+ manifest_str: Content of a manifiest XML.
+
+ Returns:
+ Whether the manifest has merge-worthy elements.
"""
doc = ElementTree.fromstring(manifest_str)
for node in doc:
if node.tag == 'application':
- if len(node):
+ if node.getchildren():
return False
elif node.tag != 'uses-sdk':
return False
@@ -40,6 +55,14 @@ def _IsManifestEmpty(manifest_str):
def _CreateInfo(aar_file):
+ """Extracts and return .info data from an .aar file.
+
+ Args:
+ aar_file: Path to an input .aar file.
+
+ Returns:
+ A dict containing .info data.
+ """
data = {}
data['aidl'] = []
data['assets'] = []
@@ -76,16 +99,40 @@ def _CreateInfo(aar_file):
data['native_libraries'] = [name]
elif name == 'classes.jar':
data['has_classes_jar'] = True
- elif name == 'proguard.txt':
+ elif name == _PROGUARD_TXT:
data['has_proguard_flags'] = True
elif name == 'R.txt':
# Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
# have no resources as well. We treat empty R.txt as having no R.txt.
- data['has_r_text_file'] = (z.read('R.txt').strip() != '')
+ data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+ if data['has_proguard_flags']:
+ config = z.read(_PROGUARD_TXT)
+ if _CHECKDISCARD_PATTERN.search(config):
+ data['has_proguard_check_flags'] = True
+
return data
-def _PerformExtract(aar_file, output_dir, name_allowlist):
+def _SplitProguardConfig(tmp_dir):
+ # Put -checkdiscard (and friends) into a separate proguard config.
+ # https://crbug.com/1093831
+ main_flag_path = os.path.join(tmp_dir, _PROGUARD_TXT)
+ check_flag_path = os.path.join(tmp_dir, _PROGUARD_CHECKS_TXT)
+ with open(main_flag_path) as f:
+ config_data = f.read()
+ with open(main_flag_path, 'w') as f:
+ MSG = ('# Check flag moved to proguard-checks.txt by '
+ '//build/android/gyp/aar.py\n')
+ f.write(_CHECKDISCARD_PATTERN.sub(MSG, config_data))
+ with open(check_flag_path, 'w') as f:
+ f.write('# Check flags extracted by //build/android/gyp/aar.py\n\n')
+ for m in _CHECKDISCARD_PATTERN.finditer(config_data):
+ f.write(m.group(0))
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist,
+ has_proguard_check_flags):
with build_utils.TempDir() as tmp_dir:
tmp_dir = os.path.join(tmp_dir, 'staging')
os.mkdir(tmp_dir)
@@ -94,6 +141,10 @@ def _PerformExtract(aar_file, output_dir, name_allowlist):
# Write a breadcrumb so that SuperSize can attribute files back to the .aar.
with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
f.write('source={}\n'.format(aar_file))
+
+ if has_proguard_check_flags:
+ _SplitProguardConfig(tmp_dir)
+
shutil.rmtree(output_dir, ignore_errors=True)
shutil.move(tmp_dir, output_dir)
@@ -135,7 +186,7 @@ def main():
# Generated by //build/android/gyp/aar.py
# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-""" + gn_helpers.ToGNString(aar_info)
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
if args.command == 'extract':
if args.assert_info_file:
@@ -150,12 +201,20 @@ def main():
if args.ignore_resources:
names = [n for n in names if not n.startswith('res')]
+ has_proguard_check_flags = aar_info.get('has_proguard_check_flags')
output_paths = [os.path.join(args.output_dir, n) for n in names]
output_paths.append(os.path.join(args.output_dir, 'source.info'))
- md5_check.CallAndRecordIfStale(
- lambda: _PerformExtract(args.aar_file, args.output_dir, set(names)),
- input_paths=[args.aar_file],
- output_paths=output_paths)
+ if has_proguard_check_flags:
+ output_paths.append(os.path.join(args.output_dir, _PROGUARD_CHECKS_TXT))
+
+ def on_stale_md5():
+ _PerformExtract(args.aar_file, args.output_dir, set(names),
+ has_proguard_check_flags)
+
+ md5_check.CallAndRecordIfStale(on_stale_md5,
+ input_strings=[aar_info],
+ input_paths=[args.aar_file],
+ output_paths=output_paths)
elif args.command == 'list':
aar_output_present = args.output != '-' and os.path.isfile(args.output)
diff --git a/chromium/build/android/gyp/aar.pydeps b/chromium/build/android/gyp/aar.pydeps
index e08c5475e3d..edb351d2fc8 100644
--- a/chromium/build/android/gyp/aar.pydeps
+++ b/chromium/build/android/gyp/aar.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
../../gn_helpers.py
+../../print_python_deps.py
aar.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py
index 7f8403919ca..dd2175bbe9c 100755
--- a/chromium/build/android/gyp/apkbuilder.py
+++ b/chromium/build/android/gyp/apkbuilder.py
@@ -359,8 +359,7 @@ def _MaybeWriteDepAndStampFiles(options, depfile_deps):
output = options.stamp
else:
output = options.output_apk
- build_utils.WriteDepfile(
- options.depfile, output, inputs=depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output, inputs=depfile_deps)
def main(args):
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index 3d78347998d..850a809d5d2 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -3,7 +3,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs."""
+"""Wraps bin/helper/bytecode_processor and expands @FileArgs."""
import argparse
import os
@@ -23,45 +23,45 @@ def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--script', required=True,
help='Path to the java binary wrapper script.')
+ parser.add_argument('--gn-target', required=True)
parser.add_argument('--input-jar', required=True)
- parser.add_argument('--output-jar', required=True)
- parser.add_argument('--direct-classpath-jars', required=True)
- parser.add_argument('--sdk-classpath-jars', required=True)
- parser.add_argument('--extra-classpath-jars', dest='extra_jars',
- action='append', default=[],
- help='Extra inputs, passed last to the binary script.')
+ parser.add_argument('--direct-classpath-jars')
+ parser.add_argument('--sdk-classpath-jars')
+ parser.add_argument('--full-classpath-jars')
+ parser.add_argument('--full-classpath-gn-targets')
+ parser.add_argument('--stamp')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--missing-classes-allowlist')
_AddSwitch(parser, '--is-prebuilt')
- _AddSwitch(parser, '--enable-thread-annotations')
- _AddSwitch(parser, '--enable-check-class-path')
args = parser.parse_args(argv)
- sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
- assert len(sdk_jars) > 0
-
- direct_jars = build_utils.ParseGnList(args.direct_classpath_jars)
- assert len(direct_jars) > 0
-
- extra_classpath_jars = []
- for a in args.extra_jars:
- extra_classpath_jars.extend(build_utils.ParseGnList(a))
+ args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ args.direct_classpath_jars = build_utils.ParseGnList(
+ args.direct_classpath_jars)
+ args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
+ args.full_classpath_gn_targets = build_utils.ParseGnList(
+ args.full_classpath_gn_targets)
args.missing_classes_allowlist = build_utils.ParseGnList(
args.missing_classes_allowlist)
- if args.verbose:
- verbose = '--verbose'
- else:
- verbose = '--not-verbose'
+ verbose = '--verbose' if args.verbose else '--not-verbose'
- cmd = ([
- args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
- args.enable_thread_annotations, args.enable_check_class_path
- ] + [str(len(args.missing_classes_allowlist))] +
- args.missing_classes_allowlist + [str(len(sdk_jars))] + sdk_jars +
- [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
+ cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt]
+ cmd += [str(len(args.missing_classes_allowlist))]
+ cmd += args.missing_classes_allowlist
+ cmd += [str(len(args.sdk_classpath_jars))]
+ cmd += args.sdk_classpath_jars
+ cmd += [str(len(args.direct_classpath_jars))]
+ cmd += args.direct_classpath_jars
+ cmd += [str(len(args.full_classpath_jars))]
+ cmd += args.full_classpath_jars
+ cmd += [str(len(args.full_classpath_gn_targets))]
+ cmd += args.full_classpath_gn_targets
subprocess.check_call(cmd)
+ if args.stamp:
+ build_utils.Touch(args.stamp)
+
if __name__ == '__main__':
sys.exit(main(sys.argv))
diff --git a/chromium/build/android/gyp/compile_java.pydeps b/chromium/build/android/gyp/compile_java.pydeps
index a128f47280a..f24bdcbb879 100644
--- a/chromium/build/android/gyp/compile_java.pydeps
+++ b/chromium/build/android/gyp/compile_java.pydeps
@@ -7,6 +7,7 @@
../../../third_party/colorama/src/colorama/win32.py
../../../third_party/colorama/src/colorama/winterm.py
../../gn_helpers.py
+../../print_python_deps.py
compile_java.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 2ca4ec781c8..eece2eb3fea 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -18,7 +18,6 @@ import contextlib
import filecmp
import hashlib
import logging
-import multiprocessing.dummy
import os
import re
import shutil
@@ -26,7 +25,6 @@ import subprocess
import sys
import tempfile
import textwrap
-import time
import zipfile
from xml.etree import ElementTree
@@ -34,9 +32,11 @@ from util import build_utils
from util import diff_utils
from util import manifest_utils
from util import md5_check
+from util import parallel
from util import protoresources
from util import resource_utils
+
# Pngs that we shouldn't convert to webp. Please add rationale when updating.
_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
# Crashes on Galaxy S5 running L (https://crbug.com/807059).
@@ -546,68 +546,64 @@ def _CreateKeepPredicate(resource_exclusion_regex,
build_utils.MatchesGlob(path, resource_exclusion_exceptions))
-def _ConvertToWebP(webp_binary, png_paths, path_info, webp_cache_dir):
- pool = multiprocessing.dummy.Pool(10)
+def _ComputeSha1(path):
+ with open(path, 'rb') as f:
+ data = f.read()
+ return hashlib.sha1(data).hexdigest()
- build_utils.MakeDirectory(webp_cache_dir)
- cwebp_version = subprocess.check_output([webp_binary, '-version']).rstrip()
- cwebp_arguments = ['-mt', '-quiet', '-m', '6', '-q', '100', '-lossless']
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+ sha1_hash = _ComputeSha1(png_path)
- sha1_time = [0]
- cwebp_time = [0]
- cache_hits = [0]
+ # The set of arguments that will appear in the cache key.
+ quality_args = ['-m', '6', '-q', '100', '-lossless']
- def cal_sha1(png_path):
- start = time.time()
- with open(png_path, 'rb') as f:
- png_content = f.read()
+ webp_cache_path = os.path.join(
+ webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+ ''.join(quality_args)))
+ # No need to add .webp. Android can load images fine without them.
+ webp_path = os.path.splitext(png_path)[0]
- sha1_hex = hashlib.sha1(png_content).hexdigest()
- sha1_time[0] += time.time() - start
- return sha1_hex
+ cache_hit = os.path.exists(webp_cache_path)
+ if cache_hit:
+ os.link(webp_cache_path, webp_path)
+ else:
+ # We place the generated webp image to webp_path, instead of in the
+ # webp_cache_dir to avoid concurrency issues.
+ args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+ subprocess.check_call(args)
- def get_converted_image(png_path):
- sha1_hash = cal_sha1(png_path)
+ try:
+ os.link(webp_path, webp_cache_path)
+ except OSError:
+ # Because of concurrent run, a webp image may already exists in
+ # webp_cache_path.
+ pass
- webp_cache_path = os.path.join(
- webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
- ''.join(cwebp_arguments)))
- # No need to add an extension, android can load images fine without them.
- webp_path = os.path.splitext(png_path)[0]
+ os.remove(png_path)
+ original_dir = os.path.dirname(os.path.dirname(png_path))
+ rename_tuple = (os.path.relpath(png_path, original_dir),
+ os.path.relpath(webp_path, original_dir))
+ return rename_tuple, cache_hit
- if os.path.exists(webp_cache_path):
- cache_hits[0] += 1
- os.link(webp_cache_path, webp_path)
- else:
- # We place the generated webp image to webp_path, instead of in the
- # webp_cache_dir to avoid concurrency issues.
- start = time.time()
- args = [webp_binary, png_path] + cwebp_arguments + ['-o', webp_path]
- subprocess.check_call(args)
- cwebp_time[0] += time.time() - start
-
- try:
- os.link(webp_path, webp_cache_path)
- except OSError:
- # Because of concurrent run, a webp image may already exists in
- # webp_cache_path.
- pass
-
- os.remove(png_path)
- original_dir = os.path.dirname(os.path.dirname(png_path))
- path_info.RegisterRename(
- os.path.relpath(png_path, original_dir),
- os.path.relpath(webp_path, original_dir))
-
- png_paths = [f for f in png_paths if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
- try:
- pool.map(get_converted_image, png_paths)
- finally:
- pool.close()
- pool.join()
- logging.debug('png->webp: cache: %d/%d sha1 time: %.1fms cwebp time: %.1fms',
- cache_hits[0], len(png_paths), sha1_time[0], cwebp_time[0])
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+ cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+ shard_args = [(f, ) for f in png_paths
+ if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+ build_utils.MakeDirectory(webp_cache_dir)
+ results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+ shard_args,
+ cwebp_binary=cwebp_binary,
+ cwebp_version=cwebp_version,
+ webp_cache_dir=webp_cache_dir)
+ total_cache_hits = 0
+ for rename_tuple, cache_hit in results:
+ path_info.RegisterRename(*rename_tuple)
+ total_cache_hits += int(cache_hit)
+
+ logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
def _RemoveImageExtensions(directory, path_info):
@@ -627,10 +623,9 @@ def _RemoveImageExtensions(directory, path_info):
os.path.relpath(path_no_extension, directory))
-def _CompileSingleDep(args):
- index, dep_path, aapt2_path, partials_dir, exclusion_rules = args
- basename = os.path.basename(dep_path)
- unique_name = '{}_{}'.format(index, basename)
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+ partials_dir):
+ unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
compile_command = [
@@ -639,7 +634,7 @@ def _CompileSingleDep(args):
# TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
# '--no-crunch',
'--dir',
- dep_path,
+ dep_subdir,
'-o',
partial_path
]
@@ -654,33 +649,16 @@ def _CompileSingleDep(args):
# Filtering these files is expensive, so only apply filters to the partials
# that have been explicitly targeted.
- keep_predicate = _CreateValuesKeepPredicate(exclusion_rules, dep_path)
if keep_predicate:
- logging.debug('Applying .arsc filtering to %s', dep_path)
+ logging.debug('Applying .arsc filtering to %s', dep_subdir)
protoresources.StripUnwantedResources(partial_path, keep_predicate)
return partial_path
-def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
- partials_dir = os.path.join(temp_dir, 'partials')
- build_utils.MakeDirectory(partials_dir)
-
- def iter_params():
- for i, dep_path in enumerate(dep_subdirs):
- yield i, dep_path, aapt2_path, partials_dir, exclusion_rules
-
- pool = multiprocessing.dummy.Pool(10)
- try:
- return pool.map(_CompileSingleDep, iter_params())
- finally:
- pool.close()
- pool.join()
-
-
-def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
patterns = [
x[1] for x in exclusion_rules
- if build_utils.MatchesGlob(dep_path, [x[0]])
+ if build_utils.MatchesGlob(dep_subdir, [x[0]])
]
if not patterns:
return None
@@ -689,6 +667,23 @@ def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
return lambda x: not any(r.search(x) for r in regexes)
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+
+ job_params = [(i, dep_subdir,
+ _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+ for i, dep_subdir in enumerate(dep_subdirs)]
+
+ # Filtering is slow, so ensure jobs with keep_predicate are started first.
+ job_params.sort(key=lambda x: not x[2])
+ return list(
+ parallel.BulkForkAndCall(_CompileSingleDep,
+ job_params,
+ aapt2_path=aapt2_path,
+ partials_dir=partials_dir))
+
+
def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
for zip_file in dependencies_res_zips:
zip_info_file_path = zip_file + '.info'
diff --git a/chromium/build/android/gyp/compile_resources.pydeps b/chromium/build/android/gyp/compile_resources.pydeps
index f34926c185e..cb1d7621cd7 100644
--- a/chromium/build/android/gyp/compile_resources.pydeps
+++ b/chromium/build/android/gyp/compile_resources.pydeps
@@ -46,6 +46,7 @@
../../../third_party/protobuf/python/google/protobuf/text_format.py
../../../third_party/six/src/six.py
../../gn_helpers.py
+../../print_python_deps.py
compile_resources.py
proto/Configuration_pb2.py
proto/Resources_pb2.py
@@ -55,5 +56,6 @@ util/build_utils.py
util/diff_utils.py
util/manifest_utils.py
util/md5_check.py
+util/parallel.py
util/protoresources.py
util/resource_utils.py
diff --git a/chromium/build/android/gyp/copy_ex.py b/chromium/build/android/gyp/copy_ex.py
index 8451555ee57..f93597f973f 100755
--- a/chromium/build/android/gyp/copy_ex.py
+++ b/chromium/build/android/gyp/copy_ex.py
@@ -119,8 +119,7 @@ def main(args):
DoRenaming(options, deps)
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, options.stamp, deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.stamp, deps)
if options.stamp:
build_utils.Touch(options.stamp)
diff --git a/chromium/build/android/gyp/create_app_bundle_apks.pydeps b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
index bdee0af2c2d..064ab48f0f6 100644
--- a/chromium/build/android/gyp/create_app_bundle_apks.pydeps
+++ b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
../pylib/__init__.py
../pylib/utils/__init__.py
../pylib/utils/app_bundle_utils.py
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
index d8825145a19..65222c6976d 100644
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -22,7 +22,7 @@
../../../third_party/catapult/devil/devil/android/constants/chrome.py
../../../third_party/catapult/devil/devil/android/constants/file_system.py
../../../third_party/catapult/devil/devil/android/decorators.py
-../../../third_party/catapult/devil/devil/android/device_blacklist.py
+../../../third_party/catapult/devil/devil/android/device_denylist.py
../../../third_party/catapult/devil/devil/android/device_errors.py
../../../third_party/catapult/devil/devil/android/device_signal.py
../../../third_party/catapult/devil/devil/android/device_temp_file.py
@@ -85,6 +85,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
../adb_command_line.py
../apk_operations.py
../convert_dex_profile.py
diff --git a/chromium/build/android/gyp/create_size_info_files.py b/chromium/build/android/gyp/create_size_info_files.py
index 27046db1150..b446b7f5dd4 100755
--- a/chromium/build/android/gyp/create_size_info_files.py
+++ b/chromium/build/android/gyp/create_size_info_files.py
@@ -179,11 +179,9 @@ def main(args):
_MergeResInfoFiles(options.res_info_path, res_inputs)
all_inputs = jar_inputs + pak_inputs + res_inputs
- build_utils.WriteDepfile(
- options.depfile,
- options.jar_info_path,
- inputs=all_inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/create_ui_locale_resources.py b/chromium/build/android/gyp/create_ui_locale_resources.py
index 97868cbfde8..007afb37ec5 100755
--- a/chromium/build/android/gyp/create_ui_locale_resources.py
+++ b/chromium/build/android/gyp/create_ui_locale_resources.py
@@ -60,7 +60,6 @@ def main():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- build_utils.AddDepfileOption(parser)
parser.add_argument(
'--locale-list',
required=True,
@@ -83,9 +82,6 @@ def main():
android_locale = resource_utils.ToAndroidLocaleName(locale)
_AddLocaleResourceFileToZip(out_zip, android_locale, locale)
- if args.depfile:
- build_utils.WriteDepfile(args.depfile, args.output_zip)
-
if __name__ == '__main__':
main()
diff --git a/chromium/build/android/gyp/create_ui_locale_resources.pydeps b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
new file mode 100644
index 00000000000..663ed03619d
--- /dev/null
+++ b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
@@ -0,0 +1,28 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_ui_locale_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py
index 1e1c15678ee..f12aafbe74e 100755
--- a/chromium/build/android/gyp/desugar.py
+++ b/chromium/build/android/gyp/desugar.py
@@ -53,11 +53,9 @@ def main():
stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile,
- options.output_jar,
- inputs=options.bootclasspath + options.classpath,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.output_jar,
+ inputs=options.bootclasspath + options.classpath)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index 0b3dcbd28b9..6fd0ab35dd8 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -62,6 +62,9 @@ def _ParseArgs(args):
'--multi-dex',
action='store_true',
help='Allow multiple dex files within output.')
+ parser.add_argument('--library',
+ action='store_true',
+ help='Allow numerous dex files within output.')
parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
parser.add_argument('--desugar', action='store_true')
parser.add_argument(
@@ -159,9 +162,18 @@ def _RunD8(dex_cmd, input_paths, output_path):
output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE)
return output
- # stdout sometimes spams with things like:
- # Stripped invalid locals information from 1 method.
- build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter)
+ with tempfile.NamedTemporaryFile() as flag_file:
+ # Chosen arbitrarily. Needed to avoid command-line length limits.
+ MAX_ARGS = 50
+ if len(dex_cmd) > MAX_ARGS:
+ flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
+ flag_file.flush()
+ dex_cmd = dex_cmd[:MAX_ARGS]
+ dex_cmd.append('@' + flag_file.name)
+
+ # stdout sometimes spams with things like:
+ # Stripped invalid locals information from 1 method.
+ build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter)
def _EnvWithArtLibPath(binary_path):
@@ -325,13 +337,15 @@ def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
- if (output.endswith('.dex')
- or not all(f.endswith('.dex') for f in d8_inputs)):
+ needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
+ needs_dexmerge = output.endswith('.dex') or not (options and options.library)
+ if needs_dexing or needs_dexmerge:
if options:
if options.main_dex_list_path:
dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path]
- elif options.multi_dex and int(options.min_api or 1) < 21:
- # When dexing library targets, it doesn't matter what's in the main dex.
+ elif options.library and int(options.min_api or 1) < 21:
+ # When dexing D8 requires a main dex list pre-21. For library targets,
+ # it doesn't matter what's in the main dex, so just use a dummy one.
tmp_main_dex_list_path = os.path.join(tmp_dir, 'main_list.txt')
with open(tmp_main_dex_list_path, 'w') as f:
f.write('Foo.class\n')
@@ -420,7 +434,7 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
# If the only change is deleting a file, class_files will be empty.
if class_files:
# Dex necessary classes into intermediate dex files.
- dex_cmd = dex_cmd + ['--intermediate', '--file-per-class']
+ dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
_RunD8(dex_cmd, class_files, options.incremental_dir)
logging.debug('Dexed class files.')
@@ -444,9 +458,9 @@ def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar):
dex_cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
r8_jar_path,
- 'd8',
+ 'com.android.tools.r8.D8',
]
with build_utils.TempDir() as tmp_dir:
_CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
@@ -479,7 +493,10 @@ def main(args):
final_dex_inputs += options.dex_inputs
dex_cmd = [
- build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8',
+ build_utils.JAVA_PATH,
+ '-cp',
+ options.r8_jar_path,
+ 'com.android.tools.r8.D8',
]
if options.release:
dex_cmd += ['--release']
diff --git a/chromium/build/android/gyp/dex.pydeps b/chromium/build/android/gyp/dex.pydeps
index 5fe5b2b99c1..23856f3c847 100644
--- a/chromium/build/android/gyp/dex.pydeps
+++ b/chromium/build/android/gyp/dex.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
../../gn_helpers.py
+../../print_python_deps.py
../convert_dex_profile.py
dex.py
util/__init__.py
diff --git a/chromium/build/android/gyp/dex_jdk_libs.py b/chromium/build/android/gyp/dex_jdk_libs.py
index 0cda991a4c9..01dc3c93091 100755
--- a/chromium/build/android/gyp/dex_jdk_libs.py
+++ b/chromium/build/android/gyp/dex_jdk_libs.py
@@ -29,30 +29,44 @@ def _ParseArgs(args):
return options
-def main(args):
- options = _ParseArgs(args)
-
+def DexJdkLibJar(r8_path, min_api, desugar_jdk_libs_json, desugar_jdk_libs_jar,
+ keep_rule_file, output):
# TODO(agrieve): Spews a lot of stderr about missing classes.
with build_utils.TempDir() as tmp_dir:
cmd = [
build_utils.JAVA_PATH,
- '-jar',
- options.r8_path,
- 'l8',
+ '-cp',
+ r8_path,
+ 'com.android.tools.r8.L8',
'--min-api',
- options.min_api,
- #'--lib', build_utils.JAVA_HOME,
+ min_api,
+ '--lib',
+ build_utils.JAVA_HOME,
'--desugared-lib',
- options.desugar_jdk_libs_json,
- '--output',
- tmp_dir,
- options.desugar_jdk_libs_jar
+ desugar_jdk_libs_json,
]
- subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ if keep_rule_file:
+ cmd += ['--pg-conf', keep_rule_file]
+
+ cmd += ['--output', tmp_dir, desugar_jdk_libs_jar]
+
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
- shutil.move(os.path.join(tmp_dir, 'classes.dex'), options.output)
+
+ # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used
+ # at all.
+ if os.path.exists(os.path.join(tmp_dir, 'classes.dex')):
+ shutil.move(os.path.join(tmp_dir, 'classes.dex'), output)
+ return True
+ return False
+
+
+def main(args):
+ options = _ParseArgs(args)
+ DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar, None, options.output)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 8e8230b97bc..47bea7ee80c 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -50,9 +50,9 @@ def _ParseOptions(args):
def _RunDexsplitter(options, output_dir):
cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
options.r8_path,
- 'dexsplitter',
+ 'com.android.tools.r8.dexsplitter.DexSplitter',
'--output',
output_dir,
'--proguard-map',
diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py
index a74037af07a..fed1983b9ca 100755
--- a/chromium/build/android/gyp/dist_aar.py
+++ b/chromium/build/android/gyp/dist_aar.py
@@ -14,7 +14,7 @@ import sys
import tempfile
import zipfile
-from filter_zip import CreatePathTransform
+import filter_zip
from util import build_utils
@@ -117,8 +117,8 @@ def main(args):
build_utils.AddToZipHermetic(
z, 'AndroidManifest.xml', src_path=options.android_manifest)
- path_transform = CreatePathTransform(options.jar_excluded_globs,
- options.jar_included_globs, [])
+ path_transform = filter_zip.CreatePathTransform(
+ options.jar_excluded_globs, options.jar_included_globs, [])
with tempfile.NamedTemporaryFile() as jar_file:
build_utils.MergeZips(
jar_file.name, options.jars, path_transform=path_transform)
@@ -152,8 +152,7 @@ def main(args):
if options.depfile:
all_inputs = (options.jars + options.dependencies_res_zips +
options.r_text_files + options.proguard_configs)
- build_utils.WriteDepfile(options.depfile, options.output, all_inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/filter_zip.py b/chromium/build/android/gyp/filter_zip.py
index 6f854191254..9b52288c7a9 100755
--- a/chromium/build/android/gyp/filter_zip.py
+++ b/chromium/build/android/gyp/filter_zip.py
@@ -5,6 +5,7 @@
# found in the LICENSE file.
import argparse
+import shutil
import sys
from util import build_utils
@@ -20,6 +21,21 @@ _RESOURCE_CLASSES = [
def CreatePathTransform(exclude_globs, include_globs,
strip_resource_classes_for):
+ """Returns a function to strip paths for the given patterns.
+
+ Args:
+ exclude_globs: List of globs that if matched should be excluded.
+ include_globs: List of globs that if not matched should be excluded.
+ strip_resource_classes_for: List of Java packages for which to strip
+ R.java classes from.
+
+ Returns:
+ * None if no filters are needed.
+ * A function "(path) -> path" that returns None when |path| should be
+ stripped, or |path| otherwise.
+ """
+ if not (exclude_globs or include_globs or strip_resource_classes_for):
+ return None
exclude_globs = list(exclude_globs or [])
if strip_resource_classes_for:
exclude_globs.extend(p.replace('.', '/') + '/' + f
@@ -52,19 +68,18 @@ def main():
argv = build_utils.ExpandFileArgs(sys.argv[1:])
args = parser.parse_args(argv)
- if args.exclude_globs:
- args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
- if args.include_globs:
- args.include_globs= build_utils.ParseGnList(args.include_globs)
- if args.strip_resource_classes_for:
- args.strip_resource_classes_for = build_utils.ParseGnList(
- args.strip_resource_classes_for)
+ args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+ args.include_globs = build_utils.ParseGnList(args.include_globs)
+ args.strip_resource_classes_for = build_utils.ParseGnList(
+ args.strip_resource_classes_for)
path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
args.strip_resource_classes_for)
with build_utils.AtomicOutput(args.output) as f:
- build_utils.MergeZips(
- f.name, [args.input], path_transform=path_transform)
+ if path_transform:
+ build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+ else:
+ shutil.copy(args.input, f.name)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/gcc_preprocess.py b/chromium/build/android/gyp/gcc_preprocess.py
index 8b3444c2b01..8c5c404c744 100755
--- a/chromium/build/android/gyp/gcc_preprocess.py
+++ b/chromium/build/android/gyp/gcc_preprocess.py
@@ -47,7 +47,7 @@ def main(args):
DoGcc(options)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.output)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py
index 39cba3d785d..0b9ee541e4e 100755
--- a/chromium/build/android/gyp/java_cpp_enum.py
+++ b/chromium/build/android/gyp/java_cpp_enum.py
@@ -124,15 +124,20 @@ class EnumDefinition(object):
def _TransformKeys(d, func):
"""Normalize keys in |d| and update references to old keys in |d| values."""
- normal_keys = {k: func(k) for k in d}
+ keys_map = {k: func(k) for k in d}
ret = collections.OrderedDict()
for k, v in d.items():
# Need to transform values as well when the entry value was explicitly set
# (since it could contain references to other enum entry values).
if isinstance(v, str):
- for normal_key in normal_keys:
- v = v.replace(normal_key, normal_keys[normal_key])
- ret[normal_keys[k]] = v
+ # First check if a full replacement is available. This avoids issues when
+ # one key is a substring of another.
+ if v in d:
+ v = keys_map[v]
+ else:
+ for old_key, new_key in keys_map.items():
+ v = v.replace(old_key, new_key)
+ ret[keys_map[k]] = v
return ret
@@ -412,7 +417,6 @@ ${ENUM_ENTRIES}
def DoMain(argv):
usage = 'usage: %prog [options] [output_dir] input_file(s)...'
parser = optparse.OptionParser(usage=usage)
- build_utils.AddDepfileOption(parser)
parser.add_option('--srcjar',
help='When specified, a .srcjar at the given path is '
@@ -429,9 +433,6 @@ def DoMain(argv):
for output_path, data in DoGenerate(input_paths):
build_utils.AddToZipHermetic(srcjar, output_path, data=data)
- if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False)
-
if __name__ == '__main__':
DoMain(sys.argv[1:])
diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py
index 088c450aeb6..1acb57f82c4 100755
--- a/chromium/build/android/gyp/java_cpp_enum_tests.py
+++ b/chromium/build/android/gyp/java_cpp_enum_tests.py
@@ -484,6 +484,42 @@ public @interface ClassName {
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
+ def testParseEnumClassOneValueSubstringOfAnother(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class SafeBrowsingStatus {
+ kChecking = 0,
+ kEnabled = 1,
+ kDisabled = 2,
+ kDisabledByAdmin = 3,
+ kDisabledByExtension = 4,
+ kEnabledStandard = 5,
+ kEnabledEnhanced = 6,
+ // New enum values must go above here.
+ kMaxValue = kEnabledEnhanced,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('SafeBrowsingStatus', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('CHECKING', '0'),
+ ('ENABLED', '1'),
+ ('DISABLED', '2'),
+ ('DISABLED_BY_ADMIN', '3'),
+ ('DISABLED_BY_EXTENSION', '4'),
+ ('ENABLED_STANDARD', '5'),
+ ('ENABLED_ENHANCED', '6'),
+ ('MAX_VALUE', 'ENABLED_ENHANCED'),
+ ]), definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('MAX_VALUE', 'New enum values must go above here.')
+ ]), definition.comments)
+
def testParseEnumStruct(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
diff --git a/chromium/build/android/gyp/java_cpp_strings.py b/chromium/build/android/gyp/java_cpp_strings.py
index acaaf223efb..498e05e3540 100755
--- a/chromium/build/android/gyp/java_cpp_strings.py
+++ b/chromium/build/android/gyp/java_cpp_strings.py
@@ -114,6 +114,8 @@ class StringFileParser(object):
if string_line.groups()[1]:
self._current_value = string_line.groups()[1]
self._AppendString()
+ else:
+ self._in_string = True
return True
else:
self._in_string = False
@@ -141,19 +143,19 @@ class StringFileParser(object):
return self._strings
-def _GenerateOutput(template, source_path, template_path, strings):
+def _GenerateOutput(template, source_paths, template_path, strings):
description_template = """
// This following string constants were inserted by
// {SCRIPT_NAME}
// From
- // {SOURCE_PATH}
+ // {SOURCE_PATHS}
// Into
// {TEMPLATE_PATH}
"""
values = {
'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
- 'SOURCE_PATH': source_path,
+ 'SOURCE_PATHS': ',\n // '.join(source_paths),
'TEMPLATE_PATH': template_path,
}
description = description_template.format(**values)
@@ -173,15 +175,18 @@ def _ParseStringFile(path):
def _Generate(source_paths, template_path):
with open(template_path) as f:
lines = f.readlines()
- template = ''.join(lines)
- for source_path in source_paths:
- strings = _ParseStringFile(source_path)
- package, class_name = ParseTemplateFile(lines)
- package_path = package.replace('.', os.path.sep)
- file_name = class_name + '.java'
- output_path = os.path.join(package_path, file_name)
- output = _GenerateOutput(template, source_path, template_path, strings)
- yield output, output_path
+
+ template = ''.join(lines)
+ package, class_name = ParseTemplateFile(lines)
+ package_path = package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ strings = []
+ for source_path in source_paths:
+ strings.extend(_ParseStringFile(source_path))
+
+ output = _GenerateOutput(template, source_paths, template_path, strings)
+ return output, output_path
def _Main(argv):
@@ -205,8 +210,8 @@ def _Main(argv):
with build_utils.AtomicOutput(args.srcjar) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
- for data, path in _Generate(args.inputs, args.template):
- build_utils.AddToZipHermetic(srcjar, path, data=data)
+ data, path = _Generate(args.inputs, args.template)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/java_cpp_strings_tests.py b/chromium/build/android/gyp/java_cpp_strings_tests.py
index acf51e428e0..3b7d5ca8f67 100755
--- a/chromium/build/android/gyp/java_cpp_strings_tests.py
+++ b/chromium/build/android/gyp/java_cpp_strings_tests.py
@@ -51,6 +51,10 @@ const char kAnotherSwitch[] = "another-value";
const char kAString[] = "a-value";
const char kNoComment[] = "no-comment";
+namespace myfeature {
+const char kMyFeatureNoComment[] = "myfeature.no-comment";
+}
+
// Single line switch with a big space.
const char kAStringWithSpace[] = "a-value";
@@ -58,23 +62,34 @@ const char kAStringWithSpace[] = "a-value";
const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
"a-string-with-a-very-long-name-that-will-have-to-wrap";
+// This one has no comment before it.
+
+const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap2";
+
// This is erroneous and should be ignored.
const char kInvalidLineBreak[] =
"invalid-line-break";
""".split('\n')
strings = java_cpp_strings.StringFileParser(test_data).Parse()
- self.assertEqual(4, len(strings))
+ self.assertEqual(5, len(strings))
self.assertEqual('A_STRING', strings[0].name)
self.assertEqual('"a-value"', strings[0].value)
self.assertEqual('NO_COMMENT', strings[1].name)
self.assertEqual('"no-comment"', strings[1].value)
- self.assertEqual('A_STRING_WITH_SPACE', strings[2].name)
- self.assertEqual('"a-value"', strings[2].value)
+ self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name)
+ self.assertEqual('"myfeature.no-comment"', strings[2].value)
+ self.assertEqual('A_STRING_WITH_SPACE', strings[3].name)
+ self.assertEqual('"a-value"', strings[3].value)
self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
- strings[3].name)
+ strings[4].name)
self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
- strings[3].value)
+ strings[4].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2',
+ strings[5].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"',
+ strings[5].value)
def testTemplateParsing(self):
test_data = """
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index fb751bd6ed6..fa526e6df88 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -3,10 +3,8 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Runs Android's lint tool."""
-
from __future__ import print_function
import argparse
@@ -22,9 +20,8 @@ from xml.etree import ElementTree
from util import build_utils
from util import manifest_utils
-from util import resource_utils
-_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
# These checks are not useful for test targets and adds an unnecessary burden
# to suppress them.
@@ -41,12 +38,83 @@ _DISABLED_FOR_TESTS = [
"UnusedResources",
]
-
-def _RunLint(lint_path,
+_RES_ZIP_DIR = 'RESZIPS'
+_SRCJAR_DIR = 'SRCJARS'
+
+
+def _SrcRelative(path):
+ """Returns relative path to top-level src dir."""
+ return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT)
+
+
+def _GenerateProjectFile(android_manifest,
+ android_sdk_root,
+ cache_dir,
+ sources=None,
+ srcjar_sources=None,
+ resource_sources=None,
+ android_sdk_version=None):
+ project = ElementTree.Element('project')
+ root = ElementTree.SubElement(project, 'root')
+ # An absolute path helps error paths to be shorter.
+ root.set('dir', os.path.abspath(build_utils.DIR_SOURCE_ROOT))
+ sdk = ElementTree.SubElement(project, 'sdk')
+ # Lint requires that the sdk path be an absolute path.
+ sdk.set('dir', os.path.abspath(android_sdk_root))
+ cache = ElementTree.SubElement(project, 'cache')
+ cache.set('dir', _SrcRelative(cache_dir))
+ main_module = ElementTree.SubElement(project, 'module')
+ main_module.set('name', 'main')
+ main_module.set('android', 'true')
+ main_module.set('library', 'false')
+ if android_sdk_version:
+ main_module.set('compile_sdk_version', android_sdk_version)
+ manifest = ElementTree.SubElement(main_module, 'manifest')
+ manifest.set('file', _SrcRelative(android_manifest))
+ if srcjar_sources:
+ for srcjar_file in srcjar_sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', _SrcRelative(srcjar_file))
+ if sources:
+ for source in sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', _SrcRelative(source))
+ if resource_sources:
+ for resource_file in resource_sources:
+ resource = ElementTree.SubElement(main_module, 'resource')
+ resource.set('file', _SrcRelative(resource_file))
+ return project
+
+
+def _GenerateAndroidManifest(original_manifest_path,
+ min_sdk_version,
+ manifest_package=None):
+ # Set minSdkVersion and package in the manifest to the correct values.
+ doc, manifest, _ = manifest_utils.ParseManifest(original_manifest_path)
+ uses_sdk = manifest.find('./uses-sdk')
+ if uses_sdk is None:
+ uses_sdk = ElementTree.Element('uses-sdk')
+ manifest.insert(0, uses_sdk)
+ uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+ min_sdk_version)
+ if manifest_package:
+ manifest.set('package', manifest_package)
+ return doc
+
+
+def _WriteXmlFile(root, path):
+ build_utils.MakeDirectory(os.path.dirname(path))
+ with build_utils.AtomicOutput(path) as f:
+ # Although we can write it just with ElementTree.tostring, using minidom
+ # makes it a lot easier to read as a human (also on code search).
+ f.write(
+ minidom.parseString(ElementTree.tostring(
+ root, encoding='utf-8')).toprettyxml(indent=' '))
+
+
+def _RunLint(lint_binary_path,
config_path,
manifest_path,
- result_path,
- product_dir,
sources,
cache_dir,
android_sdk_version,
@@ -56,268 +124,139 @@ def _RunLint(lint_path,
resource_sources,
resource_zips,
android_sdk_root,
+ lint_gen_dir,
testonly_target=False,
can_fail_build=False,
- include_unexpected=False,
silent=False):
logging.info('Lint starting')
- def _RebasePath(path):
- """Returns relative path to top-level src dir.
-
- Args:
- path: A path relative to cwd.
- """
- ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT)
- # If it's outside of src/, just use abspath.
- if ret.startswith('..'):
- ret = os.path.abspath(path)
- return ret
-
- def _ProcessResultFile():
- with open(result_path, 'rb') as f:
- content = f.read().replace(
- _RebasePath(product_dir), 'PRODUCT_DIR')
-
- with open(result_path, 'wb') as f:
- f.write(content)
-
- def _ParseAndShowResultFile():
- dom = minidom.parse(result_path)
- issues = dom.getElementsByTagName('issue')
+ cmd = [
+ _SrcRelative(lint_binary_path),
+ # Consider all lint warnings as errors. Warnings should either always be
+ # fixed or completely suppressed in suppressions.xml. They should not
+ # bloat build output if they are not important enough to be fixed.
+ '-Werror',
+ '--exitcode', # Sets error code if there are errors.
+ '--quiet', # Silences lint's "." progress updates.
+ ]
+ if config_path:
+ cmd.extend(['--config', _SrcRelative(config_path)])
+ if testonly_target:
+ cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
+
+ if not manifest_path:
+ manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
+ 'android', 'AndroidManifest.xml')
+
+ logging.info('Generating Android manifest file')
+ android_manifest_tree = _GenerateAndroidManifest(manifest_path,
+ min_sdk_version,
+ manifest_package)
+ # Include the rebased manifest_path in the lint generated path so that it is
+ # clear in error messages where the original AndroidManifest.xml came from.
+ lint_android_manifest_path = os.path.join(lint_gen_dir,
+ _SrcRelative(manifest_path))
+ logging.info('Writing xml file %s', lint_android_manifest_path)
+ _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)
+
+ resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
+ # These are zip files with generated resources (e. g. strings from GRD).
+ logging.info('Extracting resource zips')
+ for resource_zip in resource_zips:
+ # Use a consistent root and name rather than a temporary file so that
+ # suppressions can be local to the lint target and the resource target.
+ resource_dir = os.path.join(resource_root_dir, resource_zip)
+ shutil.rmtree(resource_dir, True)
+ os.makedirs(resource_dir)
+ resource_sources.extend(
+ build_utils.ExtractAll(resource_zip, path=resource_dir))
+
+ logging.info('Extracting srcjars')
+ srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
+ srcjar_sources = []
+ if srcjars:
+ for srcjar in srcjars:
+ # Use path without extensions since otherwise the file name includes
+ # .srcjar and lint treats it as a srcjar.
+ srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
+ shutil.rmtree(srcjar_dir, True)
+ os.makedirs(srcjar_dir)
+ # Sadly lint's srcjar support is broken since it only considers the first
+ # srcjar. Until we roll a lint version with that fixed, we need to extract
+ # it ourselves.
+ srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))
+
+ logging.info('Generating project file')
+ project_file_root = _GenerateProjectFile(lint_android_manifest_path,
+ android_sdk_root, cache_dir, sources,
+ srcjar_sources, resource_sources,
+ android_sdk_version)
+
+ project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
+ logging.info('Writing xml file %s', project_xml_path)
+ _WriteXmlFile(project_file_root, project_xml_path)
+ cmd += ['--project', _SrcRelative(project_xml_path)]
+
+ logging.info('Preparing environment variables')
+ env = os.environ.copy()
+ # It is important that lint uses the checked-in JDK11 as it is almost 50%
+ # faster than JDK8.
+ env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
+ build_utils.DIR_SOURCE_ROOT)
+ # This filter is necessary for JDK11.
+ stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
+
+ try:
+ logging.debug('Lint command %s', cmd)
+ start = time.time()
+ # Lint outputs "No issues found" if it succeeds, and uses stderr when it
+ # fails, so we can safely ignore stdout.
+ build_utils.CheckOutput(cmd,
+ cwd=build_utils.DIR_SOURCE_ROOT,
+ env=env,
+ stderr_filter=stderr_filter)
+ end = time.time() - start
+ logging.info('Lint command took %ss', end)
+ except build_utils.CalledProcessError as e:
if not silent:
- print(file=sys.stderr)
- for issue in issues:
- issue_id = issue.attributes['id'].value
- message = issue.attributes['message'].value
- location_elem = issue.getElementsByTagName('location')[0]
- path = location_elem.attributes['file'].value
- line = location_elem.getAttribute('line')
- error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
- print(error.encode('utf-8'), file=sys.stderr)
- for attr in ['errorLine1', 'errorLine2']:
- error_line = issue.getAttribute(attr)
- if error_line:
- print(error_line.encode('utf-8'), file=sys.stderr)
- return len(issues)
-
- with build_utils.TempDir() as temp_dir:
- cmd = [
- _RebasePath(lint_path),
- '-Werror',
- '--exitcode',
- '--showall',
- '--xml',
- _RebasePath(result_path),
- # An explicit sdk root needs to be specified since we have an extra
- # intermediate 'lastest' directory under cmdline-tools which prevents
- # lint from automatically deducing the location of the sdk. The sdk is
- # required for many checks (e.g. NewApi). Lint also requires absolute
- # paths.
- '--sdk-home',
- os.path.abspath(android_sdk_root),
- ]
- if config_path:
- cmd.extend(['--config', _RebasePath(config_path)])
- if testonly_target:
- cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
-
- tmp_dir_counter = [0]
- def _NewTempSubdir(prefix, append_digit=True):
- # Helper function to create a new sub directory based on the number of
- # subdirs created earlier.
- if append_digit:
- tmp_dir_counter[0] += 1
- prefix += str(tmp_dir_counter[0])
- new_dir = os.path.join(temp_dir, prefix)
- os.makedirs(new_dir)
- return new_dir
-
- resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
- resource_sources)
- # These are zip files with generated resources (e. g. strings from GRD).
- for resource_zip in resource_zips:
- resource_dir = _NewTempSubdir(resource_zip, append_digit=False)
- resource_dirs.append(resource_dir)
- build_utils.ExtractAll(resource_zip, path=resource_dir)
-
- for resource_dir in resource_dirs:
- cmd.extend(['--resources', _RebasePath(resource_dir)])
-
- # There may be multiple source files with the same basename (but in
- # different directories). It is difficult to determine what part of the path
- # corresponds to the java package, and so instead just link the source files
- # into temporary directories (creating a new one whenever there is a name
- # conflict).
- def PathInDir(d, src):
- subpath = os.path.join(d, _RebasePath(src))
- subdir = os.path.dirname(subpath)
- if not os.path.exists(subdir):
- os.makedirs(subdir)
- return subpath
-
- src_dirs = []
- for src in sources:
- src_dir = None
- for d in src_dirs:
- if not os.path.exists(PathInDir(d, src)):
- src_dir = d
- break
- if not src_dir:
- src_dir = _NewTempSubdir('SRC_ROOT')
- src_dirs.append(src_dir)
- cmd.extend(['--sources', _RebasePath(src_dir)])
- # In cases where the build dir is outside of the src dir, this can
- # result in trying to symlink a file to itself for this file:
- # gen/components/version_info/android/java/org/chromium/
- # components/version_info/VersionConstants.java
- src = os.path.abspath(src)
- dst = PathInDir(src_dir, src)
- if src == dst:
- continue
- os.symlink(src, dst)
-
- if srcjars:
- srcjar_dir = _NewTempSubdir('GENERATED_SRC_ROOT', append_digit=False)
- cmd.extend(['--sources', _RebasePath(srcjar_dir)])
- for srcjar in srcjars:
- # We choose to allow srcjars that contain java files which have the
- # same package and name to clobber each other. This happens for
- # generated files like BuildConfig.java. It is generated for
- # targets like base_build_config_gen as well as targets like
- # chrome_modern_public_base_bundle_module__build_config_srcjar.
- # Although we could extract each srcjar to a separate folder, that
- # slows down some invocations of lint by 20 seconds or more.
- # TODO(wnwen): Switch lint.py to generate a project.xml file which
- # supports srcjar inputs by default.
- build_utils.ExtractAll(srcjar, path=srcjar_dir, no_clobber=False)
-
- project_dir = _NewTempSubdir('PROJECT_ROOT', append_digit=False)
- if android_sdk_version:
- # Create dummy project.properies file in a temporary "project" directory.
- # It is the only way to add Android SDK to the Lint's classpath. Proper
- # classpath is necessary for most source-level checks.
- with open(os.path.join(project_dir, 'project.properties'), 'w') \
- as propfile:
- print('target=android-{}'.format(android_sdk_version), file=propfile)
-
- # Put the manifest in a temporary directory in order to avoid lint detecting
- # sibling res/ and src/ directories (which should be pass explicitly if they
- # are to be included).
- if not manifest_path:
- manifest_path = os.path.join(
- build_utils.DIR_SOURCE_ROOT, 'build', 'android',
- 'AndroidManifest.xml')
- lint_manifest_path = os.path.join(project_dir, 'AndroidManifest.xml')
- shutil.copyfile(os.path.abspath(manifest_path), lint_manifest_path)
-
- # Check that minSdkVersion and package is correct and add it to the manifest
- # in case it does not exist.
- doc, manifest, _ = manifest_utils.ParseManifest(lint_manifest_path)
- manifest_utils.AssertUsesSdk(manifest, min_sdk_version)
- manifest_utils.AssertPackage(manifest, manifest_package)
- uses_sdk = manifest.find('./uses-sdk')
- if uses_sdk is None:
- uses_sdk = ElementTree.Element('uses-sdk')
- manifest.insert(0, uses_sdk)
- uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
- min_sdk_version)
- if manifest_package:
- manifest.set('package', manifest_package)
- manifest_utils.SaveManifest(doc, lint_manifest_path)
-
- cmd.append(project_dir)
-
- if os.path.exists(result_path):
- os.remove(result_path)
-
- env = os.environ.copy()
- stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
- if cache_dir:
- env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
- # When _JAVA_OPTIONS is set, java prints to stderr:
- # Picked up _JAVA_OPTIONS: ...
- #
- # We drop all lines that contain _JAVA_OPTIONS from the output
- stderr_filter = lambda l: re.sub(
- r'.*_JAVA_OPTIONS.*\n?',
- '',
- build_utils.FilterReflectiveAccessJavaWarnings(l))
-
- def fail_func(returncode, stderr):
- if returncode != 0:
- return True
- if (include_unexpected and
- 'Unexpected failure during lint analysis' in stderr):
- return True
- return False
-
- try:
- env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
- build_utils.DIR_SOURCE_ROOT)
- logging.debug('Lint command %s', cmd)
- start = time.time()
- build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
- env=env or None, stderr_filter=stderr_filter,
- fail_func=fail_func)
- end = time.time() - start
- logging.info('Lint command took %ss', end)
- except build_utils.CalledProcessError:
- # There is a problem with lint usage
- if not os.path.exists(result_path):
- raise
-
- # Sometimes produces empty (almost) files:
- if os.path.getsize(result_path) < 10:
- if can_fail_build:
- raise
- elif not silent:
- traceback.print_exc()
- return
-
- # There are actual lint issues
- try:
- num_issues = _ParseAndShowResultFile()
- except Exception: # pylint: disable=broad-except
- if not silent:
- print('Lint created unparseable xml file...')
- print('File contents:')
- with open(result_path) as f:
- print(f.read())
- if can_fail_build:
- traceback.print_exc()
- if can_fail_build:
- raise
- else:
- return
-
- _ProcessResultFile()
- if num_issues == 0 and include_unexpected:
- msg = 'Please refer to output above for unexpected lint failures.\n'
- else:
- msg = ('\nLint found %d new issues.\n'
- ' - For full explanation, please refer to %s\n'
- ' - For more information about lint and how to fix lint issues,'
- ' please refer to %s\n' %
- (num_issues, _RebasePath(result_path), _LINT_MD_URL))
- if not silent:
- print(msg, file=sys.stderr)
- if can_fail_build:
- raise Exception('Lint failed.')
+ print('Lint found new issues.\n'
+ ' - Here is the project.xml file passed to lint: {}\n'
+ ' - For more information about lint and how to fix lint issues,'
+ ' please refer to {}\n'.format(_SrcRelative(project_xml_path),
+ _LINT_MD_URL))
+ if can_fail_build:
+ raise
+ else:
+ print(e)
+ else:
+ # Lint succeeded, no need to keep generated files for debugging purposes.
+ shutil.rmtree(resource_root_dir, ignore_errors=True)
+ shutil.rmtree(srcjar_root_dir, ignore_errors=True)
logging.info('Lint completed')
-def _FindInDirectories(directories, filename_filter):
- all_files = []
- for directory in directories:
- all_files.extend(build_utils.FindInDirectory(directory, filename_filter))
- return all_files
-
-
def _ParseArgs(argv):
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
+ parser.add_argument('--lint-binary-path',
+ required=True,
+ help='Path to lint executable.')
+ parser.add_argument('--cache-dir',
+ required=True,
+ help='Path to the directory in which the android cache '
+ 'directory tree should be stored.')
+ parser.add_argument('--config-path', help='Path to lint suppressions file.')
+ parser.add_argument('--lint-gen-dir',
+ required=True,
+ help='Path to store generated xml files.')
+ parser.add_argument('--stamp', help='Path to stamp upon success.')
+ parser.add_argument('--android-sdk-version',
+ help='Version (API level) of the Android SDK used for '
+ 'building.')
+ parser.add_argument('--min-sdk-version',
+ required=True,
+ help='Minimal SDK version to lint against.')
parser.add_argument('--android-sdk-root',
required=True,
help='Lint needs an explicit path to the android sdk.')
@@ -326,32 +265,20 @@ def _ParseArgs(argv):
help='If set, some checks like UnusedResources will be '
'disabled since they are not helpful for test '
'targets.')
- parser.add_argument('--lint-path', required=True,
- help='Path to lint executable.')
- parser.add_argument('--product-dir', required=True,
- help='Path to product dir.')
- parser.add_argument('--result-path', required=True,
- help='Path to XML lint result file.')
- parser.add_argument('--cache-dir', required=True,
- help='Path to the directory in which the android cache '
- 'directory tree should be stored.')
- parser.add_argument('--platform-xml-path', required=True,
- help='Path to api-platforms.xml')
- parser.add_argument('--android-sdk-version',
- help='Version (API level) of the Android SDK used for '
- 'building.')
- parser.add_argument('--can-fail-build', action='store_true',
- help='If set, script will exit with nonzero exit status'
- ' if lint errors are present')
- parser.add_argument('--include-unexpected-failures', action='store_true',
+ parser.add_argument('--manifest-package',
+ help='Package name of the AndroidManifest.xml.')
+ parser.add_argument('--can-fail-build',
+ action='store_true',
help='If set, script will exit with nonzero exit status'
- ' if lint itself crashes with unexpected failures.')
- parser.add_argument('--config-path',
- help='Path to lint suppressions file.')
+ ' if lint errors are present')
+ parser.add_argument('--silent',
+ action='store_true',
+ help='If set, script will not log anything.')
parser.add_argument('--java-sources',
help='File containing a list of java sources files.')
+ parser.add_argument('--srcjars', help='GN list of included srcjars.')
parser.add_argument('--manifest-path',
- help='Path to AndroidManifest.xml')
+ help='Path to original AndroidManifest.xml')
parser.add_argument('--resource-sources',
default=[],
action='append',
@@ -362,25 +289,12 @@ def _ParseArgs(argv):
action='append',
help='GYP-list of resource zips, zip files of generated '
'resource files.')
- parser.add_argument('--silent', action='store_true',
- help='If set, script will not log anything.')
- parser.add_argument('--srcjars',
- help='GN list of included srcjars.')
- parser.add_argument('--stamp', help='Path to stamp upon success.')
- parser.add_argument(
- '--min-sdk-version',
- required=True,
- help='Minimal SDK version to lint against.')
- parser.add_argument(
- '--manifest-package', help='Package name of the AndroidManifest.xml.')
args = parser.parse_args(build_utils.ExpandFileArgs(argv))
-
args.java_sources = build_utils.ParseGnList(args.java_sources)
args.srcjars = build_utils.ParseGnList(args.srcjars)
args.resource_sources = build_utils.ParseGnList(args.resource_sources)
args.resource_zips = build_utils.ParseGnList(args.resource_zips)
-
return args
@@ -391,7 +305,6 @@ def main():
sources = []
for java_sources_file in args.java_sources:
sources.extend(build_utils.ReadSourcesList(java_sources_file))
-
resource_sources = []
for resource_sources_file in args.resource_sources:
resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
@@ -400,14 +313,11 @@ def main():
resource_sources + [
args.manifest_path,
])
-
depfile_deps = [p for p in possible_depfile_deps if p]
- _RunLint(args.lint_path,
+ _RunLint(args.lint_binary_path,
args.config_path,
args.manifest_path,
- args.result_path,
- args.product_dir,
sources,
args.cache_dir,
args.android_sdk_version,
@@ -417,18 +327,15 @@ def main():
resource_sources,
args.resource_zips,
args.android_sdk_root,
+ args.lint_gen_dir,
testonly_target=args.testonly,
can_fail_build=args.can_fail_build,
- include_unexpected=args.include_unexpected_failures,
silent=args.silent)
logging.info('Creating stamp file')
build_utils.Touch(args.stamp)
if args.depfile:
- build_utils.WriteDepfile(args.depfile,
- args.stamp,
- depfile_deps,
- add_pydeps=False) # pydeps listed in GN.
+ build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/lint.pydeps b/chromium/build/android/gyp/lint.pydeps
index d9a96c70194..68a62f6bf94 100644
--- a/chromium/build/android/gyp/lint.pydeps
+++ b/chromium/build/android/gyp/lint.pydeps
@@ -1,29 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
-../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
-../../../third_party/jinja2/bccache.py
-../../../third_party/jinja2/compiler.py
-../../../third_party/jinja2/defaults.py
-../../../third_party/jinja2/environment.py
-../../../third_party/jinja2/exceptions.py
-../../../third_party/jinja2/filters.py
-../../../third_party/jinja2/idtracking.py
-../../../third_party/jinja2/lexer.py
-../../../third_party/jinja2/loaders.py
-../../../third_party/jinja2/nodes.py
-../../../third_party/jinja2/optimizer.py
-../../../third_party/jinja2/parser.py
-../../../third_party/jinja2/runtime.py
-../../../third_party/jinja2/tests.py
-../../../third_party/jinja2/utils.py
-../../../third_party/jinja2/visitor.py
-../../../third_party/markupsafe/__init__.py
-../../../third_party/markupsafe/_compat.py
-../../../third_party/markupsafe/_native.py
../../gn_helpers.py
lint.py
util/__init__.py
util/build_utils.py
util/manifest_utils.py
-util/resource_utils.py
diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py
index 9c36063468a..75ca886b833 100755
--- a/chromium/build/android/gyp/main_dex_list.py
+++ b/chromium/build/android/gyp/main_dex_list.py
@@ -56,8 +56,9 @@ def main():
args = _ParseArgs()
proguard_cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
args.r8_path,
+ 'com.android.tools.r8.R8',
'--classfile',
'--no-desugaring',
'--lib',
@@ -131,11 +132,9 @@ def main():
f.write(main_dex_list)
if args.depfile:
- build_utils.WriteDepfile(
- args.depfile,
- args.main_dex_list_path,
- inputs=args.class_inputs_filearg,
- add_pydeps=False)
+ build_utils.WriteDepfile(args.depfile,
+ args.main_dex_list_path,
+ inputs=args.class_inputs_filearg)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py
index 3f784588206..f205aa42b48 100755
--- a/chromium/build/android/gyp/merge_manifest.py
+++ b/chromium/build/android/gyp/merge_manifest.py
@@ -22,8 +22,8 @@ _MANIFEST_MERGER_JARS = [
os.path.join('common', 'common.jar'),
os.path.join('sdk-common', 'sdk-common.jar'),
os.path.join('sdklib', 'sdklib.jar'),
- os.path.join('external', 'com', 'google', 'guava', 'guava', '27.1-jre',
- 'guava-27.1-jre.jar'),
+ os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
+ 'guava-28.1-jre.jar'),
os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
'kotlin-stdlib.jar'),
os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5',
@@ -137,8 +137,7 @@ def main(argv):
if args.depfile:
inputs = extras + classpath.split(':')
- build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/prepare_resources.pydeps b/chromium/build/android/gyp/prepare_resources.pydeps
index 1066a5ff1ec..c0d225db2c6 100644
--- a/chromium/build/android/gyp/prepare_resources.pydeps
+++ b/chromium/build/android/gyp/prepare_resources.pydeps
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
prepare_resources.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 18919589382..c151be70aef 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -12,6 +12,7 @@ import sys
import tempfile
import zipfile
+import dex_jdk_libs
from util import build_utils
from util import diff_utils
@@ -107,15 +108,17 @@ def _ParseOptions():
args = build_utils.ExpandFileArgs(sys.argv[1:])
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
- group = parser.add_mutually_exclusive_group(required=True)
- group.add_argument('--proguard-path', help='Path to the proguard.jar to use.')
- group.add_argument('--r8-path', help='Path to the R8.jar to use.')
+ parser.add_argument('--r8-path',
+ required=True,
+ help='Path to the R8.jar to use.')
parser.add_argument(
'--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
parser.add_argument('--input-paths',
action='append',
required=True,
help='GN-list of .jar files to optimize.')
+ parser.add_argument('--desugar-jdk-libs-jar',
+ help='Path to desugar_jdk_libs.jar.')
parser.add_argument('--output-path', help='Path to the generated .jar file.')
parser.add_argument(
'--proguard-configs',
@@ -196,6 +199,8 @@ def _ParseOptions():
parser.add_argument(
'--stamp',
help='File to touch upon success. Mutually exclusive with --output-path')
+ parser.add_argument('--desugared-library-keep-rule-output',
+ help='Path to desugared library keep rule output file.')
options = parser.parse_args(args)
@@ -213,9 +218,6 @@ def _ParseOptions():
if options.expected_configs_file and not options.output_config:
parser.error('--expected-configs-file requires --output-config')
- if options.proguard_path and options.disable_outlining:
- parser.error('--disable-outlining requires --r8-path')
-
if options.only_verify_expectations and not options.stamp:
parser.error('--only-verify-expectations requires --stamp')
@@ -268,12 +270,18 @@ class _DexPathContext(object):
self.staging_dir = os.path.join(work_dir, name)
os.mkdir(self.staging_dir)
- def CreateOutput(self):
+ def CreateOutput(self, has_imported_lib=False, keep_rule_output=None):
found_files = build_utils.FindInDirectory(self.staging_dir)
if not found_files:
raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
if self._final_output_path.endswith('.dex'):
+ if has_imported_lib:
+ raise Exception(
+ 'Trying to create a single .dex file, but a dependency requires '
+ 'JDK Library Desugaring (which necessitates a second file).'
+ 'Refer to %s to see what desugaring was required' %
+ keep_rule_output)
if len(found_files) != 1:
raise Exception('Expected exactly 1 dex file output, found: {}'.format(
'\t'.join(found_files)))
@@ -323,8 +331,9 @@ def _OptimizeWithR8(options,
cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
options.r8_path,
+ 'com.android.tools.r8.R8',
'--no-data-resources',
'--output',
base_dex_context.staging_dir,
@@ -333,7 +342,12 @@ def _OptimizeWithR8(options,
]
if options.desugar_jdk_libs_json:
- cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
+ cmd += [
+ '--desugared-lib',
+ options.desugar_jdk_libs_json,
+ '--desugared-lib-pg-conf-output',
+ options.desugared_library_keep_rule_output,
+ ]
if options.min_api:
cmd += ['--min-api', options.min_api]
@@ -357,10 +371,8 @@ def _OptimizeWithR8(options,
p for p in feature.input_paths if p not in module_input_jars
]
module_input_jars.update(feature_input_jars)
- cmd += [
- '--feature-jar',
- feature.staging_dir + ':' + ':'.join(feature_input_jars)
- ]
+ for in_jar in feature_input_jars:
+ cmd += ['--feature', in_jar, feature.staging_dir]
cmd += base_dex_context.input_paths
# Add any extra input jars to the base module (e.g. desugar runtime).
@@ -382,7 +394,18 @@ def _OptimizeWithR8(options,
'android/docs/java_optimization.md#Debugging-common-failures\n'))
raise ProguardProcessError(err, debugging_link)
- base_dex_context.CreateOutput()
+ base_has_imported_lib = False
+ if options.desugar_jdk_libs_json:
+ existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir)
+ base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
+ options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar,
+ options.desugared_library_keep_rule_output,
+ os.path.join(base_dex_context.staging_dir,
+ 'classes%d.dex' % (len(existing_files) + 1)))
+
+ base_dex_context.CreateOutput(base_has_imported_lib,
+ options.desugared_library_keep_rule_output)
for feature in feature_contexts:
feature.CreateOutput()
@@ -393,65 +416,6 @@ def _OptimizeWithR8(options,
out_file.writelines(l for l in in_file if not l.startswith('#'))
-def _OptimizeWithProguard(options,
- config_paths,
- libraries,
- dynamic_config_data,
- print_stdout=False):
- with build_utils.TempDir() as tmp_dir:
- combined_injars_path = os.path.join(tmp_dir, 'injars.jar')
- combined_libjars_path = os.path.join(tmp_dir, 'libjars.jar')
- combined_proguard_configs_path = os.path.join(tmp_dir, 'includes.txt')
- tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
- tmp_output_jar = os.path.join(tmp_dir, 'output.jar')
-
- build_utils.MergeZips(combined_injars_path, options.input_paths)
- build_utils.MergeZips(combined_libjars_path, libraries)
- with open(combined_proguard_configs_path, 'w') as f:
- f.write(_CombineConfigs(config_paths, dynamic_config_data))
-
- if options.proguard_path.endswith('.jar'):
- cmd = [
- build_utils.JAVA_PATH, '-jar', options.proguard_path, '-include',
- combined_proguard_configs_path
- ]
- else:
- cmd = [options.proguard_path, '@' + combined_proguard_configs_path]
-
- cmd += [
- '-forceprocessing',
- '-libraryjars',
- combined_libjars_path,
- '-injars',
- combined_injars_path,
- '-outjars',
- tmp_output_jar,
- '-printmapping',
- tmp_mapping_path,
- ]
-
- # Warning: and Error: are sent to stderr, but messages and Note: are sent
- # to stdout.
- stdout_filter = None
- stderr_filter = None
- if print_stdout:
- stdout_filter = _ProguardOutputFilter()
- stderr_filter = _ProguardOutputFilter()
- build_utils.CheckOutput(
- cmd,
- print_stdout=True,
- print_stderr=True,
- stdout_filter=stdout_filter,
- stderr_filter=stderr_filter)
-
- # ProGuard will skip writing if the file would be empty.
- build_utils.Touch(tmp_mapping_path)
-
- # Copy output files to correct locations.
- shutil.move(tmp_output_jar, options.output_path)
- shutil.move(tmp_mapping_path, options.mapping_output)
-
-
def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False):
ret = []
@@ -508,11 +472,14 @@ def _CreateDynamicConfig(options):
if api_level > _min_api:
ret.append('-keep @interface %s' % annotation_name)
ret.append("""\
--keep,allowobfuscation,allowoptimization @%s class ** {
- <methods>;
+-if @%s class * {
+ *** *(...);
+}
+-keep,allowobfuscation class <1> {
+ *** <2>(...);
}""" % annotation_name)
ret.append("""\
--keepclassmembers,allowobfuscation,allowoptimization class ** {
+-keepclassmembers,allowobfuscation class ** {
@%s <methods>;
}""" % annotation_name)
return '\n'.join(ret)
@@ -545,8 +512,7 @@ def _MaybeWriteStampAndDepFile(options, inputs):
build_utils.Touch(options.stamp)
output = options.stamp
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, output, inputs=inputs, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
def main():
@@ -597,12 +563,8 @@ def main():
with open(options.output_config, 'w') as f:
f.write(merged_configs)
- if options.r8_path:
- _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data,
- print_stdout)
- else:
- _OptimizeWithProguard(options, proguard_configs, libraries,
- dynamic_config_data, print_stdout)
+ _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data,
+ print_stdout)
# After ProGuard / R8 has run:
for output in options.extra_mapping_output_paths:
diff --git a/chromium/build/android/gyp/proguard.pydeps b/chromium/build/android/gyp/proguard.pydeps
index 98934d7aae2..11f51cc0f0d 100644
--- a/chromium/build/android/gyp/proguard.pydeps
+++ b/chromium/build/android/gyp/proguard.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
../../gn_helpers.py
+dex_jdk_libs.py
proguard.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/turbine.pydeps b/chromium/build/android/gyp/turbine.pydeps
index 19396459519..45b0d27d3f6 100644
--- a/chromium/build/android/gyp/turbine.pydeps
+++ b/chromium/build/android/gyp/turbine.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py
../../gn_helpers.py
+../../print_python_deps.py
turbine.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index bc15fbb61f2..067f62e4b9b 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -541,49 +541,6 @@ def GetSortedTransitiveDependencies(top, deps_func):
return list(deps_map)
-def ComputePythonDependencies():
- """Gets the paths of imported non-system python modules.
-
- A path is assumed to be a "system" import if it is outside of chromium's
- src/. The paths will be relative to the current directory.
- """
- _ForceLazyModulesToLoad()
- module_paths = (m.__file__ for m in sys.modules.values()
- if m is not None and hasattr(m, '__file__'))
- abs_module_paths = map(os.path.abspath, module_paths)
-
- abs_dir_source_root = os.path.abspath(DIR_SOURCE_ROOT)
- non_system_module_paths = [
- p for p in abs_module_paths if p.startswith(abs_dir_source_root)
- ]
-
- def ConvertPycToPy(s):
- if s.endswith('.pyc'):
- return s[:-1]
- return s
-
- non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
- non_system_module_paths = map(os.path.relpath, non_system_module_paths)
- return sorted(set(non_system_module_paths))
-
-
-def _ForceLazyModulesToLoad():
- """Forces any lazily imported modules to fully load themselves.
-
- Inspecting the modules' __file__ attribute causes lazily imported modules
- (e.g. from email) to get fully imported and update sys.modules. Iterate
- over the values until sys.modules stabilizes so that no modules are missed.
- """
- while True:
- num_modules_before = len(sys.modules.keys())
- for m in sys.modules.values():
- if m is not None and hasattr(m, '__file__'):
- _ = m.__file__
- num_modules_after = len(sys.modules.keys())
- if num_modules_before == num_modules_after:
- break
-
-
def InitLogging(enabling_env):
logging.basicConfig(
level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
@@ -611,12 +568,10 @@ def AddDepfileOption(parser):
help='Path to depfile (refer to `gn help depfile`)')
-def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
assert depfile_path != first_gn_output # http://crbug.com/646165
assert not isinstance(inputs, string_types) # Easy mistake to make
inputs = inputs or []
- if add_pydeps:
- inputs = ComputePythonDependencies() + inputs
MakeDirectory(os.path.dirname(depfile_path))
# Ninja does not support multiple outputs in depfiles.
with open(depfile_path, 'w') as depfile:
diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py
index a8a815e7e4f..2830d25c969 100644
--- a/chromium/build/android/gyp/util/md5_check.py
+++ b/chromium/build/android/gyp/util/md5_check.py
@@ -14,6 +14,9 @@ import zipfile
from util import build_utils
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
# When set and a difference is detected, a diff of what changed is printed.
PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
@@ -48,7 +51,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5,
input_strings = list(input_strings or [])
output_paths = list(output_paths or [])
- input_paths += build_utils.ComputePythonDependencies()
+ input_paths += print_python_deps.ComputePythonDependencies()
CallAndRecordIfStale(
on_stale_md5,
@@ -64,8 +67,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5,
# on bots that build with & without patch, and the patch changes the depfile
# location.
if hasattr(options, 'depfile') and options.depfile:
- build_utils.WriteDepfile(
- options.depfile, output_paths[0], depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
def CallAndRecordIfStale(function,
@@ -125,15 +127,21 @@ def CallAndRecordIfStale(function,
old_metadata = None
force = force or _FORCE_REBUILD
missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ too_new = []
# When outputs are missing, don't bother gathering change information.
if not missing_outputs and os.path.exists(record_path):
- with open(record_path, 'r') as jsonfile:
- try:
- old_metadata = _Metadata.FromFile(jsonfile)
- except: # pylint: disable=bare-except
- pass # Not yet using new file format.
-
- changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+ record_mtime = os.path.getmtime(record_path)
+ # Outputs newer than the change information must have been modified outside
+ # of the build, and should be considered stale.
+ too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+ if not too_new:
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
if not changes.HasChanges():
return
@@ -153,11 +161,13 @@ def CallAndRecordIfStale(function,
class Changes(object):
"""Provides and API for querying what changed between runs."""
- def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+ too_new):
self.old_metadata = old_metadata
self.new_metadata = new_metadata
self.force = force
self.missing_outputs = missing_outputs
+ self.too_new = too_new
def _GetOldTag(self, path, subpath=None):
return self.old_metadata and self.old_metadata.GetTag(path, subpath)
@@ -254,6 +264,8 @@ class Changes(object):
return 'force=True'
elif self.missing_outputs:
return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.too_new:
+ return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new)
elif self.old_metadata is None:
return 'Previous stamp file not found.'
diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py
index 9b3b9039f39..2169320ee54 100755
--- a/chromium/build/android/gyp/util/md5_check_test.py
+++ b/chromium/build/android/gyp/util/md5_check_test.py
@@ -47,13 +47,21 @@ class TestMd5Check(unittest.TestCase):
outputs_missing=False,
expected_changes=None,
added_or_modified_only=None,
- track_subentries=False):
+ track_subentries=False,
+ output_newer_than_record=False):
output_paths = None
if outputs_specified:
output_file1 = tempfile.NamedTemporaryFile()
if outputs_missing:
output_file1.close() # Gets deleted on close().
output_paths = [output_file1.name]
+ if output_newer_than_record:
+ output_mtime = os.path.getmtime(output_file1.name)
+ os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+ else:
+ # touch the record file so it doesn't look like it's older that
+ # the output we've just created
+ os.utime(record_path.name, None)
self.called = False
self.changes = None
@@ -97,6 +105,13 @@ class TestMd5Check(unittest.TestCase):
outputs_specified=True, outputs_missing=True,
expected_changes='Outputs do not exist:*',
added_or_modified_only=False)
+ CheckCallAndRecord(True,
+ 'should call when output is newer than record',
+ expected_changes='Outputs newer than stamp file:*',
+ outputs_specified=True,
+ outputs_missing=False,
+ added_or_modified_only=False,
+ output_newer_than_record=True)
CheckCallAndRecord(True, force=True, message='should call when forced',
expected_changes='force=True',
added_or_modified_only=False)
diff --git a/chromium/build/android/gyp/util/parallel.py b/chromium/build/android/gyp/util/parallel.py
new file mode 100644
index 00000000000..082ad97225e
--- /dev/null
+++ b/chromium/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+ logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+ def __init__(self, value):
+ self._value = value
+
+ def get(self):
+ return self._value
+
+ def wait(self):
+ pass
+
+ def ready(self):
+ return True
+
+ def successful(self):
+ return True
+
+
+class _ExceptionWrapper(object):
+ """Used to marshal exception messages back to main process."""
+
+ def __init__(self, msg, exception_type=None):
+ self.msg = msg
+ self.exception_type = exception_type
+
+ def MaybeThrow(self):
+ if self.exception_type:
+ raise getattr(__builtins__,
+ self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+ """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+ def __init__(self, func):
+ global _is_child_process
+ _is_child_process = True
+ self._func = func
+
+ def __call__(self, index, _=None):
+ try:
+ return self._func(*_fork_params[index], **_fork_kwargs)
+ except Exception as e:
+ # Only keep the exception type for builtin exception types or else risk
+ # further marshalling exceptions.
+ exception_type = None
+ if hasattr(__builtins__, type(e).__name__):
+ exception_type = type(e).__name__
+ # multiprocessing is supposed to catch and return exceptions automatically
+ # but it doesn't seem to work properly :(.
+ return _ExceptionWrapper(traceback.format_exc(), exception_type)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+ """Allows for host-side logic to be run after child process has terminated.
+
+ * Unregisters associated pool _all_pools.
+ * Raises exception caught by _FuncWrapper.
+ """
+
+ def __init__(self, result, pool=None):
+ self._result = result
+ self._pool = pool
+
+ def get(self):
+ self.wait()
+ value = self._result.get()
+ _CheckForException(value)
+ return value
+
+ def wait(self):
+ self._result.wait()
+ if self._pool:
+ _all_pools.remove(self._pool)
+ self._pool = None
+
+ def ready(self):
+ return self._result.ready()
+
+ def successful(self):
+ return self._result.successful()
+
+
+def _TerminatePools():
+ """Calls .terminate() on all active process pools.
+
+ Not supposed to be necessary according to the docs, but seems to be required
+ when child process throws an exception or Ctrl-C is hit.
+ """
+ global _silence_exceptions
+ _silence_exceptions = True
+ # Child processes cannot have pools, but atexit runs this function because
+ # it was registered before fork()ing.
+ if _is_child_process:
+ return
+
+ def close_pool(pool):
+ try:
+ pool.terminate()
+ except: # pylint: disable=bare-except
+ pass
+
+ for i, pool in enumerate(_all_pools):
+ # Without calling terminate() on a separate thread, the call can block
+ # forever.
+ thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+ target=close_pool,
+ args=(pool, ))
+ thread.daemon = True
+ thread.start()
+
+
+def _CheckForException(value):
+ if isinstance(value, _ExceptionWrapper):
+ global _silence_exceptions
+ if not _silence_exceptions:
+ value.MaybeThrow()
+ _silence_exceptions = True
+ logging.error('Subprocess raised an exception:\n%s', value.msg)
+ sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+ global _all_pools
+ global _fork_params
+ global _fork_kwargs
+ assert _fork_params is None
+ assert _fork_kwargs is None
+ pool_size = min(len(job_params), multiprocessing.cpu_count())
+ _fork_params = job_params
+ _fork_kwargs = job_kwargs
+ ret = multiprocessing.Pool(pool_size)
+ _fork_params = None
+ _fork_kwargs = None
+ if _all_pools is None:
+ _all_pools = []
+ atexit.register(_TerminatePools)
+ _all_pools.append(ret)
+ return ret
+
+
+def ForkAndCall(func, args):
+ """Runs |func| in a fork'ed process.
+
+ Returns:
+ A Result object (call .get() to get the return value)
+ """
+ if DISABLE_ASYNC:
+ pool = None
+ result = _ImmediateResult(func(*args))
+ else:
+ pool = _MakeProcessPool([args]) # Omit |kwargs|.
+ result = pool.apply_async(_FuncWrapper(func), (0, ))
+ pool.close()
+ return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+ """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+ Args:
+ kwargs: Common keyword arguments to be passed to |func|.
+
+ Yields the return values in order.
+ """
+ arg_tuples = list(arg_tuples)
+ if not arg_tuples:
+ return
+
+ if DISABLE_ASYNC:
+ for args in arg_tuples:
+ yield func(*args, **kwargs)
+ return
+
+ pool = _MakeProcessPool(arg_tuples, **kwargs)
+ wrapped_func = _FuncWrapper(func)
+ try:
+ for result in pool.imap(wrapped_func, xrange(len(arg_tuples))):
+ _CheckForException(result)
+ yield result
+ finally:
+ pool.close()
+ pool.join()
+ _all_pools.remove(pool)
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index 1b92c4fb49e..7b16949f9d3 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -57,6 +57,7 @@ AAPT_IGNORE_PATTERN = ':'.join([
'*~', # Some editors create these as temp files.
'.*', # Never makes sense to include dot(files/dirs).
'*.d.stamp', # Ignore stamp files
+ '*.backup', # Some tools create temporary backup files.
])
MULTIPLE_RES_MAGIC_STRING = b'magic'
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index 02b02fcd538..5e3897c4913 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -242,18 +242,22 @@ through Proguard or other tools. For most targets this is generated
from sources, with a name like `$target_name.javac.jar`. However, when using
a prebuilt jar, this will point to the source archive directly.
-* `deps_info['jar_path']`:
+* `deps_info['device_jar_path']`:
Path to a file that is the result of processing
-`deps_info['unprocessed_jar_path']` with various tools.
+`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed).
+
+* `deps_info['host_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (use by java_binary).
* `deps_info['interface_jar_path']:
Path to the interface jar generated for this library. This corresponds to
a jar file that only contains declarations. Generated by running the `ijar` on
-`deps_info['jar_path']` or the `turbine` tool on source files.
+`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files.
* `deps_info['dex_path']`:
-Path to the `.dex` file generated for this target, from `deps_info['jar_path']`
-unless this comes from a prebuilt `.aar` archive.
+Path to the `.dex` file generated for this target, from
+`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive.
* `deps_info['is_prebuilt']`:
True to indicate that this target corresponds to a prebuilt `.jar` file.
@@ -323,10 +327,10 @@ all entries from the `java_library` type, and adds:
* `deps_info['main_class']`:
Name of the main Java class that serves as an entry point for the binary.
-* `deps_info['java_runtime_classpath']`:
+* `deps_info['device_classpath']`:
The classpath used when running a Java or Android binary. Essentially the
-collection of all `deps_info['jar_path']` entries for the target and all its
-dependencies.
+collection of all `deps_info['device_jar_path']` entries for the target and all
+its dependencies.
## <a name="target_junit_binary">Target type `junit_binary`</a>:
@@ -701,8 +705,8 @@ class Deps(object):
def helper(cur):
for config in cur.Direct('java_library'):
if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
- if config['jar_path'] not in ret:
- ret.append(config['jar_path'])
+ if config['unprocessed_jar_path'] not in ret:
+ ret.append(config['unprocessed_jar_path'])
helper(self)
return ret
@@ -837,6 +841,15 @@ def _CreateJavaLocaleListFromAssets(assets, locale_paks):
return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)])
+def _AddJarMapping(jar_to_target, configs):
+ for config in configs:
+ jar = config.get('unprocessed_jar_path')
+ if jar:
+ jar_to_target[jar] = config['gn_target']
+ for jar in config.get('extra_classpath_jars', []):
+ jar_to_target[jar] = config['gn_target']
+
+
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
@@ -844,6 +857,7 @@ def main(argv):
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
+ parser.add_option('--gn-target', help='GN label for this target')
parser.add_option(
'--deps-configs',
help='GN-list of dependent build_config files.')
@@ -875,7 +889,8 @@ def main(argv):
help='Consider the assets as locale paks in BuildConfig.java')
# java library options
- parser.add_option('--jar-path', help='Path to target\'s jar output.')
+ parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
+ parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
parser.add_option('--unprocessed-jar-path',
help='Path to the .jar to use for javac classpath purposes.')
parser.add_option(
@@ -884,10 +899,6 @@ def main(argv):
parser.add_option(
'--jetified-jar-path',
help='Path to the jetified.jar to use for javac classpath purposes.')
- parser.add_option(
- '--skip-jetify',
- action='store_true',
- help='Whether to use jetified or non-jetified classpath.')
parser.add_option('--is-prebuilt', action='store_true',
help='Whether the jar was compiled or pre-compiled.')
parser.add_option('--java-sources-file', help='Path to .sources file')
@@ -1039,11 +1050,13 @@ def main(argv):
if options.fail:
parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
- jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path']
+ lib_options = ['unprocessed_jar_path', 'interface_jar_path']
+ device_lib_options = ['device_jar_path', 'dex_path']
required_options_map = {
- 'android_apk': ['build_config', 'dex_path'] + jar_path_options,
- 'android_app_bundle_module': ['build_config', 'dex_path',
- 'final_dex_path', 'res_size_info'] + jar_path_options,
+ 'android_apk': ['build_config'] + lib_options + device_lib_options,
+ 'android_app_bundle_module':
+ ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
+ device_lib_options,
'android_assets': ['build_config'],
'android_resources': ['build_config', 'resources_zip'],
'dist_aar': ['build_config'],
@@ -1051,9 +1064,9 @@ def main(argv):
'group': ['build_config'],
'java_annotation_processor': ['build_config', 'main_class'],
'java_binary': ['build_config'],
- 'java_library': ['build_config'] + jar_path_options,
+ 'java_library': ['build_config', 'host_jar_path'] + lib_options,
'junit_binary': ['build_config'],
- 'system_java_library': ['build_config'],
+ 'system_java_library': ['build_config', 'unprocessed_jar_path'],
'android_app_bundle': ['build_config', 'module_build_configs'],
}
required_options = required_options_map.get(options.type)
@@ -1093,10 +1106,10 @@ def main(argv):
'--library-renames can only be used with --type=android_apk or '
'--type=android_app_bundle_module')
- if options.jar_path and options.supports_android and not options.dex_path:
+ if options.device_jar_path and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
- if any(getattr(options, x) for x in jar_path_options):
- for attr in jar_path_options:
+ if any(getattr(options, x) for x in lib_options):
+ for attr in lib_options:
if not getattr(options, attr):
raise('Expected %s to be set.' % attr)
@@ -1152,6 +1165,7 @@ def main(argv):
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
+ 'gn_target': options.gn_target,
'deps_configs': deps.direct_deps_config_paths,
'chromium_code': not options.non_chromium_code,
},
@@ -1254,20 +1268,21 @@ def main(argv):
raise Exception('Not all deps support the Android platform: '
+ str(deps_not_support_android))
- if is_apk_or_module_target:
+ if is_apk_or_module_target or options.type == 'dist_jar':
all_dex_files = [c['dex_path'] for c in all_library_deps]
if is_java_target:
# Classpath values filled in below (after applying tested_apk_config).
config['javac'] = {}
- if options.jar_path:
- deps_info['jar_path'] = options.jar_path
+ if options.unprocessed_jar_path:
deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
deps_info['interface_jar_path'] = options.interface_jar_path
- if options.skip_jetify:
- deps_info['jetified_jar_path'] = options.interface_jar_path
- else:
- deps_info['jetified_jar_path'] = options.jetified_jar_path
+ if options.device_jar_path:
+ deps_info['device_jar_path'] = options.device_jar_path
+ if options.host_jar_path:
+ deps_info['host_jar_path'] = options.host_jar_path
+ deps_info['jetified_jar_path'] = (options.jetified_jar_path
+ or options.interface_jar_path)
if options.dex_path:
deps_info['dex_path'] = options.dex_path
if is_apk_or_module_target:
@@ -1403,7 +1418,7 @@ def main(argv):
# Adding base module to classpath to compile against its R.java file
if base_module_build_config:
javac_full_classpath.append(
- base_module_build_config['deps_info']['jar_path'])
+ base_module_build_config['deps_info']['unprocessed_jar_path'])
javac_full_interface_classpath.append(
base_module_build_config['deps_info']['interface_jar_path'])
jetified_full_jar_classpath.append(
@@ -1459,15 +1474,24 @@ def main(argv):
if is_java_target or options.type == 'android_app_bundle':
# The classpath to use to run this target (or as an input to ProGuard).
- java_full_classpath = []
- if is_java_target and options.jar_path:
- java_full_classpath.append(options.jar_path)
- java_full_classpath.extend(c['jar_path'] for c in all_library_deps)
+ device_classpath = []
+ if is_java_target and options.device_jar_path:
+ device_classpath.append(options.device_jar_path)
+ device_classpath.extend(
+ c.get('device_jar_path') for c in all_library_deps
+ if c.get('device_jar_path'))
if options.type == 'android_app_bundle':
for d in deps.Direct('android_app_bundle_module'):
- java_full_classpath.extend(
- c for c in d.get('java_runtime_classpath', [])
- if c not in java_full_classpath)
+ device_classpath.extend(c for c in d.get('device_classpath', [])
+ if c not in device_classpath)
+
+ if options.type in ('dist_jar', 'java_binary', 'junit_binary'):
+ # The classpath to use to run this target.
+ host_classpath = []
+ if options.host_jar_path:
+ host_classpath.append(options.host_jar_path)
+ host_classpath.extend(c['host_jar_path'] for c in all_library_deps)
+ deps_info['host_classpath'] = host_classpath
all_configs = build_utils.ParseGnList(options.proguard_configs)
deps_info['proguard_configs'] = list(all_configs)
@@ -1563,7 +1587,7 @@ def main(argv):
if dep_config['type'] == 'android_app_bundle':
base_config = GetDepConfig(dep_config['base_module_config'])
extra_main_r_text_files.append(base_config['r_text_path'])
- static_lib_jar_paths[config_path] = base_config['jar_path']
+ static_lib_jar_paths[config_path] = base_config['device_jar_path']
all_configs.extend(dep_config['proguard_all_configs'])
extra_proguard_classpath_jars.extend(
dep_config['proguard_classpath_jars'])
@@ -1578,19 +1602,19 @@ def main(argv):
for package in base_config['extra_package_names']:
if package not in extra_package_names:
extra_package_names.append(package)
- for cp_entry in dep_config['java_runtime_classpath']:
+ for cp_entry in dep_config['device_classpath']:
configs_by_classpath_entry[cp_entry].append(config_path)
- for cp_entry in java_full_classpath:
+ for cp_entry in device_classpath:
configs_by_classpath_entry[cp_entry].append(options.build_config)
for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems():
config_path = (candidate_configs[0]
if len(candidate_configs) == 1 else options.build_config)
classpath_entries_by_owning_config[config_path].append(cp_entry)
- java_full_classpath.append(cp_entry)
+ device_classpath.append(cp_entry)
- java_full_classpath = sorted(set(java_full_classpath))
+ device_classpath = sorted(set(device_classpath))
deps_info['static_library_proguard_mapping_output_paths'] = sorted([
d['proguard_mapping_path']
@@ -1606,7 +1630,7 @@ def main(argv):
'junit_binary'):
deps_info['jni']['all_source'] = sorted(set(all_java_sources))
- system_jars = [c['jar_path'] for c in system_library_deps]
+ system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
if system_library_deps:
config['android'] = {}
@@ -1635,7 +1659,7 @@ def main(argv):
deps_proguard_enabled = []
deps_proguard_disabled = []
for d in deps.Direct('android_app_bundle_module'):
- if not d['java_runtime_classpath']:
+ if not d['device_classpath']:
# We don't care about modules that have no Java code for proguarding.
continue
if d['proguard_enabled']:
@@ -1685,9 +1709,10 @@ def main(argv):
# Add all tested classes to the test's classpath to ensure that the test's
# java code is a superset of the tested apk's java code
- java_full_classpath.extend(
- p for p in tested_apk_config['java_runtime_classpath']
- if p not in java_full_classpath)
+ device_classpath_extended = list(device_classpath)
+ device_classpath_extended.extend(
+ p for p in tested_apk_config['device_classpath']
+ if p not in device_classpath)
# Include in the classpath classes that are added directly to the apk under
# test (those that are not a part of a java_library).
javac_classpath.append(tested_apk_config['unprocessed_jar_path'])
@@ -1706,13 +1731,13 @@ def main(argv):
p for p in tested_apk_config['javac_full_classpath']
if p not in javac_full_classpath)
- # Exclude dex files from the test apk that exist within the apk under test.
- # TODO(agrieve): When proguard is enabled, this filtering logic happens
- # within proguard.py. Move the logic for the proguard case to here.
+ # Exclude .jar files from the test apk that exist within the apk under test.
tested_apk_library_deps = tested_apk_deps.All('java_library')
- tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
- all_dex_files = [
- p for p in all_dex_files if not p in tested_apk_deps_dex_files
+ tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps}
+ all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files]
+ tested_apk_jar_files = set(tested_apk_config['device_classpath'])
+ device_classpath = [
+ p for p in device_classpath if p not in tested_apk_jar_files
]
if options.type in ('android_apk', 'dist_aar', 'dist_jar',
@@ -1722,20 +1747,27 @@ def main(argv):
set(extra_proguard_classpath_jars))
# Dependencies for the final dex file of an apk.
- if is_apk_or_module_target or options.final_dex_path:
+ if (is_apk_or_module_target or options.final_dex_path
+ or options.type == 'dist_jar'):
config['final_dex'] = {}
dex_config = config['final_dex']
dex_config['path'] = options.final_dex_path
- if is_apk_or_module_target:
+ if is_apk_or_module_target or options.type == 'dist_jar':
dex_config['all_dex_files'] = all_dex_files
if is_java_target:
config['javac']['classpath'] = javac_classpath
config['javac']['interface_classpath'] = javac_interface_classpath
- # Direct() will be of type 'java_annotation_processor'.
+ # Direct() will be of type 'java_annotation_processor', and so not included
+ # in All('java_library').
+ # Annotation processors run as part of the build, so need host_jar_path.
config['javac']['processor_classpath'] = [
- c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [
- c['jar_path'] for c in processor_deps.All('java_library')]
+ c['host_jar_path'] for c in processor_deps.Direct()
+ if c.get('host_jar_path')
+ ]
+ config['javac']['processor_classpath'] += [
+ c['host_jar_path'] for c in processor_deps.All('java_library')
+ ]
config['javac']['processor_classes'] = [
c['main_class'] for c in processor_deps.Direct()]
deps_info['javac_full_classpath'] = javac_full_classpath
@@ -1746,16 +1778,18 @@ def main(argv):
javac_full_classpath = set()
for d in deps.Direct('android_app_bundle_module'):
javac_full_classpath.update(p for p in d['javac_full_classpath'])
- javac_full_classpath.add(d['jar_path'])
+ javac_full_classpath.add(d['unprocessed_jar_path'])
deps_info['javac_full_classpath'] = sorted(javac_full_classpath)
- if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary',
- 'android_app_bundle_module', 'android_app_bundle'):
- deps_info['java_runtime_classpath'] = java_full_classpath
+ if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
+ 'android_app_bundle'):
+ deps_info['device_classpath'] = device_classpath
+ if options.tested_apk_config:
+ deps_info['java_runtime_classpath_extended'] = (device_classpath_extended)
if options.type in ('android_apk', 'dist_jar'):
all_interface_jars = []
- if options.jar_path:
+ if options.interface_jar_path:
all_interface_jars.append(options.interface_jar_path)
all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
@@ -1847,7 +1881,7 @@ def main(argv):
# are not duplicated on the feature module.
if base_module_build_config:
base = base_module_build_config
- RemoveObjDups(config, base, 'deps_info', 'java_runtime_classpath')
+ RemoveObjDups(config, base, 'deps_info', 'device_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
RemoveObjDups(config, base, 'deps_info', 'jetified_full_jar_classpath')
@@ -1855,11 +1889,29 @@ def main(argv):
RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
RemoveObjDups(config, base, 'extra_android_manifests')
+ if is_java_target:
+ jar_to_target = {}
+ _AddJarMapping(jar_to_target, [deps_info])
+ _AddJarMapping(jar_to_target, deps.all_deps_configs)
+ if base_module_build_config:
+ _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+ if options.tested_apk_config:
+ _AddJarMapping(jar_to_target, [tested_apk_config])
+ for jar, target in itertools.izip(
+ tested_apk_config['javac_full_classpath'],
+ tested_apk_config['javac_full_classpath_targets']):
+ jar_to_target[jar] = target
+
+ # Used by bytecode_processor to give better error message when missing
+ # deps are found.
+ config['deps_info']['javac_full_classpath_targets'] = [
+ jar_to_target[x] for x in deps_info['javac_full_classpath']
+ ]
+
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs,
- add_pydeps=False) # pydeps listed in GN.
+ build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/write_native_libraries_java.py b/chromium/build/android/gyp/write_native_libraries_java.py
index 65688b9fd05..cb0c5d398ec 100755
--- a/chromium/build/android/gyp/write_native_libraries_java.py
+++ b/chromium/build/android/gyp/write_native_libraries_java.py
@@ -26,6 +26,7 @@ def _FormatLibraryName(library_name):
def main():
parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
parser.add_argument('--final', action='store_true', help='Use final fields.')
parser.add_argument(
'--enable-chromium-linker',
@@ -97,6 +98,12 @@ def main():
zip_path='org/chromium/base/library_loader/NativeLibraries.java',
data=NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
+ if options.depfile:
+ assert options.native_libraries_list
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=[options.native_libraries_list])
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/android/gyp/zip.py b/chromium/build/android/gyp/zip.py
index b9503960fa3..ed8f61a9c9e 100755
--- a/chromium/build/android/gyp/zip.py
+++ b/chromium/build/android/gyp/zip.py
@@ -63,8 +63,9 @@ def main(args):
# Depfile used only by dist_jar().
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=depfile_deps)
if __name__ == '__main__':