summaryrefslogtreecommitdiffstats
path: root/chromium/build/android/gyp
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/build/android/gyp')
-rwxr-xr-xchromium/build/android/gyp/bundletool.py4
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py14
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py91
-rwxr-xr-xchromium/build/android/gyp/create_app_bundle.py17
-rw-r--r--chromium/build/android/gyp/create_bundle_wrapper_script.pydeps1
-rwxr-xr-xchromium/build/android/gyp/create_java_binary_script.py16
-rwxr-xr-xchromium/build/android/gyp/desugar.py7
-rwxr-xr-xchromium/build/android/gyp/dex.py187
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py2
-rwxr-xr-xchromium/build/android/gyp/dist_aar.py40
-rw-r--r--chromium/build/android/gyp/dist_aar.pydeps1
-rwxr-xr-xchromium/build/android/gyp/filter_zip.py6
-rwxr-xr-xchromium/build/android/gyp/generate_linker_version_script.py7
-rwxr-xr-xchromium/build/android/gyp/jacoco_instr.py152
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum.py2
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum_tests.py2
-rwxr-xr-xchromium/build/android/gyp/javac.py7
-rwxr-xr-xchromium/build/android/gyp/lint.py3
-rwxr-xr-xchromium/build/android/gyp/main_dex_list.py111
-rwxr-xr-xchromium/build/android/gyp/merge_manifest.py2
-rwxr-xr-xchromium/build/android/gyp/prepare_resources.py3
-rwxr-xr-xchromium/build/android/gyp/proguard.py40
-rw-r--r--chromium/build/android/gyp/util/build_utils.py69
-rw-r--r--chromium/build/android/gyp/util/md5_check.py71
-rwxr-xr-xchromium/build/android/gyp/util/md5_check_test.py44
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py23
26 files changed, 607 insertions, 315 deletions
diff --git a/chromium/build/android/gyp/bundletool.py b/chromium/build/android/gyp/bundletool.py
index 2201cc12373..65be46e31dd 100755
--- a/chromium/build/android/gyp/bundletool.py
+++ b/chromium/build/android/gyp/bundletool.py
@@ -13,6 +13,8 @@ import os
import subprocess
import sys
+from util import build_utils
+
# Assume this is stored under build/android/gyp/
BUNDLETOOL_DIR = os.path.abspath(os.path.join(
__file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
@@ -24,7 +26,7 @@ BUNDLETOOL_JAR_PATH = os.path.join(
BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
def RunBundleTool(args):
- args = ['java', '-jar', BUNDLETOOL_JAR_PATH] + args
+ args = [build_utils.JAVA_PATH, '-jar', BUNDLETOOL_JAR_PATH] + args
logging.debug(' '.join(args))
subprocess.check_call(args)
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index adff1625056..76775d3958a 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -36,12 +36,6 @@ def main(argv):
_AddSwitch(parser, '--enable-assert')
_AddSwitch(parser, '--enable-thread-annotations')
_AddSwitch(parser, '--enable-check-class-path')
- parser.add_argument('--enable-class-deps-output', default='')
- parser.add_argument(
- '--split-compat-class-names',
- action='append',
- default=[],
- help='Names of classes that need to be made SplitCompat-enabled.')
args = parser.parse_args(argv)
sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
@@ -54,9 +48,6 @@ def main(argv):
for a in args.extra_jars:
extra_classpath_jars.extend(build_utils.ParseGnList(a))
- split_compat_class_names = build_utils.ParseGnList(
- args.split_compat_class_names)
-
if args.verbose:
verbose = '--verbose'
else:
@@ -66,11 +57,8 @@ def main(argv):
args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
args.enable_assert, args.enable_custom_resources,
args.enable_thread_annotations, args.enable_check_class_path,
- args.enable_class_deps_output,
str(len(sdk_jars))
- ] + sdk_jars + [str(len(direct_jars))] + direct_jars + [
- str(len(split_compat_class_names))
- ] + split_compat_class_names + extra_classpath_jars)
+ ] + sdk_jars + [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
subprocess.check_call(cmd)
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 95b8c2aa0d7..4ac6d64bf18 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -23,6 +23,7 @@ import shutil
import subprocess
import sys
import tempfile
+import textwrap
import zipfile
from xml.etree import ElementTree
@@ -50,13 +51,6 @@ _PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
r'.*daydream_icon_.*\.png']))
-def _ListToDictionary(lst, separator):
- """Splits each element of the passed-in |lst| using |separator| and creates
- dictionary treating first element of the split as the key and second as the
- value."""
- return dict(item.split(separator, 1) for item in lst)
-
-
def _ParseArgs(args):
"""Parses command line options.
@@ -100,17 +94,13 @@ def _ParseArgs(args):
input_opts.add_argument(
'--package-id',
- help='Custom package ID for resources (instead of 0x7f). Cannot be used '
- 'with --shared-resources.')
-
- input_opts.add_argument(
- '--package-name-to-id-mapping',
- help='List containing mapping from package name to package IDs that will '
- 'be assigned.')
+ type=int,
+ help='Decimal integer representing custom package ID for resources '
+ '(instead of 127==0x7f). Cannot be used with --shared-resources.')
input_opts.add_argument(
'--package-name',
- help='Package name that will be used to determine package ID.')
+ help='Package name that will be used to create R class.')
input_opts.add_argument(
'--rename-manifest-package', help='Package name to force AAPT to use.')
@@ -266,11 +256,8 @@ def _ParseArgs(args):
parser.error(
'--resources-path-map-out-path requires --short-resource-paths')
- if options.package_name_to_id_mapping:
- package_names_list = build_utils.ParseGnList(
- options.package_name_to_id_mapping)
- options.package_name_to_id_mapping = _ListToDictionary(
- package_names_list, '=')
+ if options.package_id and options.shared_resources:
+ parser.error('--package-id and --shared-resources are mutually exclusive')
return options
@@ -423,19 +410,6 @@ def _MoveImagesToNonMdpiFolders(res_root):
return renamed_paths
-def _PackageIdFromOptions(options):
- package_id = None
- if options.package_id:
- package_id = options.package_id
- if options.package_name:
- package_id = options.package_name_to_id_mapping.get(options.package_name)
- if package_id is None:
- raise Exception(
- 'Package name %s is not present in package_name_to_id_mapping.' %
- options.package_name)
- return package_id
-
-
def _FixManifest(options, temp_dir):
"""Fix the APK's AndroidManifest.xml.
@@ -684,6 +658,8 @@ def _PackageApk(options, build):
Args:
options: The command-line options.
build: BuildContext object.
+ Returns:
+ The manifest package name for the APK.
"""
dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
build.deps_dir)
@@ -751,9 +727,12 @@ def _PackageApk(options, build):
if options.no_xml_namespaces:
link_command.append('--no-xml-namespaces')
- package_id = _PackageIdFromOptions(options)
- if package_id is not None:
- link_command += ['--package-id', package_id, '--allow-reserved-package-id']
+ if options.package_id:
+ link_command += [
+ '--package-id',
+ hex(options.package_id),
+ '--allow-reserved-package-id',
+ ]
fixed_manifest, desired_manifest_package_name = _FixManifest(
options, build.temp_dir)
@@ -787,6 +766,19 @@ def _PackageApk(options, build):
build_utils.CheckOutput(link_command, print_stdout=False, print_stderr=False)
+ if options.proguard_file and (options.shared_resources
+ or options.app_as_shared_lib):
+ # Make sure the R class associated with the manifest package does not have
+ # its onResourcesLoaded method obfuscated or removed, so that the framework
+ # can call it in the case where the APK is being loaded as a library.
+ with open(build.proguard_path, 'a') as proguard_file:
+ keep_rule = '''
+ -keep class {package}.R {{
+ public static void onResourcesLoaded(int);
+ }}
+ '''.format(package=desired_manifest_package_name)
+ proguard_file.write(textwrap.dedent(keep_rule))
+
if options.proto_path and options.arsc_path:
build_utils.CheckOutput([
options.aapt2_path, 'convert', '-o', build.arsc_path, build.proto_path
@@ -799,6 +791,8 @@ def _PackageApk(options, build):
_OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
build.arsc_path, build.r_txt_path)
+ return desired_manifest_package_name
+
def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path):
"""Optimize intermediate .ap_ file with aapt2.
@@ -921,7 +915,7 @@ def main(args):
build_utils.MakeDirectory(debug_temp_resources_dir)
with resource_utils.BuildContext(debug_temp_resources_dir) as build:
- _PackageApk(options, build)
+ manifest_package_name = _PackageApk(options, build)
# If --shared-resources-whitelist is used, the all resources listed in
# the corresponding R.txt file will be non-final, and an onResourcesLoaded()
@@ -953,19 +947,26 @@ def main(args):
custom_root_package_name = options.package_name
grandparent_custom_package_name = options.r_java_root_package_name
+ if options.shared_resources or options.app_as_shared_lib:
+ package_for_library = manifest_package_name
+ else:
+ package_for_library = None
+
resource_utils.CreateRJavaFiles(
- build.srcjar_dir, None, build.r_txt_path, options.extra_res_packages,
- options.extra_r_text_files, rjava_build_options, options.srcjar_out,
- custom_root_package_name, grandparent_custom_package_name,
- options.extra_main_r_text_files)
+ build.srcjar_dir, package_for_library, build.r_txt_path,
+ options.extra_res_packages, options.extra_r_text_files,
+ rjava_build_options, options.srcjar_out, custom_root_package_name,
+ grandparent_custom_package_name, options.extra_main_r_text_files)
build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
# Sanity check that the created resources have the expected package ID.
- expected_id = _PackageIdFromOptions(options)
- if expected_id is None:
- expected_id = '0x00' if options.shared_resources else '0x7f'
- expected_id = int(expected_id, 16)
+ if options.package_id:
+ expected_id = options.package_id
+ elif options.shared_resources:
+ expected_id = 0
+ else:
+ expected_id = 127 # == '0x7f'.
_, package_id = resource_utils.ExtractArscPackage(
options.aapt2_path,
build.arsc_path if options.arsc_path else build.proto_path)
diff --git a/chromium/build/android/gyp/create_app_bundle.py b/chromium/build/android/gyp/create_app_bundle.py
index eeb665bb41b..ae3f4662b1b 100755
--- a/chromium/build/android/gyp/create_app_bundle.py
+++ b/chromium/build/android/gyp/create_app_bundle.py
@@ -411,12 +411,17 @@ def main(args):
with open(tmp_bundle_config, 'w') as f:
f.write(bundle_config)
- cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle']
- cmd_args += ['--modules=%s' % ','.join(module_zips)]
- cmd_args += ['--output=%s' % tmp_unsigned_bundle]
- cmd_args += ['--config=%s' % tmp_bundle_config]
-
- build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)
+ cmd_args = [
+ build_utils.JAVA_PATH, '-jar', bundletool.BUNDLETOOL_JAR_PATH,
+ 'build-bundle', '--modules=' + ','.join(module_zips),
+ '--output=' + tmp_unsigned_bundle, '--config=' + tmp_bundle_config
+ ]
+
+ build_utils.CheckOutput(
+ cmd_args,
+ print_stdout=True,
+ print_stderr=True,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.keystore_path:
# NOTE: As stated by the public documentation, apksigner cannot be used
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
index fb35bc02ea1..a83e696a26d 100644
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -37,6 +37,7 @@
../../../third_party/catapult/devil/devil/android/sdk/aapt.py
../../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
../../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../../third_party/catapult/devil/devil/android/sdk/bundletool.py
../../../third_party/catapult/devil/devil/android/sdk/intent.py
../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
../../../third_party/catapult/devil/devil/android/sdk/split_select.py
diff --git a/chromium/build/android/gyp/create_java_binary_script.py b/chromium/build/android/gyp/create_java_binary_script.py
index 4469381c7c0..7ea0efb22d8 100755
--- a/chromium/build/android/gyp/create_java_binary_script.py
+++ b/chromium/build/android/gyp/create_java_binary_script.py
@@ -37,13 +37,14 @@ if os.getcwd() != self_dir:
offset = os.path.relpath(self_dir, os.getcwd())
classpath = [os.path.join(offset, p) for p in classpath]
bootclasspath = [os.path.join(offset, p) for p in bootclasspath]
-java_cmd = ["java"]
-# This is a simple argparser for jvm and jar arguments.
+java_cmd = ['java']
+# This is a simple argparser for jvm, jar, and classpath arguments.
parser = argparse.ArgumentParser()
parser.add_argument('--jar-args')
parser.add_argument('--jvm-args')
-
+parser.add_argument('--classpath')
known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+
if known_args.jvm_args:
jvm_arguments = known_args.jvm_args.strip('"').split()
java_cmd.extend(jvm_arguments)
@@ -54,14 +55,17 @@ if known_args.jar_args:
else:
jar_arguments = unknown_args
+if known_args.classpath:
+ classpath += [known_args.classpath]
+
{noverify_flag}
if bootclasspath:
- java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath))
+ java_cmd.append('-Xbootclasspath/p:' + ':'.join(bootclasspath))
java_cmd.extend(
- ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"])
+ ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"])
java_cmd.extend(extra_program_args)
java_cmd.extend(jar_arguments)
-os.execvp("java", java_cmd)
+os.execvp('java', java_cmd)
"""
def main(argv):
diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py
index b9d04059e55..407b8f2c59c 100755
--- a/chromium/build/android/gyp/desugar.py
+++ b/chromium/build/android/gyp/desugar.py
@@ -31,7 +31,7 @@ def main():
options.classpath = build_utils.ParseGnList(options.classpath)
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.desugar_jar,
'--input',
@@ -46,7 +46,10 @@ def main():
cmd += ['--bootclasspath_entry', path]
for path in options.classpath:
cmd += ['--classpath_entry', path]
- build_utils.CheckOutput(cmd, print_stdout=False)
+ build_utils.CheckOutput(
+ cmd,
+ print_stdout=False,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.depfile:
build_utils.WriteDepfile(
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index a2e17b4e282..043a08ab272 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -27,7 +27,27 @@ def _ParseArgs(args):
build_utils.AddDepfileOption(parser)
parser.add_argument('--output', required=True, help='Dex output path.')
- parser.add_argument('--input-list', help='GN-list of additional input paths.')
+ parser.add_argument(
+ '--class-inputs',
+ action='append',
+ help='GN-list of .jars with .class files.')
+ parser.add_argument(
+ '--class-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .class files (added to depfile).')
+ parser.add_argument(
+ '--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
+ parser.add_argument(
+ '--dex-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .dex files (added to depfile).')
+ parser.add_argument(
+ '--incremental-dir',
+ help='Path of directory to put intermediate dex files.')
+ parser.add_argument(
+ '--merge-incrementals',
+ action='store_true',
+ help='Combine all per-class .dex files into a single classes.dex')
parser.add_argument(
'--main-dex-list-path',
help='File containing a list of the classes to include in the main dex.')
@@ -35,7 +55,7 @@ def _ParseArgs(args):
'--multi-dex',
action='store_true',
help='Allow multiple dex files within output.')
- parser.add_argument('--d8-jar-path', required=True, help='Path to D8 jar.')
+ parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
parser.add_argument(
'--release',
action='store_true',
@@ -44,7 +64,6 @@ def _ParseArgs(args):
'main dex and keeps all line number information, and then some.')
parser.add_argument(
'--min-api', help='Minimum Android API level compatibility.')
- parser.add_argument('inputs', nargs='*', help='Input .jar files.')
group = parser.add_argument_group('Dexlayout')
group.add_argument(
@@ -79,8 +98,12 @@ def _ParseArgs(args):
if options.main_dex_list_path and not options.multi_dex:
parser.error('--main-dex-list-path is unused if multidex is not enabled')
- if options.input_list:
- options.inputs += build_utils.ParseGnList(options.input_list)
+ options.class_inputs = build_utils.ParseGnList(options.class_inputs)
+ options.class_inputs_filearg = build_utils.ParseGnList(
+ options.class_inputs_filearg)
+ options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
+ options.dex_inputs_filearg = build_utils.ParseGnList(
+ options.dex_inputs_filearg)
return options
@@ -249,48 +272,156 @@ def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
return final_output
-def _PerformDexing(options):
- dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring']
- if options.multi_dex and options.main_dex_list_path:
- dex_cmd += ['--main-dex-list', options.main_dex_list_path]
- if options.release:
- dex_cmd += ['--release']
- if options.min_api:
- dex_cmd += ['--min-api', options.min_api]
+def _CreateFinalDex(options, d8_inputs, tmp_dir, dex_cmd):
+ tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
+ if (options.merge_incrementals or options.output.endswith('.dex')
+ or not all(f.endswith('.dex') for f in d8_inputs)):
+ if options.multi_dex and options.main_dex_list_path:
+ # Provides a list of classes that should be included in the main dex file.
+ dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path]
- with build_utils.TempDir() as tmp_dir:
tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
os.mkdir(tmp_dex_dir)
- _RunD8(dex_cmd, options.inputs, tmp_dex_dir)
+ _RunD8(dex_cmd, d8_inputs, tmp_dex_dir)
+ logging.info('Performed dex merging')
+
dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
- if not options.output.endswith('.dex'):
- tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
- _ZipAligned(sorted(dex_files), tmp_dex_output)
- else:
- # Output to a .dex file.
+ if options.output.endswith('.dex'):
if len(dex_files) > 1:
raise Exception('%d files created, expected 1' % len(dex_files))
tmp_dex_output = dex_files[0]
+ else:
+ _ZipAligned(sorted(dex_files), tmp_dex_output)
+ else:
+ # Skip dexmerger. Just put all incrementals into the .jar individually.
+ _ZipAligned(sorted(d8_inputs), tmp_dex_output)
+ logging.info('Quick-zipped %d files', len(d8_inputs))
- if options.dexlayout_profile:
- tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+ if options.dexlayout_profile:
+ tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+
+ # The dex file is complete and can be moved out of tmp_dir.
+ shutil.move(tmp_dex_output, options.output)
+
+
+def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
+ """Returns a list of all intermediate dex file paths."""
+ dex_files = []
+ for jar in class_inputs:
+ with zipfile.ZipFile(jar, 'r') as z:
+ for subpath in z.namelist():
+ if subpath.endswith('.class'):
+ subpath = subpath[:-5] + 'dex'
+ dex_files.append(os.path.join(incremental_dir, subpath))
+ return dex_files
+
+
+def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
+ """Deletes intermediate .dex files that are no longer needed."""
+ all_files = build_utils.FindInDirectory(dex_dir)
+ desired_files = set(dex_files)
+ for path in all_files:
+ if path not in desired_files:
+ os.unlink(path)
+
+
+def _ExtractClassFiles(changes, tmp_dir, class_inputs):
+ classes_list = []
+ for jar in class_inputs:
+ if changes:
+ changed_class_list = set(changes.IterChangedSubpaths(jar))
+ predicate = lambda x: x in changed_class_list and x.endswith('.class')
+ else:
+ predicate = lambda x: x.endswith('.class')
+
+ classes_list.extend(
+ build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
+ return classes_list
+
+
+def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
+ # Create temporary directory for classes to be extracted to.
+ tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
+ os.mkdir(tmp_extract_dir)
+
+ # Check whether changes were to a non-jar file, requiring full re-dex.
+ # E.g. r8.jar updated.
+ rebuild_all = changes.HasStringChanges() or not all(
+ p.endswith('.jar') for p in changes.IterChangedPaths())
+
+ if rebuild_all:
+ changes = None
+ class_files = _ExtractClassFiles(changes, tmp_extract_dir,
+ options.class_inputs)
+ logging.info('Extracted class files: %d', len(class_files))
+
+ # If the only change is deleting a file, class_files will be empty.
+ if class_files:
+ # Dex necessary classes into intermediate dex files.
+ dex_cmd = dex_cmd + ['--intermediate', '--file-per-class']
+ _RunD8(dex_cmd, class_files, options.incremental_dir)
+ logging.info('Dexed class files.')
+
+
+def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
+ logging.info('_OnStaleMd5')
+ with build_utils.TempDir() as tmp_dir:
+ if options.incremental_dir:
+ # Create directory for all intermediate dex files.
+ if not os.path.exists(options.incremental_dir):
+ os.makedirs(options.incremental_dir)
- # The dex file is complete and can be moved out of tmp_dir.
- shutil.move(tmp_dex_output, options.output)
+ _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
+ logging.info('Stale files deleted')
+ _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
+
+ _CreateFinalDex(options, final_dex_inputs, tmp_dir, dex_cmd)
+ logging.info('Dex finished for: %s', options.output)
def main(args):
+ logging.basicConfig(
+ level=logging.INFO if os.environ.get('DEX_DEBUG') else logging.WARNING,
+ format='%(levelname).1s %(relativeCreated)6d %(message)s')
options = _ParseArgs(args)
- input_paths = list(options.inputs)
+ options.class_inputs += options.class_inputs_filearg
+ options.dex_inputs += options.dex_inputs_filearg
+
+ input_paths = options.class_inputs + options.dex_inputs
if options.multi_dex and options.main_dex_list_path:
input_paths.append(options.main_dex_list_path)
+ input_paths.append(options.r8_jar_path)
+
+ output_paths = [options.output]
+
+ if options.incremental_dir:
+ final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
+ options.class_inputs, options.incremental_dir)
+ output_paths += final_dex_inputs
+ else:
+ final_dex_inputs = list(options.class_inputs)
+ final_dex_inputs += options.dex_inputs
- _PerformDexing(options)
+ dex_cmd = [
+ build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8',
+ '--no-desugaring'
+ ]
+ if options.release:
+ dex_cmd += ['--release']
+ if options.min_api:
+ dex_cmd += ['--min-api', options.min_api]
- build_utils.WriteDepfile(
- options.depfile, options.output, input_paths, add_pydeps=False)
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
+ options,
+ depfile_deps=options.class_inputs_filearg + options.dex_inputs_filearg,
+ output_paths=output_paths,
+ input_paths=input_paths,
+ input_strings=dex_cmd + [bool(options.incremental_dir)],
+ pass_changes=True,
+ track_subpaths_whitelist=options.class_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 926d2cdd502..7bbc066f076 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -49,7 +49,7 @@ def _ParseOptions(args):
def _RunDexsplitter(options, output_dir):
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.r8_path,
'dexsplitter',
diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py
index ed823f18b7b..a74037af07a 100755
--- a/chromium/build/android/gyp/dist_aar.py
+++ b/chromium/build/android/gyp/dist_aar.py
@@ -14,16 +14,19 @@ import sys
import tempfile
import zipfile
+from filter_zip import CreatePathTransform
from util import build_utils
_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
-def _MergeRTxt(r_paths):
+def _MergeRTxt(r_paths, include_globs):
"""Merging the given R.txt files and returns them as a string."""
all_lines = set()
for r_path in r_paths:
+ if include_globs and not build_utils.MatchesGlob(r_path, include_globs):
+ continue
with open(r_path) as f:
all_lines.update(f.readlines())
return ''.join(sorted(all_lines))
@@ -39,18 +42,21 @@ def _MergeProguardConfigs(proguard_configs):
return '\n'.join(ret)
-def _AddResources(aar_zip, resource_zips):
+def _AddResources(aar_zip, resource_zips, include_globs):
"""Adds all resource zips to the given aar_zip.
Ensures all res/values/* files have unique names by prefixing them.
"""
for i, path in enumerate(resource_zips):
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ continue
with zipfile.ZipFile(path) as res_zip:
for info in res_zip.infolist():
data = res_zip.read(info)
dirname, basename = posixpath.split(info.filename)
if 'values' in dirname:
- basename = '{}_{}'.format(basename, i)
+ root, ext = os.path.splitext(basename)
+ basename = '{}_{}{}'.format(root, i, ext)
info.filename = posixpath.join(dirname, basename)
info.filename = posixpath.join('res', info.filename)
aar_zip.writestr(info, data)
@@ -77,6 +83,15 @@ def main(args):
'ABI must be specified.')
parser.add_argument('--abi',
help='ABI (e.g. armeabi-v7a) for native libraries.')
+ parser.add_argument(
+ '--jar-excluded-globs',
+ help='GN-list of globs for paths to exclude in jar.')
+ parser.add_argument(
+ '--jar-included-globs',
+ help='GN-list of globs for paths to include in jar.')
+ parser.add_argument(
+ '--resource-included-globs',
+ help='GN-list of globs for paths to include in R.txt and resources zips.')
options = parser.parse_args(args)
@@ -89,6 +104,12 @@ def main(args):
options.r_text_files = build_utils.ParseGnList(options.r_text_files)
options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+ options.jar_excluded_globs = build_utils.ParseGnList(
+ options.jar_excluded_globs)
+ options.jar_included_globs = build_utils.ParseGnList(
+ options.jar_included_globs)
+ options.resource_included_globs = build_utils.ParseGnList(
+ options.resource_included_globs)
with tempfile.NamedTemporaryFile(delete=False) as staging_file:
try:
@@ -96,12 +117,18 @@ def main(args):
build_utils.AddToZipHermetic(
z, 'AndroidManifest.xml', src_path=options.android_manifest)
+ path_transform = CreatePathTransform(options.jar_excluded_globs,
+ options.jar_included_globs, [])
with tempfile.NamedTemporaryFile() as jar_file:
- build_utils.MergeZips(jar_file.name, options.jars)
+ build_utils.MergeZips(
+ jar_file.name, options.jars, path_transform=path_transform)
build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
build_utils.AddToZipHermetic(
- z, 'R.txt', data=_MergeRTxt(options.r_text_files))
+ z,
+ 'R.txt',
+ data=_MergeRTxt(options.r_text_files,
+ options.resource_included_globs))
build_utils.AddToZipHermetic(z, 'public.txt', data='')
if options.proguard_configs:
@@ -109,7 +136,8 @@ def main(args):
z, 'proguard.txt',
data=_MergeProguardConfigs(options.proguard_configs))
- _AddResources(z, options.dependencies_res_zips)
+ _AddResources(z, options.dependencies_res_zips,
+ options.resource_included_globs)
for native_library in options.native_libraries:
libname = os.path.basename(native_library)
diff --git a/chromium/build/android/gyp/dist_aar.pydeps b/chromium/build/android/gyp/dist_aar.pydeps
index da5ea8da23d..d4f9aae9b34 100644
--- a/chromium/build/android/gyp/dist_aar.pydeps
+++ b/chromium/build/android/gyp/dist_aar.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
../../gn_helpers.py
dist_aar.py
+filter_zip.py
util/__init__.py
util/build_utils.py
util/md5_check.py
diff --git a/chromium/build/android/gyp/filter_zip.py b/chromium/build/android/gyp/filter_zip.py
index 2182042df52..6f854191254 100755
--- a/chromium/build/android/gyp/filter_zip.py
+++ b/chromium/build/android/gyp/filter_zip.py
@@ -18,7 +18,7 @@ _RESOURCE_CLASSES = [
]
-def _CreatePathTransform(exclude_globs, include_globs,
+def CreatePathTransform(exclude_globs, include_globs,
strip_resource_classes_for):
exclude_globs = list(exclude_globs or [])
if strip_resource_classes_for:
@@ -60,8 +60,8 @@ def main():
args.strip_resource_classes_for = build_utils.ParseGnList(
args.strip_resource_classes_for)
- path_transform = _CreatePathTransform(
- args.exclude_globs, args.include_globs, args.strip_resource_classes_for)
+ path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
+ args.strip_resource_classes_for)
with build_utils.AtomicOutput(args.output) as f:
build_utils.MergeZips(
f.name, [args.input], path_transform=path_transform)
diff --git a/chromium/build/android/gyp/generate_linker_version_script.py b/chromium/build/android/gyp/generate_linker_version_script.py
index 0b6c2ef6968..526d636197b 100755
--- a/chromium/build/android/gyp/generate_linker_version_script.py
+++ b/chromium/build/android/gyp/generate_linker_version_script.py
@@ -42,6 +42,10 @@ def main():
dest='whitelists',
help='Path to an input file containing a whitelist of extra symbols to '
'export, one symbol per line. Multiple files may be specified.')
+ parser.add_argument(
+ '--export-feature-registrations',
+ action='store_true',
+ help='Export JNI_OnLoad_* methods')
options = parser.parse_args()
# JNI_OnLoad is always exported.
@@ -52,6 +56,9 @@ def main():
if options.export_java_symbols:
symbol_list.append('Java_*')
+ if options.export_feature_registrations:
+ symbol_list.append('JNI_OnLoad_*')
+
for whitelist in options.whitelists:
with open(whitelist, 'rt') as f:
for line in f:
diff --git a/chromium/build/android/gyp/jacoco_instr.py b/chromium/build/android/gyp/jacoco_instr.py
index 4deea439559..9f4f55f551d 100755
--- a/chromium/build/android/gyp/jacoco_instr.py
+++ b/chromium/build/android/gyp/jacoco_instr.py
@@ -6,7 +6,7 @@
"""Instruments classes and jar files.
-This script corresponds to the 'jacoco_instr' action in the java build process.
+This script corresponds to the 'jacoco_instr' action in the Java build process.
Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will
call the instrument command which accepts a jar and instruments it using
jacococli.jar.
@@ -21,6 +21,7 @@ import os
import shutil
import sys
import tempfile
+import zipfile
from util import build_utils
@@ -53,6 +54,9 @@ def _AddArguments(parser):
help='File containing newline-separated .java paths')
parser.add_argument(
'--jacococli-jar', required=True, help='Path to jacococli.jar.')
+ parser.add_argument(
+ '--files-to-instrument',
+ help='Path to a file containing which source files are affected.')
def _GetSourceDirsFromSourceFiles(source_files):
@@ -96,49 +100,145 @@ def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file,
data = {}
data['source_dirs'] = relative_sources
- data['input_path'] = os.path.abspath(input_path)
+ data['input_path'] = []
+ if input_path:
+ data['input_path'].append(os.path.abspath(input_path))
with open(sources_json_file, 'w') as f:
json.dump(data, f)
-def _RunInstrumentCommand(parser):
- """Instruments jar files using Jacoco.
+def _GetAffectedClasses(jar_file, source_files):
+ """Gets affected classes by affected source files to a jar.
Args:
- parser: ArgumentParser object.
+ jar_file: The jar file to get all members.
+ source_files: The list of affected source files.
Returns:
- An exit code.
+ A tuple of affected classes and unaffected members.
"""
- args = parser.parse_args()
+ with zipfile.ZipFile(jar_file) as f:
+ members = f.namelist()
- temp_dir = tempfile.mkdtemp()
- try:
- cmd = [
- 'java', '-jar', args.jacococli_jar, 'instrument', args.input_path,
- '--dest', temp_dir
- ]
+ affected_classes = []
+ unaffected_members = []
- build_utils.CheckOutput(cmd)
+ for member in members:
+ if not member.endswith('.class'):
+ unaffected_members.append(member)
+ continue
- jars = os.listdir(temp_dir)
- if len(jars) != 1:
- print('Error: multiple output files in: %s' % (temp_dir))
- return 1
+ is_affected = False
+ index = member.find('$')
+ if index == -1:
+ index = member.find('.class')
+ for source_file in source_files:
+ if source_file.endswith(member[:index] + '.java'):
+ affected_classes.append(member)
+ is_affected = True
+ break
+ if not is_affected:
+ unaffected_members.append(member)
+
+ return affected_classes, unaffected_members
+
+
+def _InstrumentWholeJar(instrument_cmd, input_path, output_path, temp_dir):
+ """Instruments input jar to output_path.
+
+ Args:
+ instrument_cmd: JaCoCo instrument command.
+ input_path: The input path to non-instrumented jar.
+ output_path: The output path to instrumented jar.
+ temp_dir: The temporary directory.
+ """
+ instrument_cmd.extend([input_path, '--dest', temp_dir])
+
+ build_utils.CheckOutput(instrument_cmd)
+
+ jars = os.listdir(temp_dir)
+ if len(jars) != 1:
+ raise Exception('Error: multiple output files: %s' % jars)
+
+ # Delete output_path first to avoid modifying input_path in the case where
+ # input_path is a hardlink to output_path. http://crbug.com/571642
+ if os.path.exists(output_path):
+ os.unlink(output_path)
+ shutil.move(os.path.join(temp_dir, jars[0]), output_path)
+
+
+def _InstrumentClassFiles(instrument_cmd, input_path, output_path, temp_dir,
+ affected_source_files):
+ """Instruments affected class files from input jar.
+
+ Args:
+ instrument_cmd: JaCoCo instrument command.
+ input_path: The input path to non-instrumented jar.
+ output_path: The output path to instrumented jar.
+ temp_dir: The temporary directory.
+ affected_source_files: The affected source file paths to input jar.
+ """
+ affected_classes, unaffected_members = _GetAffectedClasses(
+ input_path, affected_source_files)
- # Delete output_path first to avoid modifying input_path in the case where
- # input_path is a hardlink to output_path. http://crbug.com/571642
- if os.path.exists(args.output_path):
- os.unlink(args.output_path)
- shutil.move(os.path.join(temp_dir, jars[0]), args.output_path)
- finally:
- shutil.rmtree(temp_dir)
+ # Extract affected class files.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(temp_dir, affected_classes)
+
+ instrumented_dir = os.path.join(temp_dir, 'instrumented')
+
+ # Instrument extracted class files.
+ instrument_cmd.extend([temp_dir, '--dest', instrumented_dir])
+ build_utils.CheckOutput(instrument_cmd)
+
+ # Extract unaffected members to instrumented_dir.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(instrumented_dir, unaffected_members)
+
+ # Zip all files to output_path
+ build_utils.ZipDir(output_path, instrumented_dir)
+
+
+def _RunInstrumentCommand(parser):
+ """Instruments class or Jar files using JaCoCo.
+
+ Args:
+ parser: ArgumentParser object.
+
+ Returns:
+ An exit code.
+ """
+ args = parser.parse_args()
source_files = []
if args.java_sources_file:
source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
- source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+ with build_utils.TempDir() as temp_dir:
+ instrument_cmd = [
+ build_utils.JAVA_PATH, '-jar', args.jacococli_jar, 'instrument'
+ ]
+
+ if not args.files_to_instrument:
+ _InstrumentWholeJar(instrument_cmd, args.input_path, args.output_path,
+ temp_dir)
+ else:
+ affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
+ source_set = set(source_files)
+ affected_source_files = [f for f in affected_files if f in source_set]
+
+ # Copy input_path to output_path and return if no source file affected.
+ if not affected_source_files:
+ shutil.copyfile(args.input_path, args.output_path)
+ # Create a dummy sources_json_file.
+ _CreateSourcesJsonFile([], None, args.sources_json_file,
+ build_utils.DIR_SOURCE_ROOT)
+ return 0
+ else:
+ _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+ temp_dir, affected_source_files)
+
+ source_dirs = _GetSourceDirsFromSourceFiles(source_files)
# TODO(GYP): In GN, we are passed the list of sources, detecting source
# directories, then walking them to re-establish the list of sources.
# This can obviously be simplified!
diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py
index bacc8e3d46d..502e0715647 100755
--- a/chromium/build/android/gyp/java_cpp_enum.py
+++ b/chromium/build/android/gyp/java_cpp_enum.py
@@ -351,7 +351,7 @@ def GenerateOutput(source_path, enum_definition):
package ${PACKAGE};
-import android.support.annotation.IntDef;
+import androidx.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py
index 5717047c7a3..08ef3b8773b 100755
--- a/chromium/build/android/gyp/java_cpp_enum_tests.py
+++ b/chromium/build/android/gyp/java_cpp_enum_tests.py
@@ -42,7 +42,7 @@ class TestPreprocess(unittest.TestCase):
package some.package;
-import android.support.annotation.IntDef;
+import androidx.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/chromium/build/android/gyp/javac.py b/chromium/build/android/gyp/javac.py
index e8f27d562c2..9745a340373 100755
--- a/chromium/build/android/gyp/javac.py
+++ b/chromium/build/android/gyp/javac.py
@@ -501,7 +501,7 @@ def _ParseOptions(argv):
def main(argv):
logging.basicConfig(
- level=logging.INFO if os.environ.get('_JAVAC_DEBUG') else logging.WARNING,
+ level=logging.INFO if os.environ.get('JAVAC_DEBUG') else logging.WARNING,
format='%(levelname).1s %(relativeCreated)6d %(message)s')
colorama.init()
@@ -591,7 +591,7 @@ def main(argv):
options.jar_path + '.info',
]
- input_strings = javac_cmd + classpath
+ input_strings = javac_cmd + classpath + java_files
if options.jar_info_exclude_globs:
input_strings.append(options.jar_info_exclude_globs)
build_utils.CallAndWriteDepfileIfStale(
@@ -600,8 +600,7 @@ def main(argv):
depfile_deps=depfile_deps,
input_paths=input_paths,
input_strings=input_strings,
- output_paths=output_paths,
- add_pydeps=False)
+ output_paths=output_paths)
logging.info('Script complete: %s', __file__)
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index 605a478ec85..dfb9d08975b 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -439,8 +439,7 @@ def main():
input_paths=input_paths,
input_strings=input_strings,
output_paths=output_paths,
- depfile_deps=classpath,
- add_pydeps=False)
+ depfile_deps=classpath)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py
index d610982699c..b75f8ee6ff4 100755
--- a/chromium/build/android/gyp/main_dex_list.py
+++ b/chromium/build/android/gyp/main_dex_list.py
@@ -5,8 +5,6 @@
# found in the LICENSE file.
import argparse
-import json
-import os
import sys
import tempfile
import zipfile
@@ -14,7 +12,7 @@ import zipfile
from util import build_utils
-def main(args):
+def _ParseArgs():
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument('--shrinked-android-path', required=True,
@@ -28,33 +26,36 @@ def main(args):
'main dex.')
parser.add_argument('--main-dex-list-path', required=True,
help='The main dex list file to generate.')
- parser.add_argument('--inputs',
- help='JARs for which a main dex list should be '
- 'generated.')
+ parser.add_argument(
+ '--class-inputs',
+ action='append',
+ help='GN-list of .jars with .class files.')
+ parser.add_argument(
+ '--class-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .class files (added to depfile).')
parser.add_argument(
'--r8-path', required=True, help='Path to the r8 executable.')
parser.add_argument('--negative-main-dex-globs',
help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) '
'that will fail the build if they match files in the main dex.')
- parser.add_argument('paths', nargs='*', default=[],
- help='JARs for which a main dex list should be '
- 'generated.')
-
- args = parser.parse_args(build_utils.ExpandFileArgs(args))
+ args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
- depfile_deps = []
- if args.inputs:
- args.inputs = build_utils.ParseGnList(args.inputs)
- depfile_deps = args.inputs
- args.paths.extend(args.inputs)
+ args.class_inputs = build_utils.ParseGnList(args.class_inputs)
+ args.class_inputs_filearg = build_utils.ParseGnList(args.class_inputs_filearg)
+ args.class_inputs += args.class_inputs_filearg
if args.negative_main_dex_globs:
args.negative_main_dex_globs = build_utils.ParseGnList(
args.negative_main_dex_globs)
+ return args
+
+def main():
+ args = _ParseArgs()
proguard_cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
args.r8_path,
'--classfile',
@@ -73,60 +74,16 @@ def main(args):
'-dontpreverify',
]
- main_dex_list_cmd = [
- 'java', '-cp', args.dx_path,
- 'com.android.multidex.MainDexListBuilder',
- # This workaround significantly increases main dex size and doesn't seem to
- # be needed by Chrome. See comment in the source:
- # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
- '--disable-annotation-resolution-workaround',
- ]
-
- input_paths = list(args.paths)
- input_paths += [
- args.shrinked_android_path,
- args.dx_path,
- ]
- input_paths += args.main_dex_rules_paths
-
- input_strings = [
- proguard_cmd,
- main_dex_list_cmd,
- ]
-
if args.negative_main_dex_globs:
- input_strings += args.negative_main_dex_globs
for glob in args.negative_main_dex_globs:
# Globs come with 1 asterix, but we want 2 to match subpackages.
proguard_flags.append('-checkdiscard class ' +
glob.replace('*', '**').replace('/', '.'))
- output_paths = [
- args.main_dex_list_path,
- ]
-
- def _LineLengthHelperForOnStaleMd5():
- _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, args.paths,
- args.main_dex_list_path)
-
- build_utils.CallAndWriteDepfileIfStale(
- _LineLengthHelperForOnStaleMd5,
- args,
- input_paths=input_paths,
- input_strings=input_strings,
- output_paths=output_paths,
- depfile_deps=depfile_deps,
- add_pydeps=False)
-
- return 0
-
-
-def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
- main_dex_list_path):
main_dex_list = ''
try:
with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
- # Step 1: Use ProGuard to find all @MainDex code, and all code reachable
+ # Step 1: Use R8 to find all @MainDex code, and all code reachable
# from @MainDex code (recursive).
proguard_cmd += ['--output', temp_jar.name]
with tempfile.NamedTemporaryFile() as proguard_flags_file:
@@ -134,7 +91,7 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
proguard_flags_file.write(flag + '\n')
proguard_flags_file.flush()
proguard_cmd += ['--pg-conf', proguard_flags_file.name]
- for injar in paths:
+ for injar in args.class_inputs:
proguard_cmd.append(injar)
build_utils.CheckOutput(proguard_cmd, print_stderr=False)
@@ -142,12 +99,23 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
# for debugging what classes are kept by ProGuard vs. MainDexListBuilder.
with zipfile.ZipFile(temp_jar.name) as z:
kept_classes = [p for p in z.namelist() if p.endswith('.class')]
- with open(main_dex_list_path + '.partial', 'w') as f:
+ with open(args.main_dex_list_path + '.partial', 'w') as f:
f.write('\n'.join(kept_classes) + '\n')
# Step 2: Expand inclusion list to all classes referenced by the .class
# files of kept classes (non-recursive).
- main_dex_list_cmd += [temp_jar.name, ':'.join(paths)]
+ main_dex_list_cmd = [
+ build_utils.JAVA_PATH,
+ '-cp',
+ args.dx_path,
+ 'com.android.multidex.MainDexListBuilder',
+ # This workaround increases main dex size and does not seem to
+ # be needed by Chrome. See comment in the source:
+ # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
+ '--disable-annotation-resolution-workaround',
+ temp_jar.name,
+ ':'.join(args.class_inputs)
+ ]
main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
except build_utils.CalledProcessError as e:
@@ -158,9 +126,16 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
else:
raise
- with open(main_dex_list_path, 'w') as main_dex_list_file:
- main_dex_list_file.write(main_dex_list)
+ with build_utils.AtomicOutput(args.main_dex_list_path) as f:
+ f.write(main_dex_list)
+
+ if args.depfile:
+ build_utils.WriteDepfile(
+ args.depfile,
+ args.main_dex_list_path,
+ inputs=args.class_inputs_filearg,
+ add_pydeps=False)
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ main()
diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py
index 5680ad94028..61840bf6a1a 100755
--- a/chromium/build/android/gyp/merge_manifest.py
+++ b/chromium/build/android/gyp/merge_manifest.py
@@ -92,7 +92,7 @@ def main(argv):
with build_utils.AtomicOutput(args.output) as output:
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-cp',
classpath,
_MANIFEST_MERGER_MAIN_CLASS,
diff --git a/chromium/build/android/gyp/prepare_resources.py b/chromium/build/android/gyp/prepare_resources.py
index 6147f1a087f..cb0a2446544 100755
--- a/chromium/build/android/gyp/prepare_resources.py
+++ b/chromium/build/android/gyp/prepare_resources.py
@@ -295,8 +295,7 @@ def main(args):
input_paths=input_paths,
input_strings=input_strings,
output_paths=output_paths,
- depfile_deps=depfile_deps,
- add_pydeps=False)
+ depfile_deps=depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 3284341100d..fc9364471bd 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -14,6 +14,18 @@ import zipfile
from util import build_utils
from util import diff_utils
+_API_LEVEL_VERSION_CODE = [
+ (21, 'L'),
+ (22, 'LolliopoMR1'),
+ (23, 'M'),
+ (24, 'N'),
+ (25, 'NMR1'),
+ (26, 'O'),
+ (27, 'OMR1'),
+ (28, 'P'),
+ (29, 'Q'),
+]
+
class _ProguardOutputFilter(object):
"""ProGuard outputs boring stuff to stdout (ProGuard version, jar path, etc)
@@ -99,8 +111,7 @@ def _ParseOptions():
parser.add_argument(
'--verbose', '-v', action='store_true', help='Print all ProGuard output')
parser.add_argument(
- '--repackage-classes',
- help='Unique package name given to an asynchronously proguarded module')
+ '--repackage-classes', help='Package all optimized classes are put in.')
parser.add_argument(
'--disable-outlining',
action='store_true',
@@ -123,9 +134,6 @@ def _ParseOptions():
options.extra_mapping_output_paths = build_utils.ParseGnList(
options.extra_mapping_output_paths)
- if options.apply_mapping:
- options.apply_mapping = os.path.abspath(options.apply_mapping)
-
return options
@@ -168,7 +176,7 @@ def _OptimizeWithR8(options,
os.mkdir(tmp_output)
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.r8_path,
'--no-desugaring',
@@ -238,7 +246,7 @@ def _OptimizeWithProguard(options,
if options.proguard_path.endswith('.jar'):
cmd = [
- 'java', '-jar', options.proguard_path, '-include',
+ build_utils.JAVA_PATH, '-jar', options.proguard_path, '-include',
combined_proguard_configs_path
]
else:
@@ -319,9 +327,23 @@ def _CreateDynamicConfig(options):
}""" % options.min_api)
if options.apply_mapping:
- ret.append("-applymapping '%s'" % options.apply_mapping)
+ ret.append("-applymapping '%s'" % os.path.abspath(options.apply_mapping))
if options.repackage_classes:
ret.append("-repackageclasses '%s'" % options.repackage_classes)
+
+ _min_api = int(options.min_api) if options.min_api else 0
+ for api_level, version_code in _API_LEVEL_VERSION_CODE:
+ annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code
+ if api_level > _min_api:
+ ret.append('-keep @interface %s' % annotation_name)
+ ret.append("""\
+-keep,allowobfuscation,allowoptimization @%s class ** {
+ <methods>;
+}""" % annotation_name)
+ ret.append("""\
+-keepclassmembers,allowobfuscation,allowoptimization class ** {
+ @%s <methods>;
+}""" % annotation_name)
return '\n'.join(ret)
@@ -387,7 +409,7 @@ def main():
inputs = options.proguard_configs + options.input_paths + libraries
if options.apply_mapping:
- inputs += options.apply_mapping
+ inputs.append(options.apply_mapping)
build_utils.WriteDepfile(
options.depfile, options.output_path, inputs=inputs, add_pydeps=False)
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index ecb41a3fe85..ca7f0c165b6 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -33,6 +33,8 @@ import gn_helpers
DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, os.pardir)))
+JAVA_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+ 'bin', 'java')
try:
string_types = basestring
@@ -70,7 +72,7 @@ def Touch(path, fail_if_missing=False):
os.utime(path, None)
-def FindInDirectory(directory, filename_filter):
+def FindInDirectory(directory, filename_filter='*'):
files = []
for root, _dirnames, filenames in os.walk(directory):
matched_files = fnmatch.filter(filenames, filename_filter)
@@ -206,6 +208,26 @@ def FilterLines(output, filter_string):
line for line in output.splitlines() if not re_filter.search(line))
+def FilterReflectiveAccessJavaWarnings(output):
+ """Filters out warnings about illegal reflective access operation.
+
+ These warnings were introduced in Java 9, and generally mean that dependencies
+ need to be updated.
+ """
+ # WARNING: An illegal reflective access operation has occurred
+ # WARNING: Illegal reflective access by ...
+ # WARNING: Please consider reporting this to the maintainers of ...
+ # WARNING: Use --illegal-access=warn to enable warnings of further ...
+ # WARNING: All illegal access operations will be denied in a future release
+ return FilterLines(
+ output, r'WARNING: ('
+ 'An illegal reflective|'
+ 'Illegal reflective access|'
+ 'Please consider reporting this to|'
+ 'Use --illegal-access=warn|'
+ 'All illegal access operations)')
+
+
# This can be used in most cases like subprocess.check_output(). The output,
# particularly when the command fails, better highlights the command's failure.
# If the command fails, raises a build_utils.CalledProcessError.
@@ -511,7 +533,7 @@ def _ComputePythonDependencies():
src/. The paths will be relative to the current directory.
"""
_ForceLazyModulesToLoad()
- module_paths = (m.__file__ for m in sys.modules.itervalues()
+ module_paths = (m.__file__ for m in sys.modules.values()
if m is not None and hasattr(m, '__file__'))
abs_module_paths = map(os.path.abspath, module_paths)
@@ -634,15 +656,20 @@ def ReadSourcesList(sources_list_file_name):
return [file_name.strip() for file_name in f]
-def CallAndWriteDepfileIfStale(function, options, record_path=None,
- input_paths=None, input_strings=None,
- output_paths=None, force=False,
- pass_changes=False, depfile_deps=None,
- add_pydeps=True):
+def CallAndWriteDepfileIfStale(on_stale_md5,
+ options,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_whitelist=None,
+ depfile_deps=None):
"""Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable.
Depfiles are automatically added to output_paths when present in the |options|
- argument. They are then created after |function| is called.
+ argument. They are then created after |on_stale_md5| is called.
By default, only python dependencies are added to the depfile. If there are
other input paths that are not captured by GN deps, then they should be listed
@@ -656,21 +683,7 @@ def CallAndWriteDepfileIfStale(function, options, record_path=None,
input_strings = list(input_strings or [])
output_paths = list(output_paths or [])
- python_deps = None
- if hasattr(options, 'depfile') and options.depfile:
- python_deps = _ComputePythonDependencies()
- input_paths += python_deps
- output_paths += [options.depfile]
-
- def on_stale_md5(changes):
- args = (changes,) if pass_changes else ()
- function(*args)
- if python_deps is not None:
- all_depfile_deps = list(python_deps) if add_pydeps else []
- if depfile_deps:
- all_depfile_deps.extend(depfile_deps)
- WriteDepfile(options.depfile, output_paths[0], all_depfile_deps,
- add_pydeps=False)
+ input_paths += _ComputePythonDependencies()
md5_check.CallAndRecordIfStale(
on_stale_md5,
@@ -679,4 +692,12 @@ def CallAndWriteDepfileIfStale(function, options, record_path=None,
input_strings=input_strings,
output_paths=output_paths,
force=force,
- pass_changes=True)
+ pass_changes=pass_changes,
+ track_subpaths_whitelist=track_subpaths_whitelist)
+
+ # Write depfile even when inputs have not changed to ensure build correctness
+ # on bots that build with & without patch, and the patch changes the depfile
+ # location.
+ if hasattr(options, 'depfile') and options.depfile:
+ WriteDepfile(
+ options.depfile, output_paths[0], depfile_deps, add_pydeps=False)
diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py
index 9a15ee6e75a..0ad6f1b4003 100644
--- a/chromium/build/android/gyp/util/md5_check.py
+++ b/chromium/build/android/gyp/util/md5_check.py
@@ -20,9 +20,14 @@ PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
-def CallAndRecordIfStale(
- function, record_path=None, input_paths=None, input_strings=None,
- output_paths=None, force=False, pass_changes=False):
+def CallAndRecordIfStale(function,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_whitelist=None):
"""Calls function if outputs are stale.
Outputs are considered stale if:
@@ -43,6 +48,8 @@ def CallAndRecordIfStale(
force: Whether to treat outputs as missing regardless of whether they
actually are.
pass_changes: Whether to pass a Changes instance to |function|.
+ track_subpaths_whitelist: Relevant only when pass_changes=True. List of .zip
+ files from |input_paths| to make subpath information available for.
"""
assert record_path or output_paths
input_paths = input_paths or []
@@ -57,12 +64,15 @@ def CallAndRecordIfStale(
new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
new_metadata.AddStrings(input_strings)
+ zip_whitelist = set(track_subpaths_whitelist or [])
for path in input_paths:
- if _IsZipFile(path):
+ # It's faster to md5 an entire zip file than it is to just locate & hash
+ # its central directory (which is what this used to do).
+ if path in zip_whitelist:
entries = _ExtractZipEntries(path)
new_metadata.AddZipFile(path, entries)
else:
- new_metadata.AddFile(path, _Md5ForPath(path))
+ new_metadata.AddFile(path, _ComputeTagForPath(path))
old_metadata = None
force = force or _FORCE_REBUILD
@@ -106,19 +116,20 @@ class Changes(object):
def HasChanges(self):
"""Returns whether any changes exist."""
- return (self.force or
- not self.old_metadata or
- self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or
- self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+ return (self.HasStringChanges()
+ or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def HasStringChanges(self):
+ """Returns whether string metadata changed."""
+ return (self.force or not self.old_metadata
+ or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
def AddedOrModifiedOnly(self):
"""Returns whether the only changes were from added or modified (sub)files.
No missing outputs, no removed paths/subpaths.
"""
- if (self.force or
- not self.old_metadata or
- self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()):
+ if self.HasStringChanges():
return False
if any(self.IterRemovedPaths()):
return False
@@ -368,27 +379,15 @@ class _Metadata(object):
return (entry['path'] for entry in subentries)
-def _UpdateMd5ForFile(md5, path, block_size=2**16):
- with open(path, 'rb') as infile:
- while True:
- data = infile.read(block_size)
- if not data:
- break
- md5.update(data)
-
-
-def _UpdateMd5ForDirectory(md5, dir_path):
- for root, _, files in os.walk(dir_path):
- for f in files:
- _UpdateMd5ForFile(md5, os.path.join(root, f))
-
-
-def _Md5ForPath(path):
+def _ComputeTagForPath(path):
+ stat = os.stat(path)
+ if stat.st_size > 1 * 1024 * 1024:
+ # Fallback to mtime for large files so that md5_check does not take too long
+ # to run.
+ return stat.st_mtime
md5 = hashlib.md5()
- if os.path.isdir(path):
- _UpdateMd5ForDirectory(md5, path)
- else:
- _UpdateMd5ForFile(md5, path)
+ with open(path, 'rb') as f:
+ md5.update(f.read())
return md5.hexdigest()
@@ -400,14 +399,6 @@ def _ComputeInlineMd5(iterable):
return md5.hexdigest()
-def _IsZipFile(path):
- """Returns whether to treat the given file as a zip file."""
- # ijar doesn't set the CRC32 field.
- if path.endswith('.interface.jar'):
- return False
- return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar')
-
-
def _ExtractZipEntries(path):
"""Returns a list of (path, CRC32) of all files within |path|."""
entries = []
diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py
index 41e9d3c248c..cba7a6a354a 100755
--- a/chromium/build/android/gyp/util/md5_check_test.py
+++ b/chromium/build/android/gyp/util/md5_check_test.py
@@ -36,12 +36,18 @@ class TestMd5Check(unittest.TestCase):
# Test out empty zip file to start.
_WriteZipFile(input_file2.name, [])
input_files = [input_file1.name, input_file2.name]
+ zip_paths = [input_file2.name]
record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
- def CheckCallAndRecord(should_call, message, force=False,
- outputs_specified=False, outputs_missing=False,
- expected_changes=None, added_or_modified_only=None):
+ def CheckCallAndRecord(should_call,
+ message,
+ force=False,
+ outputs_specified=False,
+ outputs_missing=False,
+ expected_changes=None,
+ added_or_modified_only=None,
+ track_subentries=False):
output_paths = None
if outputs_specified:
output_file1 = tempfile.NamedTemporaryFile()
@@ -66,7 +72,8 @@ class TestMd5Check(unittest.TestCase):
input_strings=input_strings,
output_paths=output_paths,
force=force,
- pass_changes=(expected_changes or added_or_modified_only) is not None)
+ pass_changes=(expected_changes or added_or_modified_only) is not None,
+ track_subpaths_whitelist=zip_paths if track_subentries else None)
self.assertEqual(should_call, self.called, message)
if expected_changes:
description = self.changes.DescribeDifference()
@@ -81,6 +88,9 @@ class TestMd5Check(unittest.TestCase):
expected_changes='Previous stamp file not found.',
added_or_modified_only=False)
CheckCallAndRecord(False, 'should not call when nothing changed')
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
CheckCallAndRecord(False, 'should not call when nothing changed #2',
outputs_specified=True, outputs_missing=False)
CheckCallAndRecord(True, 'should call when output missing',
@@ -97,9 +107,6 @@ class TestMd5Check(unittest.TestCase):
expected_changes='*Modified: %s' % input_file1.name,
added_or_modified_only=True)
- input_files = input_files[::-1]
- CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
-
input_files = input_files[:1]
CheckCallAndRecord(True, 'removing file should trigger call',
expected_changes='*Removed: %s' % input_file1.name,
@@ -129,16 +136,21 @@ class TestMd5Check(unittest.TestCase):
added_or_modified_only=False)
_WriteZipFile(input_file2.name, [('path/1.txt', '1')])
- CheckCallAndRecord(True, 'added subpath should trigger call',
- expected_changes='*Modified: %s*Subpath added: %s' % (
- input_file2.name, 'path/1.txt'),
- added_or_modified_only=True)
+ CheckCallAndRecord(
+ True,
+ 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+ 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
_WriteZipFile(input_file2.name, [('path/1.txt', '2')])
- CheckCallAndRecord(True, 'changed subpath should trigger call',
- expected_changes='*Modified: %s*Subpath modified: %s' % (
- input_file2.name, 'path/1.txt'),
- added_or_modified_only=True)
- CheckCallAndRecord(False, 'should not call when nothing changed')
+ CheckCallAndRecord(
+ True,
+ 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' %
+ (input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
_WriteZipFile(input_file2.name, [])
CheckCallAndRecord(True, 'removed subpath should trigger call',
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index 3d842a3fc5e..b98bd4fa4a3 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -348,9 +348,9 @@ that will be merged into the final `.jar` file for distribution.
Path to the final classes.dex file (or classes.zip in case of multi-dex)
for this APK.
-* `deps_info['final_dex']['dependency_dex_files']`:
-The list of paths to all `deps_info['dex_path']` entries for all library
-dependencies for this APK.
+* `deps_info['final_dex']['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
* `native['libraries']`
List of native libraries for the primary ABI to be embedded in this APK.
@@ -1191,6 +1191,9 @@ def main(argv):
raise Exception('Not all deps support the Android platform: '
+ str(deps_not_support_android))
+ if is_apk_or_module_target:
+ all_dex_files = [c['dex_path'] for c in all_library_deps]
+
if is_java_target:
# Classpath values filled in below (after applying tested_apk_config).
config['javac'] = {}
@@ -1200,6 +1203,8 @@ def main(argv):
deps_info['interface_jar_path'] = options.interface_jar_path
if options.dex_path:
deps_info['dex_path'] = options.dex_path
+ if is_apk_or_module_target:
+ all_dex_files.append(options.dex_path)
if options.type == 'android_apk':
deps_info['apk_path'] = options.apk_path
deps_info['incremental_apk_path'] = options.incremental_apk_path
@@ -1318,9 +1323,6 @@ def main(argv):
if options.res_size_info:
config['deps_info']['res_size_info'] = options.res_size_info
- if is_apk_or_module_target:
- deps_dex_files = [c['dex_path'] for c in all_library_deps]
-
if options.type == 'group':
if options.extra_classpath_jars:
# These are .jars to add to javac classpath but not to runtime classpath.
@@ -1590,8 +1592,9 @@ def main(argv):
# within proguard.py. Move the logic for the proguard case to here.
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
- deps_dex_files = [
- p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+ all_dex_files = [
+ p for p in all_dex_files if not p in tested_apk_deps_dex_files
+ ]
if options.type in ('android_apk', 'dist_aar', 'dist_jar',
'android_app_bundle_module', 'android_app_bundle'):
@@ -1605,7 +1608,7 @@ def main(argv):
dex_config = config['final_dex']
dex_config['path'] = options.final_dex_path
if is_apk_or_module_target:
- dex_config['dependency_dex_files'] = deps_dex_files
+ dex_config['all_dex_files'] = all_dex_files
if is_java_target:
config['javac']['classpath'] = javac_classpath
@@ -1723,7 +1726,7 @@ def main(argv):
RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
- RemoveObjDups(config, base, 'final_dex', 'dependency_dex_files')
+ RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
RemoveObjDups(config, base, 'extra_android_manifests')
build_utils.WriteJson(config, options.build_config, only_if_changed=True)