summaryrefslogtreecommitdiffstats
path: root/chromium/build/android/gyp
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2018-05-15 10:20:33 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2018-05-15 10:28:57 +0000
commitd17ea114e5ef69ad5d5d7413280a13e6428098aa (patch)
tree2c01a75df69f30d27b1432467cfe7c1467a498da /chromium/build/android/gyp
parent8c5c43c7b138c9b4b0bf56d946e61d3bbc111bec (diff)
BASELINE: Update Chromium to 67.0.3396.47
Change-Id: Idcb1341782e417561a2473eeecc82642dafda5b7 Reviewed-by: Michal Klocek <michal.klocek@qt.io>
Diffstat (limited to 'chromium/build/android/gyp')
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py632
-rwxr-xr-xchromium/build/android/gyp/extract_unwind_tables.py288
-rwxr-xr-xchromium/build/android/gyp/extract_unwind_tables_tests.py121
-rwxr-xr-xchromium/build/android/gyp/generate_split_manifest.py96
-rwxr-xr-xchromium/build/android/gyp/javac.py48
-rwxr-xr-xchromium/build/android/gyp/main_dex_list.py77
-rwxr-xr-xchromium/build/android/gyp/prepare_resources.py285
-rwxr-xr-xchromium/build/android/gyp/process_resources.py995
-rwxr-xr-xchromium/build/android/gyp/proguard.py1
-rw-r--r--chromium/build/android/gyp/util/build_utils.py42
-rwxr-xr-xchromium/build/android/gyp/util/build_utils_test.py44
-rw-r--r--chromium/build/android/gyp/util/proguard_util.py13
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py469
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py491
-rwxr-xr-xchromium/build/android/gyp/write_ordered_libraries.py4
15 files changed, 2450 insertions, 1156 deletions
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
new file mode 100755
index 00000000000..5c9618894ab
--- /dev/null
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -0,0 +1,632 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import multiprocessing.pool
+import os
+import re
+import shutil
+import subprocess
+import sys
+import zipfile
+from xml.etree import ElementTree
+
+
+from util import build_utils
+from util import resource_utils
+
+_SOURCE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(
+ __file__))))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+# A variation of this lists also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+ 'en-GB': 'en-rGB',
+ 'en-US': 'en-rUS',
+ 'es-419': 'es-rUS',
+ 'fil': 'tl',
+ 'he': 'iw',
+ 'id': 'in',
+ 'pt-PT': 'pt-rPT',
+ 'pt-BR': 'pt-rBR',
+ 'yi': 'ji',
+ 'zh-CN': 'zh-rCN',
+ 'zh-TW': 'zh-rTW',
+}
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
+ # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+ r'.*star_gray\.png',
+ # Android requires pngs for 9-patch images.
+ r'.*\.9\.png',
+ # Daydream requires pngs for icon files.
+ r'.*daydream_icon_.*\.png']))
+
+# Regular expression for package declaration in 'aapt dump resources' output.
+_RE_PACKAGE_DECLARATION = re.compile(
+ r'^Package Group ([0-9]+) id=0x([0-9a-fA-F]+)')
+
+
+def _PackageIdArgument(x):
+ """Convert a string into a package ID while checking its range.
+
+ Args:
+ x: argument string.
+ Returns:
+ the package ID as an int, or -1 in case of error.
+ """
+ try:
+ x = int(x, 0)
+ if x < 0 or x > 127:
+ x = -1
+ except ValueError:
+ x = -1
+ return x
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument('--android-manifest', required=True,
+ help='AndroidManifest.xml path')
+
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make all resources in R.java non-final and allow the resource IDs '
+ 'to be reset to a different package index when the apk is loaded by '
+ 'another application at runtime.')
+
+ input_opts.add_argument(
+ '--app-as-shared-lib',
+ action='store_true',
+ help='Same as --shared-resources, but also ensures all resource IDs are '
+ 'directly usable from the APK loaded as an application.')
+
+ input_opts.add_argument(
+ '--shared-resources-whitelist',
+ help='An R.txt file acting as a whitelist for resources that should be '
+ 'non-final and have their package ID changed at runtime in R.java. '
+ 'Implies and overrides --shared-resources.')
+
+ input_opts.add_argument('--support-zh-hk', action='store_true',
+ help='Use zh-rTW resources for zh-rHK.')
+
+ input_opts.add_argument('--debuggable',
+ action='store_true',
+ help='Whether to add android:debuggable="true"')
+
+ input_opts.add_argument('--version-code', help='Version code for apk.')
+ input_opts.add_argument('--version-name', help='Version name for apk.')
+
+ input_opts.add_argument(
+ '--no-compress',
+ help='disables compression for the given comma-separated list of '
+ 'extensions')
+
+ input_opts.add_argument(
+ '--locale-whitelist',
+ default='[]',
+ help='GN list of languages to include. All other language configs will '
+ 'be stripped out. List may include a combination of Android locales '
+ 'or Chrome locales.')
+
+ input_opts.add_argument('--exclude-xxxhdpi', action='store_true',
+ help='Do not include xxxhdpi drawables.')
+
+ input_opts.add_argument(
+ '--xxxhdpi-whitelist',
+ default='[]',
+ help='GN list of globs that say which xxxhdpi images to include even '
+ 'when --exclude-xxxhdpi is set.')
+
+ input_opts.add_argument('--png-to-webp', action='store_true',
+ help='Convert png files to webp format.')
+
+ input_opts.add_argument('--webp-binary', default='',
+ help='Path to the cwebp binary.')
+
+ input_opts.add_argument('--no-xml-namespaces',
+ action='store_true',
+ help='Whether to strip xml namespaces from processed '
+ 'xml resources')
+
+ input_opts.add_argument(
+ '--check-resources-pkg-id', type=_PackageIdArgument,
+ help='Check the package ID of the generated resources table. '
+ 'Value must be integer in [0..127] range.')
+
+ output_opts.add_argument('--apk-path', required=True,
+ help='Path to output (partial) apk.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to srcjar to contain generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ output_opts.add_argument('--proguard-file',
+ help='Path to proguard.txt generated file')
+
+ output_opts.add_argument(
+ '--proguard-file-main-dex',
+ help='Path to proguard.txt generated file for main dex')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist)
+ options.xxxhdpi_whitelist = build_utils.ParseGnList(options.xxxhdpi_whitelist)
+
+ if options.check_resources_pkg_id is not None:
+ if options.check_resources_pkg_id < 0:
+ raise Exception(
+ 'Package resource id should be integer in [0..127] range.')
+
+ if options.shared_resources and options.app_as_shared_lib:
+ raise Exception('Only one of --app-as-shared-lib or --shared-resources '
+ 'can be used.')
+
+ return options
+
+
+def _ExtractPackageIdFromApk(apk_path, aapt_path):
+ """Extract the package ID of a given APK (even intermediate ones).
+
+ Args:
+ apk_path: Input apk path.
+ aapt_path: Path to aapt tool.
+ Returns:
+ An integer corresponding to the APK's package id.
+ Raises:
+ Exception if there is no resources table in the input file.
+ """
+ cmd_args = [ aapt_path, 'dump', 'resources', apk_path ]
+ output = build_utils.CheckOutput(cmd_args)
+
+ for line in output.splitlines():
+ m = _RE_PACKAGE_DECLARATION.match(line)
+ if m:
+ return int(m.group(2), 16)
+
+ raise Exception("No resources in this APK!")
+
+
+def _SortZip(original_path, sorted_path):
+ """Generate new zip archive by sorting all files in the original by name."""
+ with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \
+ zipfile.ZipFile(original_path, 'r') as original_zip:
+ for info in sorted(original_zip.infolist(), key=lambda i: i.filename):
+ sorted_zip.writestr(info, original_zip.read(info))
+
+
+def _DuplicateZhResources(resource_dirs):
+ """Duplicate Taiwanese resources into Hong-Kong specific directory."""
+ for resource_dir in resource_dirs:
+ # We use zh-TW resources for zh-HK (if we have zh-TW resources).
+ for path in build_utils.IterFiles(resource_dir):
+ if 'zh-rTW' in path:
+ hk_path = path.replace('zh-rTW', 'zh-rHK')
+ build_utils.MakeDirectory(os.path.dirname(hk_path))
+ shutil.copyfile(path, hk_path)
+
+
+def _ToAaptLocales(locale_whitelist, support_zh_hk):
+ """Converts the list of Chrome locales to aapt config locales."""
+ ret = set()
+ for locale in locale_whitelist:
+ locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(locale, locale)
+ if locale is None or ('-' in locale and '-r' not in locale):
+ raise Exception('_CHROME_TO_ANDROID_LOCALE_MAP needs updating.'
+ ' Found: %s' % locale)
+ ret.add(locale)
+ # Always keep non-regional fall-backs.
+ language = locale.split('-')[0]
+ ret.add(language)
+
+ # We don't actually support zh-HK in Chrome on Android, but we mimic the
+ # native side behavior where we use zh-TW resources when the locale is set to
+ # zh-HK. See https://crbug.com/780847.
+ if support_zh_hk:
+ assert not any('HK' in l for l in locale_whitelist), (
+ 'Remove special logic if zh-HK is now supported (crbug.com/780847).')
+ ret.add('zh-rHK')
+ return sorted(ret)
+
+
+def _MoveImagesToNonMdpiFolders(res_root):
+ """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+ Why? http://crbug.com/289843
+ """
+ for src_dir_name in os.listdir(res_root):
+ src_components = src_dir_name.split('-')
+ if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+ continue
+ src_dir = os.path.join(res_root, src_dir_name)
+ if not os.path.isdir(src_dir):
+ continue
+ dst_components = [c for c in src_components if c != 'mdpi']
+ assert dst_components != src_components
+ dst_dir_name = '-'.join(dst_components)
+ dst_dir = os.path.join(res_root, dst_dir_name)
+ build_utils.MakeDirectory(dst_dir)
+ for src_file_name in os.listdir(src_dir):
+ if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'):
+ continue
+ src_file = os.path.join(src_dir, src_file_name)
+ dst_file = os.path.join(dst_dir, src_file_name)
+ assert not os.path.lexists(dst_file)
+ shutil.move(src_file, dst_file)
+
+
+def _CreateLinkApkArgs(options):
+ """Create command-line arguments list to invoke 'aapt2 link'.
+
+ Args:
+ options: The command-line options tuple.
+ Returns:
+ A list of strings corresponding to the command-line invokation for
+ the command, matching the arguments from |options|.
+ """
+ link_command = [
+ options.aapt_path + '2',
+ 'link',
+ '--version-code', options.version_code,
+ '--version-name', options.version_name,
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ '-I', options.android_sdk_jar,
+ '-o', options.apk_path,
+ ]
+
+ if options.proguard_file:
+ link_command += ['--proguard', options.proguard_file]
+ if options.proguard_file_main_dex:
+ link_command += ['--proguard-main-dex', options.proguard_file_main_dex]
+
+ if options.no_compress:
+ for ext in options.no_compress.split(','):
+ link_command += ['-0', ext]
+
+ if options.shared_resources:
+ link_command.append('--shared-lib')
+
+ if options.locale_whitelist:
+ aapt_locales = _ToAaptLocales(
+ options.locale_whitelist, options.support_zh_hk)
+ link_command += ['-c', ','.join(aapt_locales)]
+
+ if options.no_xml_namespaces:
+ link_command.append('--no-xml-namespaces')
+
+ return link_command
+
+
+def _ExtractVersionFromSdk(aapt_path, sdk_path):
+ """Extract version code and name from Android SDK .jar file.
+
+ Args:
+ aapt_path: Path to 'aapt' build tool.
+ sdk_path: Path to SDK-specific android.jar file.
+ Returns:
+ A (version_code, version_name) pair of strings.
+ """
+ output = subprocess.check_output([aapt_path, 'dump', 'badging', sdk_path])
+ version_code = re.search(r"versionCode='(.*?)'", output).group(1)
+ version_name = re.search(r"versionName='(.*?)'", output).group(1)
+ return version_code, version_name,
+
+
+def _FixManifest(options, temp_dir):
+ """Fix the APK's AndroidManifest.xml.
+
+ This adds any missing namespaces for 'android' and 'tools', and
+ sets certains elements like 'platformBuildVersionCode' or
+ 'android:debuggable' depending on the content of |options|.
+
+ Args:
+ options: The command-line arguments tuple.
+ temp_dir: A temporary directory where the fixed manifest will be written to.
+ Returns:
+ Path to the fixed manifest within |temp_dir|.
+ """
+ debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+ _ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+ _TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+ ElementTree.register_namespace('android', _ANDROID_NAMESPACE)
+ ElementTree.register_namespace('tools', _TOOLS_NAMESPACE)
+ original_manifest = ElementTree.parse(options.android_manifest)
+
+ version_code, version_name = _ExtractVersionFromSdk(
+ options.aapt_path, options.android_sdk_jar)
+
+ # ElementTree.find does not work if the required tag is the root.
+ if original_manifest.getroot().tag == 'manifest':
+ manifest_node = original_manifest.getroot()
+ else:
+ manifest_node = original_manifest.find('manifest')
+
+ manifest_node.set('platformBuildVersionCode', version_code)
+ manifest_node.set('platformBuildVersionName', version_name)
+
+ if options.debuggable:
+ app_node = original_manifest.find('application')
+ app_node.set('{%s}%s' % (_ANDROID_NAMESPACE, 'debuggable'), 'true')
+
+ with open(debug_manifest_path, 'w') as debug_manifest:
+ debug_manifest.write(ElementTree.tostring(
+ original_manifest.getroot(), encoding='UTF-8'))
+
+ return debug_manifest_path
+
+
+def _ResourceNameFromPath(path):
+ return os.path.splitext(os.path.basename(path))[0]
+
+
+def _CreateKeepPredicate(resource_dirs, exclude_xxxhdpi, xxxhdpi_whitelist):
+ """Return a predicate lambda to determine which resource files to keep."""
+ if not exclude_xxxhdpi:
+ # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+ return lambda path: os.path.basename(path)[0] != '.'
+
+ # Returns False only for xxxhdpi non-mipmap, non-whitelisted drawables.
+ naive_predicate = lambda path: (
+ not re.search(r'[/-]xxxhdpi[/-]', path) or
+ re.search(r'[/-]mipmap[/-]', path) or
+ build_utils.MatchesGlob(path, xxxhdpi_whitelist))
+
+ # Build a set of all non-xxxhdpi drawables to ensure that we never exclude any
+ # xxxhdpi drawable that does not exist in other densities.
+ non_xxxhdpi_drawables = set()
+ for resource_dir in resource_dirs:
+ for path in build_utils.IterFiles(resource_dir):
+ if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path):
+ non_xxxhdpi_drawables.add(_ResourceNameFromPath(path))
+
+ return lambda path: (naive_predicate(path) or
+ _ResourceNameFromPath(path) not in non_xxxhdpi_drawables)
+
+
+def _ConvertToWebP(webp_binary, png_files):
+ pool = multiprocessing.pool.ThreadPool(10)
+ def convert_image(png_path):
+ root = os.path.splitext(png_path)[0]
+ webp_path = root + '.webp'
+ args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100',
+ '-lossless', '-o', webp_path]
+ subprocess.check_call(args)
+ os.remove(png_path)
+
+ pool.map(convert_image, [f for f in png_files
+ if not _PNG_WEBP_BLACKLIST_PATTERN.match(f)])
+ pool.close()
+ pool.join()
+
+
+def _CompileDeps(aapt_path, dep_subdirs, temp_dir):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+ partial_compile_command = [
+ aapt_path + '2',
+ 'compile',
+ # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+ # '--no-crunch',
+ ]
+ pool = multiprocessing.pool.ThreadPool(10)
+ def compile_partial(directory):
+ dirname = os.path.basename(directory)
+ partial_path = os.path.join(partials_dir, dirname + '.zip')
+ compile_command = (partial_compile_command +
+ ['--dir', directory, '-o', partial_path])
+ build_utils.CheckOutput(compile_command)
+
+ # Sorting the files in the partial ensures deterministic output from the
+ # aapt2 link step which uses order of files in the partial.
+ sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip')
+ _SortZip(partial_path, sorted_partial_path)
+
+ return sorted_partial_path
+
+ partials = pool.map(compile_partial, dep_subdirs)
+ pool.close()
+ pool.join()
+ return partials
+
+
+def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
+ """Compile resources with aapt2 and generate intermediate .ap_ file.
+
+ Args:
+ options: The command-line options tuple. E.g. the generated apk
+ will be written to |options.apk_path|.
+ dep_subdirs: The list of directories where dependency resource zips
+ were extracted (its content will be altered by this function).
+ temp_dir: A temporary directory.
+ gen_dir: Another temp directory where some intermediate files are
+ generated.
+ r_txt_path: The path where the R.txt file will written to.
+ """
+ _DuplicateZhResources(dep_subdirs)
+
+ keep_predicate = _CreateKeepPredicate(
+ dep_subdirs, options.exclude_xxxhdpi, options.xxxhdpi_whitelist)
+ png_paths = []
+ for directory in dep_subdirs:
+ for f in build_utils.IterFiles(directory):
+ if not keep_predicate(f):
+ os.remove(f)
+ elif f.endswith('.png'):
+ png_paths.append(f)
+ if png_paths and options.png_to_webp:
+ _ConvertToWebP(options.webp_binary, png_paths)
+ for directory in dep_subdirs:
+ _MoveImagesToNonMdpiFolders(directory)
+
+ link_command = _CreateLinkApkArgs(options)
+ link_command += ['--output-text-symbols', r_txt_path]
+ # TODO(digit): Is this below actually required for R.txt generation?
+ link_command += ['--java', gen_dir]
+
+ fixed_manifest = _FixManifest(options, temp_dir)
+ link_command += ['--manifest', fixed_manifest]
+
+ partials = _CompileDeps(options.aapt_path, dep_subdirs, temp_dir)
+ for partial in partials:
+ link_command += ['-R', partial]
+
+ # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+ # Also creates R.txt
+ build_utils.CheckOutput(
+ link_command, print_stdout=False, print_stderr=False)
+
+
+def _WriteFinalRTxtFile(options, aapt_r_txt_path):
+ """Determine final R.txt and return its location.
+
+ This handles --r-text-in and --r-text-out options at the same time.
+
+ Args:
+ options: The command-line options tuple.
+ aapt_r_txt_path: The path to the R.txt generated by aapt.
+ Returns:
+ Path to the final R.txt file.
+ """
+ if options.r_text_in:
+ r_txt_file = options.r_text_in
+ else:
+ # When an empty res/ directory is passed, aapt does not write an R.txt.
+ r_txt_file = aapt_r_txt_path
+ if not os.path.exists(r_txt_file):
+ build_utils.Touch(r_txt_file)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_file, options.r_text_out)
+
+ return r_txt_file
+
+
+def _OnStaleMd5(options):
+ with resource_utils.BuildContext() as build:
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _PackageApk(options, dep_subdirs, build.temp_dir, build.gen_dir,
+ build.r_txt_path)
+
+ r_txt_path = _WriteFinalRTxtFile(options, build.r_txt_path)
+
+ package = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+
+ # If --shared-resources-whitelist is used, the all resources listed in
+ # the corresponding R.txt file will be non-final, and an onResourcesLoaded()
+ # will be generated to adjust them at runtime.
+ #
+ # Otherwise, if --shared-resources is used, the all resources will be
+ # non-final, and an onResourcesLoaded() method will be generated too.
+ #
+ # Otherwise, all resources will be final, and no method will be generated.
+ #
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ if options.shared_resources_whitelist:
+ rjava_build_options.ExportSomeResources(
+ options.shared_resources_whitelist)
+ rjava_build_options.GenerateOnResourcesLoaded()
+ elif options.shared_resources or options.app_as_shared_lib:
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, package, r_txt_path,
+ options.extra_res_packages,
+ options.extra_r_text_files,
+ rjava_build_options)
+
+ if options.srcjar_out:
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ if options.check_resources_pkg_id is not None:
+ expected_id = options.check_resources_pkg_id
+ package_id = _ExtractPackageIdFromApk(options.apk_path,
+ options.aapt_path)
+ if package_id != expected_id:
+ raise Exception('Invalid package ID 0x%x (expected 0x%x)' %
+ (package_id, expected_id))
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Order of these must match order specified in GN so that the correct one
+ # appears first in the depfile.
+ possible_output_paths = [
+ options.apk_path,
+ options.r_text_out,
+ options.srcjar_out,
+ options.proguard_file,
+ options.proguard_file_main_dex,
+ ]
+ output_paths = [x for x in possible_output_paths if x]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ input_strings = options.extra_res_packages + [
+ options.shared_resources,
+ options.exclude_xxxhdpi,
+ options.xxxhdpi_whitelist,
+ str(options.debuggable),
+ str(options.png_to_webp),
+ str(options.support_zh_hk),
+ str(options.no_xml_namespaces),
+ ]
+
+ input_strings.extend(_CreateLinkApkArgs(options))
+
+ possible_input_paths = [
+ options.aapt_path,
+ options.android_manifest,
+ options.android_sdk_jar,
+ options.shared_resources_whitelist,
+ ]
+ input_paths = [x for x in possible_input_paths if x]
+ input_paths.extend(options.dependencies_res_zips)
+ input_paths.extend(options.extra_r_text_files)
+
+ if options.webp_binary:
+ input_paths.append(options.webp_binary)
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options),
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/extract_unwind_tables.py b/chromium/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 00000000000..37a8421449b
--- /dev/null
+++ b/chromium/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the size of UNW_INDEX in bytes.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+ 1. First column 4 byte rows of all the function start address as offset from
+ start of the binary, in sorted order.
+ 2. For each function addr, the second column contains 2 byte indices in order.
+ The indices are offsets (in count of 2 bytes) of the CFI data from start of
+ UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+ 2 bytes: N - number of rows that belong to current function.
+ N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+ 14 bits : CFA offset / 4.
+ 2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+ 0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+ offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+ use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+ functions which have variable arguments can have offset upto 16.
+ TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+ we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+ functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+ extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+ --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+ """Writes a 32 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+ """Writes a 16 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+ """Returns the postfix expression as string for a given register.
+
+ Breakpad CFI row format specifies rules for unwinding each register in postfix
+ expression form separated by space. Each rule starts with register name and a
+ colon. Eg: "CFI R1: <rule> R2: <rule>".
+ """
+ out = []
+ found_register = False
+ for part in cfi_row:
+ if found_register:
+ if part[-1] == ':':
+ break
+ out.append(part)
+ elif part == reg + ':':
+ found_register = True
+ return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+ """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+ Returns right values if rule matches the predefined criteria. Returns (0, 0)
+ otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+ is postfix form "CFA -<val> + ^".
+ """
+ cfa_offset = 0
+ ra_offset = 0
+ cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+ ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+ if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+ cfa_offset = int(cfa_rule.split()[1], 10)
+ if ra_rule:
+ if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+ return (0, 0)
+ ra_offset = -1 * int(ra_rule.split()[1], 10)
+ return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+ """Returns parsed CFI data from given symbol_file.
+
+ Each entry in the cfi data dictionary returned is a map from function start
+ address to array of function rows, starting with FUNCTION type, followed by
+ one or more CFI rows.
+ """
+ cfi_data = {}
+ current_func = []
+ for line in symbol_file:
+ if 'STACK CFI' not in line:
+ continue
+
+ parts = line.split()
+ data = {}
+ if parts[2] == 'INIT':
+ # Add the previous function to the output
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ current_func = []
+
+ # The function line is of format "STACK CFI INIT <addr> <length> ..."
+ data[_ADDR_ENTRY] = int(parts[3], 16)
+ data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+ # Condition C1: Skip if length is large.
+ if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+ continue # Skip the current function.
+ else:
+ # The current function is skipped.
+ if len(current_func) == 0:
+ continue
+
+ # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+ data[_ADDR_ENTRY] = int(parts[2], 16)
+ (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+ # Condition C2 and C3: Skip based on limits on offsets.
+ if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+ current_func = []
+ continue
+ assert data[_CFA_REG] % 4 == 0
+ # Since we skipped functions with code size larger than 0xffff, we should
+ # have no function offset larger than the same value.
+ assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+ if data[_ADDR_ENTRY] == 0:
+ # Skip current function, delete all previous entries.
+ current_func = []
+ continue
+ assert data[_ADDR_ENTRY] % 2 == 0
+ current_func.append(data)
+
+ # Condition C4: Skip function without CFI rows.
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+ """Writes the CFI data in defined format to out_file."""
+ # Stores the final data that will be written to UNW_DATA table, in order
+ # with 2 byte items.
+ unw_data = []
+
+ # Represent all the CFI data of functions as set of numbers and map them to an
+ # index in the |unw_data|. This index is later written to the UNW_INDEX table
+ # for each function. This map is used to find index of the data for functions.
+ data_to_index = {}
+ # Store mapping between the functions to the index.
+ func_addr_to_index = {}
+ previous_func_end = 0
+ for addr, function in sorted(cfi_data.iteritems()):
+ # Add an empty function entry when functions CFIs are missing between 2
+ # functions.
+ if previous_func_end != 0 and addr - previous_func_end > 4:
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+ previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+ assert len(function) > 1
+ func_data_arr = []
+ func_data = 0
+ # The first row contains the function address and length. The rest of the
+ # rows have CFI data. Create function data array as given in the format.
+ for row in function[1:]:
+ addr_offset = row[_ADDR_ENTRY] - addr
+ cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] / 4)
+
+ func_data_arr.append(addr_offset)
+ func_data_arr.append(cfa_offset)
+
+ # Consider all the rows in the data as one large integer and add it as a key
+ # to the |data_to_index|.
+ for data in func_data_arr:
+ func_data = (func_data << 16) | data
+
+ row_count = len(func_data_arr) / 2
+ if func_data not in data_to_index:
+ # When data is not found, create a new index = len(unw_data), and write
+ # the data to |unw_data|.
+ index = len(unw_data)
+ data_to_index[func_data] = index
+ unw_data.append(row_count)
+ for row in func_data_arr:
+ unw_data.append(row)
+ else:
+ # If the data was found, then use the same index for the function.
+ index = data_to_index[func_data]
+ assert row_count == unw_data[index]
+ func_addr_to_index[addr] = data_to_index[func_data]
+
+ # Mark the end end of last function entry.
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+ # Write the size of UNW_INDEX file in bytes.
+ _Write4Bytes(out_file, len(func_addr_to_index) * 6)
+
+ # Write the UNW_INDEX table. First list of addresses and then indices.
+ sorted_unw_index = sorted(func_addr_to_index.iteritems())
+ for addr, index in sorted_unw_index:
+ _Write4Bytes(out_file, addr)
+ for addr, index in sorted_unw_index:
+ _Write2Bytes(out_file, index)
+
+ # Write the UNW_DATA table.
+ for data in unw_data:
+ _Write2Bytes(out_file, data)
+
+
+def _ParseCfiData(sym_file, output_path):
+ with open(sym_file, 'r') as f:
+ cfi_data = _GetAllCfiRows(f)
+
+ with open(output_path, 'wb') as out_file:
+ _WriteCfiData(cfi_data, out_file)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--input_path', required=True,
+ help='The input path of the unstripped binary')
+ parser.add_argument(
+ '--output_path', required=True,
+ help='The path of the output file')
+ parser.add_argument(
+ '--dump_syms_path', required=True,
+ help='The path of the dump_syms binary')
+
+ args = parser.parse_args()
+
+ with tempfile.NamedTemporaryFile() as sym_file:
+ out = subprocess.call(
+ ['./' +args.dump_syms_path, args.input_path], stdout=sym_file)
+ assert not out
+ sym_file.flush()
+ _ParseCfiData(sym_file.name, args.output_path)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/android/gyp/extract_unwind_tables_tests.py b/chromium/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 00000000000..02c70eb049b
--- /dev/null
+++ b/chromium/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+ def testExtractCfi(self):
+ with tempfile.NamedTemporaryFile() as input_file, \
+ tempfile.NamedTemporaryFile() as output_file:
+ input_file.write("""
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""")
+ input_file.flush()
+ extract_unwind_tables._ParseCfiData(input_file.name, output_file.name)
+
+ expected_cfi_data = {
+ 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+ 0xe1a296 : [],
+ 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+ 0xe1a990 : [],
+ 0x3b92e24: [0x28, 0x13],
+ 0x3b92e62: [],
+ }
+ expected_function_count = len(expected_cfi_data)
+
+ actual_output = []
+ with open(output_file.name, 'rb') as f:
+ while True:
+ read = f.read(2)
+ if not read:
+ break
+ actual_output.append(struct.unpack('H', read)[0])
+
+ # First value is size of unw_index table.
+ unw_index_size = actual_output[1] << 16 | actual_output[0]
+ # Each function index is 6 bytes data.
+ self.assertEqual(expected_function_count * 6, unw_index_size)
+ # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+ # size.
+ unw_index_start = 2
+ unw_index_addr_end = unw_index_start + expected_function_count * 2
+ unw_index_end = unw_index_addr_end + expected_function_count
+ unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+ unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+ unw_data_start = unw_index_end
+ unw_data = actual_output[unw_data_start:]
+
+ for func_iter in range(0, expected_function_count):
+ func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+ unw_index_addr_col[func_iter * 2])
+ index = unw_index_index_col[func_iter]
+ # If index is CANT_UNWIND then invalid function.
+ if index == 0xFFFF:
+ self.assertEqual(expected_cfi_data[func_addr], [])
+ continue
+
+ func_start = index + 1
+ func_end = func_start + unw_data[index] * 2
+ self.assertEquals(
+ len(expected_cfi_data[func_addr]), func_end - func_start)
+ func_cfi = unw_data[func_start : func_end]
+ self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/build/android/gyp/generate_split_manifest.py b/chromium/build/android/gyp/generate_split_manifest.py
deleted file mode 100755
index daa8f67c1b3..00000000000
--- a/chromium/build/android/gyp/generate_split_manifest.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Creates an AndroidManifest.xml for an APK split.
-
-Given the manifest file for the main APK, generates an AndroidManifest.xml with
-the value required for a Split APK (package, versionCode, etc).
-"""
-
-import optparse
-import xml.etree.ElementTree
-
-from util import build_utils
-
-MANIFEST_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
-<manifest
- xmlns:android="http://schemas.android.com/apk/res/android"
- package="%(package)s"
- split="%(split)s">
- <uses-sdk android:minSdkVersion="21" />
- <application android:hasCode="%(has_code)s">
- </application>
-</manifest>
-"""
-
-def ParseArgs():
- """Parses command line options.
-
- Returns:
- An options object as from optparse.OptionsParser.parse_args()
- """
- parser = optparse.OptionParser()
- build_utils.AddDepfileOption(parser)
- parser.add_option('--main-manifest', help='The main manifest of the app')
- parser.add_option('--out-manifest', help='The output manifest')
- parser.add_option('--split', help='The name of the split')
- parser.add_option(
- '--has-code',
- action='store_true',
- default=False,
- help='Whether the split will contain a .dex file')
-
- (options, args) = parser.parse_args()
-
- if args:
- parser.error('No positional arguments should be given.')
-
- # Check that required options have been provided.
- required_options = ('main_manifest', 'out_manifest', 'split')
- build_utils.CheckOptions(options, parser, required=required_options)
-
- return options
-
-
-def Build(main_manifest, split, has_code):
- """Builds a split manifest based on the manifest of the main APK.
-
- Args:
- main_manifest: the XML manifest of the main APK as a string
- split: the name of the split as a string
- has_code: whether this split APK will contain .dex files
-
- Returns:
- The XML split manifest as a string
- """
-
- doc = xml.etree.ElementTree.fromstring(main_manifest)
- package = doc.get('package')
-
- return MANIFEST_TEMPLATE % {
- 'package': package,
- 'split': split.replace('-', '_'),
- 'has_code': str(has_code).lower()
- }
-
-
-def main():
- options = ParseArgs()
- main_manifest = file(options.main_manifest).read()
- split_manifest = Build(
- main_manifest,
- options.split,
- options.has_code)
-
- with file(options.out_manifest, 'w') as f:
- f.write(split_manifest)
-
- if options.depfile:
- deps = [options.main_manifest]
- build_utils.WriteDepfile(options.depfile, options.out_manifest, deps)
-
-
-if __name__ == '__main__':
- main()
diff --git a/chromium/build/android/gyp/javac.py b/chromium/build/android/gyp/javac.py
index f78a2848d8c..ae215642057 100755
--- a/chromium/build/android/gyp/javac.py
+++ b/chromium/build/android/gyp/javac.py
@@ -10,6 +10,7 @@ import os
import shutil
import re
import sys
+import zipfile
from util import build_utils
from util import md5_check
@@ -23,24 +24,14 @@ import colorama
ERRORPRONE_WARNINGS_TO_TURN_OFF = [
# TODO(crbug.com/801210): Follow steps in bug.
'SynchronizeOnNonFinalField',
- # TODO(crbug.com/801261): Follow steps in bug
- 'ArgumentSelectionDefectChecker',
- # TODO(crbug.com/801268): Follow steps in bug.
- 'NarrowingCompoundAssignment',
# TODO(crbug.com/802073): Follow steps in bug.
'TypeParameterUnusedInFormals',
- # TODO(crbug.com/802075): Follow steps in bug.
- 'ReferenceEquality',
# TODO(crbug.com/803484): Follow steps in bug.
'CatchFail',
# TODO(crbug.com/803485): Follow steps in bug.
'JUnitAmbiguousTestClass',
- # TODO(crbug.com/803486): Follow steps in bug.
- 'AssertionFailureIgnored',
# TODO(crbug.com/803589): Follow steps in bug.
'MissingFail',
- # TODO(crbug.com/803625): Follow steps in bug.
- 'StaticGuardedByInstance',
# Android platform default is always UTF-8.
# https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
'DefaultCharset',
@@ -96,10 +87,15 @@ ERRORPRONE_WARNINGS_TO_TURN_OFF = [
ERRORPRONE_WARNINGS_TO_ERROR = [
# Add warnings to this after fixing/suppressing all instances in our codebase.
+ 'ArgumentSelectionDefectChecker',
+ 'AssertionFailureIgnored',
'FloatingPointLiteralPrecision',
'JavaLangClash',
'MissingOverride',
+ 'NarrowingCompoundAssignment',
'ParameterName',
+ 'ReferenceEquality',
+ 'StaticGuardedByInstance',
'StaticQualifiedUsingExpression',
'UseCorrectAssertInTests',
]
@@ -253,6 +249,22 @@ def _WriteInfoFile(info_path, info_data, srcjar_files):
info_file.write('{},{}\n'.format(fully_qualified_name, path))
+def _FullJavaNameFromClassFilePath(path):
+ # Input: base/android/java/src/org/chromium/Foo.class
+ # Output: base.android.java.src.org.chromium.Foo
+ if not path.endswith('.class'):
+ return ''
+ path = os.path.splitext(path)[0]
+ parts = []
+ while path:
+ # Use split to be platform independent.
+ head, tail = os.path.split(path)
+ path = head
+ parts.append(tail)
+ parts.reverse() # Package comes first
+ return '.'.join(parts)
+
+
def _CreateInfoFile(java_files, options, srcjar_files):
"""Writes a .jar.info file.
@@ -281,7 +293,21 @@ def _CreateInfoFile(java_files, options, srcjar_files):
# Collect all the info files for transitive dependencies of the apk.
if options.apk_jar_info_path:
for jar_path in options.full_classpath:
- info_data.update(_ParseInfoFile(jar_path + '.info'))
+ # android_java_prebuilt adds jar files in the src directory (relative to
+ # the output directory, usually ../../third_party/example.jar).
+ # android_aar_prebuilt collects jar files in the aar file and uses the
+ # java_prebuilt rule to generate gen/example/classes.jar files.
+ # We scan these prebuilt jars to parse each class path for the FQN. This
+ # allows us to later map these classes back to their respective src
+ # directories.
+ if jar_path.startswith('..') or jar_path.endswith('classes.jar'):
+ with zipfile.ZipFile(jar_path) as zip_info:
+ for path in zip_info.namelist():
+ fully_qualified_name = _FullJavaNameFromClassFilePath(path)
+ if fully_qualified_name:
+ info_data[fully_qualified_name] = jar_path
+ else:
+ info_data.update(_ParseInfoFile(jar_path + '.info'))
_WriteInfoFile(options.apk_jar_info_path, info_data, srcjar_files)
diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py
index e18422e958a..bd7e86e549c 100755
--- a/chromium/build/android/gyp/main_dex_list.py
+++ b/chromium/build/android/gyp/main_dex_list.py
@@ -9,8 +9,10 @@ import json
import os
import sys
import tempfile
+import zipfile
from util import build_utils
+from util import proguard_util
sys.path.append(os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir)))
@@ -29,19 +31,14 @@ def main(args):
'main dex.')
parser.add_argument('--main-dex-list-path', required=True,
help='The main dex list file to generate.')
- parser.add_argument('--enabled-configurations',
- help='The build configurations for which a main dex list'
- ' should be generated.')
- parser.add_argument('--configuration-name',
- help='The current build configuration.')
- parser.add_argument('--multidex-configuration-path',
- help='A JSON file containing multidex build '
- 'configuration.')
parser.add_argument('--inputs',
help='JARs for which a main dex list should be '
'generated.')
parser.add_argument('--proguard-path', required=True,
help='Path to the proguard executable.')
+ parser.add_argument('--negative-main-dex-globs',
+ help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) '
+ 'that will fail the build if they match files in the main dex.')
parser.add_argument('paths', nargs='*', default=[],
help='JARs for which a main dex list should be '
@@ -49,29 +46,22 @@ def main(args):
args = parser.parse_args(build_utils.ExpandFileArgs(args))
- if args.multidex_configuration_path:
- with open(args.multidex_configuration_path) as multidex_config_file:
- multidex_config = json.loads(multidex_config_file.read())
-
- if not multidex_config.get('enabled', False):
- return 0
-
if args.inputs:
args.paths.extend(build_utils.ParseGnList(args.inputs))
+ if args.negative_main_dex_globs:
+ args.negative_main_dex_globs = build_utils.ParseGnList(
+ args.negative_main_dex_globs)
shrinked_android_jar = os.path.abspath(
os.path.join(args.android_sdk_tools, 'lib', 'shrinkedAndroid.jar'))
dx_jar = os.path.abspath(
os.path.join(args.android_sdk_tools, 'lib', 'dx.jar'))
- rules_file = os.path.abspath(
- os.path.join(args.android_sdk_tools, 'mainDexClasses.rules'))
proguard_cmd = [
'java', '-jar', args.proguard_path,
'-forceprocessing',
'-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
'-libraryjars', shrinked_android_jar,
- '-include', rules_file,
]
for m in args.main_dex_rules_paths:
proguard_cmd.extend(['-include', m])
@@ -79,13 +69,16 @@ def main(args):
main_dex_list_cmd = [
'java', '-cp', dx_jar,
'com.android.multidex.MainDexListBuilder',
+ # This workaround significantly increases main dex size and doesn't seem to
+ # be needed by Chrome. See comment in the source:
+ # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
+ '--disable-annotation-resolution-workaround',
]
input_paths = list(args.paths)
input_paths += [
shrinked_android_jar,
dx_jar,
- rules_file,
]
input_paths += args.main_dex_rules_paths
@@ -93,6 +86,8 @@ def main(args):
proguard_cmd,
main_dex_list_cmd,
]
+ if args.negative_main_dex_globs:
+ input_strings += args.negative_main_dex_globs
output_paths = [
args.main_dex_list_path,
@@ -100,7 +95,8 @@ def main(args):
build_utils.CallAndWriteDepfileIfStale(
lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths,
- args.main_dex_list_path),
+ args.main_dex_list_path,
+ args.negative_main_dex_globs),
args,
input_paths=input_paths,
input_strings=input_strings,
@@ -109,21 +105,60 @@ def main(args):
return 0
-def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path):
+def _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs):
+ # Check if ProGuard kept any unwanted classes.
+ found_unwanted_classes = sorted(
+ p for p in kept_classes
+ if build_utils.MatchesGlob(p, negative_main_dex_globs))
+
+ if found_unwanted_classes:
+ first_class = found_unwanted_classes[0].replace(
+ '.class', '').replace('/', '.')
+ proguard_cmd += ['-whyareyoukeeping', 'class', first_class, '{}']
+ output = build_utils.CheckOutput(
+ proguard_cmd, print_stderr=False,
+ stdout_filter=proguard_util.ProguardOutputFilter())
+ raise Exception(
+ ('Found classes that should not be in the main dex:\n {}\n\n'
+ 'Here is the -whyareyoukeeping output for {}: \n{}').format(
+ '\n '.join(found_unwanted_classes), first_class, output))
+
+
+def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path,
+ negative_main_dex_globs):
paths_arg = ':'.join(paths)
main_dex_list = ''
try:
with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
+ # Step 1: Use ProGuard to find all @MainDex code, and all code reachable
+ # from @MainDex code (recursive).
proguard_cmd += [
'-injars', paths_arg,
'-outjars', temp_jar.name
]
build_utils.CheckOutput(proguard_cmd, print_stderr=False)
+ # Record the classes kept by ProGuard. Not used by the build, but useful
+ # for debugging what classes are kept by ProGuard vs. MainDexListBuilder.
+ with zipfile.ZipFile(temp_jar.name) as z:
+ kept_classes = [p for p in z.namelist() if p.endswith('.class')]
+ with open(main_dex_list_path + '.partial', 'w') as f:
+ f.write('\n'.join(kept_classes) + '\n')
+
+ if negative_main_dex_globs:
+ # Perform assertions before MainDexListBuilder because:
+ # a) MainDexListBuilder is not recursive, so being included by it isn't
+ # a huge deal.
+ # b) Errors are much more actionable.
+ _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs)
+
+ # Step 2: Expand inclusion list to all classes referenced by the .class
+ # files of kept classes (non-recursive).
main_dex_list_cmd += [
temp_jar.name, paths_arg
]
main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
+
except build_utils.CalledProcessError as e:
if 'output jar is empty' in e.output:
pass
diff --git a/chromium/build/android/gyp/prepare_resources.py b/chromium/build/android/gyp/prepare_resources.py
new file mode 100755
index 00000000000..fee7932e6df
--- /dev/null
+++ b/chromium/build/android/gyp/prepare_resources.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip, R.txt and
+.srcjar files."""
+
+import argparse
+import collections
+import os
+import re
+import shutil
+import sys
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+from util import resource_utils
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument('--resource-dirs',
+ default='[]',
+ help='A list of input directories containing resources '
+ 'for this target.')
+
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make resources shareable by generating an onResourcesLoaded() '
+ 'method in the R.java source file.')
+
+ input_opts.add_argument('--custom-package',
+ help='Optional Java package for main R.java.')
+
+ input_opts.add_argument(
+ '--android-manifest',
+ help='Optional AndroidManifest.xml path. Only used to extract a package '
+ 'name for R.java if a --custom-package is not provided.')
+
+ output_opts.add_argument(
+ '--resource-zip-out',
+ help='Path to a zip archive containing all resources from '
+ '--resource-dirs, merged into a single directory tree. This will '
+ 'also include auto-generated v14-compatible resources unless '
+ '--v14-skip is used.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to .srcjar to contain the generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ input_opts.add_argument(
+ '--v14-skip',
+ action="store_true",
+ help='Do not generate nor verify v14 resources.')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.resource_dirs = build_utils.ParseGnList(options.resource_dirs)
+
+ return options
+
+
+def _GenerateGlobs(pattern):
+ # This function processes the aapt ignore assets pattern into a list of globs
+ # to be used to exclude files on the python side. It removes the '!', which is
+ # used by aapt to mean 'not chatty' so it does not output if the file is
+ # ignored (we dont output anyways, so it is not required). This function does
+ # not handle the <dir> and <file> prefixes used by aapt and are assumed not to
+ # be included in the pattern string.
+ return pattern.replace('!', '').split(':')
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+ # Python zipfile does not provide a way to replace a file (it just writes
+ # another file with the same name). So, first collect all the files to put
+ # in the zip (with proper overriding), and then zip them.
+ # ignore_pattern is a string of ':' delimited list of globs used to ignore
+ # files that should not be part of the final resource zip.
+ files_to_zip = dict()
+ globs = _GenerateGlobs(ignore_pattern)
+ for d in resource_dirs:
+ for root, _, files in os.walk(d):
+ for f in files:
+ archive_path = f
+ parent_dir = os.path.relpath(root, d)
+ if parent_dir != '.':
+ archive_path = os.path.join(parent_dir, f)
+ path = os.path.join(root, f)
+ if build_utils.MatchesGlob(archive_path, globs):
+ continue
+ files_to_zip[archive_path] = path
+ build_utils.DoZip(files_to_zip.iteritems(), zip_path)
+
+
+def _GenerateRTxt(options, dep_subdirs, gen_dir):
+ """Generate R.txt file.
+
+ Args:
+ options: The command-line options tuple.
+ dep_subdirs: List of directories containing extracted dependency resources.
+ gen_dir: Locates where the aapt-generated files will go. In particular
+ the output file is always generated as |{gen_dir}/R.txt|.
+ """
+ # NOTE: This uses aapt rather than aapt2 because 'aapt2 compile' does not
+ # support the --output-text-symbols option yet (https://crbug.com/820460).
+ package_command = [options.aapt_path,
+ 'package',
+ '-m',
+ '-M', resource_utils.EMPTY_ANDROID_MANIFEST_PATH,
+ '--no-crunch',
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ '-I', options.android_sdk_jar,
+ '--output-text-symbols', gen_dir,
+ '-J', gen_dir, # Required for R.txt generation.
+ '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+
+ # Adding all dependencies as sources is necessary for @type/foo references
+ # to symbols within dependencies to resolve. However, it has the side-effect
+ # that all Java symbols from dependencies are copied into the new R.java.
+ # E.g.: It enables an arguably incorrect usage of
+ # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
+ # more correct. This is just how Android works.
+ for d in dep_subdirs:
+ package_command += ['-S', d]
+
+ for d in options.resource_dirs:
+ package_command += ['-S', d]
+
+ # Only creates an R.txt
+ build_utils.CheckOutput(
+ package_command, print_stdout=False, print_stderr=False)
+
+
+def _GenerateResourcesZip(output_resource_zip, input_resource_dirs,
+ v14_skip, temp_dir):
+ """Generate a .resources.zip file fron a list of input resource dirs.
+
+ Args:
+ output_resource_zip: Path to the output .resources.zip file.
+ input_resource_dirs: A list of input resource directories.
+ v14_skip: If False, then v14-compatible resource will also be
+ generated in |{temp_dir}/v14| and added to the final zip.
+ temp_dir: Path to temporary directory.
+ """
+ if not v14_skip:
+ # Generate v14-compatible resources in temp_dir.
+ v14_dir = os.path.join(temp_dir, 'v14')
+ build_utils.MakeDirectory(v14_dir)
+
+ for resource_dir in input_resource_dirs:
+ generate_v14_compatible_resources.GenerateV14Resources(
+ resource_dir,
+ v14_dir)
+
+ input_resource_dirs.append(v14_dir)
+
+ _ZipResources(input_resource_dirs, output_resource_zip,
+ build_utils.AAPT_IGNORE_PATTERN)
+
+
+def _OnStaleMd5(options):
+ with resource_utils.BuildContext() as build:
+ if options.r_text_in:
+ r_txt_path = options.r_text_in
+ else:
+ # Extract dependencies to resolve @foo/type references into
+ # dependent packages.
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _GenerateRTxt(options, dep_subdirs, build.gen_dir)
+ r_txt_path = build.r_txt_path
+
+ # 'aapt' doesn't generate any R.txt file if res/ was empty.
+ if not os.path.exists(r_txt_path):
+ build_utils.Touch(r_txt_path)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_path, options.r_text_out)
+
+ if options.srcjar_out:
+ package = options.custom_package
+ if not package and options.android_manifest:
+ package = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+
+ # Don't create a .java file for the current resource target when no
+ # package name was provided (either by manifest or build rules).
+ if package:
+ # All resource IDs should be non-final here, but the
+ # onResourcesLoaded() method should only be generated if
+ # --shared-resources is used.
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.ExportAllStyleables()
+ if options.shared_resources:
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, package, r_txt_path,
+ options.extra_res_packages,
+ options.extra_r_text_files,
+ rjava_build_options)
+
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ if options.resource_zip_out:
+ _GenerateResourcesZip(options.resource_zip_out, options.resource_dirs,
+ options.v14_skip, build.temp_dir)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Order of these must match order specified in GN so that the correct one
+ # appears first in the depfile.
+ possible_output_paths = [
+ options.resource_zip_out,
+ options.r_text_out,
+ options.srcjar_out,
+ ]
+ output_paths = [x for x in possible_output_paths if x]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ input_strings = options.extra_res_packages + [
+ options.custom_package,
+ options.shared_resources,
+ options.v14_skip,
+ ]
+
+ possible_input_paths = [
+ options.aapt_path,
+ options.android_manifest,
+ options.android_sdk_jar,
+ ]
+ input_paths = [x for x in possible_input_paths if x]
+ input_paths.extend(options.dependencies_res_zips)
+ input_paths.extend(options.extra_r_text_files)
+
+ # Resource files aren't explicitly listed in GN. Listing them in the depfile
+ # ensures the target will be marked stale when resource files are removed.
+ depfile_deps = []
+ resource_names = []
+ for resource_dir in options.resource_dirs:
+ for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+ # Don't list the empty .keep file in depfile. Since it doesn't end up
+ # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+ # if ever moved.
+ if not resource_file.endswith(os.path.join('empty', '.keep')):
+ input_paths.append(resource_file)
+ depfile_deps.append(resource_file)
+ resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+ # Resource filenames matter to the output, so add them to strings as well.
+ # This matters if a file is renamed but not changed (http://crbug.com/597126).
+ input_strings.extend(sorted(resource_names))
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options),
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/process_resources.py b/chromium/build/android/gyp/process_resources.py
deleted file mode 100755
index 602cea6c896..00000000000
--- a/chromium/build/android/gyp/process_resources.py
+++ /dev/null
@@ -1,995 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Process Android resources to generate R.java, and prepare for packaging.
-
-This will crunch images with aapt2 and generate v14 compatible resources
-(see generate_v14_compatible_resources.py).
-"""
-
-import codecs
-import collections
-import multiprocessing.pool
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import zipfile
-from xml.etree import ElementTree
-
-
-import generate_v14_compatible_resources
-
-from util import build_utils
-
-_SOURCE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(
- __file__))))
-# Import jinja2 from third_party/jinja2
-sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
-from jinja2 import Template # pylint: disable=F0401
-
-
-_EMPTY_ANDROID_MANIFEST_PATH = os.path.join(
- _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml')
-
-
-# Represents a line from a R.txt file.
-TextSymbolsEntry = collections.namedtuple('RTextEntry',
- ('java_type', 'resource_type', 'name', 'value'))
-
-
-# A variation of this lists also exists in:
-# //base/android/java/src/org/chromium/base/LocaleUtils.java
-_CHROME_TO_ANDROID_LOCALE_MAP = {
- 'en-GB': 'en-rGB',
- 'en-US': 'en-rUS',
- 'es-419': 'es-rUS',
- 'fil': 'tl',
- 'he': 'iw',
- 'id': 'in',
- 'pt-PT': 'pt-rPT',
- 'pt-BR': 'pt-rBR',
- 'yi': 'ji',
- 'zh-CN': 'zh-rCN',
- 'zh-TW': 'zh-rTW',
-}
-
-# List is generated from the chrome_apk.apk_intermediates.ap_ via:
-# unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \
-# | uniq | grep -- -tvdpi- | cut -c10-
-# and then manually sorted.
-# Note that we can't just do a cross-product of dimensions because the filenames
-# become too big and aapt fails to create the files.
-# This leaves all default drawables (mdpi) in the main apk. Android gets upset
-# though if any drawables are missing from the default drawables/ directory.
-_DENSITY_SPLITS = {
- 'hdpi': (
- 'hdpi-v4', # Order matters for output file names.
- 'ldrtl-hdpi-v4',
- 'sw600dp-hdpi-v13',
- 'ldrtl-hdpi-v17',
- 'ldrtl-sw600dp-hdpi-v17',
- 'hdpi-v21',
- ),
- 'xhdpi': (
- 'xhdpi-v4',
- 'ldrtl-xhdpi-v4',
- 'sw600dp-xhdpi-v13',
- 'ldrtl-xhdpi-v17',
- 'ldrtl-sw600dp-xhdpi-v17',
- 'xhdpi-v21',
- ),
- 'xxhdpi': (
- 'xxhdpi-v4',
- 'ldrtl-xxhdpi-v4',
- 'sw600dp-xxhdpi-v13',
- 'ldrtl-xxhdpi-v17',
- 'ldrtl-sw600dp-xxhdpi-v17',
- 'xxhdpi-v21',
- ),
- 'xxxhdpi': (
- 'xxxhdpi-v4',
- 'ldrtl-xxxhdpi-v4',
- 'sw600dp-xxxhdpi-v13',
- 'ldrtl-xxxhdpi-v17',
- 'ldrtl-sw600dp-xxxhdpi-v17',
- 'xxxhdpi-v21',
- ),
- 'tvdpi': (
- 'tvdpi-v4',
- 'sw600dp-tvdpi-v13',
- 'ldrtl-sw600dp-tvdpi-v17',
- ),
-}
-
-# Pngs that we shouldn't convert to webp. Please add rationale when updating.
-_PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
- # Crashes on Galaxy S5 running L (https://crbug.com/807059).
- r'.*star_gray\.png',
- # Android requires pngs for 9-patch images.
- r'.*\.9\.png',
- # Daydream requires pngs for icon files.
- r'.*daydream_icon_.*\.png']))
-
-
-class _ResourceWhitelist(object):
- def __init__(self, entries=None):
- self._entries = None
- if entries:
- self._entries = set(self._Key(x) for x in entries)
-
- def __contains__(self, entry):
- return self._entries is None or self._Key(entry) in self._entries
-
- @staticmethod
- def _Key(entry):
- # Whitelists should only care about the name of the resource rather than the
- # resource ID (since the whitelist is from another compilation unit, the
- # resource IDs may not match).
- return (entry.java_type, entry.resource_type, entry.name)
-
-
-def _ParseArgs(args):
- """Parses command line options.
-
- Returns:
- An options object as from optparse.OptionsParser.parse_args()
- """
- parser = optparse.OptionParser()
- build_utils.AddDepfileOption(parser)
-
- parser.add_option('--android-sdk-jar',
- help='the path to android jar file.')
- parser.add_option('--aapt-path',
- help='path to the Android aapt tool')
- parser.add_option('--non-constant-id', action='store_true')
-
- parser.add_option('--android-manifest', help='AndroidManifest.xml path')
- parser.add_option('--custom-package', help='Java package for R.java')
- parser.add_option(
- '--shared-resources',
- action='store_true',
- help='Make a resource package that can be loaded by a different'
- 'application at runtime to access the package\'s resources.')
- parser.add_option(
- '--app-as-shared-lib',
- action='store_true',
- help='Make a resource package that can be loaded as shared library.')
- parser.add_option(
- '--shared-resources-whitelist',
- help='An R.txt file acting as a whitelist for resources that should be '
- 'non-final and have their package ID changed at runtime in R.java. If no '
- 'whitelist is provided, then everything is whitelisted.')
-
- parser.add_option('--resource-dirs',
- default='[]',
- help='Directories containing resources of this target.')
- parser.add_option('--dependencies-res-zips',
- help='Resources from dependents.')
-
- parser.add_option('--resource-zip-out',
- help='Path for output zipped resources.')
-
- parser.add_option('--srcjar-out',
- help='Path to srcjar to contain generated R.java.')
- parser.add_option('--r-text-out',
- help='Path to store the generated R.txt file.')
- parser.add_option('--r-text-in',
- help='Path to pre-existing R.txt for these resources. '
- 'Resource names from it will be used to generate R.java '
- 'instead of aapt-generated R.txt.')
-
- parser.add_option('--proguard-file',
- help='Path to proguard.txt generated file')
- parser.add_option('--proguard-file-main-dex',
- help='Path to proguard.txt generated file for main dex')
-
- parser.add_option(
- '--v14-skip',
- action="store_true",
- help='Do not generate nor verify v14 resources')
-
- parser.add_option(
- '--extra-res-packages',
- help='Additional package names to generate R.java files for')
- parser.add_option(
- '--extra-r-text-files',
- help='For each additional package, the R.txt file should contain a '
- 'list of resources to be included in the R.java file in the format '
- 'generated by aapt')
-
- parser.add_option('--support-zh-hk', action='store_true',
- help='Use zh-rTW resources for zh-rHK.')
-
- parser.add_option('--stamp', help='File to touch on success')
-
- parser.add_option('--debuggable',
- action='store_true',
- help='Whether to add android:debuggable="true"')
- parser.add_option('--version-code', help='Version code for apk.')
- parser.add_option('--version-name', help='Version name for apk.')
- parser.add_option('--no-compress', help='disables compression for the '
- 'given comma separated list of extensions')
- parser.add_option(
- '--create-density-splits',
- action='store_true',
- help='Enables density splits')
- parser.add_option('--language-splits',
- default='[]',
- help='GN list of languages to create splits for')
- parser.add_option('--locale-whitelist',
- default='[]',
- help='GN list of languages to include. All other language '
- 'configs will be stripped out. List may include '
- 'a combination of Android locales or Chrome locales.')
- parser.add_option('--apk-path',
- help='Path to output (partial) apk.')
- parser.add_option('--exclude-xxxhdpi', action='store_true',
- help='Do not include xxxhdpi drawables.')
- parser.add_option('--xxxhdpi-whitelist',
- default='[]',
- help='GN list of globs that say which xxxhdpi images to '
- 'include even when --exclude-xxxhdpi is set.')
- parser.add_option('--png-to-webp', action='store_true',
- help='Convert png files to webp format.')
- parser.add_option('--webp-binary', default='',
- help='Path to the cwebp binary.')
- parser.add_option('--no-xml-namespaces',
- action='store_true',
- help='Whether to strip xml namespaces from processed xml '
- 'resources')
-
- options, positional_args = parser.parse_args(args)
-
- if positional_args:
- parser.error('No positional arguments should be given.')
-
- # Check that required options have been provided.
- required_options = (
- 'android_sdk_jar',
- 'aapt_path',
- 'dependencies_res_zips',
- )
- build_utils.CheckOptions(options, parser, required=required_options)
-
- options.resource_dirs = build_utils.ParseGnList(options.resource_dirs)
- options.dependencies_res_zips = (
- build_utils.ParseGnList(options.dependencies_res_zips))
-
- options.language_splits = build_utils.ParseGnList(options.language_splits)
- options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist)
- options.xxxhdpi_whitelist = build_utils.ParseGnList(options.xxxhdpi_whitelist)
-
- # Don't use [] as default value since some script explicitly pass "".
- if options.extra_res_packages:
- options.extra_res_packages = (
- build_utils.ParseGnList(options.extra_res_packages))
- else:
- options.extra_res_packages = []
-
- if options.extra_r_text_files:
- options.extra_r_text_files = (
- build_utils.ParseGnList(options.extra_r_text_files))
- else:
- options.extra_r_text_files = []
-
- return options
-
-
-def _CreateRJavaFiles(srcjar_dir, main_r_txt_file, packages, r_txt_files,
- shared_resources, non_constant_id, whitelist_r_txt_file, is_apk):
- assert len(packages) == len(r_txt_files), 'Need one R.txt file per package'
-
- # Map of (resource_type, name) -> Entry.
- # Contains the correct values for resources.
- all_resources = {}
- for entry in _ParseTextSymbolsFile(main_r_txt_file):
- entry = entry._replace(value=_FixPackageIds(entry.value))
- all_resources[(entry.resource_type, entry.name)] = entry
-
- if whitelist_r_txt_file:
- whitelisted_resources = _ResourceWhitelist(
- _ParseTextSymbolsFile(whitelist_r_txt_file))
- else:
- whitelisted_resources = _ResourceWhitelist()
-
- # Map of package_name->resource_type->entry
- resources_by_package = (
- collections.defaultdict(lambda: collections.defaultdict(list)))
- # Build the R.java files using each package's R.txt file, but replacing
- # each entry's placeholder value with correct values from all_resources.
- for package, r_txt_file in zip(packages, r_txt_files):
- if package in resources_by_package:
- raise Exception(('Package name "%s" appeared twice. All '
- 'android_resources() targets must use unique package '
- 'names, or no package name at all.') % package)
- resources_by_type = resources_by_package[package]
- # The sub-R.txt files have the wrong values at this point. Read them to
- # figure out which entries belong to them, but use the values from the
- # main R.txt file.
- for entry in _ParseTextSymbolsFile(r_txt_file):
- entry = all_resources.get((entry.resource_type, entry.name))
- # For most cases missing entry here is an error. It means that some
- # library claims to have or depend on a resource that isn't included into
- # the APK. There is one notable exception: Google Play Services (GMS).
- # GMS is shipped as a bunch of AARs. One of them - basement - contains
- # R.txt with ids of all resources, but most of the resources are in the
- # other AARs. However, all other AARs reference their resources via
- # basement's R.java so the latter must contain all ids that are in its
- # R.txt. Most targets depend on only a subset of GMS AARs so some
- # resources are missing, which is okay because the code that references
- # them is missing too. We can't get an id for a resource that isn't here
- # so the only solution is to skip the resource entry entirely.
- #
- # We can verify that all entries referenced in the code were generated
- # correctly by running Proguard on the APK: it will report missing
- # fields.
- if entry:
- resources_by_type[entry.resource_type].append(entry)
-
- for package, resources_by_type in resources_by_package.iteritems():
- package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
- build_utils.MakeDirectory(package_r_java_dir)
- package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
- java_file_contents = _CreateRJavaFile(package, resources_by_type,
- shared_resources, non_constant_id, whitelisted_resources, is_apk)
- with open(package_r_java_path, 'w') as f:
- f.write(java_file_contents)
-
-
-def _ParseTextSymbolsFile(path):
- """Given an R.txt file, returns a list of TextSymbolsEntry."""
- ret = []
- with open(path) as f:
- for line in f:
- m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
- if not m:
- raise Exception('Unexpected line in R.txt: %s' % line)
- java_type, resource_type, name, value = m.groups()
- ret.append(TextSymbolsEntry(java_type, resource_type, name, value))
- return ret
-
-
-def _FixPackageIds(resource_value):
- # Resource IDs for resources belonging to regular APKs have their first byte
- # as 0x7f (package id). However with webview, since it is not a regular apk
- # but used as a shared library, aapt is passed the --shared-resources flag
- # which changes some of the package ids to 0x02 and 0x00. This function just
- # normalises all package ids to 0x7f, which the generated code in R.java
- # changes to the correct package id at runtime.
- # resource_value is a string with either, a single value '0x12345678', or an
- # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }'
- return re.sub(r'0x(?!01)\d\d', r'0x7f', resource_value)
-
-
-def _CreateRJavaFile(package, resources_by_type, shared_resources,
- non_constant_id, whitelisted_resources, is_apk):
- """Generates the contents of a R.java file."""
- final_resources_by_type = collections.defaultdict(list)
- non_final_resources_by_type = collections.defaultdict(list)
- if shared_resources or non_constant_id:
- for res_type, resources in resources_by_type.iteritems():
- for entry in resources:
- # Entries in stylable that are not int[] are not actually resource ids
- # but constants. If we are creating an apk there is no reason for them
- # to be non-final. However for libraries, they may be clobbered later on
- # and thus should remain non-final. This is regardless of the
- # whitelisting rules (since they are not actually resources).
- if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
- if is_apk:
- final_resources_by_type[res_type].append(entry)
- else:
- non_final_resources_by_type[res_type].append(entry)
- elif entry in whitelisted_resources:
- non_final_resources_by_type[res_type].append(entry)
- else:
- final_resources_by_type[res_type].append(entry)
- else:
- final_resources_by_type = resources_by_type
-
- # Keep these assignments all on one line to make diffing against regular
- # aapt-generated files easier.
- create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
- create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
- ' packageIdTransform;')
- # Here we diverge from what aapt does. Because we have so many
- # resources, the onResourcesLoaded method was exceeding the 64KB limit that
- # Java imposes. For this reason we split onResourcesLoaded into different
- # methods for each resource type.
- template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
-
-package {{ package }};
-
-public final class R {
- private static boolean sResourcesDidLoad;
- {% for resource_type in resource_types %}
- public static final class {{ resource_type }} {
- {% for e in final_resources[resource_type] %}
- public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
- {% endfor %}
- {% for e in non_final_resources[resource_type] %}
- public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
- {% endfor %}
- }
- {% endfor %}
- {% if shared_resources %}
- public static void onResourcesLoaded(int packageId) {
- assert !sResourcesDidLoad;
- sResourcesDidLoad = true;
- int packageIdTransform = (packageId ^ 0x7f) << 24;
- {% for resource_type in resource_types %}
- onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
- {% for e in non_final_resources[resource_type] %}
- {% if e.java_type == 'int[]' %}
- for(int i = 0; i < {{ e.resource_type }}.{{ e.name }}.length; ++i) {
- """ + create_id_arr + """
- }
- {% endif %}
- {% endfor %}
- {% endfor %}
- }
- {% for res_type in resource_types %}
- private static void onResourcesLoaded{{ res_type|title }} (
- int packageIdTransform) {
- {% for e in non_final_resources[res_type] %}
- {% if res_type != 'styleable' and e.java_type != 'int[]' %}
- """ + create_id + """
- {% endif %}
- {% endfor %}
- }
- {% endfor %}
- {% endif %}
-}
-""", trim_blocks=True, lstrip_blocks=True)
-
- return template.render(package=package,
- resource_types=sorted(resources_by_type),
- shared_resources=shared_resources,
- final_resources=final_resources_by_type,
- non_final_resources=non_final_resources_by_type)
-
-
-def _GenerateGlobs(pattern):
- # This function processes the aapt ignore assets pattern into a list of globs
- # to be used to exclude files on the python side. It removes the '!', which is
- # used by aapt to mean 'not chatty' so it does not output if the file is
- # ignored (we dont output anyways, so it is not required). This function does
- # not handle the <dir> and <file> prefixes used by aapt and are assumed not to
- # be included in the pattern string.
- return pattern.replace('!', '').split(':')
-
-
-def _ZipResources(resource_dirs, zip_path, ignore_pattern):
- # Python zipfile does not provide a way to replace a file (it just writes
- # another file with the same name). So, first collect all the files to put
- # in the zip (with proper overriding), and then zip them.
- # ignore_pattern is a string of ':' delimited list of globs used to ignore
- # files that should not be part of the final resource zip.
- files_to_zip = dict()
- globs = _GenerateGlobs(ignore_pattern)
- for d in resource_dirs:
- for root, _, files in os.walk(d):
- for f in files:
- archive_path = f
- parent_dir = os.path.relpath(root, d)
- if parent_dir != '.':
- archive_path = os.path.join(parent_dir, f)
- path = os.path.join(root, f)
- if build_utils.MatchesGlob(archive_path, globs):
- continue
- files_to_zip[archive_path] = path
- build_utils.DoZip(files_to_zip.iteritems(), zip_path)
-
-def _SortZip(original_path, sorted_path):
- with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \
- zipfile.ZipFile(original_path, 'r') as original_zip:
- for info in sorted(original_zip.infolist(), key=lambda i: i.filename):
- sorted_zip.writestr(info, original_zip.read(info))
-
-
-
-def _DuplicateZhResources(resource_dirs):
- for resource_dir in resource_dirs:
- # We use zh-TW resources for zh-HK (if we have zh-TW resources).
- for path in build_utils.IterFiles(resource_dir):
- if 'zh-rTW' in path:
- hk_path = path.replace('zh-rTW', 'zh-rHK')
- build_utils.MakeDirectory(os.path.dirname(hk_path))
- shutil.copyfile(path, hk_path)
-
-def _ExtractPackageFromManifest(manifest_path):
- doc = ElementTree.parse(manifest_path)
- return doc.getroot().get('package')
-
-
-def _ToAaptLocales(locale_whitelist, support_zh_hk):
- """Converts the list of Chrome locales to aapt config locales."""
- ret = set()
- for locale in locale_whitelist:
- locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(locale, locale)
- if locale is None or ('-' in locale and '-r' not in locale):
- raise Exception('_CHROME_TO_ANDROID_LOCALE_MAP needs updating.'
- ' Found: %s' % locale)
- ret.add(locale)
- # Always keep non-regional fall-backs.
- language = locale.split('-')[0]
- ret.add(language)
-
- # We don't actually support zh-HK in Chrome on Android, but we mimic the
- # native side behavior where we use zh-TW resources when the locale is set to
- # zh-HK. See https://crbug.com/780847.
- if support_zh_hk:
- assert not any('HK' in l for l in locale_whitelist), (
- 'Remove special logic if zh-HK is now supported (crbug.com/780847).')
- ret.add('zh-rHK')
- return sorted(ret)
-
-
-def _MoveImagesToNonMdpiFolders(res_root):
- """Move images from drawable-*-mdpi-* folders to drawable-* folders.
-
- Why? http://crbug.com/289843
- """
- for src_dir_name in os.listdir(res_root):
- src_components = src_dir_name.split('-')
- if src_components[0] != 'drawable' or 'mdpi' not in src_components:
- continue
- src_dir = os.path.join(res_root, src_dir_name)
- if not os.path.isdir(src_dir):
- continue
- dst_components = [c for c in src_components if c != 'mdpi']
- assert dst_components != src_components
- dst_dir_name = '-'.join(dst_components)
- dst_dir = os.path.join(res_root, dst_dir_name)
- build_utils.MakeDirectory(dst_dir)
- for src_file_name in os.listdir(src_dir):
- if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'):
- continue
- src_file = os.path.join(src_dir, src_file_name)
- dst_file = os.path.join(dst_dir, src_file_name)
- assert not os.path.lexists(dst_file)
- shutil.move(src_file, dst_file)
-
-
-def _GenerateDensitySplitPaths(apk_path):
- for density, config in _DENSITY_SPLITS.iteritems():
- src_path = '%s_%s' % (apk_path, '_'.join(config))
- dst_path = '%s_%s' % (apk_path, density)
- yield src_path, dst_path
-
-
-def _GenerateLanguageSplitOutputPaths(apk_path, languages):
- for lang in languages:
- yield '%s_%s' % (apk_path, lang)
-
-
-def _RenameDensitySplits(apk_path):
- """Renames all density splits to have shorter / predictable names."""
- for src_path, dst_path in _GenerateDensitySplitPaths(apk_path):
- shutil.move(src_path, dst_path)
-
-
-def _CheckForMissedConfigs(apk_path, check_density, languages):
- """Raises an exception if apk_path contains any unexpected configs."""
- triggers = []
- if check_density:
- triggers.extend(re.compile('-%s' % density) for density in _DENSITY_SPLITS)
- if languages:
- triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages)
- with zipfile.ZipFile(apk_path) as main_apk_zip:
- for name in main_apk_zip.namelist():
- for trigger in triggers:
- if trigger.search(name) and not 'mipmap-' in name:
- raise Exception(('Found config in main apk that should have been ' +
- 'put into a split: %s\nYou need to update ' +
- 'package_resources.py to include this new ' +
- 'config (trigger=%s)') % (name, trigger.pattern))
-
-
-def _CreateLinkApkArgs(options):
- link_command = [
- options.aapt_path + '2',
- 'link',
- '--version-code', options.version_code,
- '--version-name', options.version_name,
- '--auto-add-overlay',
- '--no-version-vectors',
- '-I', options.android_sdk_jar,
- '-o', options.apk_path,
- ]
-
- if options.proguard_file:
- link_command += ['--proguard', options.proguard_file]
- if options.proguard_file_main_dex:
- link_command += ['--proguard-main-dex', options.proguard_file_main_dex]
-
- if options.no_compress:
- for ext in options.no_compress.split(','):
- link_command += ['-0', ext]
-
- if options.shared_resources:
- link_command.append('--shared-lib')
-
- if options.create_density_splits:
- for config in _DENSITY_SPLITS.itervalues():
- link_command.extend(('--split', ','.join(config)))
-
- if options.language_splits:
- for lang in options.language_splits:
- link_command.extend(('--split', lang))
-
- if options.locale_whitelist:
- aapt_locales = _ToAaptLocales(
- options.locale_whitelist, options.support_zh_hk)
- link_command += ['-c', ','.join(aapt_locales)]
-
- if options.no_xml_namespaces:
- link_command.append('--no-xml-namespaces')
-
- return link_command
-
-
-def _ExtractVersionFromSdk(aapt_path, sdk_path):
- output = subprocess.check_output([aapt_path, 'dump', 'badging', sdk_path])
- version_code = re.search(r"versionCode='(.*?)'", output).group(1)
- version_name = re.search(r"versionName='(.*?)'", output).group(1)
- return version_code, version_name,
-
-
-def _FixManifest(options, temp_dir):
- debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
- _ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
- _TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
- ElementTree.register_namespace('android', _ANDROID_NAMESPACE)
- ElementTree.register_namespace('tools', _TOOLS_NAMESPACE)
- original_manifest = ElementTree.parse(options.android_manifest)
-
- version_code, version_name = _ExtractVersionFromSdk(
- options.aapt_path, options.android_sdk_jar)
-
- # ElementTree.find does not work if the required tag is the root.
- if original_manifest.getroot().tag == 'manifest':
- manifest_node = original_manifest.getroot()
- else:
- manifest_node = original_manifest.find('manifest')
-
- manifest_node.set('platformBuildVersionCode', version_code)
- manifest_node.set('platformBuildVersionName', version_name)
-
- if options.debuggable:
- app_node = original_manifest.find('application')
- app_node.set('{%s}%s' % (_ANDROID_NAMESPACE, 'debuggable'), 'true')
-
- with open(debug_manifest_path, 'w') as debug_manifest:
- debug_manifest.write(ElementTree.tostring(
- original_manifest.getroot(), encoding='UTF-8'))
-
- return debug_manifest_path
-
-
-def _ResourceNameFromPath(path):
- return os.path.splitext(os.path.basename(path))[0]
-
-
-def _CreateKeepPredicate(resource_dirs, exclude_xxxhdpi, xxxhdpi_whitelist):
- if not exclude_xxxhdpi:
- # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
- return lambda path: os.path.basename(path)[0] != '.'
-
- # Returns False only for xxxhdpi non-mipmap, non-whitelisted drawables.
- naive_predicate = lambda path: (
- not re.search(r'[/-]xxxhdpi[/-]', path) or
- re.search(r'[/-]mipmap[/-]', path) or
- build_utils.MatchesGlob(path, xxxhdpi_whitelist))
-
- # Build a set of all non-xxxhdpi drawables to ensure that we never exclude any
- # xxxhdpi drawable that does not exist in other densities.
- non_xxxhdpi_drawables = set()
- for resource_dir in resource_dirs:
- for path in build_utils.IterFiles(resource_dir):
- if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path):
- non_xxxhdpi_drawables.add(_ResourceNameFromPath(path))
-
- return lambda path: (naive_predicate(path) or
- _ResourceNameFromPath(path) not in non_xxxhdpi_drawables)
-
-
-def _ConvertToWebP(webp_binary, png_files):
- pool = multiprocessing.pool.ThreadPool(10)
- def convert_image(png_path):
- root = os.path.splitext(png_path)[0]
- webp_path = root + '.webp'
- args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100',
- '-lossless', '-o', webp_path]
- subprocess.check_call(args)
- os.remove(png_path)
-
- pool.map(convert_image, [f for f in png_files
- if not _PNG_WEBP_BLACKLIST_PATTERN.match(f)])
- pool.close()
- pool.join()
-
-
-def _CompileDeps(aapt_path, dep_subdirs, temp_dir):
- partials_dir = os.path.join(temp_dir, 'partials')
- build_utils.MakeDirectory(partials_dir)
- partial_compile_command = [
- aapt_path + '2',
- 'compile',
- # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
- # '--no-crunch',
- ]
- pool = multiprocessing.pool.ThreadPool(10)
- def compile_partial(directory):
- dirname = os.path.basename(directory)
- partial_path = os.path.join(partials_dir, dirname + '.zip')
- compile_command = (partial_compile_command +
- ['--dir', directory, '-o', partial_path])
- build_utils.CheckOutput(compile_command)
-
- # Sorting the files in the partial ensures deterministic output from the
- # aapt2 link step which uses order of files in the partial.
- sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip')
- _SortZip(partial_path, sorted_partial_path)
-
- return sorted_partial_path
-
- partials = pool.map(compile_partial, dep_subdirs)
- pool.close()
- pool.join()
- return partials
-
-
-def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
- _DuplicateZhResources(dep_subdirs)
-
- keep_predicate = _CreateKeepPredicate(
- dep_subdirs, options.exclude_xxxhdpi, options.xxxhdpi_whitelist)
- png_paths = []
- for directory in dep_subdirs:
- for f in build_utils.IterFiles(directory):
- if not keep_predicate(f):
- os.remove(f)
- elif f.endswith('.png'):
- png_paths.append(f)
- if png_paths and options.png_to_webp:
- _ConvertToWebP(options.webp_binary, png_paths)
- for directory in dep_subdirs:
- _MoveImagesToNonMdpiFolders(directory)
-
- link_command = _CreateLinkApkArgs(options)
- link_command += ['--output-text-symbols', r_txt_path]
- link_command += ['--java', gen_dir]
-
- fixed_manifest = _FixManifest(options, temp_dir)
- link_command += ['--manifest', fixed_manifest]
-
- partials = _CompileDeps(options.aapt_path, dep_subdirs, temp_dir)
- for partial in partials:
- link_command += ['-R', partial]
-
- # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
- # Also creates R.txt
- build_utils.CheckOutput(
- link_command, print_stdout=False, print_stderr=False)
-
- if options.create_density_splits or options.language_splits:
- _CheckForMissedConfigs(options.apk_path, options.create_density_splits,
- options.language_splits)
-
- if options.create_density_splits:
- _RenameDensitySplits(options.apk_path)
-
-
-# _PackageLibrary uses aapt rather than aapt2 because aapt2 compile does not
-# support outputting an R.txt file.
-def _PackageLibrary(options, dep_subdirs, temp_dir, gen_dir):
- v14_dir = os.path.join(temp_dir, 'v14')
- build_utils.MakeDirectory(v14_dir)
-
- # Generate R.java. This R.java contains non-final constants and is used only
- # while compiling the library jar (e.g. chromium_content.jar). When building
- # an apk, a new R.java file with the correct resource -> ID mappings will be
- # generated by merging the resources from all libraries and the main apk
- # project.
- package_command = [options.aapt_path,
- 'package',
- '-m',
- '-M', _EMPTY_ANDROID_MANIFEST_PATH,
- '--no-crunch',
- '--auto-add-overlay',
- '--no-version-vectors',
- '-I', options.android_sdk_jar,
- '--output-text-symbols', gen_dir,
- '-J', gen_dir, # Required for R.txt generation.
- '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
-
- # Adding all dependencies as sources is necessary for @type/foo references
- # to symbols within dependencies to resolve. However, it has the side-effect
- # that all Java symbols from dependencies are copied into the new R.java.
- # E.g.: It enables an arguably incorrect usage of
- # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
- # more correct. This is just how Android works.
- for d in dep_subdirs:
- package_command += ['-S', d]
-
- input_resource_dirs = options.resource_dirs
-
- for d in input_resource_dirs:
- package_command += ['-S', d]
-
- if not options.v14_skip:
- for resource_dir in input_resource_dirs:
- generate_v14_compatible_resources.GenerateV14Resources(
- resource_dir,
- v14_dir)
-
- # This is the list of directories with resources to put in the final .zip
- zip_resource_dirs = input_resource_dirs + [v14_dir]
-
- if options.resource_zip_out:
- _ZipResources(zip_resource_dirs, options.resource_zip_out,
- build_utils.AAPT_IGNORE_PATTERN)
-
- # Only creates an R.txt
- build_utils.CheckOutput(
- package_command, print_stdout=False, print_stderr=False)
-
-
-def _CreateRTxtAndSrcJar(options, r_txt_path, srcjar_dir):
- # When an empty res/ directory is passed, aapt does not write an R.txt.
- if not os.path.exists(r_txt_path):
- build_utils.Touch(r_txt_path)
-
- if options.r_text_in:
- r_txt_path = options.r_text_in
-
- packages = list(options.extra_res_packages)
- r_txt_files = list(options.extra_r_text_files)
-
- cur_package = options.custom_package
- if not options.custom_package and options.android_manifest:
- cur_package = _ExtractPackageFromManifest(options.android_manifest)
-
- # Don't create a .java file for the current resource target when:
- # - no package name was provided (either by manifest or build rules),
- # - there was already a dependent android_resources() with the same
- # package (occurs mostly when an apk target and resources target share
- # an AndroidManifest.xml)
- if cur_package and cur_package not in packages:
- packages.append(cur_package)
- r_txt_files.append(r_txt_path)
-
- if packages:
- shared_resources = options.shared_resources or options.app_as_shared_lib
- _CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files,
- shared_resources, options.non_constant_id,
- options.shared_resources_whitelist, bool(options.apk_path))
-
- if options.srcjar_out:
- build_utils.ZipDir(options.srcjar_out, srcjar_dir)
-
- if options.r_text_out:
- shutil.copyfile(r_txt_path, options.r_text_out)
-
-
-def _ExtractDeps(dep_zips, deps_dir):
- dep_subdirs = []
- for z in dep_zips:
- subdir = os.path.join(deps_dir, os.path.basename(z))
- if os.path.exists(subdir):
- raise Exception('Resource zip name conflict: ' + os.path.basename(z))
- build_utils.ExtractAll(z, path=subdir)
- dep_subdirs.append(subdir)
- return dep_subdirs
-
-
-def _OnStaleMd5(options):
- with build_utils.TempDir() as temp_dir:
- deps_dir = os.path.join(temp_dir, 'deps')
- build_utils.MakeDirectory(deps_dir)
- gen_dir = os.path.join(temp_dir, 'gen')
- build_utils.MakeDirectory(gen_dir)
- r_txt_path = os.path.join(gen_dir, 'R.txt')
- srcjar_dir = os.path.join(temp_dir, 'java')
-
- dep_subdirs = _ExtractDeps(options.dependencies_res_zips, deps_dir)
-
- if options.apk_path:
- _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path)
- else:
- _PackageLibrary(options, dep_subdirs, temp_dir, gen_dir)
-
- _CreateRTxtAndSrcJar(options, r_txt_path, srcjar_dir)
-
-
-def main(args):
- args = build_utils.ExpandFileArgs(args)
- options = _ParseArgs(args)
-
- # Order of these must match order specified in GN so that the correct one
- # appears first in the depfile.
- possible_output_paths = [
- options.apk_path,
- options.resource_zip_out,
- options.r_text_out,
- options.srcjar_out,
- options.proguard_file,
- options.proguard_file_main_dex,
- ]
- output_paths = [x for x in possible_output_paths if x]
-
- if options.apk_path and options.create_density_splits:
- for _, dst_path in _GenerateDensitySplitPaths(options.apk_path):
- output_paths.append(dst_path)
- if options.apk_path and options.language_splits:
- output_paths.extend(
- _GenerateLanguageSplitOutputPaths(options.apk_path,
- options.language_splits))
-
- # List python deps in input_strings rather than input_paths since the contents
- # of them does not change what gets written to the depsfile.
- input_strings = options.extra_res_packages + [
- options.app_as_shared_lib,
- options.custom_package,
- options.non_constant_id,
- options.shared_resources,
- options.v14_skip,
- options.exclude_xxxhdpi,
- options.xxxhdpi_whitelist,
- str(options.debuggable),
- str(options.png_to_webp),
- str(options.support_zh_hk),
- str(options.no_xml_namespaces),
- ]
-
- if options.apk_path:
- input_strings.extend(_CreateLinkApkArgs(options))
-
- possible_input_paths = [
- options.aapt_path,
- options.android_manifest,
- options.android_sdk_jar,
- options.shared_resources_whitelist,
- ]
- input_paths = [x for x in possible_input_paths if x]
- input_paths.extend(options.dependencies_res_zips)
- input_paths.extend(options.extra_r_text_files)
-
- if options.webp_binary:
- input_paths.append(options.webp_binary)
-
- # Resource files aren't explicitly listed in GN. Listing them in the depfile
- # ensures the target will be marked stale when resource files are removed.
- depfile_deps = []
- resource_names = []
- for resource_dir in options.resource_dirs:
- for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
- # Don't list the empty .keep file in depfile. Since it doesn't end up
- # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
- # if ever moved.
- if not resource_file.endswith(os.path.join('empty', '.keep')):
- input_paths.append(resource_file)
- depfile_deps.append(resource_file)
- resource_names.append(os.path.relpath(resource_file, resource_dir))
-
- # Resource filenames matter to the output, so add them to strings as well.
- # This matters if a file is renamed but not changed (http://crbug.com/597126).
- input_strings.extend(sorted(resource_names))
-
- build_utils.CallAndWriteDepfileIfStale(
- lambda: _OnStaleMd5(options),
- options,
- input_paths=input_paths,
- input_strings=input_strings,
- output_paths=output_paths,
- depfile_deps=depfile_deps)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 77cff8300b9..5b3d7014226 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -13,6 +13,7 @@ from util import proguard_util
_DANGEROUS_OPTIMIZATIONS = [
+ "class/unboxing/enum",
# See crbug.com/625992
"code/allocation/variable",
# See crbug.com/625994
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index dec818ee17e..17442ba10c4 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import ast
+import collections
import contextlib
import fnmatch
import json
@@ -395,37 +396,28 @@ def PrintBigWarning(message):
def GetSortedTransitiveDependencies(top, deps_func):
"""Gets the list of all transitive dependencies in sorted order.
- There should be no cycles in the dependency graph.
+ There should be no cycles in the dependency graph (crashes if cycles exist).
Args:
- top: a list of the top level nodes
- deps_func: A function that takes a node and returns its direct dependencies.
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
Returns:
A list of all transitive dependencies of nodes in top, in order (a node will
appear in the list at a higher index than all of its dependencies).
"""
- def Node(dep):
- return (dep, deps_func(dep))
-
- # First: find all deps
- unchecked_deps = list(top)
- all_deps = set(top)
- while unchecked_deps:
- dep = unchecked_deps.pop()
- new_deps = deps_func(dep).difference(all_deps)
- unchecked_deps.extend(new_deps)
- all_deps = all_deps.union(new_deps)
-
- # Then: simple, slow topological sort.
- sorted_deps = []
- unsorted_deps = dict(map(Node, all_deps))
- while unsorted_deps:
- for library, dependencies in unsorted_deps.items():
- if not dependencies.intersection(unsorted_deps.keys()):
- sorted_deps.append(library)
- del unsorted_deps[library]
-
- return sorted_deps
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return deps_map.keys()
def GetPythonDependencies():
diff --git a/chromium/build/android/gyp/util/build_utils_test.py b/chromium/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 00000000000..bcc892f39b3
--- /dev/null
+++ b/chromium/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import unittest
+
+import build_utils # pylint: disable=W0403
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+ def testGetSortedTransitiveDependencies_all(self):
+ TOP = _DEPS.keys()
+ EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leaves(self):
+ TOP = ['c', 'e', 'g', 'h', 'i']
+ EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leavesReverse(self):
+ TOP = ['i', 'h', 'g', 'e', 'c']
+ EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/build/android/gyp/util/proguard_util.py b/chromium/build/android/gyp/util/proguard_util.py
index 171c35bfd80..fc7d2a82aba 100644
--- a/chromium/build/android/gyp/util/proguard_util.py
+++ b/chromium/build/android/gyp/util/proguard_util.py
@@ -8,7 +8,7 @@ import time
from util import build_utils
-class _ProguardOutputFilter(object):
+class ProguardOutputFilter(object):
"""ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
as well as interesting stuff (notes, warnings, etc). If stdout is entirely
boring, this class suppresses the output.
@@ -16,7 +16,7 @@ class _ProguardOutputFilter(object):
IGNORE_RE = re.compile(
r'Pro.*version|Note:|Reading|Preparing|Printing|ProgramClass:|Searching|'
- r'jar \[|\d+ class path entries checked|.*:.*(?:MANIFEST\.MF|\.empty)')
+ r'jar \[|\d+ class path entries checked')
def __init__(self):
self._last_line_ignored = False
@@ -128,8 +128,11 @@ class ProguardCmdBuilder(object):
for optimization in self._disabled_optimizations:
cmd += [ '-optimizations', '!' + optimization ]
+ # Filter to just .class files to avoid warnings about multiple inputs having
+ # the same files in META_INF/.
cmd += [
- '-injars', ':'.join(self._injars)
+ '-injars',
+ ':'.join('{}(**.class)'.format(x) for x in self._injars)
]
for config_file in self._configs:
@@ -211,8 +214,8 @@ class ProguardCmdBuilder(object):
stdout_filter = None
stderr_filter = None
if not self._verbose:
- stdout_filter = _ProguardOutputFilter()
- stderr_filter = _ProguardOutputFilter()
+ stdout_filter = ProguardOutputFilter()
+ stderr_filter = ProguardOutputFilter()
start_time = time.time()
build_utils.CheckOutput(self._cmd, print_stdout=True,
print_stderr=True,
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
new file mode 100644
index 00000000000..3775e3dcf27
--- /dev/null
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,469 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import os
+import re
+import shutil
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.dirname(__file__)))))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+EMPTY_ANDROID_MANIFEST_PATH = os.path.join(
+ _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml')
+
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+ ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+ """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+ Args:
+ path: Input file path.
+ fix_package_ids: if True, all packaged IDs read from the file
+ will be fixed to 0x7f.
+ Returns:
+ A list of _TextSymbolEntry instances.
+ Raises:
+ Exception: An unexpected line was detected in the input.
+ """
+ ret = []
+ with open(path) as f:
+ for line in f:
+ m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+ if not m:
+ raise Exception('Unexpected line in R.txt: %s' % line)
+ java_type, resource_type, name, value = m.groups()
+ if fix_package_ids:
+ value = _FixPackageIds(value)
+ ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+ return ret
+
+
+def _FixPackageIds(resource_value):
+ # Resource IDs for resources belonging to regular APKs have their first byte
+ # as 0x7f (package id). However with webview, since it is not a regular apk
+ # but used as a shared library, aapt is passed the --shared-resources flag
+ # which changes some of the package ids to 0x02 and 0x00. This function just
+ # normalises all package ids to 0x7f, which the generated code in R.java
+ # changes to the correct package id at runtime.
+ # resource_value is a string with either, a single value '0x12345678', or an
+ # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }'
+ return re.sub(r'0x(?!01)\d\d', r'0x7f', resource_value)
+
+
+def _GetRTxtResourceNames(r_txt_path):
+ """Parse an R.txt file and extract the set of resource names from it."""
+ result = set()
+ for entry in _ParseTextSymbolsFile(r_txt_path):
+ result.add(entry.name)
+ return result
+
+
+class RJavaBuildOptions:
+ """A class used to model the various ways to build an R.java file.
+
+ This is used to control which resource ID variables will be final or
+ non-final, and whether an onResourcesLoaded() method will be generated
+ to adjust the non-final ones, when the corresponding library is loaded
+ at runtime.
+
+ Note that by default, all resources are final, and there is no
+ method generated, which corresponds to calling ExportNoResources().
+ """
+ def __init__(self):
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportNoResources(self):
+ """Make all resource IDs final, and don't generate a method."""
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportAllResources(self):
+ """Make all resource IDs non-final in the R.java file."""
+ self.has_constant_ids = False
+ self.resources_whitelist = None
+
+ def ExportSomeResources(self, r_txt_file_path):
+ """Only select specific resource IDs to be non-final.
+
+ Args:
+ r_txt_file_path: The path to an R.txt file. All resources named
+ int it will be non-final in the generated R.java file, all others
+ will be final.
+ """
+ self.has_constant_ids = True
+ self.resources_whitelist = _GetRTxtResourceNames(r_txt_file_path)
+
+ def ExportAllStyleables(self):
+ """Make all styleable constants non-final, even non-resources ones.
+
+ Resources that are styleable but not of int[] type are not actually
+ resource IDs but constants. By default they are always final. Call this
+ method to make them non-final anyway in the final R.java file.
+ """
+ self.export_const_styleable = True
+
+ def GenerateOnResourcesLoaded(self):
+ """Generate an onResourcesLoaded() method.
+
+ This Java method will be called at runtime by the framework when
+ the corresponding library (which includes the R.java source file)
+ will be loaded at runtime. This corresponds to the --shared-resources
+ or --app-as-shared-lib flags of 'aapt package'.
+ """
+ self.has_on_resources_loaded = True
+
+ def _IsResourceFinal(self, entry):
+ """Determines whether a resource should be final or not.
+
+ Args:
+ entry: A _TextSymbolEntry instance.
+ Returns:
+ True iff the corresponding entry should be final.
+ """
+ if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+ # A styleable constant may be exported as non-final after all.
+ return not self.export_const_styleable
+ elif not self.has_constant_ids:
+ # Every resource is non-final
+ return False
+ elif not self.resources_whitelist:
+ # No whitelist means all IDs are non-final.
+ return True
+ else:
+ # Otherwise, only those in the
+ return entry.name not in self.resources_whitelist
+
+
+def CreateRJavaFiles(srcjar_dir, package, main_r_txt_file,
+ extra_res_packages, extra_r_txt_files,
+ rjava_build_options):
+ """Create all R.java files for a set of packages and R.txt files.
+
+ Args:
+ srcjar_dir: The top-level output directory for the generated files.
+ package: Top-level package name.
+ main_r_txt_file: The main R.txt file containing the valid values
+ of _all_ resource IDs.
+ extra_res_packages: A list of extra package names.
+ extra_r_txt_files: A list of extra R.txt files. One per item in
+ |extra_res_packages|. Note that all resource IDs in them will be ignored,
+ |and replaced by the values extracted from |main_r_txt_file|.
+ rjava_build_options: An RJavaBuildOptions instance that controls how
+ exactly the R.java file is generated.
+ Raises:
+ Exception if a package name appears several times in |extra_res_packages|
+ """
+ assert len(extra_res_packages) == len(extra_r_txt_files), \
+ 'Need one R.txt file per package'
+
+ packages = list(extra_res_packages)
+ r_txt_files = list(extra_r_txt_files)
+
+ if package not in packages:
+ # Sometimes, an apk target and a resources target share the same
+ # AndroidManifest.xml and thus |package| will already be in |packages|.
+ packages.append(package)
+ r_txt_files.append(main_r_txt_file)
+
+ # Map of (resource_type, name) -> Entry.
+ # Contains the correct values for resources.
+ all_resources = {}
+ for entry in _ParseTextSymbolsFile(main_r_txt_file, fix_package_ids=True):
+ all_resources[(entry.resource_type, entry.name)] = entry
+
+ # Map of package_name->resource_type->entry
+ resources_by_package = (
+ collections.defaultdict(lambda: collections.defaultdict(list)))
+ # Build the R.java files using each package's R.txt file, but replacing
+ # each entry's placeholder value with correct values from all_resources.
+ for package, r_txt_file in zip(packages, r_txt_files):
+ if package in resources_by_package:
+ raise Exception(('Package name "%s" appeared twice. All '
+ 'android_resources() targets must use unique package '
+ 'names, or no package name at all.') % package)
+ resources_by_type = resources_by_package[package]
+ # The sub-R.txt files have the wrong values at this point. Read them to
+ # figure out which entries belong to them, but use the values from the
+ # main R.txt file.
+ for entry in _ParseTextSymbolsFile(r_txt_file):
+ entry = all_resources.get((entry.resource_type, entry.name))
+ # For most cases missing entry here is an error. It means that some
+ # library claims to have or depend on a resource that isn't included into
+ # the APK. There is one notable exception: Google Play Services (GMS).
+ # GMS is shipped as a bunch of AARs. One of them - basement - contains
+ # R.txt with ids of all resources, but most of the resources are in the
+ # other AARs. However, all other AARs reference their resources via
+ # basement's R.java so the latter must contain all ids that are in its
+ # R.txt. Most targets depend on only a subset of GMS AARs so some
+ # resources are missing, which is okay because the code that references
+ # them is missing too. We can't get an id for a resource that isn't here
+ # so the only solution is to skip the resource entry entirely.
+ #
+ # We can verify that all entries referenced in the code were generated
+ # correctly by running Proguard on the APK: it will report missing
+ # fields.
+ if entry:
+ resources_by_type[entry.resource_type].append(entry)
+
+ for package, resources_by_type in resources_by_package.iteritems():
+ _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options):
+ """Generates an R.java source file."""
+ package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+ build_utils.MakeDirectory(package_r_java_dir)
+ package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+ java_file_contents = _RenderRJavaSource(package, resources_by_type,
+ rjava_build_options)
+ with open(package_r_java_path, 'w') as f:
+ f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+ """Get the index of the first application resource ID within a resource
+ array."""
+ res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+ for i, res_id in enumerate(res_ids):
+ if res_id.startswith('0x7f'):
+ return i
+ return len(res_ids)
+
+
+def _RenderRJavaSource(package, resources_by_type, rjava_build_options):
+ """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+ final_resources_by_type = collections.defaultdict(list)
+ non_final_resources_by_type = collections.defaultdict(list)
+ for res_type, resources in resources_by_type.iteritems():
+ for entry in resources:
+ # Entries in stylable that are not int[] are not actually resource ids
+ # but constants.
+ if rjava_build_options._IsResourceFinal(entry):
+ final_resources_by_type[res_type].append(entry)
+ else:
+ non_final_resources_by_type[res_type].append(entry)
+
+ # Keep these assignments all on one line to make diffing against regular
+ # aapt-generated files easier.
+ create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
+ create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
+ ' packageIdTransform;')
+ for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
+ '{{ e.resource_type }}.{{ e.name }}.length; ++i')
+
+ # Here we diverge from what aapt does. Because we have so many
+ # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+ # Java imposes. For this reason we split onResourcesLoaded into different
+ # methods for each resource type.
+ template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ private static boolean sResourcesDidLoad;
+ {% for resource_type in resource_types %}
+ public static final class {{ resource_type }} {
+ {% for e in final_resources[resource_type] %}
+ public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ {% for e in non_final_resources[resource_type] %}
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ }
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ assert !sResourcesDidLoad;
+ sResourcesDidLoad = true;
+ int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.java_type == 'int[]' %}
+ for(""" + for_loop_condition + """) {
+ """ + create_id_arr + """
+ }
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ }
+ {% for res_type in resource_types %}
+ private static void onResourcesLoaded{{ res_type|title }} (
+ int packageIdTransform) {
+ {% for e in non_final_resources[res_type] %}
+ {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+ """ + create_id + """
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+ return template.render(
+ package=package,
+ resource_types=sorted(resources_by_type),
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+ final_resources=final_resources_by_type,
+ non_final_resources=non_final_resources_by_type,
+ startIndex=_GetNonSystemIndex)
+
+
+def ExtractPackageFromManifest(manifest_path):
+ """Extract package name from Android manifest file."""
+ doc = ElementTree.parse(manifest_path)
+ return doc.getroot().get('package')
+
+
+def ExtractDeps(dep_zips, deps_dir):
+ """Extract a list of resource dependency zip files.
+
+ Args:
+ dep_zips: A list of zip file paths, each one will be extracted to
+ a subdirectory of |deps_dir|, named after the zip file (e.g.
+ '/some/path/foo.zip' -> '{deps_dir}/foo/').
+ deps_dir: Top-level extraction directory.
+ Returns:
+ The list of all sub-directory paths, relative to |deps_dir|.
+ Raises:
+ Exception: If a sub-directory already exists with the same name before
+ extraction.
+ """
+ dep_subdirs = []
+ for z in dep_zips:
+ subdir = os.path.join(deps_dir, os.path.basename(z))
+ if os.path.exists(subdir):
+ raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+ build_utils.ExtractAll(z, path=subdir)
+ dep_subdirs.append(subdir)
+ return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+ """A temporary directory for packaging and compiling Android resources."""
+ def __init__(self):
+ """Initialized the context."""
+ # The top-level temporary directory.
+ self.temp_dir = tempfile.mkdtemp()
+ # A location to store resources extracted form dependency zip files.
+ self.deps_dir = os.path.join(self.temp_dir, 'deps')
+ os.mkdir(self.deps_dir)
+ # A location to place aapt-generated files.
+ self.gen_dir = os.path.join(self.temp_dir, 'gen')
+ os.mkdir(self.gen_dir)
+ # Location of the generated R.txt file.
+ self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+ # A location to place generated R.java files.
+ self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+ os.mkdir(self.srcjar_dir)
+
+ def Close(self):
+ """Close the context and destroy all temporary files."""
+ shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext():
+ """Generator for a _ResourceBuildContext instance."""
+ try:
+ context = _ResourceBuildContext()
+ yield context
+ finally:
+ context.Close()
+
+
+def ResourceArgsParser():
+ """Create an argparse.ArgumentParser instance with common argument groups.
+
+ Returns:
+ A tuple of (parser, in_group, out_group) corresponding to the parser
+ instance, and the input and output argument groups for it, respectively.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ input_opts = parser.add_argument_group('Input options')
+ output_opts = parser.add_argument_group('Output options')
+
+ build_utils.AddDepfileOption(output_opts)
+
+ input_opts.add_argument('--android-sdk-jar', required=True,
+ help='Path to the android.jar file.')
+
+ input_opts.add_argument('--aapt-path', required=True,
+ help='Path to the Android aapt tool')
+
+ input_opts.add_argument('--dependencies-res-zips', required=True,
+ help='Resources zip archives from dependents. Required to '
+ 'resolve @type/foo references into dependent '
+ 'libraries.')
+
+ input_opts.add_argument(
+ '--r-text-in',
+ help='Path to pre-existing R.txt. Its resource IDs override those found '
+ 'in the aapt-generated R.txt when generating R.java.')
+
+ input_opts.add_argument(
+ '--extra-res-packages',
+ help='Additional package names to generate R.java files for.')
+
+ input_opts.add_argument(
+ '--extra-r-text-files',
+ help='For each additional package, the R.txt file should contain a '
+ 'list of resources to be included in the R.java file in the format '
+ 'generated by aapt.')
+
+ return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+ """Handle common command-line options after parsing.
+
+ Args:
+ options: the result of parse_args() on the parser returned by
+ ResourceArgsParser(). This function updates a few common fields.
+ """
+ options.dependencies_res_zips = (
+ build_utils.ParseGnList(options.dependencies_res_zips))
+
+ # Don't use [] as default value since some script explicitly pass "".
+ if options.extra_res_packages:
+ options.extra_res_packages = (
+ build_utils.ParseGnList(options.extra_res_packages))
+ else:
+ options.extra_res_packages = []
+
+ if options.extra_r_text_files:
+ options.extra_r_text_files = (
+ build_utils.ParseGnList(options.extra_r_text_files))
+ else:
+ options.extra_r_text_files = []
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index f1d8dbe683a..d69385827e4 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -24,6 +24,465 @@ Note: If paths to input files are passed in this way, it is important that:
2. Either (a) or (b)
a. inputs/deps ensure that the action runs whenever one of the files changes
b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+ The following types are known by the internal GN build rules and the
+ build scripts altogether:
+
+ * [java_binary](#target_java_binary)
+ * [java_annotation_processor](#target_java_annotation_processor)
+ * [junit_binary](#target_junit_binary)
+ * [java_library](#target_java_library)
+ * [android_assets](#target_android_assets)
+ * [android_resources](#target_android_resources)
+ * [android_apk](#target_android_apk)
+ * [dist_jar](#target_dist_jar)
+ * [dist_aar](#target_dist_aar)
+ * [resource_rewriter](#target_resource_rewriter)
+ * [group](#target_group)
+
+ See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+ NOTE: Because the `.build_config` of a given target is always generated
+ after the `.build_config` of its dependencies, the `write_build_config.py`
+ script can use chains of `deps_configs` to compute transitive dependencies
+ for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+ NOTE: For `android_resources` targets,
+ this is the package name for the corresponding R class. For `android_apk`
+ targets, this is the corresponding package name. This does *not* appear for
+ other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+# Top-level `resources` dictionary:
+
+This dictionary only appears for a few target types that can contain or
+relate to Android resources (e.g. `android_resources` or `android_apk`):
+
+* `resources['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `resource['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `resources['extra_r_text_files']`:
+Always empty for `android_resources` types. Otherwise, the list of
+`deps_info['r_text']` entries for all `android_resources` dependencies for
+the current target. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+* `deps_info['resource_dirs']`:
+List of paths to the source directories containing the resources for this
+target. This key is optional, because some targets can refer to prebuilt
+`.aar` archives.
+
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+ If `deps_info['resource_dirs']` is missing, this must point to a prebuilt
+ `.aar` archive containing resources. Otherwise, this will point to a
+ zip archive generated at build time, wrapping the content of
+ `deps_info['resource_dirs']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['r_text']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text']`. This is *always*
+generated from the content of `deps_info['r_text']` by the
+`build/android/gyp/process_resources.py` script.
+
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+ NOTE: When not empty, the first items of `assets['sources']` must match
+ every item in this list. Extra sources correspond to non-renamed sources.
+
+ NOTE: This comes from the `asset_renaming_destinations` parameter for the
+ `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools.
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar`
+tool on `deps_info['jar_path']`
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from `deps_info['jar_path']`
+unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['owned_resource_dirs']`:
+List of all resource directories belonging to all resource dependencies for
+this target.
+
+* `deps_info['owned_resource_zips']`:
+List of all resource zip files belonging to all resource dependencies for this
+target.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['java_runtime_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['jar_path']` entries for the target and all its
+dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['non_native_packed_relocations']`:
+A string that is either "True" or "False" (why a string?). True to indicate
+that this uses packed relocations that may not be supported by the target
+Android system for this build (this generally requires the Chromium linker
+to be used to load the native libraries).
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['dependency_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all library
+dependencies for this APK.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['secondary_abi_java_libraries_list']`
+The same list as `native['second_abi_libraries']` as a Java source string.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `compressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+compressed in the `//assets/` directory. E.g. `"{\"am\","\ar\",\"en-US\"}"`.
+Note that the files will be stored with the `.pak` extension (e.g.
+`//assets/en-US.pak`).
+
+* `uncompressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed in the `//assets/stored-locales/` directory. These are used for
+the System WebView feature only. Note that the files will be stored with the
+`.pak` extension (e.g. `//assets/stored-locales/en-US.apk`).
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs"]`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_all_extra_jars"]`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+ * `deps_info['supports_android']` (always True).
+ * `deps_info['requires_android']` (always True).
+ * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+ * `deps_info['proguard_enabled']` (False by default).
+ * `deps_info['proguard_configs']` (optional).
+ * `deps_info['supports_android']` (True by default).
+ * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="target_resource_rewriter">Target type `resource_rewriter`</a>:
+
+The ResourceRewriter Java class is in charge of rewriting resource IDs at
+runtime, for the benefit of the System WebView feature. This is a special
+target type for it.
+
+Its `.build_config` only keeps a list of dependencies in its
+`deps_info['deps_configs']` key.
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['srcjars']`
+For `java_library` targets, this is the list of all `deps_info['srcjar_path']`
+from all resource dependencies for the current target (and these contain
+corresponding R.java source files). For other target types, this is an empty
+list.
+
+* `javac['resource_packages']`
+For `java_library` targets, this is the list of package names for all resource
+dependencies for the current target. Order must match the one from
+`javac['srcjars']`. For other target types, this key does not exist.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+--------------- END_MARKDOWN ---------------------------------------------------
"""
import itertools
@@ -41,6 +500,25 @@ _ROOT_TYPES = ('android_apk', 'java_binary',
_RESOURCE_TYPES = ('android_assets', 'android_resources')
+def _ExtractMarkdownDocumentation(input_text):
+ """Extract Markdown documentation from a list of input strings lines.
+
+ This generates a list of strings extracted from |input_text|, by looking
+ for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+ in_markdown = False
+ result = []
+ for line in input_text.splitlines():
+ if in_markdown:
+ if '-- END_MARKDOWN --' in line:
+ in_markdown = False
+ else:
+ result.append(line)
+ else:
+ if '-- BEGIN_MARKDOWN --' in line:
+ in_markdown = True
+
+ return result
+
class AndroidManifest(object):
def __init__(self, path):
self.path = path
@@ -84,7 +562,7 @@ def DepsOfType(wanted_type, configs):
def GetAllDepsConfigsInOrder(deps_config_paths):
def GetDeps(path):
- return set(GetDepConfig(path)['deps_configs'])
+ return GetDepConfig(path)['deps_configs']
return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
@@ -348,10 +826,21 @@ def main(argv):
parser.add_option('--fail',
help='GN-list of error message lines to fail with.')
+ parser.add_option('--generate-markdown-format-doc', action='store_true',
+ help='Dump the Markdown .build_config format documentation '
+ 'then exit immediately.')
+
options, args = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given.')
+
+ if options.generate_markdown_format_doc:
+ doc_lines = _ExtractMarkdownDocumentation(__doc__)
+ for line in doc_lines:
+ print(line)
+ return 0
+
if options.fail:
parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
diff --git a/chromium/build/android/gyp/write_ordered_libraries.py b/chromium/build/android/gyp/write_ordered_libraries.py
index e5a23bf6d05..70186c3f839 100755
--- a/chromium/build/android/gyp/write_ordered_libraries.py
+++ b/chromium/build/android/gyp/write_ordered_libraries.py
@@ -48,11 +48,11 @@ def CallReadElf(library_or_executable):
def GetDependencies(library_or_executable):
elf = CallReadElf(library_or_executable)
- deps = set()
+ deps = []
for l in _library_re.findall(elf):
p = _library_path_map.get(l)
if p is not None:
- deps.add(p)
+ deps.append(p)
return deps