summaryrefslogtreecommitdiffstats
path: root/chromium/build/android
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/build/android')
-rw-r--r--chromium/build/android/BUILD.gn10
-rw-r--r--chromium/build/android/PRESUBMIT.py62
-rwxr-xr-xchromium/build/android/apk_operations.py107
-rw-r--r--chromium/build/android/bytecode/BUILD.gn3
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java21
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ConstantPoolReferenceReader.java140
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java167
-rw-r--r--chromium/build/android/constant_pool_refs_to_keep_rules.py223
-rw-r--r--chromium/build/android/constant_pool_refs_to_keep_rules_test.py91
-rw-r--r--chromium/build/android/devil_chromium.json9
-rw-r--r--chromium/build/android/docs/README.md1
-rw-r--r--chromium/build/android/docs/class_verification_failures.md6
-rw-r--r--chromium/build/android/docs/coverage.md2
-rw-r--r--chromium/build/android/docs/java_toolchain.md152
-rwxr-xr-xchromium/build/android/generate_jacoco_report.py2
-rwxr-xr-xchromium/build/android/gradle/generate_gradle.py13
-rw-r--r--chromium/build/android/gradle/root.jinja2
-rwxr-xr-xchromium/build/android/gyp/bundletool.py4
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py14
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py91
-rwxr-xr-xchromium/build/android/gyp/create_app_bundle.py17
-rw-r--r--chromium/build/android/gyp/create_bundle_wrapper_script.pydeps1
-rwxr-xr-xchromium/build/android/gyp/create_java_binary_script.py16
-rwxr-xr-xchromium/build/android/gyp/desugar.py7
-rwxr-xr-xchromium/build/android/gyp/dex.py187
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py2
-rwxr-xr-xchromium/build/android/gyp/dist_aar.py40
-rw-r--r--chromium/build/android/gyp/dist_aar.pydeps1
-rwxr-xr-xchromium/build/android/gyp/filter_zip.py6
-rwxr-xr-xchromium/build/android/gyp/generate_linker_version_script.py7
-rwxr-xr-xchromium/build/android/gyp/jacoco_instr.py152
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum.py2
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum_tests.py2
-rwxr-xr-xchromium/build/android/gyp/javac.py7
-rwxr-xr-xchromium/build/android/gyp/lint.py3
-rwxr-xr-xchromium/build/android/gyp/main_dex_list.py111
-rwxr-xr-xchromium/build/android/gyp/merge_manifest.py2
-rwxr-xr-xchromium/build/android/gyp/prepare_resources.py3
-rwxr-xr-xchromium/build/android/gyp/proguard.py40
-rw-r--r--chromium/build/android/gyp/util/build_utils.py69
-rw-r--r--chromium/build/android/gyp/util/md5_check.py71
-rwxr-xr-xchromium/build/android/gyp/util/md5_check_test.py44
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py23
-rw-r--r--chromium/build/android/incremental_install/BUILD.gn2
-rwxr-xr-xchromium/build/android/incremental_install/write_installer_json.py29
-rwxr-xr-xchromium/build/android/list_class_verification_failures.py11
-rw-r--r--chromium/build/android/pylib/base/environment_factory.py14
-rw-r--r--chromium/build/android/pylib/constants/__init__.py5
-rw-r--r--chromium/build/android/pylib/gtest/gtest_test_instance.py1
-rw-r--r--chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py39
-rwxr-xr-xchromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py36
-rw-r--r--chromium/build/android/pylib/local/device/local_device_gtest_run.py64
-rw-r--r--chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py80
-rw-r--r--chromium/build/android/pylib/local/emulator/avd.py496
-rw-r--r--chromium/build/android/pylib/local/emulator/local_emulator_environment.py131
-rw-r--r--chromium/build/android/pylib/local/emulator/proto/__init__.py3
-rw-r--r--chromium/build/android/pylib/local/emulator/proto/avd.proto35
-rw-r--r--chromium/build/android/pylib/local/emulator/proto/avd_pb2.py218
-rw-r--r--chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py12
-rw-r--r--chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py6
-rw-r--r--chromium/build/android/pylib/utils/simpleperf.py11
-rwxr-xr-xchromium/build/android/resource_sizes.py45
-rwxr-xr-xchromium/build/android/test_runner.py32
-rw-r--r--chromium/build/android/test_runner.pydeps4
-rwxr-xr-xchromium/build/android/update_verification.py3
65 files changed, 1905 insertions, 1305 deletions
diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn
index e2588265ec9..486fc1c2701 100644
--- a/chromium/build/android/BUILD.gn
+++ b/chromium/build/android/BUILD.gn
@@ -90,11 +90,15 @@ python_library("test_runner_py") {
":devil_chromium_py",
]
if (build_with_chromium) {
+ data += [
+ "//third_party/android_build_tools/bundletool/bundletool-all-0.10.3.jar",
+ "//tools/android/avd/proto/",
+ ]
data_deps +=
[ "//third_party/android_platform/development/scripts:stack_py" ]
- }
- if (is_asan) {
- data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+ if (is_asan) {
+ data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+ }
}
# Proguard is needed only when using apks (rather than native executables).
diff --git a/chromium/build/android/PRESUBMIT.py b/chromium/build/android/PRESUBMIT.py
index 32ccfac5047..8eb0792f624 100644
--- a/chromium/build/android/PRESUBMIT.py
+++ b/chromium/build/android/PRESUBMIT.py
@@ -10,8 +10,6 @@ details on the presubmit API built into depot_tools.
def CommonChecks(input_api, output_api):
- output = []
-
build_android_dir = input_api.PresubmitLocalPath()
def J(*dirs):
@@ -19,32 +17,38 @@ def CommonChecks(input_api, output_api):
return input_api.os_path.join(build_android_dir, *dirs)
build_pys = [
+ r'gn/.*\.py$',
r'gyp/.*\.py$',
- r'gn/.*\.py',
]
- output.extend(input_api.canned_checks.RunPylint(
- input_api,
- output_api,
- pylintrc='pylintrc',
- black_list=build_pys,
- extra_paths_list=[
- J(),
- J('gyp'),
- J('buildbot'),
- J('..', 'util', 'lib', 'common'),
- J('..', '..', 'third_party', 'catapult', 'common', 'py_trace_event'),
- J('..', '..', 'third_party', 'catapult', 'common', 'py_utils'),
- J('..', '..', 'third_party', 'catapult', 'devil'),
- J('..', '..', 'third_party', 'catapult', 'tracing'),
- J('..', '..', 'third_party', 'depot_tools'),
- J('..', '..', 'third_party', 'colorama', 'src'),
- J('..', '..', 'third_party', 'pymock'),
- ]))
- output.extend(input_api.canned_checks.RunPylint(
- input_api,
- output_api,
- white_list=build_pys,
- extra_paths_list=[J('gyp'), J('gn')]))
+ tests = []
+ tests.extend(
+ input_api.canned_checks.GetPylint(
+ input_api,
+ output_api,
+ pylintrc='pylintrc',
+ black_list=[
+ r'.*_pb2\.py',
+ ] + build_pys,
+ extra_paths_list=[
+ J(),
+ J('gyp'),
+ J('buildbot'),
+ J('..', 'util', 'lib', 'common'),
+ J('..', '..', 'third_party', 'catapult', 'common',
+ 'py_trace_event'),
+ J('..', '..', 'third_party', 'catapult', 'common', 'py_utils'),
+ J('..', '..', 'third_party', 'catapult', 'devil'),
+ J('..', '..', 'third_party', 'catapult', 'tracing'),
+ J('..', '..', 'third_party', 'depot_tools'),
+ J('..', '..', 'third_party', 'colorama', 'src'),
+ J('..', '..', 'third_party', 'pymock'),
+ ]))
+ tests.extend(
+ input_api.canned_checks.GetPylint(
+ input_api,
+ output_api,
+ white_list=build_pys,
+ extra_paths_list=[J('gyp'), J('gn')]))
# Disabled due to http://crbug.com/410936
#output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
@@ -55,8 +59,8 @@ def CommonChecks(input_api, output_api):
'PYTHONPATH': build_android_dir,
'PYTHONDONTWRITEBYTECODE': '1',
})
- output.extend(
- input_api.canned_checks.RunUnitTests(
+ tests.extend(
+ input_api.canned_checks.GetUnitTests(
input_api,
output_api,
unit_tests=[
@@ -88,7 +92,7 @@ def CommonChecks(input_api, output_api):
],
env=pylib_test_env))
- return output
+ return input_api.RunTests(tests)
def CheckChangeOnUpload(input_api, output_api):
diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py
index c6efb549f68..9df4897a196 100755
--- a/chromium/build/android/apk_operations.py
+++ b/chromium/build/android/apk_operations.py
@@ -18,7 +18,6 @@ import posixpath
import random
import re
import shlex
-import shutil
import sys
import tempfile
import textwrap
@@ -129,8 +128,6 @@ def _GenerateBundleApks(info,
def _InstallBundle(devices, bundle_apks, package_name, command_line_flags_file,
modules, fake_modules):
- # Path to push fake modules for Chrome to pick up.
- MODULES_SRC_DIRECTORY_PATH = '/data/local/tmp/modules'
# Path Chrome creates after validating fake modules. This needs to be cleared
# for pushed fake modules to be picked up.
SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
@@ -151,98 +148,24 @@ def _InstallBundle(devices, bundle_apks, package_name, command_line_flags_file,
else:
logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
- def InstallFakeModules(device):
- try:
- temp_path = tempfile.mkdtemp()
-
- if not fake_modules:
- # Push empty temp_path to clear folder on device and update the cache.
- device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
- delete_device_stale=True)
- return
-
- # Device-spec JSON is needed, so create that first.
- device_spec_filename = os.path.join(temp_path, 'device_spec.json')
- get_device_spec_cmd_args = [
- 'get-device-spec', '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
- '--device-id=' + device.serial, '--output=' + device_spec_filename
- ]
- bundletool.RunBundleTool(get_device_spec_cmd_args)
-
- # Extract fake modules to temp directory. For now, installation
- # requires running 'bundletool extract-apks'. Unfortunately, this leads
- # to unneeded compression of module files.
- extract_apks_cmd_args = [
- 'extract-apks', '--apks=' + bundle_apks,
- '--device-spec=' + device_spec_filename,
- '--modules=' + ','.join(fake_modules), '--output-dir=' + temp_path
- ]
- bundletool.RunBundleTool(extract_apks_cmd_args)
-
- # Push fake modules, with renames.
- fake_module_apks = set()
- for fake_module in fake_modules:
- found_master = False
-
- for filename in os.listdir(temp_path):
- # If file matches expected format, rename it to follow conventions
- # required by splitcompatting.
- match = re.match(r'%s-([a-z_0-9]+)\.apk' % fake_module, filename)
- local_path = os.path.join(temp_path, filename)
-
- if not match:
- continue
-
- module_suffix = match.group(1)
- remote = os.path.join(
- temp_path, '%s.config.%s.apk' % (fake_module, module_suffix))
- # Check if filename matches a master apk.
- if 'master' in module_suffix:
- if found_master:
- raise Exception('Expect 1 master apk file for %s' % fake_module)
- found_master = True
- remote = os.path.join(temp_path, '%s.apk' % fake_module)
-
- os.rename(local_path, remote)
- fake_module_apks.add(os.path.basename(remote))
-
- # Files that weren't renamed should not be pushed, remove from temp_path.
- for filename in os.listdir(temp_path):
- if filename not in fake_module_apks:
- os.remove(os.path.join(temp_path, filename))
-
- device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
- delete_device_stale=True)
-
- finally:
- shutil.rmtree(temp_path, ignore_errors=True)
-
def Install(device):
ClearFakeModules(device)
- if fake_modules:
+ if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device):
# Print warning if command line is not set up for fake modules.
- if ShouldWarnFakeFeatureModuleInstallFlag(device):
- msg = ('Command line has no %s: Fake modules will be ignored.' %
- FAKE_FEATURE_MODULE_INSTALL)
- print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
-
- InstallFakeModules(device)
-
- # NOTE: For now, installation requires running 'bundletool install-apks'.
- # TODO(digit): Add proper support for bundles to devil instead, then use it.
- install_cmd_args = [
- 'install-apks', '--apks=' + bundle_apks, '--allow-downgrade',
- '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
- '--device-id=' + device.serial
- ]
- if modules:
- install_cmd_args += ['--modules=' + ','.join(modules)]
- bundletool.RunBundleTool(install_cmd_args)
+ msg = ('Command line has no %s: Fake modules will be ignored.' %
+ FAKE_FEATURE_MODULE_INSTALL)
+ print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
+
+ device.Install(
+ bundle_apks,
+ modules=modules,
+ fake_modules=fake_modules,
+ allow_downgrade=True)
# Basic checks for |modules| and |fake_modules|.
# * |fake_modules| cannot include 'base'.
# * If |fake_modules| is given, ensure |modules| includes 'base'.
- # * They must be disjoint.
+ # * They must be disjoint (checked by device.Install).
modules_set = set(modules) if modules else set()
fake_modules_set = set(fake_modules) if fake_modules else set()
if BASE_MODULE in fake_modules_set:
@@ -250,8 +173,6 @@ def _InstallBundle(devices, bundle_apks, package_name, command_line_flags_file,
if fake_modules_set and BASE_MODULE not in modules_set:
raise Exception(
'\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
- if fake_modules_set.intersection(modules_set):
- raise Exception('\'-m\' and \'-f\' entries must be disjoint.')
logging.info('Installing bundle.')
device_utils.DeviceUtils.parallel(devices).pMap(Install)
@@ -781,7 +702,11 @@ def _RunLogcat(device, package_name, mapping_path, verbose):
try:
logcat_processor.ProcessLine(line, fast)
except:
- sys.stderr.write('Failed to process line: ' + line)
+ sys.stderr.write('Failed to process line: ' + line + '\n')
+ # Skip stack trace for the common case of the adb server being
+ # restarted.
+ if 'unexpected EOF' in line:
+ sys.exit(1)
raise
if fast and nonce in line:
fast = False
diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn
index 51fff21f18e..88517537f42 100644
--- a/chromium/build/android/bytecode/BUILD.gn
+++ b/chromium/build/android/bytecode/BUILD.gn
@@ -7,15 +7,12 @@ import("//build/config/android/rules.gni")
assert(current_toolchain == default_toolchain)
java_binary("java_bytecode_rewriter") {
- jacoco_never_instrument = true
java_files = [
"java/org/chromium/bytecode/AssertionEnablerClassAdapter.java",
"java/org/chromium/bytecode/ByteCodeProcessor.java",
"java/org/chromium/bytecode/ClassPathValidator.java",
- "java/org/chromium/bytecode/ConstantPoolReferenceReader.java",
"java/org/chromium/bytecode/CustomClassLoaderClassWriter.java",
"java/org/chromium/bytecode/CustomResourcesClassAdapter.java",
- "java/org/chromium/bytecode/SplitCompatClassAdapter.java",
"java/org/chromium/bytecode/ThreadAssertionClassAdapter.java",
"java/org/chromium/bytecode/TypeUtils.java",
]
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
index 56b84845c72..4a56332c061 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -63,8 +63,6 @@ class ByteCodeProcessor {
private static ClassLoader sDirectClassPathClassLoader;
private static ClassLoader sFullClassPathClassLoader;
private static Set<String> sFullClassPathJarPaths;
- private static String sGenerateClassDepsPath;
- private static Set<String> sSplitCompatClassNames;
private static ClassPathValidator sValidator;
private static class EntryDataPair {
@@ -138,10 +136,6 @@ class ByteCodeProcessor {
chain = new CustomResourcesClassAdapter(
chain, reader.getClassName(), reader.getSuperName(), sFullClassPathClassLoader);
}
- if (!sSplitCompatClassNames.isEmpty()) {
- chain = new SplitCompatClassAdapter(
- chain, sSplitCompatClassNames, sFullClassPathClassLoader);
- }
reader.accept(chain, 0);
byte[] patchedByteCode = writer.toByteArray();
return EntryDataPair.create(entry.getName(), patchedByteCode);
@@ -262,7 +256,6 @@ class ByteCodeProcessor {
sShouldUseCustomResources = args[currIndex++].equals("--enable-custom-resources");
sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations");
sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path");
- sGenerateClassDepsPath = args[currIndex++];
int sdkJarsLength = Integer.parseInt(args[currIndex++]);
List<String> sdkJarPaths =
Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + sdkJarsLength));
@@ -277,13 +270,6 @@ class ByteCodeProcessor {
currIndex += directJarsLength;
sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
- // Load list of class names that need to be fixed.
- int splitCompatClassNamesLength = Integer.parseInt(args[currIndex++]);
- sSplitCompatClassNames = new HashSet<>();
- sSplitCompatClassNames.addAll(Arrays.asList(
- Arrays.copyOfRange(args, currIndex, currIndex + splitCompatClassNamesLength)));
- currIndex += splitCompatClassNamesLength;
-
// Load all jars that are on the classpath for the input jar for analyzing class hierarchy.
sFullClassPathJarPaths = new HashSet<>();
sFullClassPathJarPaths.clear();
@@ -292,13 +278,6 @@ class ByteCodeProcessor {
sFullClassPathJarPaths.addAll(
Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length)));
- // Write list of references from Java class constant pools to specified output file
- // sGenerateClassDepsPath. This is needed for keep rule generation for async DFMs.
- if (!sGenerateClassDepsPath.isEmpty()) {
- ConstantPoolReferenceReader.writeConstantPoolRefsToFile(
- sFullClassPathJarPaths, sGenerateClassDepsPath);
- }
-
sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ConstantPoolReferenceReader.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ConstantPoolReferenceReader.java
deleted file mode 100644
index e2cbd39785d..00000000000
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ConstantPoolReferenceReader.java
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package org.chromium.bytecode;
-
-import org.objectweb.asm.ClassReader;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedWriter;
-import java.io.ByteArrayOutputStream;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-/**
- * Compiles list of references from all Java .class files in given jar paths by
- * reading from the constant pool, and writes this list to an output file.
- * This list is used for keep rule generation for maintaining compatibility between
- * async DFMs and synchronous modules.
- */
-public class ConstantPoolReferenceReader {
- private static final String CLASS_FILE_SUFFIX = ".class";
- private static final int BUFFER_SIZE = 16384;
-
- // Constants representing Java constant pool tags
- // See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
- private static final int FIELD_REF_TAG = 9;
- private static final int METHOD_REF_TAG = 10;
- private static final int INTERFACE_METHOD_REF_TAG = 11;
-
- private static byte[] readAllBytes(InputStream inputStream) throws IOException {
- ByteArrayOutputStream buffer = new ByteArrayOutputStream();
- int numRead = 0;
- byte[] data = new byte[BUFFER_SIZE];
- while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
- buffer.write(data, 0, numRead);
- }
- return buffer.toByteArray();
- }
-
- /**
- * Given a set of paths, generates references used to produce Proguard keep rules
- * necessary for asynchronous DFMs.
- * It reads all references stored in constant pools of Java classes from
- * the specified jar paths and writes them to an output file.
- * References written to the specified file can be converted to a
- * corresponding set of Proguard keep rules using the
- * constant_pool_refs_to_keep_rules.py script.
- *
- * @param jarPaths Set of paths specifying Java files to read constant pool
- * references from.
- * @param outputFilePath File path to write output to.
- */
- public static void writeConstantPoolRefsToFile(Set<String> jarPaths, String outputFilePath) {
- HashSet<String> classReferences = new HashSet<>();
-
- for (String jarPath : jarPaths) {
- try (ZipInputStream inputStream = new ZipInputStream(
- new BufferedInputStream(new FileInputStream(jarPath)))) {
- ZipEntry entry;
- while ((entry = inputStream.getNextEntry()) != null) {
- if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
- continue;
- }
- byte[] data = readAllBytes(inputStream);
- ClassReader reader = new ClassReader(data);
- classReferences.addAll(collectConstantPoolClassReferences(reader));
- }
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- try {
- BufferedWriter writer = new BufferedWriter(new FileWriter(outputFilePath));
- for (String ref : classReferences) {
- writer.append(ref);
- writer.append("\n");
- }
- writer.close();
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Given a ClassReader, return a set of all super classes, implemented interfaces and
- * members by reading from the associated class's constant pool.
- *
- * @param classReader .class file interface for reading the constant pool.
- */
- private static Set<String> collectConstantPoolClassReferences(ClassReader classReader) {
- char[] charBuffer = new char[classReader.getMaxStringLength()];
- HashSet<String> classReferences = new HashSet<>();
-
- classReferences.add(classReader.getSuperName());
- classReferences.addAll(Arrays.asList(classReader.getInterfaces()));
-
- // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
- // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
- StringBuilder refInfoString = new StringBuilder();
- for (int i = 1; i < classReader.getItemCount(); i++) {
- int offset = classReader.getItem(i);
- if (offset <= 0) {
- continue;
- }
- int constantType = classReader.readByte(offset - 1);
- if (offset > 0
- && (constantType == METHOD_REF_TAG || constantType == FIELD_REF_TAG
- || constantType == INTERFACE_METHOD_REF_TAG)) {
- // Read the corresponding class ref and member info from the constant pool.
- int classIndex = classReader.readUnsignedShort(offset);
- int classStartIndex = classReader.getItem(classIndex);
- // Class index is a 2-byte quantity, nameAndTypeIndex is stored sequentially after.
- int nameAndTypeIndex = classReader.readUnsignedShort(offset + 2);
- int nameAndTypeStartIndex = classReader.getItem(nameAndTypeIndex);
-
- // Get member's containing class's name, member's name, and member's details (type,
- // return type, and argument types).
- refInfoString.append(classReader.readUTF8(classStartIndex, charBuffer));
- refInfoString.append(",");
- refInfoString.append(classReader.readUTF8(nameAndTypeStartIndex, charBuffer));
- refInfoString.append(",");
- refInfoString.append(classReader.readUTF8(nameAndTypeStartIndex + 2, charBuffer));
-
- classReferences.add(refInfoString.toString());
- refInfoString.setLength(0);
- }
- }
-
- return classReferences;
- }
-} \ No newline at end of file
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
deleted file mode 100644
index 17c271642c0..00000000000
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
+++ /dev/null
@@ -1,167 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package org.chromium.bytecode;
-
-import static org.objectweb.asm.Opcodes.ACC_PROTECTED;
-import static org.objectweb.asm.Opcodes.ALOAD;
-import static org.objectweb.asm.Opcodes.INVOKEINTERFACE;
-import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
-import static org.objectweb.asm.Opcodes.INVOKESTATIC;
-import static org.objectweb.asm.Opcodes.RETURN;
-
-import static org.chromium.bytecode.TypeUtils.CONTEXT;
-import static org.chromium.bytecode.TypeUtils.VOID;
-
-import org.objectweb.asm.ClassVisitor;
-import org.objectweb.asm.MethodVisitor;
-import org.objectweb.asm.Opcodes;
-
-import java.util.Set;
-
-/**
- * A ClassVisitor for injecting ModuleInstaller.initActivity(activity) method call
- * into Activity's attachBaseContext() method. The goal is to eventually invoke
- * SplitCompat.install() method if running with the binary that has bundle support
- * enabled. This needs to happen for activities that were not built with SplitCompat
- * support.
- */
-class SplitCompatClassAdapter extends ClassVisitor {
- private static final String ANDROID_APP_ACTIVITY_CLASS_NAME = "android/app/Activity";
- private static final String ATTACH_BASE_CONTEXT_METHOD_NAME = "attachBaseContext";
- private static final String ATTACH_BASE_CONTEXT_DESCRIPTOR =
- TypeUtils.getMethodDescriptor(VOID, CONTEXT);
-
- private static final String MODULE_INSTALLER_CLASS_NAME =
- "org/chromium/components/module_installer/ModuleInstaller";
- private static final String GET_INSTANCE_METHOD_NAME = "getInstance";
- private static final String GET_INSTANCE_DESCRIPTOR =
- TypeUtils.getMethodDescriptor(MODULE_INSTALLER_CLASS_NAME);
- private static final String INIT_ACTIVITY_METHOD_NAME = "initActivity";
- private static final String INIT_ACTIVITY_DESCRIPTOR =
- TypeUtils.getMethodDescriptor(VOID, CONTEXT);
-
- private boolean mShouldTransform;
-
- private Set<String> mClassNames;
-
- private ClassLoader mClassLoader;
-
- /**
- * Creates instance of SplitCompatClassAdapter.
- *
- * @param visitor
- * @param classNames Names of classes into which the attachBaseContext method will be
- * injected. Currently, we'll only consider classes for bytecode rewriting only if
- * they inherit directly from android.app.Activity & not already contain
- * attachBaseContext method.
- * @param classLoader
- */
- SplitCompatClassAdapter(ClassVisitor visitor, Set<String> classNames, ClassLoader classLoader) {
- super(Opcodes.ASM5, visitor);
-
- mShouldTransform = false;
- mClassNames = classNames;
- mClassLoader = classLoader;
- }
-
- @Override
- public void visit(int version, int access, String name, String signature, String superName,
- String[] interfaces) {
- super.visit(version, access, name, signature, superName, interfaces);
-
- if (mClassNames.contains(name)) {
- if (!isSubclassOfActivity(name)) {
- throw new RuntimeException(name
- + " should be transformed but does not inherit from android.app.Activity");
- }
-
- mShouldTransform = true;
- }
- }
-
- @Override
- public MethodVisitor visitMethod(
- int access, String name, String descriptor, String signature, String[] exceptions) {
- // Check if current method matches attachBaseContext & we're supposed to emit code - if so,
- // fail.
- if (mShouldTransform && name.equals(ATTACH_BASE_CONTEXT_METHOD_NAME)) {
- throw new RuntimeException(ATTACH_BASE_CONTEXT_METHOD_NAME + " method already exists");
- }
-
- return super.visitMethod(access, name, descriptor, signature, exceptions);
- }
-
- @Override
- public void visitEnd() {
- if (mShouldTransform) {
- // If we reached this place, it means we're rewriting a class that inherits from
- // Activity and there was no exception thrown due to existence of attachBaseContext
- // method - emit code.
- emitAttachBaseContext();
- }
-
- super.visitEnd();
- }
-
- /**
- * Generates:
- *
- * <pre>
- * protected void attachBaseContext(Context base) {
- * super.attachBaseContext(base);
- * ModuleInstaller.getInstance().initActivity(this);
- * }
- * </pre>
- */
- private void emitAttachBaseContext() {
- MethodVisitor mv = super.visitMethod(ACC_PROTECTED, ATTACH_BASE_CONTEXT_METHOD_NAME,
- ATTACH_BASE_CONTEXT_DESCRIPTOR, null, null);
- mv.visitCode();
- // Push "this" on stack.
- mv.visitVarInsn(ALOAD, 0);
- // Push first method parameter on stack (Context).
- mv.visitVarInsn(ALOAD, 1);
- // Pop argument from stack (Context).
- // Pop target object from stack ("this").
- // Calls attachBaseContext.
- mv.visitMethodInsn(INVOKESPECIAL, ANDROID_APP_ACTIVITY_CLASS_NAME,
- ATTACH_BASE_CONTEXT_METHOD_NAME, ATTACH_BASE_CONTEXT_DESCRIPTOR, false);
- // Push return value on stack (ModuleInstaller).
- // Calls getInstance.
- mv.visitMethodInsn(INVOKESTATIC, MODULE_INSTALLER_CLASS_NAME, GET_INSTANCE_METHOD_NAME,
- GET_INSTANCE_DESCRIPTOR, true);
- // Push "this" on stack.
- mv.visitVarInsn(ALOAD, 0);
- // Pop argument from stack ("this").
- // Pop target object from stack (ModuleInstaller).
- // Calls initActivity.
- mv.visitMethodInsn(INVOKEINTERFACE, MODULE_INSTALLER_CLASS_NAME, INIT_ACTIVITY_METHOD_NAME,
- INIT_ACTIVITY_DESCRIPTOR, true);
- mv.visitInsn(RETURN);
- // Max stack size = 2 (Only push at most 2 before popping).
- // Max locals = 2 ("this" and 1 parameter).
- mv.visitMaxs(2, 2);
- mv.visitEnd();
- }
-
- /**
- * Checks whether passed in class inherits from android.app.Activity.
- * @param name Name of the class to be checked.
- * @return true if class inherits from android.app.Activity, false otherwise.
- */
- private boolean isSubclassOfActivity(String name) {
- Class<?> activityClass = loadClass(ANDROID_APP_ACTIVITY_CLASS_NAME);
- Class<?> candidateClass = loadClass(name);
- return activityClass.isAssignableFrom(candidateClass);
- }
-
- private Class<?> loadClass(String className) {
- try {
- return mClassLoader.loadClass(className.replace('/', '.'));
- } catch (ClassNotFoundException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/chromium/build/android/constant_pool_refs_to_keep_rules.py b/chromium/build/android/constant_pool_refs_to_keep_rules.py
deleted file mode 100644
index b2452d7261e..00000000000
--- a/chromium/build/android/constant_pool_refs_to_keep_rules.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script is used to convert a list of references to corresponding ProGuard
-keep rules, for the purposes of maintaining compatibility between async DFMs
-and synchronously proguarded modules.
-This script take an input file generated from
-//build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
-during the build phase of an async module.
-"""
-
-from collections import defaultdict
-import argparse
-import sys
-
-# Classes in _IGNORED_PACKAGES do not need explicit keep rules because they are
-# system APIs and are already included in ProGuard configs.
-_IGNORED_PACKAGES = ['java', 'android', 'org.w3c', 'org.xml', 'dalvik']
-
-# Classes in _WHITELIST_PACKAGES are support libraries compiled into chrome
-# that must bypass the _IGNORED_PACKAGES.
-_WHITELIST_PACKAGES = ['android.support']
-
-# TODO(https://crbug.com/968769): Filter may be too broad.
-# Classes in _DFM_FEATURES will be excluded from "keep all members" rule.
-_DFM_FEATURES = [
- 'org.chromium.chrome.autofill_assistant', 'org.chromium.chrome.tab_ui',
- 'org.chromium.chrome.browser.tasks.tab_management', 'org.chromium.chrome.vr'
-]
-
-# Mapping for translating Java bytecode type identifiers to source code type
-# identifiers.
-_TYPE_IDENTIFIER_MAP = {
- 'V': 'void',
- 'Z': 'boolean',
- 'B': 'byte',
- 'S': 'short',
- 'C': 'char',
- 'I': 'int',
- 'J': 'long',
- 'F': 'float',
- 'D': 'double',
-}
-
-
-# Translates DEX TypeDescriptor of the first type found in a given string to
-# its source code type identifier, as described in
-# https://source.android.com/devices/tech/dalvik/dex-format#typedescriptor,
-# and returns the translated type and the starting index of the next type
-# (if present).
-def translate_single_type(typedesc):
- array_count = 0
- translated = ''
- next_index = 0
-
- # In the constant pool, fully qualified names (prefixed by 'L') have a
- # trailing ';' if they are describing the type/return type of a symbol,
- # or the type of arguments passed to a symbol. TypeDescriptor representing
- # primitive types do not have trailing ';'s in any circumstances.
- for i, c in enumerate(typedesc):
- if c == '[':
- array_count += 1
- continue
- if c == 'L':
- # Fully qualified names have no trailing ';' if they are describing the
- # containing class of a reference.
- next_index = typedesc.find(';')
- if next_index == -1:
- next_index = len(typedesc)
- translated = typedesc[i + 1:next_index]
- break
- else:
- translated = _TYPE_IDENTIFIER_MAP[c]
- next_index = i
- break
-
- translated += '[]' * array_count
- return translated, next_index + 1
-
-
-# Convert string of method argument types read from constant pool to
-# corresponding list of srouce code type identifiers.
-def parse_args_list(args_list):
- parsed_args = []
- start_index = 0
-
- while start_index < len(args_list):
- args_list = args_list[start_index:]
- translated_arg, start_index = translate_single_type(args_list)
- parsed_args.append(translated_arg)
-
- return parsed_args
-
-
-def add_to_refs(class_name, keep_entry, dep_refs):
- # Add entry to class's keep rule if entry is not the empty string
- if class_name in dep_refs and keep_entry:
- dep_refs[class_name].append(keep_entry)
- else:
- dep_refs[class_name] = [keep_entry]
-
-
-def should_include_class_path(class_path):
- """ Check whether a class_path should be added as keep rule.
- Conditions:
- - Class is auto-generated (Lambdas/Nested, for example $)
- - Class is not in a DFM Module
- - Class is not in a black/white listed package
- """
- nested_class = '$' in class_path
- not_in_dfm = all(not class_path.startswith(f) for f in _DFM_FEATURES)
- allowed_packages = not (any(
- class_path.startswith(p)
- for p in _IGNORED_PACKAGES) and all(not class_path.startswith(p)
- for p in _WHITELIST_PACKAGES))
- return nested_class or (not_in_dfm and allowed_packages)
-
-
-def main(argv):
- dep_refs = defaultdict(list)
- extended_and_implemented_classes = set()
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--input-file',
- required=True,
- help='Path to constant pool reference output.')
- parser.add_argument(
- '--output-file',
- required=True,
- help='Path to write corresponding keep rules to')
- args = parser.parse_args(argv[1:])
-
- with open(args.input_file, 'r') as constant_pool_refs:
- for line in constant_pool_refs:
- line = line.rstrip().replace('/', '.')
- # Ignore any references specified by the list of
- # _IGNORED_PACKAGES and not in _WHITELIST_PACKAGES.
- if (any(line.startswith(p) for p in _IGNORED_PACKAGES)
- and all(not line.startswith(p) for p in _WHITELIST_PACKAGES)):
- continue
-
- reflist = line.split(',')
-
- # Lines denoting super classes and implemented interface references do
- # not contain additional information and thus have reflist size 1.
- # Store these as a separate set as they require full keep rules.
- if len(reflist) == 1:
- extended_and_implemented_classes.add(reflist[0])
- continue
-
- class_name = reflist[0]
- member_name = reflist[1]
- member_info = reflist[2]
- keep_entry = ''
-
- # When testing with the VR module, all class names read from constant
- # pool output that were prefixed with '[' matched references to the
- # overridden clone() method of the Object class. These seem to correspond
- # to Java enum types defined within classes.
- # It is not entirely clear whether or not this always represents
- # an enum, why enums would be represented as such in the constant pool,
- # or how we should go about keeping these references. For the moment,
- # ignoring these references does not impact compatibility between
- # modules.
- if class_name.startswith('['):
- continue
-
- # Ignore R(esources) files that are from the same module.
- if ('$' in class_name
- and any(class_name.startswith(f) for f in _DFM_FEATURES)):
- continue
-
- # If member_info starts with '(', member is a method, otherwise member
- # is a field.
- # Format keep entries as per ProGuard documentation
- # guardsquare.com/en/products/proguard/manual/usage#classspecification.
- if member_info.startswith('('):
- args_list, return_type = member_info.split(')')
- args_list = parse_args_list(args_list[1:])
- if member_name == '<init>':
- # No return type specified for constructors.
- return_type = ''
- else:
- return_type = translate_single_type(return_type)[0]
-
- # Include types of function arguments.
- for arg_type in args_list:
- if should_include_class_path(arg_type):
- extended_and_implemented_classes.add(arg_type)
-
- # Include the actual class when it's a constructor.
- if member_name == '<init>':
- if should_include_class_path(class_name):
- extended_and_implemented_classes.add(class_name)
- continue
-
- keep_entry = '%s %s(%s);' % (return_type, member_name,
- ', '.join(args_list))
- else:
- keep_entry = '%s %s;' % (translate_single_type(member_info)[0],
- member_name)
-
- dep_refs[class_name].append(keep_entry)
-
- with open(args.output_file, 'w') as keep_rules:
- # Write super classes and implemented interfaces to keep rules.
- for super_class in sorted(extended_and_implemented_classes):
- keep_rules.write('-keep class %s { *; }\n' % (super_class.rstrip()))
- keep_rules.write('\n')
- # Write all other class references to keep rules.
- for c in sorted(dep_refs.iterkeys()):
- if c in extended_and_implemented_classes:
- continue
- class_keeps = '\n '.join(dep_refs[c])
- keep_rules.write('-keep class %s {\n %s\n}\n' % (c, class_keeps))
- keep_rules.write('\n')
-
-
-if __name__ == '__main__':
- main(sys.argv)
diff --git a/chromium/build/android/constant_pool_refs_to_keep_rules_test.py b/chromium/build/android/constant_pool_refs_to_keep_rules_test.py
deleted file mode 100644
index 6f281e26738..00000000000
--- a/chromium/build/android/constant_pool_refs_to_keep_rules_test.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import unittest
-import re
-import os
-
-
-class TestProguardRuleGeneration(unittest.TestCase):
- """
- This script is used to test a ProGuard keep rules for the purposes
- of maintaining compatibility between async DFMs and synchronously
- proguarded modules.
-
- The rules are often generated by constant_pool_refs_to_keep_rules.py
-
- This test can be run manually. Example:
- python build/android/constant_pool_refs_to_keep_rules_test.py -v
- """
-
- # Make sure this variable is set accordingly.
- # It should point to a proguard file.
- PROGUARD_FILE_PATH = os.path.join(
- os.path.dirname(__file__),
- "../../chrome/android/features/tab_ui/proguard_async.flags")
-
- def test_TabUI_HasRules(self):
- """
- Ensures that a few of the rules used in tabs_ui module are included.
- Although this is far from 100% deterministic, these rules are
- created by code that exercise different parts of the rule generation code.
- """
-
- rules = set()
- with open(self.PROGUARD_FILE_PATH, 'r') as proguard_rules:
- for line in proguard_rules:
- if line.startswith('-keep'):
- rule = re.search('class (.+?) {', line).group(1)
- rules.add(rule)
-
- # The following rules test most of the use cases for
- # rules that can be added automatically.
- self.assertIn('org.chromium.ui.modelutil.PropertyModel', rules)
- self.assertIn('org.chromium.ui.modelutil.PropertyModel', rules)
- self.assertIn('org.chromium.ui.modelutil.PropertyKey', rules)
- self.assertIn('org.chromium.chrome.browser.toolbar.ToolbarManager', rules)
- self.assertIn('org.chromium.base.Supplier', rules)
- self.assertIn('android.support.v7.widget.helper.ItemTouchHelper', rules)
- self.assertIn(
- 'android.support.v7.widget.helper.ItemTouchHelper$SimpleCallback',
- rules)
- self.assertIn('android.support.v7.widget.helper.ItemTouchHelper$Callback',
- rules)
- self.assertIn('android.support.v4.content.ContextCompat', rules)
- self.assertIn('android.support.v7.widget.GridLayoutManager', rules)
- self.assertIn('android.support.v4.content.res.ResourcesCompat', rules)
- self.assertIn(
- 'org.chromium.chrome.browser.tasks.tabgroup.TabGroupModelFilter', rules)
- self.assertIn('android.support.v7.widget.RecyclerView$ViewHolder', rules)
- self.assertIn('android.support.v7.widget.RecyclerView', rules)
- self.assertIn('org.chromium.ui.modelutil.SimpleRecyclerViewMcpBase', rules)
- self.assertIn('org.chromium.ui.modelutil.RecyclerViewAdapter', rules)
-
- # The following rules need to be added manually.
- self.assertNotIn(
- 'org.chromium.chrome.browser.fullscreen.ChromeFullscreenManager' +
- '$FullscreenListener$$CC', rules)
- self.assertNotIn(
- 'org.chromium.chrome.browser.widget.bottomsheet.BottomSheet' +
- '$BottomSheetContent$$CC', rules)
- self.assertNotIn('org.chromium.ui.widget.RoundedCornerImageView', rules)
- self.assertNotIn(
- 'android.support.v4.graphics.drawable.RoundedBitmapDrawable', rules)
-
- def test_TabUI_HasNoDuplicateRules(self):
- """
- Ensures that there are no duplicate keep rules
- """
-
- rules = set()
- with open(self.PROGUARD_FILE_PATH, 'r') as proguard_rules:
- for line in proguard_rules:
- if line.startswith('-keep'):
- rule = re.search('class (.+?) {', line).group(1)
- self.assertNotIn(rule, rules)
- rules.add(rule)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/chromium/build/android/devil_chromium.json b/chromium/build/android/devil_chromium.json
index 6cb7608c9ee..4734deccf34 100644
--- a/chromium/build/android/devil_chromium.json
+++ b/chromium/build/android/devil_chromium.json
@@ -125,6 +125,15 @@
]
}
}
+ },
+ "bundletool": {
+ "file_info": {
+ "default": {
+ "local_paths": [
+ "../../third_party/android_build_tools/bundletool/bundletool-all-0.10.3.jar"
+ ]
+ }
+ }
}
}
}
diff --git a/chromium/build/android/docs/README.md b/chromium/build/android/docs/README.md
index b6f0a6e9c24..cecccf22f2a 100644
--- a/chromium/build/android/docs/README.md
+++ b/chromium/build/android/docs/README.md
@@ -3,6 +3,7 @@
* [android_app_bundles.md](android_app_bundles.md)
* [build_config.md](build_config.md)
* [coverage.md](coverage.md)
+* [java_toolchain.md](java_toolchain.md)
* [lint.md](lint.md)
* [life_of_a_resource.md](life_of_a_resource.md)
* [../incremental_install/README.md](../incremental_install/README.md)
diff --git a/chromium/build/android/docs/class_verification_failures.md b/chromium/build/android/docs/class_verification_failures.md
index bf9a8a2952c..36ee97ee712 100644
--- a/chromium/build/android/docs/class_verification_failures.md
+++ b/chromium/build/android/docs/class_verification_failures.md
@@ -127,7 +127,7 @@ look as follows:
* These need to exist in a separate class so that Android framework can successfully verify
* classes without encountering the new APIs.
*/
-@DoNotInline
+@VerifiesOnOMR1
@TargetApi(Build.VERSION_CODES.O_MR1)
public class ApiHelperForOMR1 {
private ApiHelperForOMR1() {}
@@ -136,8 +136,8 @@ public class ApiHelperForOMR1 {
}
```
-* `@DoNotInline`: this is a chromium-defined annotation to tell proguard (and
- similar tools) not to inline this class or its methods (since that would
+* `@VerifiesOnO_MR1`: this is a chromium-defined annotation to tell proguard
+ (and similar tools) not to inline this class or its methods (since that would
defeat the point of out-of-lining!)
* `@TargetApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to
use OMR1 APIs since this class is only used on OMR1 and above. Substitute
diff --git a/chromium/build/android/docs/coverage.md b/chromium/build/android/docs/coverage.md
index 6ea548aed78..35cd80988a1 100644
--- a/chromium/build/android/docs/coverage.md
+++ b/chromium/build/android/docs/coverage.md
@@ -17,7 +17,7 @@ class files and runtime **.exec** files. Then we need to process them using the
```gn
target_os = "android"
- jacoco_coverage = true
+ use_jacoco_coverage = true
```
Now when building, pre-instrumented files will be created in the build directory.
diff --git a/chromium/build/android/docs/java_toolchain.md b/chromium/build/android/docs/java_toolchain.md
new file mode 100644
index 00000000000..fed7654b526
--- /dev/null
+++ b/chromium/build/android/docs/java_toolchain.md
@@ -0,0 +1,152 @@
+# Building Dex
+
+This doc aims to describe the Chrome build process that takes a set of `.java`
+files and turns them into a `classes.dex` file.
+
+[TOC]
+
+## Core GN Target Types
+
+The following have `supports_android` and `requires_android` set to false by
+default:
+* `java_library()`: Compiles `.java` -> `.jar`
+* `java_prebuilt()`: Imports a prebuilt `.jar` file.
+
+The following have `supports_android` and `requires_android` set to true. They
+also have a default `jar_excluded_patterns` set (more on that later):
+* `android_library()`
+* `android_java_prebuilt()`
+
+All targets names must end with "_java" so that the build system can distinguish
+them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)).
+
+## Step 1a: Compile with javac
+
+This step is the only step that does not apply to prebuilt targets.
+
+* All `.java` files in a target are compiled by `javac` into `.class` files.
+ * This includes `.java` files that live within `.srcjar` files, referenced
+ through `srcjar_deps`.
+* The `classpath` used when compiling a target is comprised of `.jar` files of
+ its deps.
+ * When deps are library targets, the Step 1 `.jar` file is used.
+ * When deps are prebuilt targets, the original `.jar` file is used.
+ * All `.jar` processing done in subsequent steps does not impact compilation
+ classpath.
+* `.class` files are zipped into an output `.jar` file.
+* There is **no support** for incremental compilation at this level.
+ * If one source file changes within a library, then the entire library is
+ recompiled.
+ * Prefer smaller targets to avoid slow compiles.
+
+## Step 1b: Compile with ErrorProne
+
+This step can be disabled via GN arg: `use_errorprone_java_compiler = false`
+
+* Concurrently with step 1a: [ErrorProne] compiles java files and checks for bug
+ patterns, including some [custom to Chromium][ep_plugins].
+* ErrorProne used to replace step 1a, but was changed to a concurrent step after
+ being identified as being slower.
+
+[ErrorProne]: https://errorprone.info/
+[ep_plugins]: /tools/android/errorprone_plugin/
+
+## Step 2: Creating an .interface.jar
+
+This step happens in parallel with subsequent steps.
+
+* `//third_party/ijar` converts the `.jar` into an `.interface.jar`, which is a
+ copy of the input with all non-public symbols and function bodies removed.
+* Dependant targets use `.interface.jar` files to skip having to be rebuilt
+ when only private implementation details change.
+
+## Step 3: Bytecode Processing
+
+* `//build/android/bytecode` runs on the compiled `.jar` in order to:
+ * Enable Java assertions (when dcheck is enabled).
+ * Assert that libraries have properly declared `deps`.
+
+## Step 4: Desugaring
+
+This step happens only when targets have `supports_android = true`.
+
+* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as
+ lambdas and default interface methods, into constructs that are compatible
+ with Java 7.
+
+## Step 5: Filtering
+
+This step happens only when targets that have `jar_excluded_patterns` or
+`jar_included_patterns` set (e.g. all `android_` targets).
+
+* Remove `.class` files that match the filters from the `.jar`. These `.class`
+ files are generally those that are re-created with different implementations
+ further on in the build process.
+ * E.g.: `R.class` files - a part of [Android Resources].
+ * E.g.: `GEN_JNI.class` - a part of our [JNI] glue.
+ * E.g.: `AppHooksImpl.class` - how `chrome_java` wires up different
+ implementations for [non-public builds][apphooks].
+
+[JNI]: /base/android/jni_generator/README.md
+[Android Resources]: life_of_a_resource.md
+[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java
+
+## Step 6: Instrumentation
+
+This step happens only when this GN arg is set: `use_jacoco_coverage = true`
+
+* [Jacoco] adds instrumentation hooks to methods.
+
+[Jacoco]: https://www.eclemma.org/jacoco/
+
+## Step 7: Copy to lib.java
+
+* The `.jar` is copied into `$root_build_dir/lib.java` (under target-specific
+ subdirectories) so that it will be included by bot archive steps.
+ * These `.jar` files are the ones used when running `java_binary` and
+ `junit_binary` targets.
+
+## Step 8: Per-Library Dexing
+
+This step happens only when targets have `supports_android = true`.
+
+* [d8] converts `.jar` files contain `.class` files into `.dex.jar` files
+ containing `.dex` files.
+* Dexing is incremental - it will reuse dex'ed classes from a previous build if
+ the corresponding `.class` file is unchanged.
+* These per-library `.dex.jar` files are used directly by [incremental install],
+ and are inputs to the Apk step when `enable_proguard = false`.
+ * Even when `is_java_debug = false`, many apk targets do not enable ProGuard
+ (e.g. unit tests).
+
+[d8]: https://developer.android.com/studio/command-line/d8
+[incremental install]: /build/android/incremental_install/README.md
+
+## Step 9: Apk / Bundle Module Compile
+
+* Each `android_apk` and `android_bundle_module` template has a nested
+ `java_library` target. The nested library includes final copies of files
+ stripped out by prior filtering steps. These files include:
+ * Final `R.java` files, created by `compile_resources.py`.
+ * Final `GEN_JNI.java` for JNI glue.
+ * `BuildConfig.java` and `NativeLibraries.java` (//base dependencies).
+
+## Step 10: Final Dexing
+
+When `is_java_debug = true`:
+* [d8] merges all library `.dex.jar` files into a final `.dex.zip`.
+
+When `is_java_debug = false`:
+* [R8] performs whole-program optimization on all library `lib.java` `.jar`
+ files and outputs a final `.dex.zip`.
+ * For App Bundles, R8 creates a single `.dex.zip` with the code from all
+ modules.
+
+[R8]: https://r8.googlesource.com/r8
+
+## Step 11: Bundle Module Dex Splitting
+
+This step happens only when `is_java_debug = false`.
+
+* [dexsplitter.py] splits the single `.dex.zip` into per-module `.dex.zip`
+ files.
diff --git a/chromium/build/android/generate_jacoco_report.py b/chromium/build/android/generate_jacoco_report.py
index 927f520ee9c..e857b2def62 100755
--- a/chromium/build/android/generate_jacoco_report.py
+++ b/chromium/build/android/generate_jacoco_report.py
@@ -130,7 +130,7 @@ def main():
for f in sources_json_files:
with open(f, 'r') as json_file:
data = json.load(json_file)
- class_files.append(data['input_path'])
+ class_files.extend(data['input_path'])
source_dirs.extend(data['source_dirs'])
# Fix source directories as direct parent of Java packages.
diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py
index 08be3e99238..3b8b3c29445 100755
--- a/chromium/build/android/gradle/generate_gradle.py
+++ b/chromium/build/android/gradle/generate_gradle.py
@@ -45,6 +45,7 @@ _CMAKE_FILE = 'CMakeLists.txt'
_MODULE_ALL = '_all'
_SRC_INTERNAL = os.path.join(
os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk'
_DEFAULT_TARGETS = [
'//android_webview/test/embedded_test_server:aw_net_test_support_apk',
@@ -733,8 +734,8 @@ def _CombineTestEntries(entries):
android_test_entries = collections.defaultdict(list)
for entry in entries:
target_name = entry.GnTarget()
- if (target_name.endswith('_test_apk__apk') and
- 'apk_under_test' in entry.Gradle()):
+ if (target_name.endswith(_INSTRUMENTATION_TARGET_SUFFIX)
+ and 'apk_under_test' in entry.Gradle()):
apk_name = entry.Gradle()['apk_under_test']
android_test_entries[apk_name].append(entry)
else:
@@ -848,8 +849,10 @@ def main():
targets = _QueryForAllGnTargets(output_dir)
else:
assert not args.native_targets, 'Native editing requires --all.'
- targets = [re.sub(r'_test_apk$', '_test_apk__apk', t)
- for t in targets_from_args]
+ targets = [
+ re.sub(r'_test_apk$', _INSTRUMENTATION_TARGET_SUFFIX, t)
+ for t in targets_from_args
+ ]
# Necessary after "gn clean"
if not os.path.exists(os.path.join(output_dir, 'build_vars.txt')):
_RunGnGen(output_dir)
@@ -884,7 +887,7 @@ def main():
main_entries = [
e for e in main_entries
if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args
- or e.GnTarget().endswith('_test_apk__apk'))
+ or e.GnTarget().endswith(_INSTRUMENTATION_TARGET_SUFFIX))
]
if args.split_projects:
diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja
index 3895fc43111..38d7387573d 100644
--- a/chromium/build/android/gradle/root.jinja
+++ b/chromium/build/android/gradle/root.jinja
@@ -10,7 +10,7 @@ buildscript {
}
dependencies {
{% if channel == 'canary' %}
- classpath "com.android.tools.build:gradle:3.6.0-alpha03"
+ classpath "com.android.tools.build:gradle:3.6.0-alpha10"
{% elif channel == 'beta' %}
classpath "com.android.tools.build:gradle:3.1.0-beta4"
{% else %}
diff --git a/chromium/build/android/gyp/bundletool.py b/chromium/build/android/gyp/bundletool.py
index 2201cc12373..65be46e31dd 100755
--- a/chromium/build/android/gyp/bundletool.py
+++ b/chromium/build/android/gyp/bundletool.py
@@ -13,6 +13,8 @@ import os
import subprocess
import sys
+from util import build_utils
+
# Assume this is stored under build/android/gyp/
BUNDLETOOL_DIR = os.path.abspath(os.path.join(
__file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
@@ -24,7 +26,7 @@ BUNDLETOOL_JAR_PATH = os.path.join(
BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
def RunBundleTool(args):
- args = ['java', '-jar', BUNDLETOOL_JAR_PATH] + args
+ args = [build_utils.JAVA_PATH, '-jar', BUNDLETOOL_JAR_PATH] + args
logging.debug(' '.join(args))
subprocess.check_call(args)
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index adff1625056..76775d3958a 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -36,12 +36,6 @@ def main(argv):
_AddSwitch(parser, '--enable-assert')
_AddSwitch(parser, '--enable-thread-annotations')
_AddSwitch(parser, '--enable-check-class-path')
- parser.add_argument('--enable-class-deps-output', default='')
- parser.add_argument(
- '--split-compat-class-names',
- action='append',
- default=[],
- help='Names of classes that need to be made SplitCompat-enabled.')
args = parser.parse_args(argv)
sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
@@ -54,9 +48,6 @@ def main(argv):
for a in args.extra_jars:
extra_classpath_jars.extend(build_utils.ParseGnList(a))
- split_compat_class_names = build_utils.ParseGnList(
- args.split_compat_class_names)
-
if args.verbose:
verbose = '--verbose'
else:
@@ -66,11 +57,8 @@ def main(argv):
args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
args.enable_assert, args.enable_custom_resources,
args.enable_thread_annotations, args.enable_check_class_path,
- args.enable_class_deps_output,
str(len(sdk_jars))
- ] + sdk_jars + [str(len(direct_jars))] + direct_jars + [
- str(len(split_compat_class_names))
- ] + split_compat_class_names + extra_classpath_jars)
+ ] + sdk_jars + [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
subprocess.check_call(cmd)
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 95b8c2aa0d7..4ac6d64bf18 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -23,6 +23,7 @@ import shutil
import subprocess
import sys
import tempfile
+import textwrap
import zipfile
from xml.etree import ElementTree
@@ -50,13 +51,6 @@ _PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
r'.*daydream_icon_.*\.png']))
-def _ListToDictionary(lst, separator):
- """Splits each element of the passed-in |lst| using |separator| and creates
- dictionary treating first element of the split as the key and second as the
- value."""
- return dict(item.split(separator, 1) for item in lst)
-
-
def _ParseArgs(args):
"""Parses command line options.
@@ -100,17 +94,13 @@ def _ParseArgs(args):
input_opts.add_argument(
'--package-id',
- help='Custom package ID for resources (instead of 0x7f). Cannot be used '
- 'with --shared-resources.')
-
- input_opts.add_argument(
- '--package-name-to-id-mapping',
- help='List containing mapping from package name to package IDs that will '
- 'be assigned.')
+ type=int,
+ help='Decimal integer representing custom package ID for resources '
+ '(instead of 127==0x7f). Cannot be used with --shared-resources.')
input_opts.add_argument(
'--package-name',
- help='Package name that will be used to determine package ID.')
+ help='Package name that will be used to create R class.')
input_opts.add_argument(
'--rename-manifest-package', help='Package name to force AAPT to use.')
@@ -266,11 +256,8 @@ def _ParseArgs(args):
parser.error(
'--resources-path-map-out-path requires --short-resource-paths')
- if options.package_name_to_id_mapping:
- package_names_list = build_utils.ParseGnList(
- options.package_name_to_id_mapping)
- options.package_name_to_id_mapping = _ListToDictionary(
- package_names_list, '=')
+ if options.package_id and options.shared_resources:
+ parser.error('--package-id and --shared-resources are mutually exclusive')
return options
@@ -423,19 +410,6 @@ def _MoveImagesToNonMdpiFolders(res_root):
return renamed_paths
-def _PackageIdFromOptions(options):
- package_id = None
- if options.package_id:
- package_id = options.package_id
- if options.package_name:
- package_id = options.package_name_to_id_mapping.get(options.package_name)
- if package_id is None:
- raise Exception(
- 'Package name %s is not present in package_name_to_id_mapping.' %
- options.package_name)
- return package_id
-
-
def _FixManifest(options, temp_dir):
"""Fix the APK's AndroidManifest.xml.
@@ -684,6 +658,8 @@ def _PackageApk(options, build):
Args:
options: The command-line options.
build: BuildContext object.
+ Returns:
+ The manifest package name for the APK.
"""
dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
build.deps_dir)
@@ -751,9 +727,12 @@ def _PackageApk(options, build):
if options.no_xml_namespaces:
link_command.append('--no-xml-namespaces')
- package_id = _PackageIdFromOptions(options)
- if package_id is not None:
- link_command += ['--package-id', package_id, '--allow-reserved-package-id']
+ if options.package_id:
+ link_command += [
+ '--package-id',
+ hex(options.package_id),
+ '--allow-reserved-package-id',
+ ]
fixed_manifest, desired_manifest_package_name = _FixManifest(
options, build.temp_dir)
@@ -787,6 +766,19 @@ def _PackageApk(options, build):
build_utils.CheckOutput(link_command, print_stdout=False, print_stderr=False)
+ if options.proguard_file and (options.shared_resources
+ or options.app_as_shared_lib):
+ # Make sure the R class associated with the manifest package does not have
+ # its onResourcesLoaded method obfuscated or removed, so that the framework
+ # can call it in the case where the APK is being loaded as a library.
+ with open(build.proguard_path, 'a') as proguard_file:
+ keep_rule = '''
+ -keep class {package}.R {{
+ public static void onResourcesLoaded(int);
+ }}
+ '''.format(package=desired_manifest_package_name)
+ proguard_file.write(textwrap.dedent(keep_rule))
+
if options.proto_path and options.arsc_path:
build_utils.CheckOutput([
options.aapt2_path, 'convert', '-o', build.arsc_path, build.proto_path
@@ -799,6 +791,8 @@ def _PackageApk(options, build):
_OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
build.arsc_path, build.r_txt_path)
+ return desired_manifest_package_name
+
def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path):
"""Optimize intermediate .ap_ file with aapt2.
@@ -921,7 +915,7 @@ def main(args):
build_utils.MakeDirectory(debug_temp_resources_dir)
with resource_utils.BuildContext(debug_temp_resources_dir) as build:
- _PackageApk(options, build)
+ manifest_package_name = _PackageApk(options, build)
# If --shared-resources-whitelist is used, the all resources listed in
# the corresponding R.txt file will be non-final, and an onResourcesLoaded()
@@ -953,19 +947,26 @@ def main(args):
custom_root_package_name = options.package_name
grandparent_custom_package_name = options.r_java_root_package_name
+ if options.shared_resources or options.app_as_shared_lib:
+ package_for_library = manifest_package_name
+ else:
+ package_for_library = None
+
resource_utils.CreateRJavaFiles(
- build.srcjar_dir, None, build.r_txt_path, options.extra_res_packages,
- options.extra_r_text_files, rjava_build_options, options.srcjar_out,
- custom_root_package_name, grandparent_custom_package_name,
- options.extra_main_r_text_files)
+ build.srcjar_dir, package_for_library, build.r_txt_path,
+ options.extra_res_packages, options.extra_r_text_files,
+ rjava_build_options, options.srcjar_out, custom_root_package_name,
+ grandparent_custom_package_name, options.extra_main_r_text_files)
build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
# Sanity check that the created resources have the expected package ID.
- expected_id = _PackageIdFromOptions(options)
- if expected_id is None:
- expected_id = '0x00' if options.shared_resources else '0x7f'
- expected_id = int(expected_id, 16)
+ if options.package_id:
+ expected_id = options.package_id
+ elif options.shared_resources:
+ expected_id = 0
+ else:
+ expected_id = 127 # == '0x7f'.
_, package_id = resource_utils.ExtractArscPackage(
options.aapt2_path,
build.arsc_path if options.arsc_path else build.proto_path)
diff --git a/chromium/build/android/gyp/create_app_bundle.py b/chromium/build/android/gyp/create_app_bundle.py
index eeb665bb41b..ae3f4662b1b 100755
--- a/chromium/build/android/gyp/create_app_bundle.py
+++ b/chromium/build/android/gyp/create_app_bundle.py
@@ -411,12 +411,17 @@ def main(args):
with open(tmp_bundle_config, 'w') as f:
f.write(bundle_config)
- cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle']
- cmd_args += ['--modules=%s' % ','.join(module_zips)]
- cmd_args += ['--output=%s' % tmp_unsigned_bundle]
- cmd_args += ['--config=%s' % tmp_bundle_config]
-
- build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)
+ cmd_args = [
+ build_utils.JAVA_PATH, '-jar', bundletool.BUNDLETOOL_JAR_PATH,
+ 'build-bundle', '--modules=' + ','.join(module_zips),
+ '--output=' + tmp_unsigned_bundle, '--config=' + tmp_bundle_config
+ ]
+
+ build_utils.CheckOutput(
+ cmd_args,
+ print_stdout=True,
+ print_stderr=True,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.keystore_path:
# NOTE: As stated by the public documentation, apksigner cannot be used
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
index fb35bc02ea1..a83e696a26d 100644
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -37,6 +37,7 @@
../../../third_party/catapult/devil/devil/android/sdk/aapt.py
../../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
../../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../../third_party/catapult/devil/devil/android/sdk/bundletool.py
../../../third_party/catapult/devil/devil/android/sdk/intent.py
../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
../../../third_party/catapult/devil/devil/android/sdk/split_select.py
diff --git a/chromium/build/android/gyp/create_java_binary_script.py b/chromium/build/android/gyp/create_java_binary_script.py
index 4469381c7c0..7ea0efb22d8 100755
--- a/chromium/build/android/gyp/create_java_binary_script.py
+++ b/chromium/build/android/gyp/create_java_binary_script.py
@@ -37,13 +37,14 @@ if os.getcwd() != self_dir:
offset = os.path.relpath(self_dir, os.getcwd())
classpath = [os.path.join(offset, p) for p in classpath]
bootclasspath = [os.path.join(offset, p) for p in bootclasspath]
-java_cmd = ["java"]
-# This is a simple argparser for jvm and jar arguments.
+java_cmd = ['java']
+# This is a simple argparser for jvm, jar, and classpath arguments.
parser = argparse.ArgumentParser()
parser.add_argument('--jar-args')
parser.add_argument('--jvm-args')
-
+parser.add_argument('--classpath')
known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+
if known_args.jvm_args:
jvm_arguments = known_args.jvm_args.strip('"').split()
java_cmd.extend(jvm_arguments)
@@ -54,14 +55,17 @@ if known_args.jar_args:
else:
jar_arguments = unknown_args
+if known_args.classpath:
+ classpath += [known_args.classpath]
+
{noverify_flag}
if bootclasspath:
- java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath))
+ java_cmd.append('-Xbootclasspath/p:' + ':'.join(bootclasspath))
java_cmd.extend(
- ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"])
+ ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"])
java_cmd.extend(extra_program_args)
java_cmd.extend(jar_arguments)
-os.execvp("java", java_cmd)
+os.execvp('java', java_cmd)
"""
def main(argv):
diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py
index b9d04059e55..407b8f2c59c 100755
--- a/chromium/build/android/gyp/desugar.py
+++ b/chromium/build/android/gyp/desugar.py
@@ -31,7 +31,7 @@ def main():
options.classpath = build_utils.ParseGnList(options.classpath)
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.desugar_jar,
'--input',
@@ -46,7 +46,10 @@ def main():
cmd += ['--bootclasspath_entry', path]
for path in options.classpath:
cmd += ['--classpath_entry', path]
- build_utils.CheckOutput(cmd, print_stdout=False)
+ build_utils.CheckOutput(
+ cmd,
+ print_stdout=False,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.depfile:
build_utils.WriteDepfile(
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index a2e17b4e282..043a08ab272 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -27,7 +27,27 @@ def _ParseArgs(args):
build_utils.AddDepfileOption(parser)
parser.add_argument('--output', required=True, help='Dex output path.')
- parser.add_argument('--input-list', help='GN-list of additional input paths.')
+ parser.add_argument(
+ '--class-inputs',
+ action='append',
+ help='GN-list of .jars with .class files.')
+ parser.add_argument(
+ '--class-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .class files (added to depfile).')
+ parser.add_argument(
+ '--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
+ parser.add_argument(
+ '--dex-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .dex files (added to depfile).')
+ parser.add_argument(
+ '--incremental-dir',
+ help='Path of directory to put intermediate dex files.')
+ parser.add_argument(
+ '--merge-incrementals',
+ action='store_true',
+ help='Combine all per-class .dex files into a single classes.dex')
parser.add_argument(
'--main-dex-list-path',
help='File containing a list of the classes to include in the main dex.')
@@ -35,7 +55,7 @@ def _ParseArgs(args):
'--multi-dex',
action='store_true',
help='Allow multiple dex files within output.')
- parser.add_argument('--d8-jar-path', required=True, help='Path to D8 jar.')
+ parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
parser.add_argument(
'--release',
action='store_true',
@@ -44,7 +64,6 @@ def _ParseArgs(args):
'main dex and keeps all line number information, and then some.')
parser.add_argument(
'--min-api', help='Minimum Android API level compatibility.')
- parser.add_argument('inputs', nargs='*', help='Input .jar files.')
group = parser.add_argument_group('Dexlayout')
group.add_argument(
@@ -79,8 +98,12 @@ def _ParseArgs(args):
if options.main_dex_list_path and not options.multi_dex:
parser.error('--main-dex-list-path is unused if multidex is not enabled')
- if options.input_list:
- options.inputs += build_utils.ParseGnList(options.input_list)
+ options.class_inputs = build_utils.ParseGnList(options.class_inputs)
+ options.class_inputs_filearg = build_utils.ParseGnList(
+ options.class_inputs_filearg)
+ options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
+ options.dex_inputs_filearg = build_utils.ParseGnList(
+ options.dex_inputs_filearg)
return options
@@ -249,48 +272,156 @@ def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
return final_output
-def _PerformDexing(options):
- dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring']
- if options.multi_dex and options.main_dex_list_path:
- dex_cmd += ['--main-dex-list', options.main_dex_list_path]
- if options.release:
- dex_cmd += ['--release']
- if options.min_api:
- dex_cmd += ['--min-api', options.min_api]
+def _CreateFinalDex(options, d8_inputs, tmp_dir, dex_cmd):
+ tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
+ if (options.merge_incrementals or options.output.endswith('.dex')
+ or not all(f.endswith('.dex') for f in d8_inputs)):
+ if options.multi_dex and options.main_dex_list_path:
+ # Provides a list of classes that should be included in the main dex file.
+ dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path]
- with build_utils.TempDir() as tmp_dir:
tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
os.mkdir(tmp_dex_dir)
- _RunD8(dex_cmd, options.inputs, tmp_dex_dir)
+ _RunD8(dex_cmd, d8_inputs, tmp_dex_dir)
+ logging.info('Performed dex merging')
+
dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
- if not options.output.endswith('.dex'):
- tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
- _ZipAligned(sorted(dex_files), tmp_dex_output)
- else:
- # Output to a .dex file.
+ if options.output.endswith('.dex'):
if len(dex_files) > 1:
raise Exception('%d files created, expected 1' % len(dex_files))
tmp_dex_output = dex_files[0]
+ else:
+ _ZipAligned(sorted(dex_files), tmp_dex_output)
+ else:
+ # Skip dexmerger. Just put all incrementals into the .jar individually.
+ _ZipAligned(sorted(d8_inputs), tmp_dex_output)
+ logging.info('Quick-zipped %d files', len(d8_inputs))
- if options.dexlayout_profile:
- tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+ if options.dexlayout_profile:
+ tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+
+ # The dex file is complete and can be moved out of tmp_dir.
+ shutil.move(tmp_dex_output, options.output)
+
+
+def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
+ """Returns a list of all intermediate dex file paths."""
+ dex_files = []
+ for jar in class_inputs:
+ with zipfile.ZipFile(jar, 'r') as z:
+ for subpath in z.namelist():
+ if subpath.endswith('.class'):
+ subpath = subpath[:-5] + 'dex'
+ dex_files.append(os.path.join(incremental_dir, subpath))
+ return dex_files
+
+
+def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
+ """Deletes intermediate .dex files that are no longer needed."""
+ all_files = build_utils.FindInDirectory(dex_dir)
+ desired_files = set(dex_files)
+ for path in all_files:
+ if path not in desired_files:
+ os.unlink(path)
+
+
+def _ExtractClassFiles(changes, tmp_dir, class_inputs):
+ classes_list = []
+ for jar in class_inputs:
+ if changes:
+ changed_class_list = set(changes.IterChangedSubpaths(jar))
+ predicate = lambda x: x in changed_class_list and x.endswith('.class')
+ else:
+ predicate = lambda x: x.endswith('.class')
+
+ classes_list.extend(
+ build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
+ return classes_list
+
+
+def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
+ # Create temporary directory for classes to be extracted to.
+ tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
+ os.mkdir(tmp_extract_dir)
+
+ # Check whether changes were to a non-jar file, requiring full re-dex.
+ # E.g. r8.jar updated.
+ rebuild_all = changes.HasStringChanges() or not all(
+ p.endswith('.jar') for p in changes.IterChangedPaths())
+
+ if rebuild_all:
+ changes = None
+ class_files = _ExtractClassFiles(changes, tmp_extract_dir,
+ options.class_inputs)
+ logging.info('Extracted class files: %d', len(class_files))
+
+ # If the only change is deleting a file, class_files will be empty.
+ if class_files:
+ # Dex necessary classes into intermediate dex files.
+ dex_cmd = dex_cmd + ['--intermediate', '--file-per-class']
+ _RunD8(dex_cmd, class_files, options.incremental_dir)
+ logging.info('Dexed class files.')
+
+
+def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
+ logging.info('_OnStaleMd5')
+ with build_utils.TempDir() as tmp_dir:
+ if options.incremental_dir:
+ # Create directory for all intermediate dex files.
+ if not os.path.exists(options.incremental_dir):
+ os.makedirs(options.incremental_dir)
- # The dex file is complete and can be moved out of tmp_dir.
- shutil.move(tmp_dex_output, options.output)
+ _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
+ logging.info('Stale files deleted')
+ _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
+
+ _CreateFinalDex(options, final_dex_inputs, tmp_dir, dex_cmd)
+ logging.info('Dex finished for: %s', options.output)
def main(args):
+ logging.basicConfig(
+ level=logging.INFO if os.environ.get('DEX_DEBUG') else logging.WARNING,
+ format='%(levelname).1s %(relativeCreated)6d %(message)s')
options = _ParseArgs(args)
- input_paths = list(options.inputs)
+ options.class_inputs += options.class_inputs_filearg
+ options.dex_inputs += options.dex_inputs_filearg
+
+ input_paths = options.class_inputs + options.dex_inputs
if options.multi_dex and options.main_dex_list_path:
input_paths.append(options.main_dex_list_path)
+ input_paths.append(options.r8_jar_path)
+
+ output_paths = [options.output]
+
+ if options.incremental_dir:
+ final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
+ options.class_inputs, options.incremental_dir)
+ output_paths += final_dex_inputs
+ else:
+ final_dex_inputs = list(options.class_inputs)
+ final_dex_inputs += options.dex_inputs
- _PerformDexing(options)
+ dex_cmd = [
+ build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8',
+ '--no-desugaring'
+ ]
+ if options.release:
+ dex_cmd += ['--release']
+ if options.min_api:
+ dex_cmd += ['--min-api', options.min_api]
- build_utils.WriteDepfile(
- options.depfile, options.output, input_paths, add_pydeps=False)
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
+ options,
+ depfile_deps=options.class_inputs_filearg + options.dex_inputs_filearg,
+ output_paths=output_paths,
+ input_paths=input_paths,
+ input_strings=dex_cmd + [bool(options.incremental_dir)],
+ pass_changes=True,
+ track_subpaths_whitelist=options.class_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 926d2cdd502..7bbc066f076 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -49,7 +49,7 @@ def _ParseOptions(args):
def _RunDexsplitter(options, output_dir):
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.r8_path,
'dexsplitter',
diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py
index ed823f18b7b..a74037af07a 100755
--- a/chromium/build/android/gyp/dist_aar.py
+++ b/chromium/build/android/gyp/dist_aar.py
@@ -14,16 +14,19 @@ import sys
import tempfile
import zipfile
+from filter_zip import CreatePathTransform
from util import build_utils
_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
-def _MergeRTxt(r_paths):
+def _MergeRTxt(r_paths, include_globs):
"""Merging the given R.txt files and returns them as a string."""
all_lines = set()
for r_path in r_paths:
+ if include_globs and not build_utils.MatchesGlob(r_path, include_globs):
+ continue
with open(r_path) as f:
all_lines.update(f.readlines())
return ''.join(sorted(all_lines))
@@ -39,18 +42,21 @@ def _MergeProguardConfigs(proguard_configs):
return '\n'.join(ret)
-def _AddResources(aar_zip, resource_zips):
+def _AddResources(aar_zip, resource_zips, include_globs):
"""Adds all resource zips to the given aar_zip.
Ensures all res/values/* files have unique names by prefixing them.
"""
for i, path in enumerate(resource_zips):
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ continue
with zipfile.ZipFile(path) as res_zip:
for info in res_zip.infolist():
data = res_zip.read(info)
dirname, basename = posixpath.split(info.filename)
if 'values' in dirname:
- basename = '{}_{}'.format(basename, i)
+ root, ext = os.path.splitext(basename)
+ basename = '{}_{}{}'.format(root, i, ext)
info.filename = posixpath.join(dirname, basename)
info.filename = posixpath.join('res', info.filename)
aar_zip.writestr(info, data)
@@ -77,6 +83,15 @@ def main(args):
'ABI must be specified.')
parser.add_argument('--abi',
help='ABI (e.g. armeabi-v7a) for native libraries.')
+ parser.add_argument(
+ '--jar-excluded-globs',
+ help='GN-list of globs for paths to exclude in jar.')
+ parser.add_argument(
+ '--jar-included-globs',
+ help='GN-list of globs for paths to include in jar.')
+ parser.add_argument(
+ '--resource-included-globs',
+ help='GN-list of globs for paths to include in R.txt and resources zips.')
options = parser.parse_args(args)
@@ -89,6 +104,12 @@ def main(args):
options.r_text_files = build_utils.ParseGnList(options.r_text_files)
options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+ options.jar_excluded_globs = build_utils.ParseGnList(
+ options.jar_excluded_globs)
+ options.jar_included_globs = build_utils.ParseGnList(
+ options.jar_included_globs)
+ options.resource_included_globs = build_utils.ParseGnList(
+ options.resource_included_globs)
with tempfile.NamedTemporaryFile(delete=False) as staging_file:
try:
@@ -96,12 +117,18 @@ def main(args):
build_utils.AddToZipHermetic(
z, 'AndroidManifest.xml', src_path=options.android_manifest)
+ path_transform = CreatePathTransform(options.jar_excluded_globs,
+ options.jar_included_globs, [])
with tempfile.NamedTemporaryFile() as jar_file:
- build_utils.MergeZips(jar_file.name, options.jars)
+ build_utils.MergeZips(
+ jar_file.name, options.jars, path_transform=path_transform)
build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
build_utils.AddToZipHermetic(
- z, 'R.txt', data=_MergeRTxt(options.r_text_files))
+ z,
+ 'R.txt',
+ data=_MergeRTxt(options.r_text_files,
+ options.resource_included_globs))
build_utils.AddToZipHermetic(z, 'public.txt', data='')
if options.proguard_configs:
@@ -109,7 +136,8 @@ def main(args):
z, 'proguard.txt',
data=_MergeProguardConfigs(options.proguard_configs))
- _AddResources(z, options.dependencies_res_zips)
+ _AddResources(z, options.dependencies_res_zips,
+ options.resource_included_globs)
for native_library in options.native_libraries:
libname = os.path.basename(native_library)
diff --git a/chromium/build/android/gyp/dist_aar.pydeps b/chromium/build/android/gyp/dist_aar.pydeps
index da5ea8da23d..d4f9aae9b34 100644
--- a/chromium/build/android/gyp/dist_aar.pydeps
+++ b/chromium/build/android/gyp/dist_aar.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
../../gn_helpers.py
dist_aar.py
+filter_zip.py
util/__init__.py
util/build_utils.py
util/md5_check.py
diff --git a/chromium/build/android/gyp/filter_zip.py b/chromium/build/android/gyp/filter_zip.py
index 2182042df52..6f854191254 100755
--- a/chromium/build/android/gyp/filter_zip.py
+++ b/chromium/build/android/gyp/filter_zip.py
@@ -18,7 +18,7 @@ _RESOURCE_CLASSES = [
]
-def _CreatePathTransform(exclude_globs, include_globs,
+def CreatePathTransform(exclude_globs, include_globs,
strip_resource_classes_for):
exclude_globs = list(exclude_globs or [])
if strip_resource_classes_for:
@@ -60,8 +60,8 @@ def main():
args.strip_resource_classes_for = build_utils.ParseGnList(
args.strip_resource_classes_for)
- path_transform = _CreatePathTransform(
- args.exclude_globs, args.include_globs, args.strip_resource_classes_for)
+ path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
+ args.strip_resource_classes_for)
with build_utils.AtomicOutput(args.output) as f:
build_utils.MergeZips(
f.name, [args.input], path_transform=path_transform)
diff --git a/chromium/build/android/gyp/generate_linker_version_script.py b/chromium/build/android/gyp/generate_linker_version_script.py
index 0b6c2ef6968..526d636197b 100755
--- a/chromium/build/android/gyp/generate_linker_version_script.py
+++ b/chromium/build/android/gyp/generate_linker_version_script.py
@@ -42,6 +42,10 @@ def main():
dest='whitelists',
help='Path to an input file containing a whitelist of extra symbols to '
'export, one symbol per line. Multiple files may be specified.')
+ parser.add_argument(
+ '--export-feature-registrations',
+ action='store_true',
+ help='Export JNI_OnLoad_* methods')
options = parser.parse_args()
# JNI_OnLoad is always exported.
@@ -52,6 +56,9 @@ def main():
if options.export_java_symbols:
symbol_list.append('Java_*')
+ if options.export_feature_registrations:
+ symbol_list.append('JNI_OnLoad_*')
+
for whitelist in options.whitelists:
with open(whitelist, 'rt') as f:
for line in f:
diff --git a/chromium/build/android/gyp/jacoco_instr.py b/chromium/build/android/gyp/jacoco_instr.py
index 4deea439559..9f4f55f551d 100755
--- a/chromium/build/android/gyp/jacoco_instr.py
+++ b/chromium/build/android/gyp/jacoco_instr.py
@@ -6,7 +6,7 @@
"""Instruments classes and jar files.
-This script corresponds to the 'jacoco_instr' action in the java build process.
+This script corresponds to the 'jacoco_instr' action in the Java build process.
Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will
call the instrument command which accepts a jar and instruments it using
jacococli.jar.
@@ -21,6 +21,7 @@ import os
import shutil
import sys
import tempfile
+import zipfile
from util import build_utils
@@ -53,6 +54,9 @@ def _AddArguments(parser):
help='File containing newline-separated .java paths')
parser.add_argument(
'--jacococli-jar', required=True, help='Path to jacococli.jar.')
+ parser.add_argument(
+ '--files-to-instrument',
+ help='Path to a file containing which source files are affected.')
def _GetSourceDirsFromSourceFiles(source_files):
@@ -96,49 +100,145 @@ def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file,
data = {}
data['source_dirs'] = relative_sources
- data['input_path'] = os.path.abspath(input_path)
+ data['input_path'] = []
+ if input_path:
+ data['input_path'].append(os.path.abspath(input_path))
with open(sources_json_file, 'w') as f:
json.dump(data, f)
-def _RunInstrumentCommand(parser):
- """Instruments jar files using Jacoco.
+def _GetAffectedClasses(jar_file, source_files):
+ """Gets affected classes by affected source files to a jar.
Args:
- parser: ArgumentParser object.
+ jar_file: The jar file to get all members.
+ source_files: The list of affected source files.
Returns:
- An exit code.
+ A tuple of affected classes and unaffected members.
"""
- args = parser.parse_args()
+ with zipfile.ZipFile(jar_file) as f:
+ members = f.namelist()
- temp_dir = tempfile.mkdtemp()
- try:
- cmd = [
- 'java', '-jar', args.jacococli_jar, 'instrument', args.input_path,
- '--dest', temp_dir
- ]
+ affected_classes = []
+ unaffected_members = []
- build_utils.CheckOutput(cmd)
+ for member in members:
+ if not member.endswith('.class'):
+ unaffected_members.append(member)
+ continue
- jars = os.listdir(temp_dir)
- if len(jars) != 1:
- print('Error: multiple output files in: %s' % (temp_dir))
- return 1
+ is_affected = False
+ index = member.find('$')
+ if index == -1:
+ index = member.find('.class')
+ for source_file in source_files:
+ if source_file.endswith(member[:index] + '.java'):
+ affected_classes.append(member)
+ is_affected = True
+ break
+ if not is_affected:
+ unaffected_members.append(member)
+
+ return affected_classes, unaffected_members
+
+
+def _InstrumentWholeJar(instrument_cmd, input_path, output_path, temp_dir):
+ """Instruments input jar to output_path.
+
+ Args:
+ instrument_cmd: JaCoCo instrument command.
+ input_path: The input path to non-instrumented jar.
+ output_path: The output path to instrumented jar.
+ temp_dir: The temporary directory.
+ """
+ instrument_cmd.extend([input_path, '--dest', temp_dir])
+
+ build_utils.CheckOutput(instrument_cmd)
+
+ jars = os.listdir(temp_dir)
+ if len(jars) != 1:
+ raise Exception('Error: multiple output files: %s' % jars)
+
+ # Delete output_path first to avoid modifying input_path in the case where
+ # input_path is a hardlink to output_path. http://crbug.com/571642
+ if os.path.exists(output_path):
+ os.unlink(output_path)
+ shutil.move(os.path.join(temp_dir, jars[0]), output_path)
+
+
+def _InstrumentClassFiles(instrument_cmd, input_path, output_path, temp_dir,
+ affected_source_files):
+ """Instruments affected class files from input jar.
+
+ Args:
+ instrument_cmd: JaCoCo instrument command.
+ input_path: The input path to non-instrumented jar.
+ output_path: The output path to instrumented jar.
+ temp_dir: The temporary directory.
+ affected_source_files: The affected source file paths to input jar.
+ """
+ affected_classes, unaffected_members = _GetAffectedClasses(
+ input_path, affected_source_files)
- # Delete output_path first to avoid modifying input_path in the case where
- # input_path is a hardlink to output_path. http://crbug.com/571642
- if os.path.exists(args.output_path):
- os.unlink(args.output_path)
- shutil.move(os.path.join(temp_dir, jars[0]), args.output_path)
- finally:
- shutil.rmtree(temp_dir)
+ # Extract affected class files.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(temp_dir, affected_classes)
+
+ instrumented_dir = os.path.join(temp_dir, 'instrumented')
+
+ # Instrument extracted class files.
+ instrument_cmd.extend([temp_dir, '--dest', instrumented_dir])
+ build_utils.CheckOutput(instrument_cmd)
+
+ # Extract unaffected members to instrumented_dir.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(instrumented_dir, unaffected_members)
+
+ # Zip all files to output_path
+ build_utils.ZipDir(output_path, instrumented_dir)
+
+
+def _RunInstrumentCommand(parser):
+ """Instruments class or Jar files using JaCoCo.
+
+ Args:
+ parser: ArgumentParser object.
+
+ Returns:
+ An exit code.
+ """
+ args = parser.parse_args()
source_files = []
if args.java_sources_file:
source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
- source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+ with build_utils.TempDir() as temp_dir:
+ instrument_cmd = [
+ build_utils.JAVA_PATH, '-jar', args.jacococli_jar, 'instrument'
+ ]
+
+ if not args.files_to_instrument:
+ _InstrumentWholeJar(instrument_cmd, args.input_path, args.output_path,
+ temp_dir)
+ else:
+ affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
+ source_set = set(source_files)
+ affected_source_files = [f for f in affected_files if f in source_set]
+
+ # Copy input_path to output_path and return if no source file affected.
+ if not affected_source_files:
+ shutil.copyfile(args.input_path, args.output_path)
+ # Create a dummy sources_json_file.
+ _CreateSourcesJsonFile([], None, args.sources_json_file,
+ build_utils.DIR_SOURCE_ROOT)
+ return 0
+ else:
+ _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+ temp_dir, affected_source_files)
+
+ source_dirs = _GetSourceDirsFromSourceFiles(source_files)
# TODO(GYP): In GN, we are passed the list of sources, detecting source
# directories, then walking them to re-establish the list of sources.
# This can obviously be simplified!
diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py
index bacc8e3d46d..502e0715647 100755
--- a/chromium/build/android/gyp/java_cpp_enum.py
+++ b/chromium/build/android/gyp/java_cpp_enum.py
@@ -351,7 +351,7 @@ def GenerateOutput(source_path, enum_definition):
package ${PACKAGE};
-import android.support.annotation.IntDef;
+import androidx.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py
index 5717047c7a3..08ef3b8773b 100755
--- a/chromium/build/android/gyp/java_cpp_enum_tests.py
+++ b/chromium/build/android/gyp/java_cpp_enum_tests.py
@@ -42,7 +42,7 @@ class TestPreprocess(unittest.TestCase):
package some.package;
-import android.support.annotation.IntDef;
+import androidx.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/chromium/build/android/gyp/javac.py b/chromium/build/android/gyp/javac.py
index e8f27d562c2..9745a340373 100755
--- a/chromium/build/android/gyp/javac.py
+++ b/chromium/build/android/gyp/javac.py
@@ -501,7 +501,7 @@ def _ParseOptions(argv):
def main(argv):
logging.basicConfig(
- level=logging.INFO if os.environ.get('_JAVAC_DEBUG') else logging.WARNING,
+ level=logging.INFO if os.environ.get('JAVAC_DEBUG') else logging.WARNING,
format='%(levelname).1s %(relativeCreated)6d %(message)s')
colorama.init()
@@ -591,7 +591,7 @@ def main(argv):
options.jar_path + '.info',
]
- input_strings = javac_cmd + classpath
+ input_strings = javac_cmd + classpath + java_files
if options.jar_info_exclude_globs:
input_strings.append(options.jar_info_exclude_globs)
build_utils.CallAndWriteDepfileIfStale(
@@ -600,8 +600,7 @@ def main(argv):
depfile_deps=depfile_deps,
input_paths=input_paths,
input_strings=input_strings,
- output_paths=output_paths,
- add_pydeps=False)
+ output_paths=output_paths)
logging.info('Script complete: %s', __file__)
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index 605a478ec85..dfb9d08975b 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -439,8 +439,7 @@ def main():
input_paths=input_paths,
input_strings=input_strings,
output_paths=output_paths,
- depfile_deps=classpath,
- add_pydeps=False)
+ depfile_deps=classpath)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py
index d610982699c..b75f8ee6ff4 100755
--- a/chromium/build/android/gyp/main_dex_list.py
+++ b/chromium/build/android/gyp/main_dex_list.py
@@ -5,8 +5,6 @@
# found in the LICENSE file.
import argparse
-import json
-import os
import sys
import tempfile
import zipfile
@@ -14,7 +12,7 @@ import zipfile
from util import build_utils
-def main(args):
+def _ParseArgs():
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument('--shrinked-android-path', required=True,
@@ -28,33 +26,36 @@ def main(args):
'main dex.')
parser.add_argument('--main-dex-list-path', required=True,
help='The main dex list file to generate.')
- parser.add_argument('--inputs',
- help='JARs for which a main dex list should be '
- 'generated.')
+ parser.add_argument(
+ '--class-inputs',
+ action='append',
+ help='GN-list of .jars with .class files.')
+ parser.add_argument(
+ '--class-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .class files (added to depfile).')
parser.add_argument(
'--r8-path', required=True, help='Path to the r8 executable.')
parser.add_argument('--negative-main-dex-globs',
help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) '
'that will fail the build if they match files in the main dex.')
- parser.add_argument('paths', nargs='*', default=[],
- help='JARs for which a main dex list should be '
- 'generated.')
-
- args = parser.parse_args(build_utils.ExpandFileArgs(args))
+ args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
- depfile_deps = []
- if args.inputs:
- args.inputs = build_utils.ParseGnList(args.inputs)
- depfile_deps = args.inputs
- args.paths.extend(args.inputs)
+ args.class_inputs = build_utils.ParseGnList(args.class_inputs)
+ args.class_inputs_filearg = build_utils.ParseGnList(args.class_inputs_filearg)
+ args.class_inputs += args.class_inputs_filearg
if args.negative_main_dex_globs:
args.negative_main_dex_globs = build_utils.ParseGnList(
args.negative_main_dex_globs)
+ return args
+
+def main():
+ args = _ParseArgs()
proguard_cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
args.r8_path,
'--classfile',
@@ -73,60 +74,16 @@ def main(args):
'-dontpreverify',
]
- main_dex_list_cmd = [
- 'java', '-cp', args.dx_path,
- 'com.android.multidex.MainDexListBuilder',
- # This workaround significantly increases main dex size and doesn't seem to
- # be needed by Chrome. See comment in the source:
- # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
- '--disable-annotation-resolution-workaround',
- ]
-
- input_paths = list(args.paths)
- input_paths += [
- args.shrinked_android_path,
- args.dx_path,
- ]
- input_paths += args.main_dex_rules_paths
-
- input_strings = [
- proguard_cmd,
- main_dex_list_cmd,
- ]
-
if args.negative_main_dex_globs:
- input_strings += args.negative_main_dex_globs
for glob in args.negative_main_dex_globs:
# Globs come with 1 asterix, but we want 2 to match subpackages.
proguard_flags.append('-checkdiscard class ' +
glob.replace('*', '**').replace('/', '.'))
- output_paths = [
- args.main_dex_list_path,
- ]
-
- def _LineLengthHelperForOnStaleMd5():
- _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, args.paths,
- args.main_dex_list_path)
-
- build_utils.CallAndWriteDepfileIfStale(
- _LineLengthHelperForOnStaleMd5,
- args,
- input_paths=input_paths,
- input_strings=input_strings,
- output_paths=output_paths,
- depfile_deps=depfile_deps,
- add_pydeps=False)
-
- return 0
-
-
-def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
- main_dex_list_path):
main_dex_list = ''
try:
with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
- # Step 1: Use ProGuard to find all @MainDex code, and all code reachable
+ # Step 1: Use R8 to find all @MainDex code, and all code reachable
# from @MainDex code (recursive).
proguard_cmd += ['--output', temp_jar.name]
with tempfile.NamedTemporaryFile() as proguard_flags_file:
@@ -134,7 +91,7 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
proguard_flags_file.write(flag + '\n')
proguard_flags_file.flush()
proguard_cmd += ['--pg-conf', proguard_flags_file.name]
- for injar in paths:
+ for injar in args.class_inputs:
proguard_cmd.append(injar)
build_utils.CheckOutput(proguard_cmd, print_stderr=False)
@@ -142,12 +99,23 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
# for debugging what classes are kept by ProGuard vs. MainDexListBuilder.
with zipfile.ZipFile(temp_jar.name) as z:
kept_classes = [p for p in z.namelist() if p.endswith('.class')]
- with open(main_dex_list_path + '.partial', 'w') as f:
+ with open(args.main_dex_list_path + '.partial', 'w') as f:
f.write('\n'.join(kept_classes) + '\n')
# Step 2: Expand inclusion list to all classes referenced by the .class
# files of kept classes (non-recursive).
- main_dex_list_cmd += [temp_jar.name, ':'.join(paths)]
+ main_dex_list_cmd = [
+ build_utils.JAVA_PATH,
+ '-cp',
+ args.dx_path,
+ 'com.android.multidex.MainDexListBuilder',
+ # This workaround increases main dex size and does not seem to
+ # be needed by Chrome. See comment in the source:
+ # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
+ '--disable-annotation-resolution-workaround',
+ temp_jar.name,
+ ':'.join(args.class_inputs)
+ ]
main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
except build_utils.CalledProcessError as e:
@@ -158,9 +126,16 @@ def _OnStaleMd5(proguard_cmd, proguard_flags, main_dex_list_cmd, paths,
else:
raise
- with open(main_dex_list_path, 'w') as main_dex_list_file:
- main_dex_list_file.write(main_dex_list)
+ with build_utils.AtomicOutput(args.main_dex_list_path) as f:
+ f.write(main_dex_list)
+
+ if args.depfile:
+ build_utils.WriteDepfile(
+ args.depfile,
+ args.main_dex_list_path,
+ inputs=args.class_inputs_filearg,
+ add_pydeps=False)
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ main()
diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py
index 5680ad94028..61840bf6a1a 100755
--- a/chromium/build/android/gyp/merge_manifest.py
+++ b/chromium/build/android/gyp/merge_manifest.py
@@ -92,7 +92,7 @@ def main(argv):
with build_utils.AtomicOutput(args.output) as output:
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-cp',
classpath,
_MANIFEST_MERGER_MAIN_CLASS,
diff --git a/chromium/build/android/gyp/prepare_resources.py b/chromium/build/android/gyp/prepare_resources.py
index 6147f1a087f..cb0a2446544 100755
--- a/chromium/build/android/gyp/prepare_resources.py
+++ b/chromium/build/android/gyp/prepare_resources.py
@@ -295,8 +295,7 @@ def main(args):
input_paths=input_paths,
input_strings=input_strings,
output_paths=output_paths,
- depfile_deps=depfile_deps,
- add_pydeps=False)
+ depfile_deps=depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 3284341100d..fc9364471bd 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -14,6 +14,18 @@ import zipfile
from util import build_utils
from util import diff_utils
+_API_LEVEL_VERSION_CODE = [
+ (21, 'L'),
+ (22, 'LolliopoMR1'),
+ (23, 'M'),
+ (24, 'N'),
+ (25, 'NMR1'),
+ (26, 'O'),
+ (27, 'OMR1'),
+ (28, 'P'),
+ (29, 'Q'),
+]
+
class _ProguardOutputFilter(object):
"""ProGuard outputs boring stuff to stdout (ProGuard version, jar path, etc)
@@ -99,8 +111,7 @@ def _ParseOptions():
parser.add_argument(
'--verbose', '-v', action='store_true', help='Print all ProGuard output')
parser.add_argument(
- '--repackage-classes',
- help='Unique package name given to an asynchronously proguarded module')
+ '--repackage-classes', help='Package all optimized classes are put in.')
parser.add_argument(
'--disable-outlining',
action='store_true',
@@ -123,9 +134,6 @@ def _ParseOptions():
options.extra_mapping_output_paths = build_utils.ParseGnList(
options.extra_mapping_output_paths)
- if options.apply_mapping:
- options.apply_mapping = os.path.abspath(options.apply_mapping)
-
return options
@@ -168,7 +176,7 @@ def _OptimizeWithR8(options,
os.mkdir(tmp_output)
cmd = [
- 'java',
+ build_utils.JAVA_PATH,
'-jar',
options.r8_path,
'--no-desugaring',
@@ -238,7 +246,7 @@ def _OptimizeWithProguard(options,
if options.proguard_path.endswith('.jar'):
cmd = [
- 'java', '-jar', options.proguard_path, '-include',
+ build_utils.JAVA_PATH, '-jar', options.proguard_path, '-include',
combined_proguard_configs_path
]
else:
@@ -319,9 +327,23 @@ def _CreateDynamicConfig(options):
}""" % options.min_api)
if options.apply_mapping:
- ret.append("-applymapping '%s'" % options.apply_mapping)
+ ret.append("-applymapping '%s'" % os.path.abspath(options.apply_mapping))
if options.repackage_classes:
ret.append("-repackageclasses '%s'" % options.repackage_classes)
+
+ _min_api = int(options.min_api) if options.min_api else 0
+ for api_level, version_code in _API_LEVEL_VERSION_CODE:
+ annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code
+ if api_level > _min_api:
+ ret.append('-keep @interface %s' % annotation_name)
+ ret.append("""\
+-keep,allowobfuscation,allowoptimization @%s class ** {
+ <methods>;
+}""" % annotation_name)
+ ret.append("""\
+-keepclassmembers,allowobfuscation,allowoptimization class ** {
+ @%s <methods>;
+}""" % annotation_name)
return '\n'.join(ret)
@@ -387,7 +409,7 @@ def main():
inputs = options.proguard_configs + options.input_paths + libraries
if options.apply_mapping:
- inputs += options.apply_mapping
+ inputs.append(options.apply_mapping)
build_utils.WriteDepfile(
options.depfile, options.output_path, inputs=inputs, add_pydeps=False)
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index ecb41a3fe85..ca7f0c165b6 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -33,6 +33,8 @@ import gn_helpers
DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, os.pardir)))
+JAVA_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+ 'bin', 'java')
try:
string_types = basestring
@@ -70,7 +72,7 @@ def Touch(path, fail_if_missing=False):
os.utime(path, None)
-def FindInDirectory(directory, filename_filter):
+def FindInDirectory(directory, filename_filter='*'):
files = []
for root, _dirnames, filenames in os.walk(directory):
matched_files = fnmatch.filter(filenames, filename_filter)
@@ -206,6 +208,26 @@ def FilterLines(output, filter_string):
line for line in output.splitlines() if not re_filter.search(line))
+def FilterReflectiveAccessJavaWarnings(output):
+ """Filters out warnings about illegal reflective access operation.
+
+ These warnings were introduced in Java 9, and generally mean that dependencies
+ need to be updated.
+ """
+ # WARNING: An illegal reflective access operation has occurred
+ # WARNING: Illegal reflective access by ...
+ # WARNING: Please consider reporting this to the maintainers of ...
+ # WARNING: Use --illegal-access=warn to enable warnings of further ...
+ # WARNING: All illegal access operations will be denied in a future release
+ return FilterLines(
+ output, r'WARNING: ('
+ 'An illegal reflective|'
+ 'Illegal reflective access|'
+ 'Please consider reporting this to|'
+ 'Use --illegal-access=warn|'
+ 'All illegal access operations)')
+
+
# This can be used in most cases like subprocess.check_output(). The output,
# particularly when the command fails, better highlights the command's failure.
# If the command fails, raises a build_utils.CalledProcessError.
@@ -511,7 +533,7 @@ def _ComputePythonDependencies():
src/. The paths will be relative to the current directory.
"""
_ForceLazyModulesToLoad()
- module_paths = (m.__file__ for m in sys.modules.itervalues()
+ module_paths = (m.__file__ for m in sys.modules.values()
if m is not None and hasattr(m, '__file__'))
abs_module_paths = map(os.path.abspath, module_paths)
@@ -634,15 +656,20 @@ def ReadSourcesList(sources_list_file_name):
return [file_name.strip() for file_name in f]
-def CallAndWriteDepfileIfStale(function, options, record_path=None,
- input_paths=None, input_strings=None,
- output_paths=None, force=False,
- pass_changes=False, depfile_deps=None,
- add_pydeps=True):
+def CallAndWriteDepfileIfStale(on_stale_md5,
+ options,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_whitelist=None,
+ depfile_deps=None):
"""Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable.
Depfiles are automatically added to output_paths when present in the |options|
- argument. They are then created after |function| is called.
+ argument. They are then created after |on_stale_md5| is called.
By default, only python dependencies are added to the depfile. If there are
other input paths that are not captured by GN deps, then they should be listed
@@ -656,21 +683,7 @@ def CallAndWriteDepfileIfStale(function, options, record_path=None,
input_strings = list(input_strings or [])
output_paths = list(output_paths or [])
- python_deps = None
- if hasattr(options, 'depfile') and options.depfile:
- python_deps = _ComputePythonDependencies()
- input_paths += python_deps
- output_paths += [options.depfile]
-
- def on_stale_md5(changes):
- args = (changes,) if pass_changes else ()
- function(*args)
- if python_deps is not None:
- all_depfile_deps = list(python_deps) if add_pydeps else []
- if depfile_deps:
- all_depfile_deps.extend(depfile_deps)
- WriteDepfile(options.depfile, output_paths[0], all_depfile_deps,
- add_pydeps=False)
+ input_paths += _ComputePythonDependencies()
md5_check.CallAndRecordIfStale(
on_stale_md5,
@@ -679,4 +692,12 @@ def CallAndWriteDepfileIfStale(function, options, record_path=None,
input_strings=input_strings,
output_paths=output_paths,
force=force,
- pass_changes=True)
+ pass_changes=pass_changes,
+ track_subpaths_whitelist=track_subpaths_whitelist)
+
+ # Write depfile even when inputs have not changed to ensure build correctness
+ # on bots that build with & without patch, and the patch changes the depfile
+ # location.
+ if hasattr(options, 'depfile') and options.depfile:
+ WriteDepfile(
+ options.depfile, output_paths[0], depfile_deps, add_pydeps=False)
diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py
index 9a15ee6e75a..0ad6f1b4003 100644
--- a/chromium/build/android/gyp/util/md5_check.py
+++ b/chromium/build/android/gyp/util/md5_check.py
@@ -20,9 +20,14 @@ PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
-def CallAndRecordIfStale(
- function, record_path=None, input_paths=None, input_strings=None,
- output_paths=None, force=False, pass_changes=False):
+def CallAndRecordIfStale(function,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_whitelist=None):
"""Calls function if outputs are stale.
Outputs are considered stale if:
@@ -43,6 +48,8 @@ def CallAndRecordIfStale(
force: Whether to treat outputs as missing regardless of whether they
actually are.
pass_changes: Whether to pass a Changes instance to |function|.
+ track_subpaths_whitelist: Relevant only when pass_changes=True. List of .zip
+ files from |input_paths| to make subpath information available for.
"""
assert record_path or output_paths
input_paths = input_paths or []
@@ -57,12 +64,15 @@ def CallAndRecordIfStale(
new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
new_metadata.AddStrings(input_strings)
+ zip_whitelist = set(track_subpaths_whitelist or [])
for path in input_paths:
- if _IsZipFile(path):
+ # It's faster to md5 an entire zip file than it is to just locate & hash
+ # its central directory (which is what this used to do).
+ if path in zip_whitelist:
entries = _ExtractZipEntries(path)
new_metadata.AddZipFile(path, entries)
else:
- new_metadata.AddFile(path, _Md5ForPath(path))
+ new_metadata.AddFile(path, _ComputeTagForPath(path))
old_metadata = None
force = force or _FORCE_REBUILD
@@ -106,19 +116,20 @@ class Changes(object):
def HasChanges(self):
"""Returns whether any changes exist."""
- return (self.force or
- not self.old_metadata or
- self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or
- self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+ return (self.HasStringChanges()
+ or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def HasStringChanges(self):
+ """Returns whether string metadata changed."""
+ return (self.force or not self.old_metadata
+ or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
def AddedOrModifiedOnly(self):
"""Returns whether the only changes were from added or modified (sub)files.
No missing outputs, no removed paths/subpaths.
"""
- if (self.force or
- not self.old_metadata or
- self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()):
+ if self.HasStringChanges():
return False
if any(self.IterRemovedPaths()):
return False
@@ -368,27 +379,15 @@ class _Metadata(object):
return (entry['path'] for entry in subentries)
-def _UpdateMd5ForFile(md5, path, block_size=2**16):
- with open(path, 'rb') as infile:
- while True:
- data = infile.read(block_size)
- if not data:
- break
- md5.update(data)
-
-
-def _UpdateMd5ForDirectory(md5, dir_path):
- for root, _, files in os.walk(dir_path):
- for f in files:
- _UpdateMd5ForFile(md5, os.path.join(root, f))
-
-
-def _Md5ForPath(path):
+def _ComputeTagForPath(path):
+ stat = os.stat(path)
+ if stat.st_size > 1 * 1024 * 1024:
+ # Fallback to mtime for large files so that md5_check does not take too long
+ # to run.
+ return stat.st_mtime
md5 = hashlib.md5()
- if os.path.isdir(path):
- _UpdateMd5ForDirectory(md5, path)
- else:
- _UpdateMd5ForFile(md5, path)
+ with open(path, 'rb') as f:
+ md5.update(f.read())
return md5.hexdigest()
@@ -400,14 +399,6 @@ def _ComputeInlineMd5(iterable):
return md5.hexdigest()
-def _IsZipFile(path):
- """Returns whether to treat the given file as a zip file."""
- # ijar doesn't set the CRC32 field.
- if path.endswith('.interface.jar'):
- return False
- return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar')
-
-
def _ExtractZipEntries(path):
"""Returns a list of (path, CRC32) of all files within |path|."""
entries = []
diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py
index 41e9d3c248c..cba7a6a354a 100755
--- a/chromium/build/android/gyp/util/md5_check_test.py
+++ b/chromium/build/android/gyp/util/md5_check_test.py
@@ -36,12 +36,18 @@ class TestMd5Check(unittest.TestCase):
# Test out empty zip file to start.
_WriteZipFile(input_file2.name, [])
input_files = [input_file1.name, input_file2.name]
+ zip_paths = [input_file2.name]
record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
- def CheckCallAndRecord(should_call, message, force=False,
- outputs_specified=False, outputs_missing=False,
- expected_changes=None, added_or_modified_only=None):
+ def CheckCallAndRecord(should_call,
+ message,
+ force=False,
+ outputs_specified=False,
+ outputs_missing=False,
+ expected_changes=None,
+ added_or_modified_only=None,
+ track_subentries=False):
output_paths = None
if outputs_specified:
output_file1 = tempfile.NamedTemporaryFile()
@@ -66,7 +72,8 @@ class TestMd5Check(unittest.TestCase):
input_strings=input_strings,
output_paths=output_paths,
force=force,
- pass_changes=(expected_changes or added_or_modified_only) is not None)
+ pass_changes=(expected_changes or added_or_modified_only) is not None,
+ track_subpaths_whitelist=zip_paths if track_subentries else None)
self.assertEqual(should_call, self.called, message)
if expected_changes:
description = self.changes.DescribeDifference()
@@ -81,6 +88,9 @@ class TestMd5Check(unittest.TestCase):
expected_changes='Previous stamp file not found.',
added_or_modified_only=False)
CheckCallAndRecord(False, 'should not call when nothing changed')
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
CheckCallAndRecord(False, 'should not call when nothing changed #2',
outputs_specified=True, outputs_missing=False)
CheckCallAndRecord(True, 'should call when output missing',
@@ -97,9 +107,6 @@ class TestMd5Check(unittest.TestCase):
expected_changes='*Modified: %s' % input_file1.name,
added_or_modified_only=True)
- input_files = input_files[::-1]
- CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
-
input_files = input_files[:1]
CheckCallAndRecord(True, 'removing file should trigger call',
expected_changes='*Removed: %s' % input_file1.name,
@@ -129,16 +136,21 @@ class TestMd5Check(unittest.TestCase):
added_or_modified_only=False)
_WriteZipFile(input_file2.name, [('path/1.txt', '1')])
- CheckCallAndRecord(True, 'added subpath should trigger call',
- expected_changes='*Modified: %s*Subpath added: %s' % (
- input_file2.name, 'path/1.txt'),
- added_or_modified_only=True)
+ CheckCallAndRecord(
+ True,
+ 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+ 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
_WriteZipFile(input_file2.name, [('path/1.txt', '2')])
- CheckCallAndRecord(True, 'changed subpath should trigger call',
- expected_changes='*Modified: %s*Subpath modified: %s' % (
- input_file2.name, 'path/1.txt'),
- added_or_modified_only=True)
- CheckCallAndRecord(False, 'should not call when nothing changed')
+ CheckCallAndRecord(
+ True,
+ 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' %
+ (input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
_WriteZipFile(input_file2.name, [])
CheckCallAndRecord(True, 'removed subpath should trigger call',
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index 3d842a3fc5e..b98bd4fa4a3 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -348,9 +348,9 @@ that will be merged into the final `.jar` file for distribution.
Path to the final classes.dex file (or classes.zip in case of multi-dex)
for this APK.
-* `deps_info['final_dex']['dependency_dex_files']`:
-The list of paths to all `deps_info['dex_path']` entries for all library
-dependencies for this APK.
+* `deps_info['final_dex']['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
* `native['libraries']`
List of native libraries for the primary ABI to be embedded in this APK.
@@ -1191,6 +1191,9 @@ def main(argv):
raise Exception('Not all deps support the Android platform: '
+ str(deps_not_support_android))
+ if is_apk_or_module_target:
+ all_dex_files = [c['dex_path'] for c in all_library_deps]
+
if is_java_target:
# Classpath values filled in below (after applying tested_apk_config).
config['javac'] = {}
@@ -1200,6 +1203,8 @@ def main(argv):
deps_info['interface_jar_path'] = options.interface_jar_path
if options.dex_path:
deps_info['dex_path'] = options.dex_path
+ if is_apk_or_module_target:
+ all_dex_files.append(options.dex_path)
if options.type == 'android_apk':
deps_info['apk_path'] = options.apk_path
deps_info['incremental_apk_path'] = options.incremental_apk_path
@@ -1318,9 +1323,6 @@ def main(argv):
if options.res_size_info:
config['deps_info']['res_size_info'] = options.res_size_info
- if is_apk_or_module_target:
- deps_dex_files = [c['dex_path'] for c in all_library_deps]
-
if options.type == 'group':
if options.extra_classpath_jars:
# These are .jars to add to javac classpath but not to runtime classpath.
@@ -1590,8 +1592,9 @@ def main(argv):
# within proguard.py. Move the logic for the proguard case to here.
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
- deps_dex_files = [
- p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+ all_dex_files = [
+ p for p in all_dex_files if not p in tested_apk_deps_dex_files
+ ]
if options.type in ('android_apk', 'dist_aar', 'dist_jar',
'android_app_bundle_module', 'android_app_bundle'):
@@ -1605,7 +1608,7 @@ def main(argv):
dex_config = config['final_dex']
dex_config['path'] = options.final_dex_path
if is_apk_or_module_target:
- dex_config['dependency_dex_files'] = deps_dex_files
+ dex_config['all_dex_files'] = all_dex_files
if is_java_target:
config['javac']['classpath'] = javac_classpath
@@ -1723,7 +1726,7 @@ def main(argv):
RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
- RemoveObjDups(config, base, 'final_dex', 'dependency_dex_files')
+ RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
RemoveObjDups(config, base, 'extra_android_manifests')
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
diff --git a/chromium/build/android/incremental_install/BUILD.gn b/chromium/build/android/incremental_install/BUILD.gn
index fb75d793345..934ce2cf61d 100644
--- a/chromium/build/android/incremental_install/BUILD.gn
+++ b/chromium/build/android/incremental_install/BUILD.gn
@@ -6,7 +6,7 @@ import("//build/config/android/rules.gni")
android_library("bootstrap_java") {
# Use .dex rather than .dex.jar to be usable by package_apk().
- dex_path = "$target_gen_dir/bootstrap.dex"
+ dex_path = "$target_out_dir/bootstrap.dex"
java_files = [
"java/org/chromium/incrementalinstall/BootstrapApplication.java",
"java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
diff --git a/chromium/build/android/incremental_install/write_installer_json.py b/chromium/build/android/incremental_install/write_installer_json.py
index 75bd6d1aab7..df6cfdf734c 100755
--- a/chromium/build/android/incremental_install/write_installer_json.py
+++ b/chromium/build/android/incremental_install/write_installer_json.py
@@ -31,19 +31,13 @@ def _ParseArgs(args):
default=[],
help='A glob matching the apk splits. '
'Can be specified multiple times.')
- parser.add_argument('--native-libs-list',
- action='append',
- default=[],
- help='GN-list of paths to native libraries relative to '
- 'output directory. Can be repeated.')
- parser.add_argument('--dex-file',
- action='append',
- default=[],
- dest='dex_files',
- help='.dex file to include relative to output directory. '
- 'Can be repeated')
- parser.add_argument('--dex-file-list',
- help='GN-list of dex paths relative to output directory.')
+ parser.add_argument(
+ '--native-libs',
+ action='append',
+ help='GN-list of paths to native libraries relative to '
+ 'output directory. Can be repeated.')
+ parser.add_argument(
+ '--dex-files', help='GN-list of dex paths relative to output directory.')
parser.add_argument('--show-proguard-warning',
action='store_true',
default=False,
@@ -52,11 +46,8 @@ def _ParseArgs(args):
help='Prints the given message and exits.')
options = parser.parse_args(args)
- options.dex_files += build_utils.ParseGnList(options.dex_file_list)
- all_libs = []
- for gn_list in options.native_libs_list:
- all_libs.extend(build_utils.ParseGnList(gn_list))
- options.native_libs_list = all_libs
+ options.dex_files = build_utils.ParseGnList(options.dex_files)
+ options.native_libs = build_utils.ParseGnList(options.native_libs)
return options
@@ -65,7 +56,7 @@ def main(args):
data = {
'apk_path': options.apk_path,
- 'native_libs': options.native_libs_list,
+ 'native_libs': options.native_libs,
'dex_files': options.dex_files,
'dont_even_try': options.dont_even_try,
'show_proguard_warning': options.show_proguard_warning,
diff --git a/chromium/build/android/list_class_verification_failures.py b/chromium/build/android/list_class_verification_failures.py
index cfcb2ac6896..da7d0a8c484 100755
--- a/chromium/build/android/list_class_verification_failures.py
+++ b/chromium/build/android/list_class_verification_failures.py
@@ -121,10 +121,11 @@ def _AdbOatDumpForPackage(device, package_name, out_file):
"""Runs oatdump on the device."""
# Get the path to the odex file.
odex_file = PathToDexForPlatformVersion(device, package_name)
- device.RunShellCommand(['oatdump',
- '--oat-file=' + odex_file,
- '--output=' + out_file],
- timeout=120, shell=True, check_return=True)
+ device.RunShellCommand(
+ ['oatdump', '--oat-file=' + odex_file, '--output=' + out_file],
+ timeout=420,
+ shell=True,
+ check_return=True)
class JavaClass(object):
@@ -211,7 +212,7 @@ def RealMain(mapping, device_arg, package, status, hide_summary, workdir):
device.adb) as file_on_device:
_AdbOatDumpForPackage(device, package, file_on_device.name)
file_on_host = os.path.join(workdir, 'out.dump')
- device.PullFile(file_on_device.name, file_on_host)
+ device.PullFile(file_on_device.name, file_on_host, timeout=220)
proguard_mappings = (_ParseMappingFile(mapping) if mapping else None)
with open(file_on_host, 'r') as f:
java_classes = ListClassesAndVerificationStatus(f, proguard_mappings)
diff --git a/chromium/build/android/pylib/base/environment_factory.py b/chromium/build/android/pylib/base/environment_factory.py
index 4d3727444f4..2b402ab9854 100644
--- a/chromium/build/android/pylib/base/environment_factory.py
+++ b/chromium/build/android/pylib/base/environment_factory.py
@@ -4,14 +4,24 @@
from pylib import constants
from pylib.local.device import local_device_environment
-from pylib.local.emulator import local_emulator_environment
from pylib.local.machine import local_machine_environment
+try:
+ # local_emulator_environment depends on //tools.
+ # If a client pulls in the //build subtree but not the //tools
+ # one, fail at emulator environment creation time.
+ from pylib.local.emulator import local_emulator_environment
+except ImportError:
+ local_emulator_environment = None
+
+
def CreateEnvironment(args, output_manager, error_func):
if args.environment == 'local':
if args.command not in constants.LOCAL_MACHINE_TESTS:
- if args.avd_name:
+ if args.avd_config:
+ if not local_emulator_environment:
+ error_func('emulator environment requested but not available.')
return local_emulator_environment.LocalEmulatorEnvironment(
args, output_manager, error_func)
return local_device_environment.LocalDeviceEnvironment(
diff --git a/chromium/build/android/pylib/constants/__init__.py b/chromium/build/android/pylib/constants/__init__.py
index aff42e3e961..5daecb6d6ed 100644
--- a/chromium/build/android/pylib/constants/__init__.py
+++ b/chromium/build/android/pylib/constants/__init__.py
@@ -79,6 +79,11 @@ PACKAGE_INFO.update({
chrome.PackageInfo('org.chromium.webview_ui_test',
'org.chromium.webview_ui_test.WebViewUiTestActivity',
'webview-command-line', None),
+ 'weblayer_browsertests':
+ chrome.PackageInfo(
+ 'org.chromium.weblayer_browsertests_apk',
+ 'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity',
+ 'chrome-native-tests-command-line', None),
})
diff --git a/chromium/build/android/pylib/gtest/gtest_test_instance.py b/chromium/build/android/pylib/gtest/gtest_test_instance.py
index 2b2c5e7f7e9..634ab7220a2 100644
--- a/chromium/build/android/pylib/gtest/gtest_test_instance.py
+++ b/chromium/build/android/pylib/gtest/gtest_test_instance.py
@@ -26,6 +26,7 @@ BROWSER_TEST_SUITES = [
'android_browsertests',
'components_browsertests',
'content_browsertests',
+ 'weblayer_browsertests',
]
RUN_IN_SUB_THREAD_TEST_SUITES = [
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
index 09114225cb1..bc2a9a31d50 100644
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -446,6 +446,8 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._additional_apks = []
self._apk_under_test = None
self._apk_under_test_incremental_install_json = None
+ self._modules = None
+ self._fake_modules = None
self._package_info = None
self._suite = None
self._test_apk = None
@@ -506,7 +508,8 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def _initializeApkAttributes(self, args, error_func):
if args.apk_under_test:
apk_under_test_path = args.apk_under_test
- if not args.apk_under_test.endswith('.apk'):
+ if (not args.apk_under_test.endswith('.apk')
+ and not args.apk_under_test.endswith('.apks')):
apk_under_test_path = os.path.join(
constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
'%s.apk' % args.apk_under_test)
@@ -520,24 +523,20 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)
- if args.test_apk.endswith('.apk'):
- self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
- test_apk_path = args.test_apk
- self._test_apk = apk_helper.ToHelper(args.test_apk)
- else:
- self._suite = args.test_apk
+ test_apk_path = args.test_apk
+ if not os.path.exists(test_apk_path):
test_apk_path = os.path.join(
constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
'%s.apk' % args.test_apk)
-
- # TODO(jbudorick): Move the realpath up to the argument parser once
- # APK-by-name is no longer supported.
- test_apk_path = os.path.realpath(test_apk_path)
+ # TODO(jbudorick): Move the realpath up to the argument parser once
+ # APK-by-name is no longer supported.
+ test_apk_path = os.path.realpath(test_apk_path)
if not os.path.exists(test_apk_path):
error_func('Unable to find test APK: %s' % test_apk_path)
self._test_apk = apk_helper.ToHelper(test_apk_path)
+ self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
self._apk_under_test_incremental_install_json = (
args.apk_under_test_incremental_install_json)
@@ -548,13 +547,14 @@ class InstrumentationTestInstance(test_instance.TestInstance):
assert self._suite.endswith('_incremental')
self._suite = self._suite[:-len('_incremental')]
+ self._modules = args.modules
+ self._fake_modules = args.fake_modules
+
self._test_jar = args.test_jar
self._test_support_apk = apk_helper.ToHelper(os.path.join(
constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
'%sSupport.apk' % self._suite))
- if not os.path.exists(self._test_apk.path):
- error_func('Unable to find test APK: %s' % self._test_apk.path)
if not self._test_jar:
logging.warning('Test jar not specified. Test runner will not have '
'Java annotation info available. May not handle test '
@@ -655,7 +655,10 @@ class InstrumentationTestInstance(test_instance.TestInstance):
with open(args.device_flags_file) as device_flags_file:
stripped_lines = (l.strip() for l in device_flags_file)
self._flags.extend(flag for flag in stripped_lines if flag)
- if args.strict_mode and args.strict_mode != 'off':
+ if args.strict_mode and args.strict_mode != 'off' and (
+ # TODO(yliuyliu): Turn on strict mode for coverage once
+ # crbug/1006397 is fixed.
+ not args.coverage_dir):
self._flags.append('--strict-mode=' + args.strict_mode)
def _initializeDriverAttributes(self):
@@ -717,6 +720,14 @@ class InstrumentationTestInstance(test_instance.TestInstance):
return self._apk_under_test_incremental_install_json
@property
+ def modules(self):
+ return self._modules
+
+ @property
+ def fake_modules(self):
+ return self._fake_modules
+
+ @property
def coverage_directory(self):
return self._coverage_directory
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
index 78446d15272..5176dcd82fe 100755
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -40,21 +40,20 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
return instrumentation_test_instance.InstrumentationTestInstance(
mock.MagicMock(), mock.MagicMock(), lambda s: None)
- _FlagAttributesArgs = collections.namedtuple(
- '_FlagAttributesArgs',
- [
- 'command_line_flags',
- 'device_flags_file',
- 'strict_mode',
- 'use_apk_under_test_flags_file'
- ])
-
- def createFlagAttributesArgs(
- self, command_line_flags=None, device_flags_file=None,
- strict_mode=None, use_apk_under_test_flags_file=False):
- return self._FlagAttributesArgs(
- command_line_flags, device_flags_file, strict_mode,
- use_apk_under_test_flags_file)
+ _FlagAttributesArgs = collections.namedtuple('_FlagAttributesArgs', [
+ 'command_line_flags', 'device_flags_file', 'strict_mode',
+ 'use_apk_under_test_flags_file', 'coverage_dir'
+ ])
+
+ def createFlagAttributesArgs(self,
+ command_line_flags=None,
+ device_flags_file=None,
+ strict_mode=None,
+ use_apk_under_test_flags_file=False,
+ coverage_dir=None):
+ return self._FlagAttributesArgs(command_line_flags, device_flags_file,
+ strict_mode, use_apk_under_test_flags_file,
+ coverage_dir)
def test_initializeFlagAttributes_commandLineFlags(self):
o = self.createTestInstance()
@@ -78,6 +77,13 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
o._initializeFlagAttributes(args)
self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+ def test_initializeFlagAttributes_strictModeOn_coverageOn(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(
+ strict_mode='on', coverage_dir='/coverage/dir')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents'])
+
def test_initializeFlagAttributes_strictModeOff(self):
o = self.createTestInstance()
args = self.createFlagAttributesArgs(strict_mode='off')
diff --git a/chromium/build/android/pylib/local/device/local_device_gtest_run.py b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
index 5044cdf1247..605b826c1a0 100644
--- a/chromium/build/android/pylib/local/device/local_device_gtest_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
@@ -10,11 +10,13 @@ import posixpath
import shutil
import time
+from devil import base_error
from devil.android import crash_handler
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import logcat_monitor
from devil.android import ports
+from devil.android.sdk import version_codes
from devil.utils import reraiser_thread
from incremental_install import installer
from pylib import constants
@@ -35,6 +37,8 @@ _EXTRA_COMMAND_LINE_FILE = (
'org.chromium.native_test.NativeTest.CommandLineFile')
_EXTRA_COMMAND_LINE_FLAGS = (
'org.chromium.native_test.NativeTest.CommandLineFlags')
+_EXTRA_COVERAGE_DEVICE_FILE = (
+ 'org.chromium.native_test.NativeTest.CoverageDeviceFile')
_EXTRA_STDOUT_FILE = (
'org.chromium.native_test.NativeTestInstrumentationTestRunner'
'.StdoutFile')
@@ -102,6 +106,24 @@ def _ExtractTestsFromFilter(gtest_filter):
return patterns
+def _PullCoverageFile(device, coverage_device_file, output_dir):
+ """Pulls coverage file on device to host directory.
+
+ Args:
+ device: The working device.
+ coverage_device_file: The temporary coverage file on device.
+ output_dir: The output directory on host.
+ """
+ try:
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ device.PullFile(coverage_device_file.name, output_dir)
+ except (OSError, base_error.BaseError) as e:
+ logging.warning('Failed to handle coverage data after tests: %s', e)
+ finally:
+ coverage_device_file.close()
+
+
class _ApkDelegate(object):
def __init__(self, test_instance, tool):
self._activity = test_instance.activity
@@ -116,6 +138,7 @@ class _ApkDelegate(object):
self._extras = test_instance.extras
self._wait_for_java_debugger = test_instance.wait_for_java_debugger
self._tool = tool
+ self._coverage_dir = test_instance.coverage_dir
def GetTestDataRoot(self, device):
# pylint: disable=no-self-use
@@ -138,6 +161,15 @@ class _ApkDelegate(object):
def Run(self, test, device, flags=None, **kwargs):
extras = dict(self._extras)
+ device_api = device.build_version_sdk
+
+ if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+ coverage_device_file = device_temp_file.DeviceTempFile(
+ device.adb,
+ suffix='.profraw',
+ prefix=self._suite,
+ dir=device.GetExternalStoragePath())
+ extras[_EXTRA_COVERAGE_DEVICE_FILE] = coverage_device_file.name
if ('timeout' in kwargs
and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras):
@@ -193,6 +225,10 @@ class _ApkDelegate(object):
except Exception:
device.ForceStop(self._package)
raise
+ finally:
+ if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+ _PullCoverageFile(device, coverage_device_file, self._coverage_dir)
+
# TODO(jbudorick): Remove this after resolving crbug.com/726880
if device.PathExists(stdout_file.name):
logging.info('%s size on device: %s', stdout_file.name,
@@ -218,13 +254,18 @@ class _ApkDelegate(object):
class _ExeDelegate(object):
- def __init__(self, tr, dist_dir, tool):
- self._host_dist_dir = dist_dir
- self._exe_file_name = os.path.basename(dist_dir)[:-len('__dist')]
+
+ def __init__(self, tr, test_instance, tool):
+ self._host_dist_dir = test_instance.exe_dist_dir
+ self._exe_file_name = os.path.basename(
+ test_instance.exe_dist_dir)[:-len('__dist')]
self._device_dist_dir = posixpath.join(
- constants.TEST_EXECUTABLE_DIR, os.path.basename(dist_dir))
+ constants.TEST_EXECUTABLE_DIR,
+ os.path.basename(test_instance.exe_dist_dir))
self._test_run = tr
self._tool = tool
+ self._coverage_dir = test_instance.coverage_dir
+ self._suite = test_instance.suite
def GetTestDataRoot(self, device):
# pylint: disable=no-self-use
@@ -261,6 +302,14 @@ class _ExeDelegate(object):
'LD_LIBRARY_PATH': self._device_dist_dir
}
+ if self._coverage_dir:
+ coverage_device_file = device_temp_file.DeviceTempFile(
+ device.adb,
+ suffix='.profraw',
+ prefix=self._suite,
+ dir=device.GetExternalStoragePath())
+ env['LLVM_PROFILE_FILE'] = coverage_device_file.name
+
if self._tool != 'asan':
env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS
@@ -276,6 +325,10 @@ class _ExeDelegate(object):
# fine from the test runner's perspective; thus check_return=False.
output = device.RunShellCommand(
cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
+
+ if self._coverage_dir:
+ _PullCoverageFile(device, coverage_device_file, self._coverage_dir)
+
return output
def PullAppFiles(self, device, files, directory):
@@ -296,8 +349,7 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
if self._test_instance.apk:
self._delegate = _ApkDelegate(self._test_instance, env.tool)
elif self._test_instance.exe_dist_dir:
- self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir,
- self._env.tool)
+ self._delegate = _ExeDelegate(self, self._test_instance, self._env.tool)
if self._test_instance.isolated_script_test_perf_output:
self._test_perf_output_filenames = _GenerateSequentialFileNames(
self._test_instance.isolated_script_test_perf_output)
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index 18914e9aa13..25e47a0159d 100644
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -190,32 +190,33 @@ class LocalDeviceInstrumentationTestRun(
steps.append(use_webview_provider)
- def install_helper(apk, permissions):
+ def install_helper(apk, modules=None, fake_modules=None,
+ permissions=None):
+
@instrumentation_tracing.no_tracing
- @trace_event.traced("apk_path")
- def install_helper_internal(d, apk_path=apk.path):
+ @trace_event.traced
+ def install_helper_internal(d, apk_path=None):
# pylint: disable=unused-argument
- d.Install(apk, permissions=permissions)
+ logging.info('Start Installing %s', apk.path)
+ d.Install(
+ apk,
+ modules=modules,
+ fake_modules=fake_modules,
+ permissions=permissions)
+ logging.info('Finished Installing %s', apk.path)
+
return install_helper_internal
def incremental_install_helper(apk, json_path, permissions):
- @trace_event.traced("apk_path")
- def incremental_install_helper_internal(d, apk_path=apk.path):
+
+ @trace_event.traced
+ def incremental_install_helper_internal(d, apk_path=None):
# pylint: disable=unused-argument
+ logging.info('Start Incremental Installing %s', apk.path)
installer.Install(d, json_path, apk=apk, permissions=permissions)
- return incremental_install_helper_internal
+ logging.info('Finished Incremental Installing %s', apk.path)
- if self._test_instance.apk_under_test:
- permissions = self._test_instance.apk_under_test.GetPermissions()
- if self._test_instance.apk_under_test_incremental_install_json:
- steps.append(incremental_install_helper(
- self._test_instance.apk_under_test,
- self._test_instance.
- apk_under_test_incremental_install_json,
- permissions))
- else:
- steps.append(install_helper(self._test_instance.apk_under_test,
- permissions))
+ return incremental_install_helper_internal
permissions = self._test_instance.test_apk.GetPermissions()
if self._test_instance.test_apk_incremental_install_json:
@@ -225,11 +226,29 @@ class LocalDeviceInstrumentationTestRun(
test_apk_incremental_install_json,
permissions))
else:
- steps.append(install_helper(self._test_instance.test_apk,
- permissions))
+ steps.append(
+ install_helper(
+ self._test_instance.test_apk, permissions=permissions))
+
+ steps.extend(
+ install_helper(apk) for apk in self._test_instance.additional_apks)
- steps.extend(install_helper(apk, None)
- for apk in self._test_instance.additional_apks)
+ # The apk under test needs to be installed last since installing other
+ # apks after will unintentionally clear the fake module directory.
+ # TODO(wnwen): Make this more robust, fix crbug.com/1010954.
+ if self._test_instance.apk_under_test:
+ permissions = self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test_incremental_install_json:
+ steps.append(
+ incremental_install_helper(
+ self._test_instance.apk_under_test,
+ self._test_instance.apk_under_test_incremental_install_json,
+ permissions))
+ else:
+ steps.append(
+ install_helper(self._test_instance.apk_under_test,
+ self._test_instance.modules,
+ self._test_instance.fake_modules, permissions))
@trace_event.traced
def set_debug_app(dev):
@@ -282,9 +301,9 @@ class LocalDeviceInstrumentationTestRun(
host_device_tuples_substituted = [
(h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
for h, d in host_device_tuples]
- logging.info('instrumentation data deps:')
+ logging.info('Pushing data dependencies.')
for h, d in host_device_tuples_substituted:
- logging.info('%r -> %r', h, d)
+ logging.debug(' %r -> %r', h, d)
dev.PushChangedFiles(host_device_tuples_substituted,
delete_device_stale=True)
if not host_device_tuples_substituted:
@@ -541,6 +560,7 @@ class LocalDeviceInstrumentationTestRun(
with ui_capture_dir:
with self._env.output_manager.ArchivedTempfile(
stream_name, 'logcat') as logcat_file:
+ logmon = None
try:
with logcat_monitor.LogcatMonitor(
device.adb,
@@ -555,7 +575,8 @@ class LocalDeviceInstrumentationTestRun(
output = device.StartInstrumentation(
target, raw=True, extras=extras, timeout=timeout, retries=0)
finally:
- logmon.Close()
+ if logmon:
+ logmon.Close()
if logcat_file.Link():
logging.info('Logcat saved to %s', logcat_file.Link())
@@ -589,13 +610,12 @@ class LocalDeviceInstrumentationTestRun(
def handle_coverage_data():
if self._test_instance.coverage_directory:
try:
+ if not os.path.exists(self._test_instance.coverage_directory):
+ os.makedirs(self._test_instance.coverage_directory)
device.PullFile(coverage_device_file,
self._test_instance.coverage_directory)
- device.RunShellCommand(
- 'rm -f %s' % posixpath.join(coverage_directory, '*'),
- check_return=True,
- shell=True)
- except base_error.BaseError as e:
+ device.RemovePath(coverage_device_file, True)
+ except (OSError, base_error.BaseError) as e:
logging.warning('Failed to handle coverage data after tests: %s', e)
def handle_render_test_data():
diff --git a/chromium/build/android/pylib/local/emulator/avd.py b/chromium/build/android/pylib/local/emulator/avd.py
new file mode 100644
index 00000000000..fab9061e90e
--- /dev/null
+++ b/chromium/build/android/pylib/local/emulator/avd.py
@@ -0,0 +1,496 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import json
+import logging
+import os
+import socket
+import stat
+import subprocess
+import textwrap
+import threading
+
+from google.protobuf import text_format # pylint: disable=import-error
+
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from devil.utils import cmd_helper
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+from pylib import constants
+from pylib.local.emulator.proto import avd_pb2
+
+_ALL_PACKAGES = object()
+_DEFAULT_AVDMANAGER_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'tools',
+ 'bin', 'avdmanager')
+
+
+class AvdException(Exception):
+ """Raised when this module has a problem interacting with an AVD."""
+
+ def __init__(self, summary, command=None, stdout=None, stderr=None):
+ message_parts = [summary]
+ if command:
+ message_parts.append(' command: %s' % ' '.join(command))
+ if stdout:
+ message_parts.append(' stdout:')
+ message_parts.extend(' %s' % line for line in stdout.splitlines())
+ if stderr:
+ message_parts.append(' stderr:')
+ message_parts.extend(' %s' % line for line in stderr.splitlines())
+
+ super(AvdException, self).__init__('\n'.join(message_parts))
+
+
+def _Load(avd_proto_path):
+ """Loads an Avd proto from a textpb file at the given path.
+
+ Should not be called outside of this module.
+
+ Args:
+ avd_proto_path: path to a textpb file containing an Avd message.
+ """
+ with open(avd_proto_path) as avd_proto_file:
+ return text_format.Merge(avd_proto_file.read(), avd_pb2.Avd())
+
+
+class _AvdManagerAgent(object):
+ """Private utility for interacting with avdmanager."""
+
+ def __init__(self, avd_home, sdk_root):
+ """Create an _AvdManagerAgent.
+
+ Args:
+ avd_home: path to ANDROID_AVD_HOME directory.
+ Typically something like /path/to/dir/.android/avd
+ sdk_root: path to SDK root directory.
+ """
+ self._avd_home = avd_home
+ self._sdk_root = sdk_root
+
+ self._env = dict(os.environ)
+
+ # avdmanager, like many tools that have evolved from `android`
+ # (http://bit.ly/2m9JiTx), uses toolsdir to find the SDK root.
+ # Pass avdmanager a fake directory under the directory in which
+ # we install the system images s.t. avdmanager can find the
+ # system images.
+ fake_tools_dir = os.path.join(self._sdk_root, 'non-existent-tools')
+ self._env.update({
+ 'ANDROID_AVD_HOME':
+ self._avd_home,
+ 'AVDMANAGER_OPTS':
+ '-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir,
+ })
+
+ def Create(self, avd_name, system_image, force=False):
+ """Call `avdmanager create`.
+
+ Args:
+ avd_name: name of the AVD to create.
+ system_image: system image to use for the AVD.
+ force: whether to force creation, overwriting any existing
+ AVD with the same name.
+ """
+ create_cmd = [
+ _DEFAULT_AVDMANAGER_PATH,
+ '-v',
+ 'create',
+ 'avd',
+ '-n',
+ avd_name,
+ '-k',
+ system_image,
+ ]
+ if force:
+ create_cmd += ['--force']
+
+ create_proc = cmd_helper.Popen(
+ create_cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=self._env)
+ output, error = create_proc.communicate(input='\n')
+ if create_proc.returncode != 0:
+ raise AvdException(
+ 'AVD creation failed',
+ command=create_cmd,
+ stdout=output,
+ stderr=error)
+
+ for line in output.splitlines():
+ logging.info(' %s', line)
+
+ def Delete(self, avd_name):
+ """Call `avdmanager delete`.
+
+ Args:
+ avd_name: name of the AVD to delete.
+ """
+ delete_cmd = [
+ _DEFAULT_AVDMANAGER_PATH,
+ '-v',
+ 'delete',
+ 'avd',
+ '-n',
+ avd_name,
+ ]
+ try:
+ for line in cmd_helper.IterCmdOutputLines(delete_cmd, env=self._env):
+ logging.info(' %s', line)
+ except subprocess.CalledProcessError as e:
+ raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd)
+
+
+class AvdConfig(object):
+ """Represents a particular AVD configuration.
+
+ This class supports creation, installation, and execution of an AVD
+ from a given Avd proto message, as defined in
+ //build/android/pylib/local/emulator/proto/avd.proto.
+ """
+
+ def __init__(self, avd_proto_path):
+ """Create an AvdConfig object.
+
+ Args:
+ avd_proto_path: path to a textpb file containing an Avd message.
+ """
+ self._config = _Load(avd_proto_path)
+
+ self._emulator_home = os.path.join(constants.DIR_SOURCE_ROOT,
+ self._config.avd_package.dest_path)
+ self._emulator_sdk_root = os.path.join(
+ constants.DIR_SOURCE_ROOT, self._config.emulator_package.dest_path)
+ self._emulator_path = os.path.join(self._emulator_sdk_root, 'emulator',
+ 'emulator')
+
+ self._initialized = False
+ self._initializer_lock = threading.Lock()
+
+ def Create(self,
+ force=False,
+ snapshot=False,
+ keep=False,
+ cipd_json_output=None):
+ """Create an instance of the AVD CIPD package.
+
+ This method:
+ - installs the requisite system image
+ - creates the AVD
+ - modifies the AVD's ini files to support running chromium tests
+ in chromium infrastructure
+ - optionally starts & stops the AVD for snapshotting (default no)
+ - creates and uploads an instance of the AVD CIPD package
+ - optionally deletes the AVD (default yes)
+
+ Args:
+ force: bool indicating whether to force create the AVD.
+ snapshot: bool indicating whether to snapshot the AVD before creating
+ the CIPD package.
+ keep: bool indicating whether to keep the AVD after creating
+ the CIPD package.
+ cipd_json_output: string path to pass to `cipd create` via -json-output.
+ """
+ logging.info('Installing required packages.')
+ self.Install(packages=[
+ self._config.emulator_package,
+ self._config.system_image_package,
+ ])
+
+ android_avd_home = os.path.join(self._emulator_home, 'avd')
+
+ if not os.path.exists(android_avd_home):
+ os.makedirs(android_avd_home)
+
+ avd_manager = _AvdManagerAgent(
+ avd_home=android_avd_home, sdk_root=self._emulator_sdk_root)
+
+ logging.info('Creating AVD.')
+ avd_manager.Create(
+ avd_name=self._config.avd_name,
+ system_image=self._config.system_image_name,
+ force=force)
+
+ try:
+ logging.info('Modifying AVD configuration.')
+
+ # Clear out any previous configuration or state from this AVD.
+ root_ini = os.path.join(android_avd_home,
+ '%s.ini' % self._config.avd_name)
+ avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)
+ config_ini = os.path.join(avd_dir, 'config.ini')
+
+ with open(root_ini, 'a') as root_ini_file:
+ root_ini_file.write('path.rel=avd/%s.avd\n' % self._config.avd_name)
+
+ with open(config_ini, 'a') as config_ini_file:
+ config_ini_file.write(
+ textwrap.dedent("""\
+ disk.dataPartition.size=4G
+ hw.lcd.density=160
+ hw.lcd.height=960
+ hw.lcd.width=480
+ """))
+
+ # Start & stop the AVD.
+ self._Initialize()
+ instance = _AvdInstance(self._emulator_path, self._config.avd_name,
+ self._emulator_home)
+ instance.Start(read_only=False, snapshot_save=snapshot)
+ device_utils.DeviceUtils(instance.serial).WaitUntilFullyBooted(
+ timeout=180, retries=0)
+ instance.Stop()
+
+ # The multiinstance lock file seems to interfere with the emulator's
+ # operation in some circumstances (beyond the obvious -read-only ones),
+ # and there seems to be no mechanism by which it gets closed or deleted.
+ # See https://bit.ly/2pWQTH7 for context.
+ multiInstanceLockFile = os.path.join(avd_dir, 'multiinstance.lock')
+ if os.path.exists(multiInstanceLockFile):
+ os.unlink(multiInstanceLockFile)
+
+ package_def_content = {
+ 'package':
+ self._config.avd_package.package_name,
+ 'root':
+ self._emulator_home,
+ 'install_mode':
+ 'copy',
+ 'data': [
+ {
+ 'dir': os.path.relpath(avd_dir, self._emulator_home)
+ },
+ {
+ 'file': os.path.relpath(root_ini, self._emulator_home)
+ },
+ ],
+ }
+
+ logging.info('Creating AVD CIPD package.')
+ logging.debug('ensure file content: %s',
+ json.dumps(package_def_content, indent=2))
+
+ with tempfile_ext.TemporaryFileName(suffix='.json') as package_def_path:
+ with open(package_def_path, 'w') as package_def_file:
+ json.dump(package_def_content, package_def_file)
+
+ logging.info(' %s', self._config.avd_package.package_name)
+ cipd_create_cmd = [
+ 'cipd',
+ 'create',
+ '-pkg-def',
+ package_def_path,
+ ]
+ if cipd_json_output:
+ cipd_create_cmd.extend([
+ '-json-output',
+ cipd_json_output,
+ ])
+ try:
+ for line in cmd_helper.IterCmdOutputLines(cipd_create_cmd):
+ logging.info(' %s', line)
+ except subprocess.CalledProcessError as e:
+ raise AvdException(
+ 'CIPD package creation failed: %s' % str(e),
+ command=cipd_create_cmd)
+
+ finally:
+ if not keep:
+ logging.info('Deleting AVD.')
+ avd_manager.Delete(avd_name=self._config.avd_name)
+
+ def Install(self, packages=_ALL_PACKAGES):
+ """Installs the requested CIPD packages.
+
+ Returns: None
+ Raises: AvdException on failure to install.
+ """
+ pkgs_by_dir = {}
+ if packages is _ALL_PACKAGES:
+ packages = [
+ self._config.avd_package,
+ self._config.emulator_package,
+ self._config.system_image_package,
+ ]
+ for pkg in packages:
+ if not pkg.dest_path in pkgs_by_dir:
+ pkgs_by_dir[pkg.dest_path] = []
+ pkgs_by_dir[pkg.dest_path].append(pkg)
+
+ for pkg_dir, pkgs in pkgs_by_dir.iteritems():
+ logging.info('Installing packages in %s', pkg_dir)
+ cipd_root = os.path.join(constants.DIR_SOURCE_ROOT, pkg_dir)
+ if not os.path.exists(cipd_root):
+ os.makedirs(cipd_root)
+ ensure_path = os.path.join(cipd_root, '.ensure')
+ with open(ensure_path, 'w') as ensure_file:
+ # Make CIPD ensure that all files are present, even if
+ # it thinks the package is installed.
+ ensure_file.write('$ParanoidMode CheckPresence\n\n')
+ for pkg in pkgs:
+ ensure_file.write('%s %s\n' % (pkg.package_name, pkg.version))
+ logging.info(' %s %s', pkg.package_name, pkg.version)
+ ensure_cmd = [
+ 'cipd',
+ 'ensure',
+ '-ensure-file',
+ ensure_path,
+ '-root',
+ cipd_root,
+ ]
+ try:
+ for line in cmd_helper.IterCmdOutputLines(ensure_cmd):
+ logging.info(' %s', line)
+ except subprocess.CalledProcessError as e:
+ raise AvdException(
+ 'Failed to install CIPD package %s: %s' % (pkg.package_name,
+ str(e)),
+ command=ensure_cmd)
+
+ # The emulator requires that some files are writable.
+ for dirname, _, filenames in os.walk(self._emulator_home):
+ for f in filenames:
+ path = os.path.join(dirname, f)
+ mode = os.lstat(path).st_mode
+ if mode & stat.S_IRUSR:
+ mode = mode | stat.S_IWUSR
+ os.chmod(path, mode)
+
+ def _Initialize(self):
+ if self._initialized:
+ return
+
+ with self._initializer_lock:
+ if self._initialized:
+ return
+
+ # Emulator start-up looks for the adb daemon. Make sure it's running.
+ adb_wrapper.AdbWrapper.StartServer()
+
+ # Emulator start-up tries to check for the SDK root by looking for
+ # platforms/ and platform-tools/. Ensure they exist.
+ # See http://bit.ly/2YAkyFE for context.
+ required_dirs = [
+ os.path.join(self._emulator_sdk_root, 'platforms'),
+ os.path.join(self._emulator_sdk_root, 'platform-tools'),
+ ]
+ for d in required_dirs:
+ if not os.path.exists(d):
+ os.makedirs(d)
+
+ def CreateInstance(self):
+ """Creates an AVD instance without starting it.
+
+ Returns:
+ An _AvdInstance.
+ """
+ self._Initialize()
+ return _AvdInstance(self._emulator_path, self._config.avd_name,
+ self._emulator_home)
+
+ def StartInstance(self):
+ """Starts an AVD instance.
+
+ Returns:
+ An _AvdInstance.
+ """
+ instance = self.CreateInstance()
+ instance.Start()
+ return instance
+
+
+class _AvdInstance(object):
+ """Represents a single running instance of an AVD.
+
+ This class should only be created directly by AvdConfig.StartInstance,
+ but its other methods can be freely called.
+ """
+
+ def __init__(self, emulator_path, avd_name, emulator_home):
+ """Create an _AvdInstance object.
+
+ Args:
+ emulator_path: path to the emulator binary.
+ avd_name: name of the AVD to run.
+ emulator_home: path to the emulator home directory.
+ """
+ self._avd_name = avd_name
+ self._emulator_home = emulator_home
+ self._emulator_path = emulator_path
+ self._emulator_proc = None
+ self._emulator_serial = None
+ self._sink = None
+
+ def __str__(self):
+ return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self)))
+
+ def Start(self, read_only=True, snapshot_save=False, window=False):
+ """Starts the emulator running an instance of the given AVD."""
+ with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing(
+ socket.socket(socket.AF_UNIX))) as sock:
+ sock.bind(socket_path)
+ emulator_cmd = [
+ self._emulator_path,
+ '-avd',
+ self._avd_name,
+ '-report-console',
+ 'unix:%s' % socket_path,
+ ]
+ if read_only:
+ emulator_cmd.append('-read-only')
+ if not snapshot_save:
+ emulator_cmd.append('-no-snapshot-save')
+ emulator_env = {}
+ if self._emulator_home:
+ emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home
+ if window:
+ if 'DISPLAY' in os.environ:
+ emulator_env['DISPLAY'] = os.environ.get('DISPLAY')
+ else:
+ raise AvdException('Emulator failed to start: DISPLAY not defined')
+ else:
+ emulator_cmd.append('-no-window')
+ sock.listen(1)
+
+ logging.info('Starting emulator.')
+
+ # TODO(jbudorick): Add support for logging emulator stdout & stderr at
+ # higher logging levels.
+ self._sink = open('/dev/null', 'w')
+ self._emulator_proc = cmd_helper.Popen(
+ emulator_cmd, stdout=self._sink, stderr=self._sink, env=emulator_env)
+
+ # Waits for the emulator to report its serial as requested via
+ # -report-console. See http://bit.ly/2lK3L18 for more.
+ def listen_for_serial(s):
+ logging.info('Waiting for connection from emulator.')
+ with contextlib.closing(s.accept()[0]) as conn:
+ val = conn.recv(1024)
+ return 'emulator-%d' % int(val)
+
+ try:
+ self._emulator_serial = timeout_retry.Run(
+ listen_for_serial, timeout=30, retries=0, args=[sock])
+ logging.info('%s started', self._emulator_serial)
+ except Exception as e:
+ self.Stop()
+ raise AvdException('Emulator failed to start: %s' % str(e))
+
+ def Stop(self):
+ """Stops the emulator process."""
+ if self._emulator_proc:
+ if self._emulator_proc.poll() is None:
+ self._emulator_proc.terminate()
+ self._emulator_proc.wait()
+ self._emulator_proc = None
+ if self._sink:
+ self._sink.close()
+ self._sink = None
+
+ @property
+ def serial(self):
+ return self._emulator_serial
diff --git a/chromium/build/android/pylib/local/emulator/local_emulator_environment.py b/chromium/build/android/pylib/local/emulator/local_emulator_environment.py
index cd81cf9c3a7..22470c035e6 100644
--- a/chromium/build/android/pylib/local/emulator/local_emulator_environment.py
+++ b/chromium/build/android/pylib/local/emulator/local_emulator_environment.py
@@ -2,20 +2,14 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import contextlib
import logging
-import os
-import socket
-import stat
-from py_utils import tempfile_ext
-
-from devil.android.sdk import adb_wrapper
-from devil.utils import cmd_helper
-from devil.utils import timeout_retry
-
-from pylib import constants
+from devil.utils import parallelizer
from pylib.local.device import local_device_environment
+from pylib.local.emulator import avd
+
+# Mirroring https://bit.ly/2OjuxcS#23
+_MAX_ANDROID_EMULATORS = 16
class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
@@ -23,99 +17,52 @@ class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
def __init__(self, args, output_manager, error_func):
super(LocalEmulatorEnvironment, self).__init__(args, output_manager,
error_func)
- self._avd_name = args.avd_name
- self._emulator_home = (args.emulator_home
- or os.path.expanduser(os.path.join('~', '.android')))
-
- root_ini = os.path.join(self._emulator_home, 'avd',
- '%s.ini' % self._avd_name)
- if not os.path.exists(root_ini):
- error_func('Unable to find configuration for AVD %s at %s' %
- (self._avd_name, root_ini))
-
- self._emulator_path = os.path.join(constants.ANDROID_SDK_ROOT, 'emulator',
- 'emulator')
- if not os.path.exists(self._emulator_path):
- error_func('%s does not exist.' % self._emulator_path)
-
- self._emulator_proc = None
- self._emulator_serial = None
+ self._avd_config = avd.AvdConfig(args.avd_config)
+ if args.emulator_count < 1:
+ error_func('--emulator-count must be >= 1')
+ elif args.emulator_count >= _MAX_ANDROID_EMULATORS:
+ logging.warning('--emulator-count capped at 16.')
+ self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count)
+ self._emulator_window = args.emulator_window
+ self._emulator_instances = []
+ self._device_serials = []
#override
def SetUp(self):
- # Emulator start-up looks for the adb daemon. Make sure it's running.
- adb_wrapper.AdbWrapper.StartServer()
+ self._avd_config.Install()
- # Emulator start-up tries to check for the SDK root by looking for
- # platforms/ and platform-tools/. Ensure they exist.
- # See http://bit.ly/2YAkyFE for context.
- required_dirs = [
- os.path.join(constants.ANDROID_SDK_ROOT, 'platforms'),
- os.path.join(constants.ANDROID_SDK_ROOT, 'platform-tools'),
+ emulator_instances = [
+ self._avd_config.CreateInstance() for _ in range(self._emulator_count)
]
- for d in required_dirs:
- if not os.path.exists(d):
- os.makedirs(d)
- # The emulator requires that some files are writable.
- for dirname, _, filenames in os.walk(self._emulator_home):
- for f in filenames:
- path = os.path.join(dirname, f)
- if (os.lstat(path).st_mode &
- (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) == stat.S_IRUSR):
- os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
+ def start_emulator_instance(e):
+ try:
+ e.Start(window=self._emulator_window)
+ return e
+ except avd.AvdException:
+ logging.exception('Failed to start emulator instance.')
+ return None
+
+ parallel_emulators = parallelizer.SyncParallelizer(emulator_instances)
+ self._emulator_instances = [
+ emu
+ for emu in parallel_emulators.pMap(start_emulator_instance).pGet(None)
+ if emu is not None
+ ]
+ self._device_serials = [e.serial for e in self._emulator_instances]
- self._emulator_proc, self._emulator_serial = self._StartInstance()
+ if not self._emulator_instances:
+ raise Exception('Failed to start any instances of the emulator.')
+ elif len(self._emulator_instances) < self._emulator_count:
+ logging.warning(
+ 'Running with fewer emulator instances than requested (%d vs %d)',
+ len(self._emulator_instances), self._emulator_count)
- logging.info('Emulator serial: %s', self._emulator_serial)
- self._device_serials = [self._emulator_serial]
super(LocalEmulatorEnvironment, self).SetUp()
- def _StartInstance(self):
- """Starts an AVD instance.
-
- Returns:
- A (Popen, str) 2-tuple that includes the process and serial.
- """
- # Start up the AVD.
- with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing(
- socket.socket(socket.AF_UNIX))) as sock:
- sock.bind(socket_path)
- emulator_cmd = [
- self._emulator_path,
- '-avd',
- self._avd_name,
- '-report-console',
- 'unix:%s' % socket_path,
- '-read-only',
- '-no-window',
- ]
- emulator_env = {}
- if self._emulator_home:
- emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home
- sock.listen(1)
- emulator_proc = cmd_helper.Popen(emulator_cmd, env=emulator_env)
-
- def listen_for_serial(s):
- logging.info('Waiting for connection from emulator.')
- with contextlib.closing(s.accept()[0]) as conn:
- val = conn.recv(1024)
- return 'emulator-%d' % int(val)
-
- try:
- emulator_serial = timeout_retry.Run(
- listen_for_serial, timeout=30, retries=0, args=[sock])
- except Exception:
- emulator_proc.terminate()
- raise
-
- return (emulator_proc, emulator_serial)
-
#override
def TearDown(self):
try:
super(LocalEmulatorEnvironment, self).TearDown()
finally:
- if self._emulator_proc:
- self._emulator_proc.terminate()
- self._emulator_proc.wait()
+ parallelizer.SyncParallelizer(self._emulator_instances).Stop()
diff --git a/chromium/build/android/pylib/local/emulator/proto/__init__.py b/chromium/build/android/pylib/local/emulator/proto/__init__.py
new file mode 100644
index 00000000000..4a12e35c925
--- /dev/null
+++ b/chromium/build/android/pylib/local/emulator/proto/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/chromium/build/android/pylib/local/emulator/proto/avd.proto b/chromium/build/android/pylib/local/emulator/proto/avd.proto
new file mode 100644
index 00000000000..adf5cb76469
--- /dev/null
+++ b/chromium/build/android/pylib/local/emulator/proto/avd.proto
@@ -0,0 +1,35 @@
+
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto3";
+
+package tools.android.avd.proto;
+
+message CIPDPackage {
+ // CIPD package name.
+ string package_name = 1;
+ // CIPD package version to use.
+ // Ignored when creating AVD packages.
+ string version = 2;
+ // Path into which the package should be installed.
+ // src-relative.
+ string dest_path = 3;
+}
+
+message Avd {
+ // The emulator to use in running the AVD.
+ CIPDPackage emulator_package = 1;
+
+ // The system image to use.
+ CIPDPackage system_image_package = 2;
+ // The name of the system image to use, as reported by sdkmanager.
+ string system_image_name = 3;
+
+ // The AVD to create or use.
+ // (Only the package_name is used during AVD creation.)
+ CIPDPackage avd_package = 4;
+ // The name of the AVD to create or use.
+ string avd_name = 5;
+}
diff --git a/chromium/build/android/pylib/local/emulator/proto/avd_pb2.py b/chromium/build/android/pylib/local/emulator/proto/avd_pb2.py
new file mode 100644
index 00000000000..c264e6d17fe
--- /dev/null
+++ b/chromium/build/android/pylib/local/emulator/proto/avd_pb2.py
@@ -0,0 +1,218 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: avd.proto
+
+import sys
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='avd.proto',
+ package='tools.android.avd.proto',
+ syntax='proto3',
+ serialized_pb=_b(
+ '\n\tavd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"\xf1\x01\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\tb\x06proto3'
+ ))
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_CIPDPACKAGE = _descriptor.Descriptor(
+ name='CIPDPackage',
+ full_name='tools.android.avd.proto.CIPDPackage',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='package_name',
+ full_name='tools.android.avd.proto.CIPDPackage.package_name',
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode('utf-8'),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='version',
+ full_name='tools.android.avd.proto.CIPDPackage.version',
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode('utf-8'),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='dest_path',
+ full_name='tools.android.avd.proto.CIPDPackage.dest_path',
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode('utf-8'),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=38,
+ serialized_end=109,
+)
+
+_AVD = _descriptor.Descriptor(
+ name='Avd',
+ full_name='tools.android.avd.proto.Avd',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='emulator_package',
+ full_name='tools.android.avd.proto.Avd.emulator_package',
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='system_image_package',
+ full_name='tools.android.avd.proto.Avd.system_image_package',
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='system_image_name',
+ full_name='tools.android.avd.proto.Avd.system_image_name',
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode('utf-8'),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='avd_package',
+ full_name='tools.android.avd.proto.Avd.avd_package',
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='avd_name',
+ full_name='tools.android.avd.proto.Avd.avd_name',
+ index=4,
+ number=5,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode('utf-8'),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=112,
+ serialized_end=353,
+)
+
+_AVD.fields_by_name['emulator_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['system_image_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['avd_package'].message_type = _CIPDPACKAGE
+DESCRIPTOR.message_types_by_name['CIPDPackage'] = _CIPDPACKAGE
+DESCRIPTOR.message_types_by_name['Avd'] = _AVD
+
+CIPDPackage = _reflection.GeneratedProtocolMessageType(
+ 'CIPDPackage',
+ (_message.Message, ),
+ dict(
+ DESCRIPTOR=_CIPDPACKAGE,
+ __module__='avd_pb2'
+ # @@protoc_insertion_point(class_scope:tools.android.avd.proto.CIPDPackage)
+ ))
+_sym_db.RegisterMessage(CIPDPackage)
+
+Avd = _reflection.GeneratedProtocolMessageType(
+ 'Avd',
+ (_message.Message, ),
+ dict(
+ DESCRIPTOR=_AVD,
+ __module__='avd_pb2'
+ # @@protoc_insertion_point(class_scope:tools.android.avd.proto.Avd)
+ ))
+_sym_db.RegisterMessage(Avd)
+
+# @@protoc_insertion_point(module_scope)
diff --git a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
index 312bf9c6ff9..dab18e32000 100644
--- a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
+++ b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -5,6 +5,7 @@
import json
import logging
import os
+import zipfile
from devil.utils import cmd_helper
from pylib import constants
@@ -31,7 +32,6 @@ class LocalMachineJunitTestRun(test_run.TestRun):
def RunTests(self, results):
with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
json_file_path = os.path.join(temp_dir, 'results.json')
-
java_script = os.path.join(
constants.GetOutDirectory(), 'bin', 'helper',
self._test_instance.suite)
@@ -55,8 +55,6 @@ class LocalMachineJunitTestRun(test_run.TestRun):
self._test_instance.robolectric_runtime_deps_dir,
'-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,
'-Drobolectric.resourcesMode=binary',
- '-Dchromium.robolectric.resource.ap_=%s' %
- self._test_instance.resource_apk
]
if logging.getLogger().isEnabledFor(logging.INFO):
@@ -90,6 +88,14 @@ class LocalMachineJunitTestRun(test_run.TestRun):
if jvm_args:
command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
+ # Create properties file for Robolectric test runners so they can find the
+ # binary resources.
+ properties_jar_path = os.path.join(temp_dir, 'properties.jar')
+ with zipfile.ZipFile(properties_jar_path, 'w') as z:
+ z.writestr('com/android/tools/test_config.properties',
+ 'android_resource_apk=%s' % self._test_instance.resource_apk)
+ command.extend(['--classpath', properties_jar_path])
+
cmd_helper.RunCmd(command)
try:
with open(json_file_path, 'r') as f:
diff --git a/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
index 5e5f83f2a27..b2e542bd2a6 100644
--- a/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
+++ b/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -301,7 +301,7 @@ class JSONResultsGeneratorBase(object):
"JSON upload failed, %d: '%s'", response.code, response.read())
else:
_log.error('JSON upload failed; no response returned')
- except Exception, err: # pylint: disable=broad-except
+ except Exception as err: # pylint: disable=broad-except
_log.error('Upload failed: %s', err)
return
@@ -385,12 +385,12 @@ class JSONResultsGeneratorBase(object):
# FIXME: We should talk to the network via a Host object.
results_file = urllib2.urlopen(results_file_url)
old_results = results_file.read()
- except urllib2.HTTPError, http_error:
+ except urllib2.HTTPError as http_error:
# A non-4xx status code means the bot is hosed for some reason
# and we can't grab the results.json file off of it.
if http_error.code < 400 and http_error.code >= 500:
error = http_error
- except urllib2.URLError, url_error:
+ except urllib2.URLError as url_error:
error = url_error
# pylint: enable=redefined-variable-type
diff --git a/chromium/build/android/pylib/utils/simpleperf.py b/chromium/build/android/pylib/utils/simpleperf.py
index be259d621f2..b3ba00e6c22 100644
--- a/chromium/build/android/pylib/utils/simpleperf.py
+++ b/chromium/build/android/pylib/utils/simpleperf.py
@@ -12,6 +12,7 @@ import tempfile
from devil import devil_env
from devil.android import device_signal
from devil.android.sdk import version_codes
+from pylib import constants
def _ProcessType(proc):
@@ -246,13 +247,13 @@ def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
# Run the script to annotate symbols and convert from simpleperf format to
# pprof format.
- llvm_symbolizer_path = devil_env.config.LocalPath('llvm-symbolizer')
pprof_converter_script = os.path.join(
script_dir, 'pprof_proto_generator.py')
- pprof_converter_cmd = [sys.executable, pprof_converter_script,
- '-i', simpleperf_out_path,
- '-o', os.path.abspath(pprof_out_path),
- '--addr2line', llvm_symbolizer_path]
+ pprof_converter_cmd = [
+ sys.executable, pprof_converter_script, '-i', simpleperf_out_path, '-o',
+ os.path.abspath(pprof_out_path), '--ndk_path',
+ constants.ANDROID_NDK_ROOT
+ ]
subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
cwd=processing_dir)
finally:
diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py
index 2067f51d0ca..a48a951d88d 100755
--- a/chromium/build/android/resource_sizes.py
+++ b/chromium/build/android/resource_sizes.py
@@ -80,7 +80,6 @@ _READELF_SIZES_METRICS = {
'.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
'.got.plt', '.hash', '.gnu.hash'
],
- 'bss': ['.bss', '.bss.rel.ro'],
'other': [
'.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
'.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
@@ -104,12 +103,17 @@ def _RunReadelf(so_path, options, tool_prefix=''):
def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
with Unzip(apk_path, filename=lib_path) as extracted_lib_path:
grouped_section_sizes = collections.defaultdict(int)
- section_sizes = _CreateSectionNameSizeMap(extracted_lib_path, tool_prefix)
+ no_bits_section_sizes, section_sizes = _CreateSectionNameSizeMap(
+ extracted_lib_path, tool_prefix)
for group_name, section_names in _READELF_SIZES_METRICS.iteritems():
for section_name in section_names:
if section_name in section_sizes:
grouped_section_sizes[group_name] += section_sizes.pop(section_name)
+ # Consider all NOBITS sections as .bss.
+ grouped_section_sizes['bss'] = sum(
+ v for v in no_bits_section_sizes.itervalues())
+
# Group any unknown section headers into the "other" group.
for section_header, section_size in section_sizes.iteritems():
sys.stderr.write('Unknown elf section header: %s\n' % section_header)
@@ -121,12 +125,14 @@ def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
def _CreateSectionNameSizeMap(so_path, tool_prefix):
stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix)
section_sizes = {}
+ no_bits_section_sizes = {}
# Matches [ 2] .hash HASH 00000000006681f0 0001f0 003154 04 A 3 0 8
for match in re.finditer(r'\[[\s\d]+\] (\..*)$', stdout, re.MULTILINE):
items = match.group(1).split()
- section_sizes[items[0]] = int(items[4], 16)
+ target = no_bits_section_sizes if items[1] == 'NOBITS' else section_sizes
+ target[items[0]] = int(items[4], 16)
- return section_sizes
+ return no_bits_section_sizes, section_sizes
def _ParseManifestAttributes(apk_path):
@@ -210,6 +216,22 @@ def _RunAaptDumpResources(apk_path):
return output
+def _ReportDfmSizes(zip_obj, report_func):
+ sizes = collections.defaultdict(int)
+ for info in zip_obj.infolist():
+ # Looks for paths like splits/vr-master.apk, splits/vr-hi.apk.
+ name_parts = info.filename.split('/')
+ if name_parts[0] == 'splits' and len(name_parts) == 2:
+ name_parts = name_parts[1].split('-')
+ if len(name_parts) == 2:
+ module_name, config_name = name_parts
+ if module_name != 'base' and config_name[:-4] in ('master', 'hi'):
+ sizes[module_name] += info.file_size
+
+ for module_name, size in sorted(sizes.iteritems()):
+ report_func('DFM_' + module_name, 'Size with hindi', size, 'bytes')
+
+
class _FileGroup(object):
"""Represents a category that apk files can fall into."""
@@ -353,6 +375,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
with zipfile.ZipFile(apks_path) as z:
hindi_apk_info = z.getinfo('splits/base-hi.apk')
total_apk_size += hindi_apk_info.file_size
+ _ReportDfmSizes(z, report_func)
total_install_size = total_apk_size
total_install_size_android_go = total_apk_size
@@ -435,14 +458,20 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
# file size. Also gets rid of compression.
normalized_apk_size -= native_code.ComputeZippedSize()
normalized_apk_size += native_code_unaligned_size
+ # Normalized dex size: Size within the zip + size on disk for Android Go
+ # devices running Android O (which ~= uncompressed dex size).
+ # Use a constant compression factor to account for fluctuations.
+ normalized_apk_size -= java_code.ComputeZippedSize()
+ normalized_apk_size += java_code.ComputeUncompressedSize()
# Unaligned size should be ~= uncompressed size or something is wrong.
# As of now, padding_fraction ~= .007
padding_fraction = -_PercentageDifference(
native_code.ComputeUncompressedSize(), native_code_unaligned_size)
- assert 0 <= padding_fraction < .02, 'Padding was: {}'.format(padding_fraction)
- # Normalized dex size: size within the zip + size on disk for Android Go
- # devices (which ~= uncompressed dex size).
- normalized_apk_size += java_code.ComputeUncompressedSize()
+ assert 0 <= padding_fraction < .02, (
+ 'Padding was: {} (file_size={}, sections_sum={})'.format(
+ padding_fraction, native_code.ComputeUncompressedSize(),
+ native_code_unaligned_size))
+
if apks_path:
# Locale normalization not needed when measuring only one locale.
# E.g. a change that adds 300 chars of unstranslated strings would cause the
diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py
index 443eb26221d..0f6cc5021d4 100755
--- a/chromium/build/android/test_runner.py
+++ b/chromium/build/android/test_runner.py
@@ -310,13 +310,21 @@ def AddEmulatorOptions(parser):
parser = parser.add_argument_group('emulator arguments')
parser.add_argument(
- '--avd-name',
- help='Run and manage the lifetime of an AVD with the given name.')
- parser.add_argument(
- '--emulator-home',
+ '--avd-config',
type=os.path.realpath,
- help='Emulator home directory '
- '(see ANDROID_EMULATOR_HOME: http://bit.ly/2K32oEy)')
+ help='Path to the avd config textpb. '
+ '(See //tools/android/avd/proto/ for message definition'
+ ' and existing textpb files.)')
+ parser.add_argument(
+ '--emulator-count',
+ type=int,
+ default=1,
+ help='Number of emulators to use.')
+ parser.add_argument(
+ '--emulator-window',
+ action='store_true',
+ default=False,
+ help='Enable graphical window display on the emulator.')
def AddGTestOptions(parser):
@@ -414,6 +422,18 @@ def AddInstrumentationTestOptions(parser):
'--apk-under-test',
help='Path or name of the apk under test.')
parser.add_argument(
+ '--module',
+ action='append',
+ dest='modules',
+ help='Specify Android App Bundle modules to install in addition to the '
+ 'base module.')
+ parser.add_argument(
+ '--fake-module',
+ action='append',
+ dest='fake_modules',
+ help='Specify Android App Bundle modules to fake install in addition to '
+ 'the real modules.')
+ parser.add_argument(
'--coverage-dir',
type=os.path.realpath,
help='Directory in which to place all generated '
diff --git a/chromium/build/android/test_runner.pydeps b/chromium/build/android/test_runner.pydeps
index 9b722c4027c..e4b118cc6bf 100644
--- a/chromium/build/android/test_runner.pydeps
+++ b/chromium/build/android/test_runner.pydeps
@@ -56,6 +56,7 @@
../../third_party/catapult/devil/devil/android/sdk/aapt.py
../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
../../third_party/catapult/devil/devil/android/sdk/intent.py
../../third_party/catapult/devil/devil/android/sdk/keyevent.py
../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
@@ -168,7 +169,10 @@ pylib/local/device/local_device_linker_test_run.py
pylib/local/device/local_device_monkey_test_run.py
pylib/local/device/local_device_test_run.py
pylib/local/emulator/__init__.py
+pylib/local/emulator/avd.py
pylib/local/emulator/local_emulator_environment.py
+pylib/local/emulator/proto/__init__.py
+pylib/local/emulator/proto/avd_pb2.py
pylib/local/local_test_server_spawner.py
pylib/local/machine/__init__.py
pylib/local/machine/local_machine_environment.py
diff --git a/chromium/build/android/update_verification.py b/chromium/build/android/update_verification.py
index 40cb64ac5da..c2f32432bb6 100755
--- a/chromium/build/android/update_verification.py
+++ b/chromium/build/android/update_verification.py
@@ -49,8 +49,7 @@ def TestUpdate(device, old_apk, new_apk, app_data, package_name):
# Restore command is not synchronous
raw_input('Select "Restore my data" on the device. Then press enter to '
'continue.')
- device_path = device.GetApplicationPaths(package_name)
- if not device_path:
+ if not device.IsApplicationInstalled(package_name):
raise Exception('Expected package %s to already be installed. '
'Package name might have changed!' % package_name)