summaryrefslogtreecommitdiffstats
path: root/chromium/build
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2022-02-02 12:21:57 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2022-02-12 08:13:00 +0000
commit606d85f2a5386472314d39923da28c70c60dc8e7 (patch)
treea8f4d7bf997f349f45605e6058259fba0630e4d7 /chromium/build
parent5786336dda477d04fb98483dca1a5426eebde2d7 (diff)
BASELINE: Update Chromium to 96.0.4664.181
Change-Id: I762cd1da89d73aa6313b4a753fe126c34833f046 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/build')
-rw-r--r--chromium/build/BUILD.gn1
-rw-r--r--chromium/build/android/BUILD.gn6
-rw-r--r--chromium/build/android/COMMON_METADATA1
-rw-r--r--chromium/build/android/DIR_METADATA2
-rw-r--r--chromium/build/android/PRESUBMIT.py13
-rwxr-xr-xchromium/build/android/adb_install_apk.py7
-rwxr-xr-xchromium/build/android/adb_logcat_monitor.py4
-rwxr-xr-xchromium/build/android/adb_logcat_printer.py8
-rwxr-xr-xchromium/build/android/apk_operations.py44
-rw-r--r--chromium/build/android/apk_operations.pydeps3
-rwxr-xr-xchromium/build/android/asan_symbolize.py6
-rw-r--r--chromium/build/android/bytecode/BUILD.gn34
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java12
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java103
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java111
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java136
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java20
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java94
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java87
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java47
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java83
-rw-r--r--chromium/build/android/devil_chromium.json2
-rwxr-xr-xchromium/build/android/diff_resource_sizes.py14
-rw-r--r--chromium/build/android/docs/README.md5
-rw-r--r--chromium/build/android/docs/android_app_bundles.md205
-rw-r--r--chromium/build/android/docs/java_toolchain.md2
-rwxr-xr-xchromium/build/android/download_doclava.py2
-rwxr-xr-xchromium/build/android/generate_jacoco_report.py29
-rwxr-xr-xchromium/build/android/gradle/generate_gradle.py1
-rwxr-xr-xchromium/build/android/gradle/gn_to_cmake.py2
-rw-r--r--chromium/build/android/gyp/OWNERS2
-rwxr-xr-xchromium/build/android/gyp/assert_static_initializers.py5
-rwxr-xr-xchromium/build/android/gyp/bundletool.py5
-rwxr-xr-xchromium/build/android/gyp/compile_java.py6
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py23
-rw-r--r--chromium/build/android/gyp/compile_resources.pydeps24
-rwxr-xr-xchromium/build/android/gyp/create_apk_operations_script.py25
-rwxr-xr-xchromium/build/android/gyp/create_app_bundle.py21
-rw-r--r--chromium/build/android/gyp/create_app_bundle.pydeps1
-rw-r--r--chromium/build/android/gyp/create_app_bundle_apks.pydeps1
-rwxr-xr-xchromium/build/android/gyp/create_bundle_wrapper_script.py2
-rwxr-xr-xchromium/build/android/gyp/create_java_binary_script.py2
-rw-r--r--chromium/build/android/gyp/create_r_java.pydeps1
-rw-r--r--chromium/build/android/gyp/create_r_txt.pydeps1
-rw-r--r--chromium/build/android/gyp/create_ui_locale_resources.pydeps1
-rwxr-xr-xchromium/build/android/gyp/create_unwind_table.py108
-rwxr-xr-xchromium/build/android/gyp/create_unwind_table_tests.py91
-rwxr-xr-xchromium/build/android/gyp/desugar.py67
-rw-r--r--chromium/build/android/gyp/desugar.pydeps6
-rwxr-xr-xchromium/build/android/gyp/dex.py19
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py2
-rw-r--r--chromium/build/android/gyp/jinja_template.pydeps1
-rwxr-xr-xchromium/build/android/gyp/lint.py19
-rwxr-xr-xchromium/build/android/gyp/merge_manifest.py4
-rw-r--r--chromium/build/android/gyp/prepare_resources.pydeps1
-rwxr-xr-xchromium/build/android/gyp/proguard.py36
-rw-r--r--chromium/build/android/gyp/unused_resources.pydeps1
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py60
-rw-r--r--chromium/build/android/gyp/util/resources_parser.py5
-rw-r--r--chromium/build/android/gyp/write_build_config.pydeps1
-rwxr-xr-xchromium/build/android/incremental_install/generate_android_manifest.py3
-rw-r--r--chromium/build/android/incremental_install/generate_android_manifest.pydeps3
-rwxr-xr-xchromium/build/android/incremental_install/installer.py6
-rwxr-xr-xchromium/build/android/incremental_install/write_installer_json.py2
-rwxr-xr-xchromium/build/android/lighttpd_server.py20
-rwxr-xr-xchromium/build/android/method_count.py2
-rwxr-xr-xchromium/build/android/native_flags/argcapture.py2
-rwxr-xr-xchromium/build/android/provision_devices.py2
-rw-r--r--chromium/build/android/pylib/base/base_test_result.py6
-rw-r--r--chromium/build/android/pylib/constants/__init__.py1
-rwxr-xr-xchromium/build/android/pylib/dex/dex_parser.py2
-rw-r--r--chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py201
-rwxr-xr-xchromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py66
-rw-r--r--chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py36
-rw-r--r--chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py3
-rwxr-xr-xchromium/build/android/pylib/results/presentation/standard_gtest_merge.py2
-rwxr-xr-xchromium/build/android/pylib/results/presentation/test_results_presentation.py2
-rwxr-xr-xchromium/build/android/pylib/symbols/apk_lib_dump.py2
-rw-r--r--chromium/build/android/pylib/symbols/deobfuscator.py9
-rw-r--r--chromium/build/android/pylib/symbols/elf_symbolizer.py4
-rw-r--r--chromium/build/android/pylib/utils/app_bundle_utils.py15
-rw-r--r--chromium/build/android/pylib/utils/logging_utils.py2
-rwxr-xr-xchromium/build/android/resource_sizes.py24
-rw-r--r--chromium/build/android/resource_sizes.pydeps4
-rwxr-xr-xchromium/build/android/test_runner.py21
-rw-r--r--chromium/build/android/test_runner.pydeps1
-rwxr-xr-xchromium/build/android/update_deps/update_third_party_deps.py2
-rwxr-xr-xchromium/build/android/update_verification.py16
-rw-r--r--chromium/build/chromeos/PRESUBMIT.py9
-rwxr-xr-xchromium/build/chromeos/gen_skylab_runner.py2
-rwxr-xr-xchromium/build/chromeos/test_runner.py43
-rwxr-xr-xchromium/build/chromeos/test_runner_test.py8
-rw-r--r--chromium/build/config/BUILDCONFIG.gn6
-rw-r--r--chromium/build/config/OWNERS1
-rw-r--r--chromium/build/config/aix/BUILD.gn12
-rw-r--r--chromium/build/config/android/DIR_METADATA1
-rw-r--r--chromium/build/config/android/config.gni13
-rw-r--r--chromium/build/config/android/internal_rules.gni82
-rw-r--r--chromium/build/config/android/rules.gni37
-rw-r--r--chromium/build/config/chromecast/BUILD.gn6
-rw-r--r--chromium/build/config/chromecast_build.gni25
-rw-r--r--chromium/build/config/chromeos/args.gni8
-rw-r--r--chromium/build/config/compiler/BUILD.gn273
-rw-r--r--chromium/build/config/compiler/compiler.gni24
-rw-r--r--chromium/build/config/devtools.gni23
-rw-r--r--chromium/build/config/fuchsia/DIR_METADATA6
-rw-r--r--chromium/build/config/fuchsia/generate_runner_scripts.gni19
-rw-r--r--chromium/build/config/gcc/BUILD.gn4
-rw-r--r--chromium/build/config/ios/BUILD.gn28
-rw-r--r--chromium/build/config/ios/Host-Info.plist2
-rw-r--r--chromium/build/config/ios/Module-Info.plist2
-rw-r--r--chromium/build/config/ios/config.gni5
-rw-r--r--chromium/build/config/ios/ios_sdk.gni13
-rw-r--r--chromium/build/config/ios/resources/XCTRunnerAddition+Info.plist2
-rw-r--r--chromium/build/config/ios/rules.gni104
-rw-r--r--chromium/build/config/ios/swift_source_set.gni22
-rw-r--r--chromium/build/config/linux/gtk/BUILD.gn1
-rw-r--r--chromium/build/config/logging.gni1
-rw-r--r--chromium/build/config/mac/BUILD.gn4
-rw-r--r--chromium/build/config/mac/mac_sdk.gni5
-rw-r--r--chromium/build/config/mac/rules.gni15
-rw-r--r--chromium/build/config/nacl/config.gni12
-rw-r--r--chromium/build/config/ozone.gni20
-rw-r--r--chromium/build/config/pch.gni3
-rw-r--r--chromium/build/config/profiling/OWNERS1
-rw-r--r--chromium/build/config/rust.gni138
-rw-r--r--chromium/build/config/sanitizers/sanitizers.gni8
-rw-r--r--chromium/build/config/ui.gni14
-rw-r--r--chromium/build/docs/rust_toolchain.md21
-rw-r--r--chromium/build/fuchsia/COMMON_METADATA5
-rw-r--r--chromium/build/fuchsia/DIR_METADATA5
-rw-r--r--chromium/build/fuchsia/PRESUBMIT.py3
-rw-r--r--chromium/build/fuchsia/aemu_target.py14
-rw-r--r--chromium/build/fuchsia/boot_data.py58
-rwxr-xr-xchromium/build/fuchsia/boot_data_test.py40
-rw-r--r--chromium/build/fuchsia/common.py6
-rw-r--r--chromium/build/fuchsia/common_args.py17
-rw-r--r--chromium/build/fuchsia/device_target.py74
-rwxr-xr-xchromium/build/fuchsia/device_target_test.py103
-rw-r--r--chromium/build/fuchsia/emu_target.py73
-rw-r--r--chromium/build/fuchsia/fvdl_target.py98
-rwxr-xr-xchromium/build/fuchsia/fvdl_target_test.py80
-rw-r--r--chromium/build/fuchsia/linux.sdk.sha12
-rw-r--r--chromium/build/fuchsia/log_manager.py53
-rw-r--r--chromium/build/fuchsia/mac.sdk.sha12
-rw-r--r--chromium/build/fuchsia/qemu_target.py20
-rw-r--r--chromium/build/fuchsia/run_test_package.py77
-rw-r--r--chromium/build/fuchsia/runner_logs.py96
-rwxr-xr-xchromium/build/fuchsia/start_emulator.py27
-rw-r--r--chromium/build/fuchsia/target.py55
-rwxr-xr-xchromium/build/fuchsia/test_runner.py64
-rwxr-xr-xchromium/build/fuchsia/update_images.py19
-rwxr-xr-xchromium/build/install-build-deps-android.sh3
-rwxr-xr-xchromium/build/install-build-deps.sh53
-rw-r--r--chromium/build/lacros/PRESUBMIT.py7
-rwxr-xr-xchromium/build/lacros/test_runner.py25
-rwxr-xr-xchromium/build/lacros/test_runner_test.py4
-rw-r--r--chromium/build/linux/extract_symbols.gni1
-rw-r--r--chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd641
-rw-r--r--chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm1
-rw-r--r--chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm641
-rw-r--r--chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i3861
-rw-r--r--chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el1
-rwxr-xr-xchromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh4
-rw-r--r--chromium/build/linux/sysroot_scripts/sysroots.json12
-rwxr-xr-xchromium/build/linux/unbundle/remove_bundled_libraries.py4
-rwxr-xr-xchromium/build/linux/unbundle/replace_gn_files.py4
-rw-r--r--chromium/build/rust/BUILD.gn11
-rw-r--r--chromium/build/rust/OWNERS8
-rw-r--r--chromium/build/rust/rust_source_set.gni256
-rw-r--r--chromium/build/rust/std/BUILD.gn153
-rwxr-xr-xchromium/build/rust/std/find_std_rlibs.py101
-rw-r--r--chromium/build/rust/std/immediate_crash.h170
-rw-r--r--chromium/build/rust/std/remap_alloc.cc74
-rw-r--r--chromium/build/rust/tests/BUILD.gn29
-rw-r--r--chromium/build/rust/tests/test_cpp_including_rust/BUILD.gn25
-rw-r--r--chromium/build/rust/tests/test_rust_exe/BUILD.gn12
-rw-r--r--chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn23
-rw-r--r--chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn14
-rw-r--r--chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn14
-rw-r--r--chromium/build/rust/tests/test_rust_source_set/BUILD.gn10
-rw-r--r--chromium/build/sanitizers/lsan_suppressions.cc1
-rw-r--r--chromium/build/skia_gold_common/output_managerless_skia_gold_session.py26
-rwxr-xr-xchromium/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py35
-rw-r--r--chromium/build/skia_gold_common/skia_gold_properties.py10
-rw-r--r--chromium/build/skia_gold_common/skia_gold_session.py11
-rwxr-xr-xchromium/build/skia_gold_common/skia_gold_session_unittest.py12
-rw-r--r--chromium/build/skia_gold_common/unittest_utils.py7
-rw-r--r--chromium/build/toolchain/BUILD.gn2
-rw-r--r--chromium/build/toolchain/aix/BUILD.gn2
-rw-r--r--chromium/build/toolchain/android/BUILD.gn40
-rw-r--r--chromium/build/toolchain/android/DIR_METADATA1
-rw-r--r--chromium/build/toolchain/apple/.style.yapf2
-rwxr-xr-xchromium/build/toolchain/apple/linker_driver.py532
-rw-r--r--chromium/build/toolchain/apple/toolchain.gni104
-rw-r--r--chromium/build/toolchain/cc_wrapper.gni4
-rw-r--r--chromium/build/toolchain/concurrent_links.gni2
-rw-r--r--chromium/build/toolchain/cros/BUILD.gn2
-rw-r--r--chromium/build/toolchain/gcc_toolchain.gni135
-rw-r--r--chromium/build/toolchain/linux/BUILD.gn53
-rw-r--r--chromium/build/toolchain/mac/BUILD.gn3
-rw-r--r--chromium/build/toolchain/nacl/BUILD.gn13
-rw-r--r--chromium/build/toolchain/nacl_toolchain.gni11
-rw-r--r--chromium/build/toolchain/rbe.gni23
-rw-r--r--chromium/build/toolchain/win/BUILD.gn56
-rw-r--r--chromium/build/util/BUILD.gn2
-rw-r--r--chromium/build/util/LASTCHANGE4
-rw-r--r--chromium/build/util/LASTCHANGE.committime2
-rw-r--r--chromium/build/util/PRESUBMIT.py5
-rw-r--r--chromium/build/util/lib/common/chrome_test_server_spawner.py170
-rw-r--r--chromium/build/util/lib/results/result_sink.py23
-rwxr-xr-xchromium/build/util/lib/results/result_sink_test.py4
-rw-r--r--chromium/build/whitespace_file.txt5
213 files changed, 4398 insertions, 2024 deletions
diff --git a/chromium/build/BUILD.gn b/chromium/build/BUILD.gn
index b1724eb6a03..d18e914f3fc 100644
--- a/chromium/build/BUILD.gn
+++ b/chromium/build/BUILD.gn
@@ -42,6 +42,7 @@ buildflag_header("chromeos_buildflags") {
"IS_CHROMEOS_LACROS=$is_chromeos_lacros",
"IS_CHROMEOS_ASH=$is_chromeos_ash",
+ "IS_CHROMEOS_WITH_HW_DETAILS=$is_chromeos_with_hw_details",
]
}
diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn
index c24fce529ed..baa7120e7b8 100644
--- a/chromium/build/android/BUILD.gn
+++ b/chromium/build/android/BUILD.gn
@@ -68,9 +68,7 @@ group("apk_installer_data") {
"//build/android/pylib/device/commands",
"//tools/android/md5sum",
]
- data = [
- "//third_party/android_build_tools/bundletool/bundletool-all-1.8.0.jar",
- ]
+ data = [ "//third_party/android_build_tools/bundletool/bundletool.jar" ]
}
}
@@ -125,9 +123,11 @@ python_library("resource_sizes_py") {
":devil_chromium_py",
"//third_party/catapult/tracing:convert_chart_json",
]
+
data = [
build_vars_file,
android_readelf,
+ rebase_path("$android_ndk_library_path/libc++.so.1", root_build_dir),
]
}
diff --git a/chromium/build/android/COMMON_METADATA b/chromium/build/android/COMMON_METADATA
new file mode 100644
index 00000000000..7a2580a646c
--- /dev/null
+++ b/chromium/build/android/COMMON_METADATA
@@ -0,0 +1 @@
+os: ANDROID
diff --git a/chromium/build/android/DIR_METADATA b/chromium/build/android/DIR_METADATA
index 7a2580a646c..cdc2d6fb6eb 100644
--- a/chromium/build/android/DIR_METADATA
+++ b/chromium/build/android/DIR_METADATA
@@ -1 +1 @@
-os: ANDROID
+mixins: "//build/android/COMMON_METADATA"
diff --git a/chromium/build/android/PRESUBMIT.py b/chromium/build/android/PRESUBMIT.py
index ef22547f258..05c739f2f7a 100644
--- a/chromium/build/android/PRESUBMIT.py
+++ b/chromium/build/android/PRESUBMIT.py
@@ -63,8 +63,21 @@ def CommonChecks(input_api, output_api):
files_to_skip=[
r'.*_pb2\.py',
r'.*_pb2\.py',
+ r'.*create_unwind_table\.py',
+ r'.*create_unwind_table_tests\.py',
],
extra_paths_list=[J('gyp'), J('gn')]))
+
+ tests.extend(
+ input_api.canned_checks.GetPylint(
+ input_api,
+ output_api,
+ files_to_check=[
+ r'.*create_unwind_table\.py',
+ r'.*create_unwind_table_tests\.py',
+ ],
+ extra_paths_list=[J('gyp'), J('gn')],
+ version='2.6'))
# yapf: enable
# Disabled due to http://crbug.com/410936
diff --git a/chromium/build/android/adb_install_apk.py b/chromium/build/android/adb_install_apk.py
index 534ffe609d0..ecbcc699591 100755
--- a/chromium/build/android/adb_install_apk.py
+++ b/chromium/build/android/adb_install_apk.py
@@ -59,8 +59,11 @@ def main():
parser.add_argument('--adb-path', type=os.path.abspath,
help='Absolute path to the adb binary to use.')
parser.add_argument('--denylist-file', help='Device denylist JSON file.')
- parser.add_argument('-v', '--verbose', action='count',
- help='Enable verbose logging.')
+ parser.add_argument('-v',
+ '--verbose',
+ action='count',
+ help='Enable verbose logging.',
+ default=0)
parser.add_argument('--downgrade', action='store_true',
help='If set, allows downgrading of apk.')
parser.add_argument('--timeout', type=int,
diff --git a/chromium/build/android/adb_logcat_monitor.py b/chromium/build/android/adb_logcat_monitor.py
index a919722cbab..6230db4d842 100755
--- a/chromium/build/android/adb_logcat_monitor.py
+++ b/chromium/build/android/adb_logcat_monitor.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -141,7 +141,7 @@ def main(base_dir, adb_cmd='adb'):
except: # pylint: disable=bare-except
logging.exception('Unexpected exception in main.')
finally:
- for process, _ in devices.itervalues():
+ for process, _ in devices.values():
if process:
try:
process.terminate()
diff --git a/chromium/build/android/adb_logcat_printer.py b/chromium/build/android/adb_logcat_printer.py
index a715170759d..284988f5329 100755
--- a/chromium/build/android/adb_logcat_printer.py
+++ b/chromium/build/android/adb_logcat_printer.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -19,7 +19,7 @@ monitoring for the deletion of the aforementioned file.
"""
# pylint: disable=W0702
-import cStringIO
+import io
import logging
import optparse
import os
@@ -108,7 +108,7 @@ def GetDeviceLogs(log_filenames, logger):
"""
device_logs = []
- for device, device_files in log_filenames.iteritems():
+ for device, device_files in log_filenames.items():
logger.debug('%s: %s', device, str(device_files))
device_file_lines = []
for cur_file in device_files:
@@ -160,7 +160,7 @@ def main(argv):
parser.error('Wrong number of unparsed args')
base_dir = args[0]
- log_stringio = cStringIO.StringIO()
+ log_stringio = io.StringIO()
logger = logging.getLogger('LogcatPrinter')
logger.setLevel(LOG_LEVEL)
sh = logging.StreamHandler(log_stringio)
diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py
index b5c01ccb7a0..192d20bacf4 100755
--- a/chromium/build/android/apk_operations.py
+++ b/chromium/build/android/apk_operations.py
@@ -125,36 +125,9 @@ def _GenerateBundleApks(info,
optimize_for=optimize_for)
-def _InstallBundle(devices, apk_helper_instance, package_name,
- command_line_flags_file, modules, fake_modules):
- # Path Chrome creates after validating fake modules. This needs to be cleared
- # for pushed fake modules to be picked up.
- SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
- # Chrome command line flag needed for fake modules to work.
- FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
-
- def ShouldWarnFakeFeatureModuleInstallFlag(device):
- if command_line_flags_file:
- changer = flag_changer.FlagChanger(device, command_line_flags_file)
- return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
- return False
-
- def ClearFakeModules(device):
- if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
- device.RemovePath(
- SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
- logging.info('Removed %s', SPLITCOMPAT_PATH)
- else:
- logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+def _InstallBundle(devices, apk_helper_instance, modules, fake_modules):
def Install(device):
- ClearFakeModules(device)
- if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device):
- # Print warning if command line is not set up for fake modules.
- msg = ('Command line has no %s: Fake modules will be ignored.' %
- FAKE_FEATURE_MODULE_INSTALL)
- print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
-
device.Install(
apk_helper_instance,
permissions=[],
@@ -584,7 +557,7 @@ class _LogcatProcessor(object):
"""Prints queued lines after sending them through stack.py."""
crash_lines = self._crash_lines_buffer
self._crash_lines_buffer = None
- with tempfile.NamedTemporaryFile() as f:
+ with tempfile.NamedTemporaryFile(mode='w') as f:
f.writelines(x[0].message + '\n' for x in crash_lines)
f.flush()
proc = self._stack_script_context.Popen(
@@ -1000,7 +973,7 @@ class _StackScriptContext(object):
if input_file:
cmd.append(input_file)
logging.info('Running stack.py')
- return subprocess.Popen(cmd, **kwargs)
+ return subprocess.Popen(cmd, universal_newlines=True, **kwargs)
def _GenerateAvailableDevicesMessage(devices):
@@ -1341,8 +1314,7 @@ class _InstallCommand(_Command):
modules = list(
set(self.args.module) - set(self.args.no_module) -
set(self.args.fake))
- _InstallBundle(self.devices, self.apk_helper, self.args.package_name,
- self.args.command_line_flags_file, modules, self.args.fake)
+ _InstallBundle(self.devices, self.apk_helper, modules, self.args.fake)
else:
_InstallApk(self.devices, self.apk_helper, self.install_dict)
@@ -1789,9 +1761,11 @@ class _ManifestCommand(_Command):
need_device_args = False
def Run(self):
- bundletool.RunBundleTool([
- 'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
- ])
+ sys.stdout.write(
+ bundletool.RunBundleTool([
+ 'dump', 'manifest', '--bundle',
+ self.bundle_generation_info.bundle_path
+ ]))
class _StackCommand(_Command):
diff --git a/chromium/build/android/apk_operations.pydeps b/chromium/build/android/apk_operations.pydeps
index 60b128942e7..0bd7b7f9dc9 100644
--- a/chromium/build/android/apk_operations.pydeps
+++ b/chromium/build/android/apk_operations.pydeps
@@ -65,6 +65,9 @@
../../third_party/catapult/third_party/six/six.py
../../third_party/jinja2/__init__.py
../../third_party/jinja2/_compat.py
+../../third_party/jinja2/_identifier.py
+../../third_party/jinja2/asyncfilters.py
+../../third_party/jinja2/asyncsupport.py
../../third_party/jinja2/bccache.py
../../third_party/jinja2/compiler.py
../../third_party/jinja2/defaults.py
diff --git a/chromium/build/android/asan_symbolize.py b/chromium/build/android/asan_symbolize.py
index 65850898739..60b00d00498 100755
--- a/chromium/build/android/asan_symbolize.py
+++ b/chromium/build/android/asan_symbolize.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -98,7 +98,7 @@ def _PrintSymbolized(asan_input, arch):
# Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
all_symbols = collections.defaultdict(dict)
- for library, items in libraries.iteritems():
+ for library, items in libraries.items():
libname = _TranslateLibPath(library, asan_libs)
lib_relative_addrs = set([i.rel_address for i in items])
# pylint: disable=no-member
@@ -140,7 +140,7 @@ def main():
constants.CheckOutputDirectory()
if options.logcat:
- asan_input = file(options.logcat, 'r')
+ asan_input = open(options.logcat, 'r')
else:
asan_input = sys.stdin
diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn
index 36b54329f65..8d717eb0ada 100644
--- a/chromium/build/android/bytecode/BUILD.gn
+++ b/chromium/build/android/bytecode/BUILD.gn
@@ -5,17 +5,21 @@
import("//build/config/android/rules.gni")
java_binary("bytecode_processor") {
+ main_class = "org.chromium.bytecode.ByteCodeProcessor"
+ wrapper_script_name = "helper/bytecode_processor"
+ deps = [ ":bytecode_processor_java" ]
+}
+
+java_library("bytecode_processor_java") {
sources = [
"java/org/chromium/bytecode/ByteCodeProcessor.java",
"java/org/chromium/bytecode/ClassPathValidator.java",
"java/org/chromium/bytecode/TypeUtils.java",
]
- main_class = "org.chromium.bytecode.ByteCodeProcessor"
deps = [
"//third_party/android_deps:org_ow2_asm_asm_java",
"//third_party/android_deps:org_ow2_asm_asm_util_java",
]
- wrapper_script_name = "helper/bytecode_processor"
enable_bytecode_checks = false
}
@@ -54,3 +58,29 @@ java_library("fragment_activity_replacer_java") {
"//third_party/android_deps:org_ow2_asm_asm_util_java",
]
}
+
+java_binary("trace_event_adder") {
+ main_class = "org.chromium.bytecode.TraceEventAdder"
+ deps = [ ":trace_event_adder_java" ]
+ wrapper_script_name = "helper/trace_event_adder"
+}
+
+java_library("trace_event_adder_java") {
+ visibility = [ ":*" ]
+ sources = [
+ "java/org/chromium/bytecode/ByteCodeRewriter.java",
+ "java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java",
+ "java/org/chromium/bytecode/MethodCheckerClassAdapter.java",
+ "java/org/chromium/bytecode/MethodDescription.java",
+ "java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java",
+ "java/org/chromium/bytecode/TraceEventAdder.java",
+ "java/org/chromium/bytecode/TraceEventAdderClassAdapter.java",
+ "java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java",
+ ]
+ deps = [
+ ":bytecode_processor_java",
+ "//third_party/android_deps:org_ow2_asm_asm_commons_java",
+ "//third_party/android_deps:org_ow2_asm_asm_java",
+ "//third_party/android_deps:org_ow2_asm_asm_util_java",
+ ]
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
index 3d0d9cdd47d..37b0e863484 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
@@ -42,6 +42,13 @@ public abstract class ByteCodeRewriter {
protected abstract boolean shouldRewriteClass(String classPath);
/**
+ * Returns true if the class at the given {@link ClassReader} should be rewritten.
+ */
+ protected boolean shouldRewriteClass(ClassReader classReader) {
+ return true;
+ }
+
+ /**
* Returns the ClassVisitor that should be used to modify the bytecode of class at the given
* path in the archive.
*/
@@ -77,7 +84,10 @@ public abstract class ByteCodeRewriter {
try {
ClassReader reader = new ClassReader(inputStream);
ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES);
- ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer);
+ ClassVisitor classVisitor = writer;
+ if (shouldRewriteClass(reader)) {
+ classVisitor = getClassVisitorForClass(entry.getName(), writer);
+ }
reader.accept(classVisitor, ClassReader.EXPAND_FRAMES);
writer.visitEnd();
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java
new file mode 100644
index 00000000000..d0957625d76
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java
@@ -0,0 +1,103 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_ABSTRACT;
+import static org.objectweb.asm.Opcodes.ACC_INTERFACE;
+import static org.objectweb.asm.Opcodes.ALOAD;
+import static org.objectweb.asm.Opcodes.ASM7;
+import static org.objectweb.asm.Opcodes.ILOAD;
+import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
+import static org.objectweb.asm.Opcodes.IRETURN;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Type;
+
+import java.util.ArrayList;
+
+class EmptyOverrideGeneratorClassAdapter extends ClassVisitor {
+ private final ArrayList<MethodDescription> mMethodsToGenerate;
+ private String mSuperClassName;
+ private boolean mIsAbstract;
+ private boolean mIsInterface;
+
+ public EmptyOverrideGeneratorClassAdapter(
+ ClassVisitor cv, ArrayList<MethodDescription> methodsToGenerate) {
+ super(ASM7, cv);
+ mMethodsToGenerate = methodsToGenerate;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+
+ mSuperClassName = superName;
+ mIsAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT;
+ mIsInterface = (access & ACC_INTERFACE) == ACC_INTERFACE;
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mIsAbstract || mIsInterface || mMethodsToGenerate.isEmpty()) {
+ super.visitEnd();
+ return;
+ }
+
+ for (MethodDescription method : mMethodsToGenerate) {
+ if (!method.shouldCreateOverride) {
+ continue;
+ }
+
+ MethodVisitor mv = super.visitMethod(
+ method.access, method.methodName, method.description, null, null);
+ writeOverrideCode(mv, method.access, method.methodName, method.description);
+ }
+
+ super.visitEnd();
+ }
+
+ /**
+ * Writes code to a method to call that method's parent implementation.
+ * <pre>
+ * {@code
+ * // Calling writeOverrideCode(mv, ACC_PUBLIC, "doFoo", "(Ljava/lang/String;)I") writes the
+ * following method body: public int doFoo(String arg){ return super.doFoo(arg);
+ * }
+ * }
+ * </pre>
+ *
+ * This will be rewritten later by TraceEventAdderClassAdapter to wrap the body in a trace
+ * event.
+ */
+ private void writeOverrideCode(
+ MethodVisitor mv, final int access, final String name, final String descriptor) {
+ Type[] argTypes = Type.getArgumentTypes(descriptor);
+ Type returnType = Type.getReturnType(descriptor);
+
+ mv.visitCode();
+
+ // Variable 0 contains `this`, load it into the operand stack.
+ mv.visitVarInsn(ALOAD, 0);
+
+ // Variables 1..n contain all arguments, load them all into the operand stack.
+ int i = 1;
+ for (Type arg : argTypes) {
+ // getOpcode(ILOAD) returns the ILOAD equivalent to the current argument's type.
+ mv.visitVarInsn(arg.getOpcode(ILOAD), i);
+ i += arg.getSize();
+ }
+
+ // Call the parent class method with the same arguments.
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, name, descriptor, false);
+
+ // Return the result.
+ mv.visitInsn(returnType.getOpcode(IRETURN));
+
+ mv.visitMaxs(0, 0);
+ mv.visitEnd();
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
index a40f39c4ce8..c985600593a 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
@@ -13,6 +13,7 @@ import org.objectweb.asm.commons.Remapper;
import java.io.File;
import java.io.IOException;
+import java.lang.reflect.Method;
/**
* Java application that modifies Fragment.getActivity() to return an Activity instead of a
@@ -75,11 +76,29 @@ public class FragmentActivityReplacer extends ByteCodeRewriter {
* the replaced method.
*/
private static class InvocationReplacer extends ClassVisitor {
+ /**
+ * A ClassLoader that will resolve R classes to Object.
+ *
+ * R won't be in our classpath, and we don't access any information about them, so resolving
+ * it to a dummy value is fine.
+ */
+ private static class ResourceStubbingClassLoader extends ClassLoader {
+ @Override
+ protected Class<?> findClass(String name) throws ClassNotFoundException {
+ if (name.matches(".*\\.R(\\$.+)?")) {
+ return Object.class;
+ }
+ return super.findClass(name);
+ }
+ }
+
private final boolean mSingleAndroidX;
+ private final ClassLoader mClassLoader;
private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) {
super(Opcodes.ASM7, baseVisitor);
mSingleAndroidX = singleAndroidX;
+ mClassLoader = new ResourceStubbingClassLoader();
}
@Override
@@ -90,6 +109,28 @@ public class FragmentActivityReplacer extends ByteCodeRewriter {
@Override
public void visitMethodInsn(int opcode, String owner, String name,
String descriptor, boolean isInterface) {
+ // Change the return type of getActivity and replaceActivity.
+ if (isActivityGetterInvocation(opcode, owner, name, descriptor)) {
+ super.visitMethodInsn(
+ opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface);
+ if (mSingleAndroidX) {
+ super.visitTypeInsn(
+ Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity");
+ }
+ } else if (isDowncastableFragmentActivityMethodInvocation(
+ opcode, owner, name, descriptor)) {
+ // Replace FragmentActivity.foo() with Activity.foo() to fix cases where the
+ // above code changed the getActivity return type. See the
+ // isDowncastableFragmentActivityMethodInvocation documentation for details.
+ super.visitMethodInsn(
+ opcode, "android/app/Activity", name, descriptor, isInterface);
+ } else {
+ super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
+ }
+ }
+
+ private boolean isActivityGetterInvocation(
+ int opcode, String owner, String name, String descriptor) {
boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME)
&& descriptor.equals(OLD_METHOD_DESCRIPTOR)
&& isFragmentSubclass(owner);
@@ -100,39 +141,63 @@ public class FragmentActivityReplacer extends ByteCodeRewriter {
name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)
&& descriptor.equals(OLD_METHOD_DESCRIPTOR)
&& owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME);
- if ((opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
+ return (opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
&& (isFragmentGetActivity || isFragmentRequireActivity
- || isSupportLifecycleFragmentImplGetLifecycleActivity)) {
- super.visitMethodInsn(
- opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface);
- if (mSingleAndroidX) {
- super.visitTypeInsn(
- Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity");
- }
- } else {
- super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
- }
+ || isSupportLifecycleFragmentImplGetLifecycleActivity);
}
- private boolean isFragmentSubclass(String internalType) {
- // Look up classes with a ClassLoader that will resolve any R classes to Object.
- // This is fine in this case as resource classes shouldn't be in the class
- // hierarchy of any Fragments.
- ClassLoader resourceStubbingClassLoader = new ClassLoader() {
- @Override
- protected Class<?> findClass(String name) throws ClassNotFoundException {
- if (name.matches(".*\\.R(\\$.+)?")) {
- return Object.class;
+ /**
+ * Returns true if the given method belongs to FragmentActivity, and also exists on
+ * Activity.
+ *
+ * The Java code `requireActivity().getClassLoader()` will compile to the following
+ * bytecode:
+ * aload_0
+ * // Method requireActivity:()Landroid/app/Activity;
+ * invokevirtual #n
+ * // Method androidx/fragment/app/FragmentActivity.getClassLoader:()LClassLoader;
+ * invokevirtual #m
+ *
+ * The second invokevirtual instruction doesn't typecheck because the
+ * requireActivity() return type was changed from FragmentActivity to Activity. Note
+ * that this is only an issue when validating the bytecode on the JVM, not in
+ * Dalvik, so while the above code works on device, it fails in robolectric tests.
+ *
+ * To fix the example above, we'd replace the second invokevirtual call with a call
+ * to android/app/Activity.getClassLoader:()Ljava/lang/ClassLoader. In general, any
+ * call to FragmentActivity.foo, where foo also exists on Activity, will be replaced
+ * with a call to Activity.foo. Activity.foo will still resolve to
+ * FragmentActivity.foo at runtime, while typechecking in robolectric tests.
+ */
+ private boolean isDowncastableFragmentActivityMethodInvocation(
+ int opcode, String owner, String name, String descriptor) {
+ // Return if this isn't an invoke instruction on a FragmentActivity.
+ if (!(opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
+ || !owner.equals("androidx/fragment/app/FragmentActivity")) {
+ return false;
+ }
+ try {
+ // Check if the method exists in Activity.
+ Class<?> activity = mClassLoader.loadClass("android.app.Activity");
+ for (Method activityMethod : activity.getMethods()) {
+ if (activityMethod.getName().equals(name)
+ && Type.getMethodDescriptor(activityMethod)
+ .equals(descriptor)) {
+ return true;
}
- return super.findClass(name);
}
- };
+ return false;
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ private boolean isFragmentSubclass(String internalType) {
// This doesn't use Class#isAssignableFrom to avoid us needing to load
// AndroidX's Fragment class, which may not be on the classpath.
try {
String binaryName = Type.getObjectType(internalType).getClassName();
- Class<?> clazz = resourceStubbingClassLoader.loadClass(binaryName);
+ Class<?> clazz = mClassLoader.loadClass(binaryName);
while (clazz != null) {
if (clazz.getName().equals("androidx.fragment.app.Fragment")) {
return true;
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java
new file mode 100644
index 00000000000..5aef2753195
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java
@@ -0,0 +1,136 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.ClassReader.EXPAND_FRAMES;
+import static org.objectweb.asm.Opcodes.ACC_ABSTRACT;
+import static org.objectweb.asm.Opcodes.ACC_INTERFACE;
+import static org.objectweb.asm.Opcodes.ASM7;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+
+/**
+ * This ClassVisitor verifies that a class and its methods are suitable for rewriting.
+ * Given a class and a list of methods it performs the following checks:
+ * 1. Class is subclass of {@link android.view.View}.
+ * 2. Class is not abstract or an interface.
+ *
+ * For each method provided in {@code methodsToCheck}:
+ * If the class overrides the method then we can rewrite it directly.
+ * If the class doesn't override the method then we can generate an override with {@link
+ * EmptyOverrideGeneratorClassAdapter}, but first we must check if the parent method is private or
+ * final using {@link ParentMethodCheckerClassAdapter}.
+ *
+ * This adapter modifies the provided method list to indicate which methods should be overridden or
+ * skipped.
+ */
+class MethodCheckerClassAdapter extends ClassVisitor {
+ private static final String VIEW_CLASS_DESCRIPTOR = "android/view/View";
+
+ private final ArrayList<MethodDescription> mMethodsToCheck;
+ private final ClassLoader mJarClassLoader;
+ private String mSuperName;
+
+ public MethodCheckerClassAdapter(
+ ArrayList<MethodDescription> methodsToCheck, ClassLoader jarClassLoader) {
+ super(ASM7);
+ mMethodsToCheck = methodsToCheck;
+ mJarClassLoader = jarClassLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+
+ mSuperName = superName;
+
+ boolean isAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT;
+ boolean isInterface = (access & ACC_INTERFACE) == ACC_INTERFACE;
+
+ if (isAbstract || isInterface || !isClassView(name)) {
+ mMethodsToCheck.clear();
+ return;
+ }
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ int access, String name, String descriptor, String signature, String[] exceptions) {
+ if (mMethodsToCheck.isEmpty()) {
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ for (MethodDescription method : mMethodsToCheck) {
+ if (method.methodName.equals(name) && method.description.equals(descriptor)) {
+ method.shouldCreateOverride = false;
+ }
+ }
+
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mMethodsToCheck.isEmpty()) {
+ super.visitEnd();
+ return;
+ }
+
+ boolean areAnyUncheckedMethods = false;
+
+ for (MethodDescription method : mMethodsToCheck) {
+ if (method.shouldCreateOverride == null) {
+ areAnyUncheckedMethods = true;
+ break;
+ }
+ }
+
+ if (areAnyUncheckedMethods) {
+ checkParentClass(mSuperName, mMethodsToCheck, mJarClassLoader);
+ }
+
+ super.visitEnd();
+ }
+
+ private boolean isClassView(String desc) {
+ Class currentClass = getClass(desc);
+ Class viewClass = getClass(VIEW_CLASS_DESCRIPTOR);
+ if (currentClass != null && viewClass != null) {
+ return viewClass.isAssignableFrom(currentClass);
+ }
+ return false;
+ }
+
+ private Class getClass(String desc) {
+ try {
+ return mJarClassLoader.loadClass(desc.replace('/', '.'));
+ } catch (ClassNotFoundException | NoClassDefFoundError | IllegalAccessError e) {
+ return null;
+ }
+ }
+
+ static void checkParentClass(String superClassName, ArrayList<MethodDescription> methodsToCheck,
+ ClassLoader jarClassLoader) {
+ try {
+ ClassReader cr = new ClassReader(getClassAsStream(jarClassLoader, superClassName));
+ ParentMethodCheckerClassAdapter parentChecker =
+ new ParentMethodCheckerClassAdapter(methodsToCheck, jarClassLoader);
+ cr.accept(parentChecker, EXPAND_FRAMES);
+ } catch (IOException ex) {
+ // Ignore errors in case class can't be loaded.
+ }
+ }
+
+ private static InputStream getClassAsStream(ClassLoader jarClassLoader, String desc) {
+ return jarClassLoader.getResourceAsStream(desc.replace('.', '/') + ".class");
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java
new file mode 100644
index 00000000000..23b14536e17
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java
@@ -0,0 +1,20 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+class MethodDescription {
+ public final String methodName;
+ public final String description;
+ public final int access;
+ public Boolean shouldCreateOverride;
+
+ public MethodDescription(String methodName, String description, int access) {
+ this.methodName = methodName;
+ this.description = description;
+ this.access = access;
+ // A null value means we haven't checked the method.
+ this.shouldCreateOverride = null;
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java
new file mode 100644
index 00000000000..d913f1a73e4
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java
@@ -0,0 +1,94 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_FINAL;
+import static org.objectweb.asm.Opcodes.ACC_PRIVATE;
+import static org.objectweb.asm.Opcodes.ASM7;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+
+import java.util.ArrayList;
+
+/**
+ * This ClassVisitor checks if the given class overrides methods on {@code methodsToCheck}, and if
+ * so it determines whether they can be overridden by a child class. If at the end any unchecked
+ * methods remain then we recurse on the class's superclass.
+ */
+class ParentMethodCheckerClassAdapter extends ClassVisitor {
+ private static final String OBJECT_CLASS_DESCRIPTOR = "java.lang.Object";
+
+ private final ArrayList<MethodDescription> mMethodsToCheck;
+ private final ClassLoader mJarClassLoader;
+ private String mSuperName;
+ private boolean mIsCheckingObjectClass;
+
+ public ParentMethodCheckerClassAdapter(
+ ArrayList<MethodDescription> methodsToCheck, ClassLoader jarClassLoader) {
+ super(ASM7);
+ mMethodsToCheck = methodsToCheck;
+ mJarClassLoader = jarClassLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+
+ if (name.equals(OBJECT_CLASS_DESCRIPTOR)) {
+ mIsCheckingObjectClass = true;
+ return;
+ }
+
+ mSuperName = superName;
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ int access, String name, String descriptor, String signature, String[] exceptions) {
+ if (mIsCheckingObjectClass) {
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ for (MethodDescription methodToCheck : mMethodsToCheck) {
+ if (methodToCheck.shouldCreateOverride != null || !methodToCheck.methodName.equals(name)
+ || !methodToCheck.description.equals(descriptor)) {
+ continue;
+ }
+
+ // This class contains methodToCheck.
+ boolean isMethodPrivate = (access & ACC_PRIVATE) == ACC_PRIVATE;
+ boolean isMethodFinal = (access & ACC_FINAL) == ACC_FINAL;
+ // If the method is private or final then don't create an override.
+ methodToCheck.shouldCreateOverride = !isMethodPrivate && !isMethodFinal;
+ }
+
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mIsCheckingObjectClass) {
+ return;
+ }
+
+ boolean areAnyUncheckedMethods = false;
+
+ for (MethodDescription method : mMethodsToCheck) {
+ if (method.shouldCreateOverride == null) {
+ areAnyUncheckedMethods = true;
+ break;
+ }
+ }
+
+ if (areAnyUncheckedMethods) {
+ MethodCheckerClassAdapter.checkParentClass(
+ mSuperName, mMethodsToCheck, mJarClassLoader);
+ }
+
+ super.visitEnd();
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java
new file mode 100644
index 00000000000..51f323f00a2
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java
@@ -0,0 +1,87 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.Opcodes;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+/**
+ * Java application that modifies all implementations of "draw", "onMeasure" and "onLayout" on all
+ * {@link android.view.View} subclasses to wrap them in trace events.
+ */
+public class TraceEventAdder extends ByteCodeRewriter {
+ private final ClassLoader mClassPathJarsClassLoader;
+ private ArrayList<MethodDescription> mMethodsToTrace;
+
+ public static void main(String[] args) throws IOException {
+ // Invoke this script using //build/android/gyp/bytecode_rewriter.py
+ if (args.length < 2) {
+ System.err.println(
+ "Expected arguments: <input.jar> <output.jar> <input classpath jars>");
+ System.exit(1);
+ }
+
+ String input = args[0];
+ String output = args[1];
+
+ ArrayList<String> classPathJarsPaths = new ArrayList<>();
+ classPathJarsPaths.add(input);
+ classPathJarsPaths.addAll(Arrays.asList(Arrays.copyOfRange(args, 2, args.length)));
+ ClassLoader classPathJarsClassLoader = ByteCodeProcessor.loadJars(classPathJarsPaths);
+
+ TraceEventAdder adder = new TraceEventAdder(classPathJarsClassLoader);
+ adder.rewrite(new File(input), new File(output));
+ }
+
+ public TraceEventAdder(ClassLoader classPathJarsClassLoader) {
+ mClassPathJarsClassLoader = classPathJarsClassLoader;
+ }
+
+ @Override
+ protected boolean shouldRewriteClass(String classPath) {
+ try {
+ // If this jar's dependencies can't find Chromium's TraceEvent class then skip this
+ // class. Conceptually this could be fixed by adding a dependency on //base:base_java
+ // but that would cause circular dependencies and any changes to base_java would cause
+ // all android_library targets to require rebuilding.
+ mClassPathJarsClassLoader.loadClass("org.chromium.base.TraceEvent");
+ return true;
+ } catch (ClassNotFoundException ex) {
+ return false;
+ }
+ }
+
+ @Override
+ protected boolean shouldRewriteClass(ClassReader classReader) {
+ mMethodsToTrace = new ArrayList<>(Arrays.asList(
+ new MethodDescription("draw", "(Landroid/graphics/Canvas;)V", Opcodes.ACC_PUBLIC),
+ new MethodDescription("onMeasure", "(II)V", Opcodes.ACC_PROTECTED),
+ new MethodDescription("onLayout", "(ZIIII)V", Opcodes.ACC_PROTECTED)));
+
+ // This adapter will modify mMethodsToTrace to indicate which methods already exist in the
+ // class and which ones need to be overridden. In case the class is not an Android view
+ // we'll clear the list and skip rewriting.
+ MethodCheckerClassAdapter methodChecker =
+ new MethodCheckerClassAdapter(mMethodsToTrace, mClassPathJarsClassLoader);
+
+ classReader.accept(methodChecker, ClassReader.EXPAND_FRAMES);
+
+ return !mMethodsToTrace.isEmpty();
+ }
+
+ @Override
+ protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) {
+ ClassVisitor chain = new TraceEventAdderClassAdapter(delegate, mMethodsToTrace);
+ chain = new EmptyOverrideGeneratorClassAdapter(chain, mMethodsToTrace);
+
+ return chain;
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java
new file mode 100644
index 00000000000..c4a152d9950
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java
@@ -0,0 +1,47 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ASM7;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+
+import java.util.ArrayList;
+
+/**
+ * A ClassVisitor for adding TraceEvent.begin and TraceEvent.end methods to any methods specified in
+ * a list.
+ */
+class TraceEventAdderClassAdapter extends ClassVisitor {
+ private final ArrayList<MethodDescription> mMethodsToTrace;
+ private String mShortClassName;
+
+ TraceEventAdderClassAdapter(ClassVisitor visitor, ArrayList<MethodDescription> methodsToTrace) {
+ super(ASM7, visitor);
+ mMethodsToTrace = methodsToTrace;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+ mShortClassName = name.substring(name.lastIndexOf('/') + 1);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
+
+ for (MethodDescription method : mMethodsToTrace) {
+ if (method.methodName.equals(name) && method.description.equals(desc)) {
+ return new TraceEventAdderMethodAdapter(mv, mShortClassName, name);
+ }
+ }
+
+ return mv;
+ }
+}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java
new file mode 100644
index 00000000000..042b3d3c5f8
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java
@@ -0,0 +1,83 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ASM7;
+import static org.objectweb.asm.Opcodes.ATHROW;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.IRETURN;
+import static org.objectweb.asm.Opcodes.RETURN;
+
+import static org.chromium.bytecode.TypeUtils.STRING;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.MethodVisitor;
+
+/**
+ * MethodVisitor that wraps all code in TraceEvent.begin and TraceEvent.end calls. TraceEvent.end
+ * calls are added on all returns and thrown exceptions.
+ *
+ * Example:
+ * <pre>
+ * {@code
+ * int methodToTrace(String foo){
+ *
+ * //Line added by rewriter:
+ * TraceEvent.begin("ClassName.methodToTrace");
+ *
+ * if(foo == null){
+ * //Line added by rewriter:
+ * TraceEvent.end("ClassName.methodToTrace");
+ *
+ * throw new Exception();
+ * }
+ * else if(foo.equals("Two")){
+ * //Line added by rewriter:
+ * TraceEvent.end("ClassName.methodToTrace");
+ *
+ * return 2;
+ * }
+ *
+ * //Line added by rewriter:
+ * TraceEvent.end("ClassName.methodToTrace");
+ *
+ * return 0;
+ * }
+ * }
+ * </pre>
+ *
+ */
+class TraceEventAdderMethodAdapter extends MethodVisitor {
+ private static final String TRACE_EVENT_DESCRIPTOR = "org/chromium/base/TraceEvent";
+ private static final String TRACE_EVENT_SIGNATURE = TypeUtils.getMethodDescriptor(VOID, STRING);
+ private final String mEventName;
+
+ public TraceEventAdderMethodAdapter(
+ MethodVisitor methodVisitor, String shortClassName, String methodName) {
+ super(ASM7, methodVisitor);
+
+ mEventName = shortClassName + "." + methodName;
+ }
+
+ @Override
+ public void visitCode() {
+ super.visitCode();
+
+ mv.visitLdcInsn(mEventName);
+ mv.visitMethodInsn(
+ INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "begin", TRACE_EVENT_SIGNATURE, false);
+ }
+
+ @Override
+ public void visitInsn(int opcode) {
+ if ((opcode >= IRETURN && opcode <= RETURN) || opcode == ATHROW) {
+ mv.visitLdcInsn(mEventName);
+ mv.visitMethodInsn(
+ INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "end", TRACE_EVENT_SIGNATURE, false);
+ }
+
+ mv.visitInsn(opcode);
+ }
+}
diff --git a/chromium/build/android/devil_chromium.json b/chromium/build/android/devil_chromium.json
index d19e6b65896..97c6b7ed301 100644
--- a/chromium/build/android/devil_chromium.json
+++ b/chromium/build/android/devil_chromium.json
@@ -111,7 +111,7 @@
"file_info": {
"default": {
"local_paths": [
- "../../third_party/android_build_tools/bundletool/bundletool-all-1.8.0.jar"
+ "../../third_party/android_build_tools/bundletool/bundletool.jar"
]
}
}
diff --git a/chromium/build/android/diff_resource_sizes.py b/chromium/build/android/diff_resource_sizes.py
index eefb6cdb209..0bd2c47b403 100755
--- a/chromium/build/android/diff_resource_sizes.py
+++ b/chromium/build/android/diff_resource_sizes.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -49,8 +49,8 @@ def DiffResults(chartjson, base_results, diff_results):
base_results: The chartjson-formatted size results of the base APK.
diff_results: The chartjson-formatted size results of the diff APK.
"""
- for graph_title, graph in base_results['charts'].iteritems():
- for trace_title, trace in graph.iteritems():
+ for graph_title, graph in base_results['charts'].items():
+ for trace_title, trace in graph.items():
perf_tests_results_helper.ReportPerfResult(
chartjson, graph_title, trace_title,
diff_results['charts'][graph_title][trace_title]['value']
@@ -67,8 +67,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results):
base_results: The chartjson-formatted size results of the base APK.
diff_results: The chartjson-formatted size results of the diff APK.
"""
- for graph_title, graph in base_results['charts'].iteritems():
- for trace_title, trace in graph.iteritems():
+ for graph_title, graph in base_results['charts'].items():
+ for trace_title, trace in graph.items():
perf_tests_results_helper.ReportPerfResult(
chartjson, graph_title + '_base_apk', trace_title,
trace['value'], trace['units'], trace['improvement_direction'],
@@ -76,8 +76,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results):
# Both base_results and diff_results should have the same charts/traces, but
# loop over them separately in case they don't
- for graph_title, graph in diff_results['charts'].iteritems():
- for trace_title, trace in graph.iteritems():
+ for graph_title, graph in diff_results['charts'].items():
+ for trace_title, trace in graph.items():
perf_tests_results_helper.ReportPerfResult(
chartjson, graph_title + '_diff_apk', trace_title,
trace['value'], trace['units'], trace['improvement_direction'],
diff --git a/chromium/build/android/docs/README.md b/chromium/build/android/docs/README.md
index 6392f7dd733..5ee0ca638f1 100644
--- a/chromium/build/android/docs/README.md
+++ b/chromium/build/android/docs/README.md
@@ -1,6 +1,7 @@
# Android Build Docs
-* [android_app_bundles.md](android_app_bundles.md)
+* [//docs/android_build_instructions.md](/docs/android_build_instructions.md)
+* [//docs/android_dynamic_feature_modules.md](/docs/android_dynamic_feature_modules.md)
* [build_config.md](build_config.md)
* [coverage.md](coverage.md)
* [java_toolchain.md](java_toolchain.md)
@@ -8,6 +9,8 @@
* [lint.md](lint.md)
* [life_of_a_resource.md](life_of_a_resource.md)
* [../incremental_install/README.md](../incremental_install/README.md)
+* [//docs/ui/android/bytecode_rewriting.md](/docs/ui/android/bytecode_rewriting.md)
+* [go/doubledown](https://goto.google.com/doubledown) (Googlers only)
See also:
* [//build/README.md](../../README.md)
diff --git a/chromium/build/android/docs/android_app_bundles.md b/chromium/build/android/docs/android_app_bundles.md
deleted file mode 100644
index e71fe27f318..00000000000
--- a/chromium/build/android/docs/android_app_bundles.md
+++ /dev/null
@@ -1,205 +0,0 @@
-# Introduction
-
-This document describes how the Chromium build system supports Android app
-bundles.
-
-[TOC]
-
-# Overview of app bundles
-
-An Android app bundle is an alternative application distribution format for
-Android applications on the Google Play Store, that allows reducing the size
-of binaries sent for installation to individual devices that run on Android L
-and beyond. For more information about them, see the official Android
-[documentation](https://developer.android.com/guide/app-bundle/).
-
-For the context of this document, the most important points are:
-
- - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
- be installed directly on a device.
-
- - Instead, it must be processed into a set of installable split APKs, which
- are stored inside a special zip archive (e.g. `foo.apks`).
-
- - The splitting can be based on various criteria: e.g. language or screen
- density for resources, or cpu ABI for native code.
-
- - The bundle also uses the notion of dynamic features modules (DFMs) to
- separate several application features. Each module has its own code, assets
- and resources, and can be installed separately from the rest of the
- application if needed.
-
- - The main application itself is stored in the '`base`' module (this name
- cannot be changed).
-
-
-# Declaring app bundles with GN templates
-
-Here's an example that shows how to declare a simple bundle that contains a
-single base module, which enables language-based splits:
-
-```gn
-
- # First declare the first bundle module. The base module is the one
- # that contains the main application's code, resources and assets.
- android_app_bundle_module("foo_base_module") {
- # Declaration are similar to android_apk here.
- ...
- }
-
- # Second, declare the bundle itself.
- android_app_bundle("foo_bundle") {
- # Indicate the base module to use for this bundle
- base_module_target = ":foo_base_module"
-
- # The name of our bundle file (without any suffix). Default would
- # be 'foo_bundle' otherwise.
- bundle_name = "FooBundle"
-
- # Enable language-based splits for this bundle. Which means that
- # resources and assets specific to a given language will be placed
- # into their own split APK in the final .apks archive.
- enable_language_splits = true
-
- # Proguard settings must be passed at the bundle, not module, target.
- proguard_enabled = !is_java_debug
- }
-```
-
-When generating the `foo_bundle` target with Ninja, you will end up with
-the following:
-
- - The bundle file under `out/Release/apks/FooBundle.aab`
-
- - A helper script called `out/Release/bin/foo_bundle`, which can be used
- to install / launch / uninstall the bundle on local devices.
-
- This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
- to see all possible commands supported by the script.
-
-
-# Declaring dynamic feature modules with GN templates
-
-Please see
-[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
-more details. In short, if you need more modules besides the base one, you
-will need to list all the extra ones using the extra_modules variable which
-takes a list of GN scopes, as in:
-
-```gn
-
- android_app_bundle_module("foo_base_module") {
- ...
- }
-
- android_app_bundle_module("foo_extra_module") {
- ...
- }
-
- android_app_bundle("foo_bundle") {
- base_module_target = ":foo_base_module"
-
- extra_modules = [
- { # NOTE: Scopes require one field per line, and no comma separators.
- name = "my_module"
- module_target = ":foo_extra_module"
- }
- ]
-
- ...
- }
-```
-
-Note that each extra module is identified by a unique name, which cannot
-be '`base`'.
-
-
-# Bundle signature issues
-
-Signing an app bundle is not necessary, unless you want to upload it to the
-Play Store. Since this process is very slow (it uses `jarsigner` instead of
-the much faster `apkbuilder`), you can control it with the `sign_bundle`
-variable, as described in the example above.
-
-The `.apks` archive however always contains signed split APKs. The keystore
-path/password/alias being used are the default ones, unless you use custom
-values when declaring the bundle itself, as in:
-
-```gn
- android_app_bundle("foo_bundle") {
- ...
- keystore_path = "//path/to/keystore"
- keystore_password = "K3y$t0Re-Pa$$w0rd"
- keystore_name = "my-signing-key-name"
- }
-```
-
-These values are not stored in the bundle itself, but in the wrapper script,
-which will use them to generate the `.apks` archive for you. This allows you
-to properly install updates on top of existing applications on any device.
-
-
-# Proguard and bundles
-
-When using an app bundle that is made of several modules, it is crucial to
-ensure that proguard, if enabled:
-
-- Keeps the obfuscated class names used by each module consistent.
-- Does not remove classes that are not used in one module, but referenced
- by others.
-
-To achieve this, a special scheme called *synchronized proguarding* is
-performed, which consists of the following steps:
-
-- The list of unoptimized .jar files from all modules are sent to a single
- proguard command. This generates a new temporary optimized *group* .jar file.
-
-- Each module extracts the optimized class files from the optimized *group*
- .jar file, to generate its own, module-specific, optimized .jar.
-
-- Each module-specific optimized .jar is then sent to dex generation.
-
-This synchronized proguarding step is added by the `android_app_bundle()` GN
-template. In practice this means the following:
-
- - `proguard_enabled` must be passed to `android_app_bundle` targets, but not
- to `android_app_bundle_module` ones.
-
- - `proguard_configs` can be still passed to individual modules, just
- like regular APKs. All proguard configs will be merged during the
- synchronized proguard step.
-
-
-# Manual generation and installation of .apks archives
-
-Note that the `foo_bundle` script knows how to generate the .apks archive
-from the bundle file, and install it to local devices for you. For example,
-to install and launch a bundle, use:
-
-```sh
- out/Release/bin/foo_bundle run
-```
-
-If you want to manually look or use the `.apks` archive, use the following
-command to generate it:
-
-```sh
- out/Release/bin/foo_bundle build-bundle-apks \
- --output-apks=/tmp/BundleFoo.apks
-```
-
-All split APKs within the archive will be properly signed. And you will be
-able to look at its content (with `unzip -l`), or install it manually with:
-
-```sh
- build/android/gyp/bundletool.py install-apks \
- --apks=/tmp/BundleFoo.apks \
- --adb=$(which adb)
-```
-
-The task of examining the manifest is simplified by running the following,
-which dumps the application manifest as XML to stdout:
-
-```sh
- build/android/gyp/bundletool.py dump-manifest
-```
diff --git a/chromium/build/android/docs/java_toolchain.md b/chromium/build/android/docs/java_toolchain.md
index ef11548eb49..4a391754726 100644
--- a/chromium/build/android/docs/java_toolchain.md
+++ b/chromium/build/android/docs/java_toolchain.md
@@ -223,7 +223,7 @@ We use several tools for static analysis.
* Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`.
* Most useful check:
* Enforcement of `@GuardedBy` annotations.
-* List of enabled / disabled checks exists [within javac.py](https://cs.chromium.org/chromium/src/build/android/gyp/javac.py?l=30)
+* List of enabled / disabled checks exists [within compile_java.py](https://cs.chromium.org/chromium/src/build/android/gyp/compile_java.py?l=30)
* Many checks are currently disabled because there is work involved in fixing
violations they introduce. Please help!
* Custom checks for Chrome:
diff --git a/chromium/build/android/download_doclava.py b/chromium/build/android/download_doclava.py
index 1982fdb8469..059d1cbafe5 100755
--- a/chromium/build/android/download_doclava.py
+++ b/chromium/build/android/download_doclava.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/generate_jacoco_report.py b/chromium/build/android/generate_jacoco_report.py
index 73e497c04c2..da4a38e5143 100755
--- a/chromium/build/android/generate_jacoco_report.py
+++ b/chromium/build/android/generate_jacoco_report.py
@@ -35,29 +35,28 @@ _PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium']
_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json'
-# These should match the jar class files generated in internal_rules.gni
-_DEVICE_CLASS_EXCLUDE_SUFFIX = 'host_filter.jar'
-_HOST_CLASS_EXCLUDE_SUFFIX = 'device_filter.jar'
-
-def _CreateClassfileArgs(class_files, exclude_suffix=None, include_substr=None):
+def _CreateClassfileArgs(class_files, report_type, include_substr=None):
"""Returns a filtered list of files with classfile option.
Args:
class_files: A list of class files.
- exclude_suffix: Suffix to look for to exclude.
+ report_type: A string indicating if device or host files are desired.
include_substr: A substring that must be present to include the file.
- exclude_suffix takes precedence over this.
Returns:
A list of files that don't use the suffix.
"""
+ # These should match the jar class files generated in internal_rules.gni
+ search_jar_suffix = '%s.filter.jar' % report_type
result_class_files = []
for f in class_files:
- include_file = True
- if exclude_suffix and f.endswith(exclude_suffix):
- include_file = False
- # Exclude overrides include.
+ include_file = False
+ if f.endswith(search_jar_suffix):
+ include_file = True
+
+ # If include_substr is specified, remove files that don't have the
+ # required substring.
if include_file and include_substr and include_substr not in f:
include_file = False
if include_file:
@@ -67,13 +66,7 @@ def _CreateClassfileArgs(class_files, exclude_suffix=None, include_substr=None):
def _GenerateReportOutputArgs(args, class_files, report_type):
- class_jar_exclude = None
- if report_type == 'device':
- class_jar_exclude = _DEVICE_CLASS_EXCLUDE_SUFFIX
- elif report_type == 'host':
- class_jar_exclude = _HOST_CLASS_EXCLUDE_SUFFIX
-
- cmd = _CreateClassfileArgs(class_files, class_jar_exclude,
+ cmd = _CreateClassfileArgs(class_files, report_type,
args.include_substr_filter)
if args.format == 'html':
report_dir = os.path.join(args.output_dir, report_type)
diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py
index 4a8d6ca8f91..8a5c0abb8e7 100755
--- a/chromium/build/android/gradle/generate_gradle.py
+++ b/chromium/build/android/gradle/generate_gradle.py
@@ -57,6 +57,7 @@ _DEFAULT_TARGETS = [
'//chrome/android:chrome_junit_tests',
'//chrome/android:chrome_public_apk',
'//chrome/android:chrome_public_test_apk',
+ '//chrome/android:chrome_public_unit_test_apk',
'//content/public/android:content_junit_tests',
'//content/shell/android:content_shell_apk',
# Below must be included even with --all since they are libraries.
diff --git a/chromium/build/android/gradle/gn_to_cmake.py b/chromium/build/android/gradle/gn_to_cmake.py
index d3e80ae7684..72898254e77 100755
--- a/chromium/build/android/gradle/gn_to_cmake.py
+++ b/chromium/build/android/gradle/gn_to_cmake.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/gyp/OWNERS b/chromium/build/android/gyp/OWNERS
index 25557e1fc55..df0fa641f83 100644
--- a/chromium/build/android/gyp/OWNERS
+++ b/chromium/build/android/gyp/OWNERS
@@ -2,3 +2,5 @@ agrieve@chromium.org
digit@chromium.org
smaier@chromium.org
wnwen@chromium.org
+
+per-file create_unwind_table*.py=file://base/profiler/OWNERS \ No newline at end of file
diff --git a/chromium/build/android/gyp/assert_static_initializers.py b/chromium/build/android/gyp/assert_static_initializers.py
index 9af5e2b825b..5f746e97110 100755
--- a/chromium/build/android/gyp/assert_static_initializers.py
+++ b/chromium/build/android/gyp/assert_static_initializers.py
@@ -23,8 +23,9 @@ _DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
def _RunReadelf(so_path, options, tool_prefix=''):
- return subprocess.check_output([tool_prefix + 'readelf'] + options +
- [so_path]).decode('utf8')
+ return subprocess.check_output(
+ [tool_prefix + 'readobj', '--elf-output-style=GNU'] + options +
+ [so_path]).decode('utf8')
def _ParseLibBuildId(so_path, tool_prefix):
diff --git a/chromium/build/android/gyp/bundletool.py b/chromium/build/android/gyp/bundletool.py
index 372e55226d7..cfcc1c44220 100755
--- a/chromium/build/android/gyp/bundletool.py
+++ b/chromium/build/android/gyp/bundletool.py
@@ -19,10 +19,7 @@ BUNDLETOOL_DIR = os.path.abspath(os.path.join(
__file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
'bundletool'))
-BUNDLETOOL_VERSION = '1.8.0'
-
-BUNDLETOOL_JAR_PATH = os.path.join(
- BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+BUNDLETOOL_JAR_PATH = os.path.join(BUNDLETOOL_DIR, 'bundletool.jar')
def RunBundleTool(args, warnings_as_errors=(), print_stdout=False):
diff --git a/chromium/build/android/gyp/compile_java.py b/chromium/build/android/gyp/compile_java.py
index ce57ea60bdf..078af568e1c 100755
--- a/chromium/build/android/gyp/compile_java.py
+++ b/chromium/build/android/gyp/compile_java.py
@@ -71,6 +71,8 @@ ERRORPRONE_WARNINGS_TO_DISABLE = [
# Android platform default is always UTF-8.
# https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
'DefaultCharset',
+ # Low priority since there are lots of tags that don't fit this check.
+ 'UnrecognisedJavadocTag',
# Low priority since the alternatives still work.
'JdkObsolete',
# We don't use that many lambdas.
@@ -240,6 +242,10 @@ def _ParsePackageAndClassNames(java_file):
# Considers a leading * as a continuation of a multi-line comment (our
# linter doesn't enforce a space before it like there should be).
l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+ # Stripping things between double quotes (strings), so if the word "class"
+ # shows up in a string this doesn't trigger. This isn't strictly correct
+ # (with escaped quotes) but covers a very large percentage of cases.
+ l = re.sub('(?:".*?")', '', l)
m = re.match(r'package\s+(.*?);', l)
if m and not package_name:
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 54ca0b3f140..9add95aed85 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -381,6 +381,7 @@ def _FixManifest(options, temp_dir, extra_manifest=None):
Tuple of:
* Manifest path within |temp_dir|.
* Original package_name.
+ * Manifest package name.
"""
def maybe_extract_version(j):
try:
@@ -432,8 +433,10 @@ def _FixManifest(options, temp_dir, extra_manifest=None):
manifest_node.set('platformBuildVersionName', version_name)
orig_package = manifest_node.get('package')
+ fixed_package = orig_package
if options.arsc_package_name:
manifest_node.set('package', options.arsc_package_name)
+ fixed_package = options.arsc_package_name
if options.debuggable:
app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'),
@@ -452,7 +455,7 @@ def _FixManifest(options, temp_dir, extra_manifest=None):
min_sdk_node.set(dist_value, options.min_sdk_version)
manifest_utils.SaveManifest(doc, debug_manifest_path)
- return debug_manifest_path, orig_package
+ return debug_manifest_path, orig_package, fixed_package
def _CreateKeepPredicate(resource_exclusion_regex,
@@ -767,6 +770,8 @@ def _PackageApk(options, build):
options.min_sdk_version,
'--target-sdk-version',
options.target_sdk_version,
+ '--output-text-symbols',
+ build.r_txt_path,
]
for j in options.include_resources:
@@ -782,10 +787,6 @@ def _PackageApk(options, build):
link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
if options.emit_ids_out:
link_command += ['--emit-ids', build.emit_ids_path]
- if options.r_text_in:
- shutil.copyfile(options.r_text_in, build.r_txt_path)
- else:
- link_command += ['--output-text-symbols', build.r_txt_path]
# Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
# can be used with recent versions of aapt2.
@@ -802,8 +803,8 @@ def _PackageApk(options, build):
'--allow-reserved-package-id',
]
- fixed_manifest, desired_manifest_package_name = _FixManifest(
- options, build.temp_dir)
+ fixed_manifest, desired_manifest_package_name, fixed_manifest_package = (
+ _FixManifest(options, build.temp_dir))
if options.rename_manifest_package:
desired_manifest_package_name = options.rename_manifest_package
@@ -816,7 +817,7 @@ def _PackageApk(options, build):
# Also creates R.txt
if options.use_resource_ids_path:
_CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
- desired_manifest_package_name)
+ fixed_manifest_package)
link_command += ['--stable-ids', build.stable_ids_path]
link_command += partials
@@ -915,7 +916,7 @@ def _WriteOutputs(options, build):
def _CreateNormalizedManifestForVerification(options):
with build_utils.TempDir() as tempdir:
- fixed_manifest, _ = _FixManifest(
+ fixed_manifest, _, _ = _FixManifest(
options, tempdir, extra_manifest=options.extra_verification_manifest)
with open(fixed_manifest) as f:
return manifest_utils.NormalizeManifest(f.read())
@@ -1011,7 +1012,9 @@ def main(args):
_, package_id = resource_utils.ExtractArscPackage(
options.aapt2_path,
build.arsc_path if options.arsc_path else build.proto_path)
- if package_id != expected_id:
+ # When there are no resources, ExtractArscPackage returns (None, None), in
+ # this case there is no need to check for matching package ID.
+ if package_id is not None and package_id != expected_id:
raise Exception(
'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
diff --git a/chromium/build/android/gyp/compile_resources.pydeps b/chromium/build/android/gyp/compile_resources.pydeps
index 174b52697c6..907601422df 100644
--- a/chromium/build/android/gyp/compile_resources.pydeps
+++ b/chromium/build/android/gyp/compile_resources.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
@@ -23,29 +24,6 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
-../../../third_party/protobuf/python/google/__init__.py
-../../../third_party/protobuf/python/google/protobuf/__init__.py
-../../../third_party/protobuf/python/google/protobuf/descriptor.py
-../../../third_party/protobuf/python/google/protobuf/descriptor_database.py
-../../../third_party/protobuf/python/google/protobuf/descriptor_pool.py
-../../../third_party/protobuf/python/google/protobuf/internal/__init__.py
-../../../third_party/protobuf/python/google/protobuf/internal/api_implementation.py
-../../../third_party/protobuf/python/google/protobuf/internal/containers.py
-../../../third_party/protobuf/python/google/protobuf/internal/decoder.py
-../../../third_party/protobuf/python/google/protobuf/internal/encoder.py
-../../../third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py
-../../../third_party/protobuf/python/google/protobuf/internal/extension_dict.py
-../../../third_party/protobuf/python/google/protobuf/internal/message_listener.py
-../../../third_party/protobuf/python/google/protobuf/internal/python_message.py
-../../../third_party/protobuf/python/google/protobuf/internal/type_checkers.py
-../../../third_party/protobuf/python/google/protobuf/internal/well_known_types.py
-../../../third_party/protobuf/python/google/protobuf/internal/wire_format.py
-../../../third_party/protobuf/python/google/protobuf/message.py
-../../../third_party/protobuf/python/google/protobuf/message_factory.py
-../../../third_party/protobuf/python/google/protobuf/reflection.py
-../../../third_party/protobuf/python/google/protobuf/symbol_database.py
-../../../third_party/protobuf/python/google/protobuf/text_encoding.py
-../../../third_party/protobuf/python/google/protobuf/text_format.py
../../../third_party/six/src/six.py
../../gn_helpers.py
compile_resources.py
diff --git a/chromium/build/android/gyp/create_apk_operations_script.py b/chromium/build/android/gyp/create_apk_operations_script.py
index 660567f0deb..a5a5b6658a9 100755
--- a/chromium/build/android/gyp/create_apk_operations_script.py
+++ b/chromium/build/android/gyp/create_apk_operations_script.py
@@ -12,7 +12,7 @@ import sys
from util import build_utils
SCRIPT_TEMPLATE = string.Template("""\
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# This file was generated by build/android/gyp/create_apk_operations_script.py
@@ -26,21 +26,14 @@ def main():
sys.path.append(resolve(${APK_OPERATIONS_DIR}))
import apk_operations
output_dir = resolve(${OUTPUT_DIR})
- try:
- apk_operations.Run(
- output_dir,
- resolve(${APK_PATH}),
- [resolve(p) for p in ${ADDITIONAL_APK_PATHS}],
- resolve(${INC_JSON_PATH}),
- ${FLAGS_FILE},
- ${TARGET_CPU},
- resolve(${MAPPING_PATH}))
- except TypeError:
- rel_output_dir = os.path.relpath(output_dir)
- rel_script_path = os.path.relpath(sys.argv[0], output_dir)
- sys.stderr.write('Script out-of-date. Rebuild via:\\n')
- sys.stderr.write(' ninja -C %s %s\\n' % (rel_output_dir, rel_script_path))
- return 1
+ apk_operations.Run(
+ output_dir,
+ resolve(${APK_PATH}),
+ [resolve(p) for p in ${ADDITIONAL_APK_PATHS}],
+ resolve(${INC_JSON_PATH}),
+ ${FLAGS_FILE},
+ ${TARGET_CPU},
+ resolve(${MAPPING_PATH}))
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/create_app_bundle.py b/chromium/build/android/gyp/create_app_bundle.py
index 0b44c163ed1..8d03f08c34e 100755
--- a/chromium/build/android/gyp/create_app_bundle.py
+++ b/chromium/build/android/gyp/create_app_bundle.py
@@ -88,6 +88,9 @@ def _ParseArgs(args):
'--compress-shared-libraries',
action='store_true',
help='Whether to store native libraries compressed.')
+ parser.add_argument('--compress-dex',
+ action='store_true',
+ help='Compress .dex files')
parser.add_argument('--split-dimensions',
help="GN-list of split dimensions to support.")
parser.add_argument(
@@ -162,13 +165,15 @@ def _MakeSplitDimension(value, enabled):
return {'value': value, 'negate': not enabled}
-def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
- split_dimensions, base_master_resource_ids):
+def _GenerateBundleConfigJson(uncompressed_assets, compress_dex,
+ compress_shared_libraries, split_dimensions,
+ base_master_resource_ids):
"""Generate a dictionary that can be written to a JSON BuildConfig.
Args:
uncompressed_assets: A list or set of file paths under assets/ that always
be stored uncompressed.
+ compressed_dex: Boolean, whether to compress .dex.
compress_shared_libraries: Boolean, whether to compress native libs.
split_dimensions: list of split dimensions.
base_master_resource_ids: Optional list of 32-bit resource IDs to keep
@@ -195,6 +200,10 @@ def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
# NOTE: Use '**' instead of '*' to work through directories!
uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+ if not compress_dex:
+ # Explicit glob required only when using bundletool. Play Store looks for
+ # "uncompressDexFiles" set below.
+ uncompressed_globs.extend('classes*.dex')
data = {
'optimizations': {
@@ -482,9 +491,11 @@ def main(args):
base_master_resource_ids = _GenerateBaseResourcesAllowList(
options.base_module_rtxt_path, options.base_allowlist_rtxt_path)
- bundle_config = _GenerateBundleConfigJson(
- options.uncompressed_assets, options.compress_shared_libraries,
- split_dimensions, base_master_resource_ids)
+ bundle_config = _GenerateBundleConfigJson(options.uncompressed_assets,
+ options.compress_dex,
+ options.compress_shared_libraries,
+ split_dimensions,
+ base_master_resource_ids)
tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
diff --git a/chromium/build/android/gyp/create_app_bundle.pydeps b/chromium/build/android/gyp/create_app_bundle.pydeps
index cbb471abca7..503dfb0dc57 100644
--- a/chromium/build/android/gyp/create_app_bundle.pydeps
+++ b/chromium/build/android/gyp/create_app_bundle.pydeps
@@ -14,6 +14,7 @@
../../../third_party/catapult/devil/devil/utils/cmd_helper.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/create_app_bundle_apks.pydeps b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
index 20d8ffe8f9f..5e04dae1d9e 100644
--- a/chromium/build/android/gyp/create_app_bundle_apks.pydeps
+++ b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.py b/chromium/build/android/gyp/create_bundle_wrapper_script.py
index 282e2069a2d..1bdb7670d3a 100755
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.py
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.py
@@ -13,7 +13,7 @@ import sys
from util import build_utils
SCRIPT_TEMPLATE = string.Template("""\
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
diff --git a/chromium/build/android/gyp/create_java_binary_script.py b/chromium/build/android/gyp/create_java_binary_script.py
index 5bc9d08ab1f..91fe600ea82 100755
--- a/chromium/build/android/gyp/create_java_binary_script.py
+++ b/chromium/build/android/gyp/create_java_binary_script.py
@@ -21,7 +21,7 @@ from util import build_utils
# to the directory that the script is written in and then, when run, must
# recalculate the paths relative to the current directory.
script_template = """\
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# This file was generated by build/android/gyp/create_java_binary_script.py
diff --git a/chromium/build/android/gyp/create_r_java.pydeps b/chromium/build/android/gyp/create_r_java.pydeps
index 45121e3f7cc..b259751ced0 100644
--- a/chromium/build/android/gyp/create_r_java.pydeps
+++ b/chromium/build/android/gyp/create_r_java.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/create_r_txt.pydeps b/chromium/build/android/gyp/create_r_txt.pydeps
index c7698eefaa6..54e5670eb0a 100644
--- a/chromium/build/android/gyp/create_r_txt.pydeps
+++ b/chromium/build/android/gyp/create_r_txt.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/create_ui_locale_resources.pydeps b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
index 6bb98dd2f20..a1472376777 100644
--- a/chromium/build/android/gyp/create_ui_locale_resources.pydeps
+++ b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/create_unwind_table.py b/chromium/build/android/gyp/create_unwind_table.py
new file mode 100755
index 00000000000..ef6be776a85
--- /dev/null
+++ b/chromium/build/android/gyp/create_unwind_table.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a table of unwind information in Android Chrome's bespoke format."""
+
+import re
+from typing import Iterable, NamedTuple, TextIO, Tuple
+
+_STACK_CFI_INIT_REGEX = re.compile(
+ r'^STACK CFI INIT ([0-9a-f]+) ([0-9a-f]+) (.+)$')
+_STACK_CFI_REGEX = re.compile(r'^STACK CFI ([0-9a-f]+) (.+)$')
+
+
+class AddressCfi(NamedTuple):
+ """Record representing CFI for an address within a function.
+
+ Represents the Call Frame Information required to unwind from an address in a
+ function.
+
+ Attributes:
+ address: The address.
+ unwind_instructions: The unwind instructions for the address.
+
+ """
+ address: int
+ unwind_instructions: str
+
+
+class FunctionCfi(NamedTuple):
+ """Record representing CFI for a function.
+
+ Note: address_cfi[0].address is the start address of the function.
+
+ Attributes:
+ size: The function size in bytes.
+ address_cfi: The CFI at each address in the function.
+
+ """
+ size: int
+ address_cfi: Tuple[AddressCfi, ...]
+
+
+def FilterToNonTombstoneCfi(stream: TextIO) -> Iterable[str]:
+ """Generates non-tombstone STACK CFI lines from the stream.
+
+ STACK CFI functions with address 0 correspond are a 'tombstone' record
+ associated with dead code and can be ignored. See
+ https://bugs.llvm.org/show_bug.cgi?id=47148#c2.
+
+ Args:
+ stream: A file object.
+
+ Returns:
+ An iterable over the non-tombstone STACK CFI lines in the stream.
+ """
+ in_tombstone_function = False
+ for line in stream:
+ if not line.startswith('STACK CFI '):
+ continue
+
+ if line.startswith('STACK CFI INIT 0 '):
+ in_tombstone_function = True
+ elif line.startswith('STACK CFI INIT '):
+ in_tombstone_function = False
+
+ if not in_tombstone_function:
+ yield line
+
+
+def ReadFunctionCfi(stream: TextIO) -> Iterable[FunctionCfi]:
+ """Generates FunctionCfi records from the stream.
+
+ Args:
+ stream: A file object.
+
+ Returns:
+ An iterable over FunctionCfi corresponding to the non-tombstone STACK CFI
+ lines in the stream.
+ """
+ current_function_address = None
+ current_function_size = None
+ current_function_address_cfi = []
+ for line in FilterToNonTombstoneCfi(stream):
+ cfi_init_match = _STACK_CFI_INIT_REGEX.search(line)
+ if cfi_init_match:
+ # Function CFI with address 0 are tombstone entries per
+ # https://bugs.llvm.org/show_bug.cgi?id=47148#c2 and should have been
+ # filtered in `FilterToNonTombstoneCfi`.
+ assert current_function_address != 0
+ if (current_function_address is not None
+ and current_function_size is not None):
+ yield FunctionCfi(current_function_size,
+ tuple(current_function_address_cfi))
+ current_function_address = int(cfi_init_match.group(1), 16)
+ current_function_size = int(cfi_init_match.group(2), 16)
+ current_function_address_cfi = [
+ AddressCfi(int(cfi_init_match.group(1), 16), cfi_init_match.group(3))
+ ]
+ else:
+ cfi_match = _STACK_CFI_REGEX.search(line)
+ assert cfi_match
+ current_function_address_cfi.append(
+ AddressCfi(int(cfi_match.group(1), 16), cfi_match.group(2)))
+
+ assert current_function_address is not None
+ assert current_function_size is not None
+ yield FunctionCfi(current_function_size, tuple(current_function_address_cfi))
diff --git a/chromium/build/android/gyp/create_unwind_table_tests.py b/chromium/build/android/gyp/create_unwind_table_tests.py
new file mode 100755
index 00000000000..014df948a7d
--- /dev/null
+++ b/chromium/build/android/gyp/create_unwind_table_tests.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for create_unwind_table.py.
+
+This test suite contains tests for the custom unwind table creation for 32-bit
+arm builds.
+"""
+
+import ctypes
+import io
+import unittest
+
+from create_unwind_table import AddressCfi, FilterToNonTombstoneCfi, \
+ FunctionCfi, ReadFunctionCfi
+
+
+class _TestReadFunctionCfi(unittest.TestCase):
+ def testFilterTombstone(self):
+ input_lines = [
+ 'file name',
+ 'STACK CFI INIT 0 ',
+ 'STACK CFI 100 ',
+ 'STACK CFI INIT 1 ',
+ 'STACK CFI 200 ',
+ ]
+
+ f = io.StringIO(''.join(line + '\n' for line in input_lines))
+
+ self.assertListEqual([
+ 'STACK CFI INIT 1 \n',
+ 'STACK CFI 200 \n',
+ ], list(FilterToNonTombstoneCfi(f)))
+
+ def testReadFunctionCfiTombstoneFiltered(self):
+ input_lines = [
+ 'STACK CFI INIT 0 50 .cfa: sp 0 + .ra: lr', # Tombstone function.
+ 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ '
+ 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^',
+ 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr',
+ ]
+
+ f = io.StringIO(''.join(line + '\n' for line in input_lines))
+
+ self.assertListEqual(
+ [FunctionCfi(4, (AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), ))],
+ list(ReadFunctionCfi(f)))
+
+ def testReadFunctionCfiSingleFunction(self):
+ input_lines = [
+ 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr',
+ 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ '
+ 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^',
+ ]
+
+ f = io.StringIO(''.join(line + '\n' for line in input_lines))
+
+ self.assertListEqual([
+ FunctionCfi(4, (
+ AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'),
+ AddressCfi(
+ 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ '
+ 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'),
+ ))
+ ], list(ReadFunctionCfi(f)))
+
+ def testReadFunctionCfiMultipleFunctions(self):
+ input_lines = [
+ 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr',
+ 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ '
+ 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^',
+ 'STACK CFI INIT 15b655a 26 .cfa: sp 0 + .ra: lr',
+ 'STACK CFI 15b655c .cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^',
+ ]
+
+ f = io.StringIO(''.join(line + '\n' for line in input_lines))
+
+ self.assertListEqual([
+ FunctionCfi(0x4, (
+ AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'),
+ AddressCfi(
+ 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ '
+ 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'),
+ )),
+ FunctionCfi(0x26, (
+ AddressCfi(0x15b655a, '.cfa: sp 0 + .ra: lr'),
+ AddressCfi(0x15b655c,
+ '.cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^'),
+ )),
+ ], list(ReadFunctionCfi(f)))
diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py
deleted file mode 100755
index 87eb1590a5f..00000000000
--- a/chromium/build/android/gyp/desugar.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import sys
-
-from util import build_utils
-
-
-def main():
- args = build_utils.ExpandFileArgs(sys.argv[1:])
- parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
- parser.add_argument('--desugar-jar', required=True,
- help='Path to Desugar.jar.')
- parser.add_argument('--input-jar', required=True,
- help='Jar input path to include .class files from.')
- parser.add_argument('--output-jar', required=True,
- help='Jar output path.')
- parser.add_argument('--classpath',
- action='append',
- required=True,
- help='Classpath.')
- parser.add_argument('--bootclasspath', required=True,
- help='Path to javac bootclasspath interface jar.')
- parser.add_argument('--warnings-as-errors',
- action='store_true',
- help='Treat all warnings as errors.')
- options = parser.parse_args(args)
-
- options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
- options.classpath = build_utils.ParseGnList(options.classpath)
-
- cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
- '-jar',
- options.desugar_jar,
- '--input',
- options.input_jar,
- '--output',
- options.output_jar,
- '--generate_base_classes_for_default_methods',
- # Don't include try-with-resources files in every .jar. Instead, they
- # are included via //third_party/bazel/desugar:desugar_runtime_java.
- '--desugar_try_with_resources_omit_runtime_classes',
- ]
- for path in options.bootclasspath:
- cmd += ['--bootclasspath_entry', path]
- for path in options.classpath:
- cmd += ['--classpath_entry', path]
- build_utils.CheckOutput(
- cmd,
- print_stdout=False,
- stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
- fail_on_output=options.warnings_as_errors)
-
- if options.depfile:
- build_utils.WriteDepfile(options.depfile,
- options.output_jar,
- inputs=options.bootclasspath + options.classpath)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/chromium/build/android/gyp/desugar.pydeps b/chromium/build/android/gyp/desugar.pydeps
deleted file mode 100644
index 3e5c9ea2312..00000000000
--- a/chromium/build/android/gyp/desugar.pydeps
+++ /dev/null
@@ -1,6 +0,0 @@
-# Generated by running:
-# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
-../../gn_helpers.py
-desugar.py
-util/__init__.py
-util/build_utils.py
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index e55ab4eccf0..f64ddceb359 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -30,6 +30,7 @@ _IGNORE_WARNINGS = (
r'Type `libcore.io.Memory` was not found',
# Caused by flogger supporting these as fallbacks. Not needed at runtime.
r'Type `dalvik.system.VMStack` was not found',
+ r'Type `sun.misc.JavaLangAccess` was not found',
r'Type `sun.misc.SharedSecrets` was not found',
# Caused by jacoco code coverage:
r'Type `java.lang.management.ManagementFactory` was not found',
@@ -49,6 +50,10 @@ _IGNORE_WARNINGS = (
r'Ignoring -shrinkunusedprotofields since the protobuf-lite runtime is',
)
+_SKIPPED_CLASS_FILE_NAMES = (
+ 'module-info.class', # Explicitly skipped by r8/utils/FileUtils#isClassFile
+)
+
def _ParseArgs(args):
args = build_utils.ExpandFileArgs(args)
@@ -324,7 +329,7 @@ def _ZipMultidex(file_dir, dex_files):
if not ordered_files:
raise Exception('Could not find classes.dex multidex file in %s',
dex_files)
- for dex_idx in xrange(2, len(dex_files) + 1):
+ for dex_idx in range(2, len(dex_files) + 1):
archive_name = 'classes%d.dex' % dex_idx
for f in dex_files:
if f.endswith(archive_name):
@@ -424,7 +429,7 @@ def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
for jar in class_inputs:
with zipfile.ZipFile(jar, 'r') as z:
for subpath in z.namelist():
- if subpath.endswith('.class'):
+ if _IsClassFile(subpath):
subpath = subpath[:-5] + 'dex'
dex_files.append(os.path.join(incremental_dir, subpath))
return dex_files
@@ -466,15 +471,21 @@ def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file,
return required_classes
+def _IsClassFile(path):
+ if os.path.basename(path) in _SKIPPED_CLASS_FILE_NAMES:
+ return False
+ return path.endswith('.class')
+
+
def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set):
classes_list = []
for jar in class_inputs:
if changes:
changed_class_list = (set(changes.IterChangedSubpaths(jar))
| required_classes_set)
- predicate = lambda x: x in changed_class_list and x.endswith('.class')
+ predicate = lambda x: x in changed_class_list and _IsClassFile(x)
else:
- predicate = lambda x: x.endswith('.class')
+ predicate = _IsClassFile
classes_list.extend(
build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 149e994f514..80b49c7f8e0 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -81,7 +81,7 @@ def main(args):
options = _ParseOptions(args)
input_paths = [options.input_dex_zip]
- for feature_jars in options.features.itervalues():
+ for feature_jars in options.features.values():
for feature_jar in feature_jars:
input_paths.append(feature_jar)
diff --git a/chromium/build/android/gyp/jinja_template.pydeps b/chromium/build/android/gyp/jinja_template.pydeps
index af22c400243..98de9329b35 100644
--- a/chromium/build/android/gyp/jinja_template.pydeps
+++ b/chromium/build/android/gyp/jinja_template.pydeps
@@ -11,6 +11,7 @@
../../../third_party/catapult/devil/devil/constants/exit_codes.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index 61763c16241..e277b37c362 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -80,7 +80,8 @@ def _GenerateProjectFile(android_manifest,
resource_sources=None,
custom_lint_jars=None,
custom_annotation_zips=None,
- android_sdk_version=None):
+ android_sdk_version=None,
+ baseline_path=None):
project = ElementTree.Element('project')
root = ElementTree.SubElement(project, 'root')
# Run lint from output directory: crbug.com/1115594
@@ -88,6 +89,9 @@ def _GenerateProjectFile(android_manifest,
sdk = ElementTree.SubElement(project, 'sdk')
# Lint requires that the sdk path be an absolute path.
sdk.set('dir', os.path.abspath(android_sdk_root))
+ if baseline_path is not None:
+ baseline = ElementTree.SubElement(project, 'baseline')
+ baseline.set('file', baseline_path)
cache = ElementTree.SubElement(project, 'cache')
cache.set('dir', cache_dir)
main_module = ElementTree.SubElement(project, 'module')
@@ -214,13 +218,16 @@ def _RunLint(lint_binary_path,
cmd = [
lint_binary_path,
+ # Uncomment to update baseline files during lint upgrades.
+ #'--update-baseline',
+ # Uncomment to easily remove fixed lint errors. This is not turned on by
+ # default due to: https://crbug.com/1256477#c5
+ #'--remove-fixed',
'--quiet', # Silences lint's "." progress updates.
'--disable',
','.join(_DISABLED_ALWAYS),
]
- if baseline:
- cmd.extend(['--baseline', baseline])
if testonly_target:
cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
@@ -263,10 +270,6 @@ def _RunLint(lint_binary_path,
custom_annotation_zips = []
if aars:
for aar in aars:
- # androidx custom lint checks require a newer version of lint. Disable
- # until we update see https://crbug.com/1225326
- if 'androidx' in aar:
- continue
# Use relative source for aar files since they are not generated.
aar_dir = os.path.join(aar_root_dir,
os.path.splitext(_SrcRelative(aar))[0])
@@ -300,7 +303,7 @@ def _RunLint(lint_binary_path,
classpath, srcjar_sources,
resource_sources, custom_lint_jars,
custom_annotation_zips,
- android_sdk_version)
+ android_sdk_version, baseline)
project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
_WriteXmlFile(project_file_root, project_xml_path)
diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py
index d0a93a8c78f..d4ea203e372 100755
--- a/chromium/build/android/gyp/merge_manifest.py
+++ b/chromium/build/android/gyp/merge_manifest.py
@@ -22,8 +22,8 @@ _MANIFEST_MERGER_JARS = [
os.path.join('common', 'common.jar'),
os.path.join('sdk-common', 'sdk-common.jar'),
os.path.join('sdklib', 'sdklib.jar'),
- os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
- 'guava-28.1-jre.jar'),
+ os.path.join('external', 'com', 'google', 'guava', 'guava', '30.1-jre',
+ 'guava-30.1-jre.jar'),
os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
'kotlin-stdlib.jar'),
os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.6',
diff --git a/chromium/build/android/gyp/prepare_resources.pydeps b/chromium/build/android/gyp/prepare_resources.pydeps
index b225918c4dc..8136e733efc 100644
--- a/chromium/build/android/gyp/prepare_resources.pydeps
+++ b/chromium/build/android/gyp/prepare_resources.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 51632f813a4..9da100e42d5 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -367,28 +367,20 @@ def _OptimizeWithR8(options,
cmd += sorted(base_context.input_jars)
- # https://crbug.com/1231986
- for i in range(4):
- if i == 3:
- cmd += ['--thread-count', '1']
- try:
- stderr_filter = dex.CreateStderrFilter(
- options.show_desugar_default_interface_warnings)
- logging.debug('Running R8')
- build_utils.CheckOutput(cmd,
- print_stdout=print_stdout,
- stderr_filter=stderr_filter,
- fail_on_output=options.warnings_as_errors)
- break
- except build_utils.CalledProcessError as err:
- # https://crbug.com/1231986
- if 'ArrayIndexOutOfBoundsException' not in err.output or i == 3:
- # Python will print the original exception as well.
- raise Exception(
- 'R8 failed. Please see '
- 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
- 'android/docs/java_optimization.md#Debugging-common-failures')
- logging.warning('Retrying R8 due to crbug/1231986')
+ try:
+ stderr_filter = dex.CreateStderrFilter(
+ options.show_desugar_default_interface_warnings)
+ logging.debug('Running R8')
+ build_utils.CheckOutput(cmd,
+ print_stdout=print_stdout,
+ stderr_filter=stderr_filter,
+ fail_on_output=options.warnings_as_errors)
+ except build_utils.CalledProcessError:
+ # Python will print the original exception as well.
+ raise Exception(
+ 'R8 failed. Please see '
+ 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
+ 'android/docs/java_optimization.md#Debugging-common-failures')
base_has_imported_lib = False
if options.desugar_jdk_libs_json:
diff --git a/chromium/build/android/gyp/unused_resources.pydeps b/chromium/build/android/gyp/unused_resources.pydeps
index 4753ec358b8..b821d706146 100644
--- a/chromium/build/android/gyp/unused_resources.pydeps
+++ b/chromium/build/android/gyp/unused_resources.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index 263b7c23484..4f64174193b 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -45,9 +45,9 @@ _ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
_ALL_RESOURCE_TYPES = {
'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable',
- 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'menu',
- 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable', 'transition',
- 'xml'
+ 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'macro',
+ 'menu', 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable',
+ 'transition', 'xml'
}
AAPT_IGNORE_PATTERN = ':'.join([
@@ -662,14 +662,6 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
else:
non_final_resources_by_type[res_type].append(entry)
- # Keep these assignments all on one line to make diffing against regular
- # aapt-generated files easier.
- create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
- create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
- ' packageIdTransform;')
- for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
- '{{ e.resource_type }}.{{ e.name }}.length; ++i')
-
# Here we diverge from what aapt does. Because we have so many
# resources, the onResourcesLoaded method was exceeding the 64KB limit that
# Java imposes. For this reason we split onResourcesLoaded into different
@@ -705,29 +697,44 @@ public final class R {
}
{% else %}
private static boolean sResourcesDidLoad;
+
+ private static void patchArray(
+ int[] arr, int startIndex, int packageIdTransform) {
+ for (int i = startIndex; i < arr.length; ++i) {
+ arr[i] ^= packageIdTransform;
+ }
+ }
+
public static void onResourcesLoaded(int packageId) {
if (sResourcesDidLoad) {
return;
}
sResourcesDidLoad = true;
int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {# aapt2 makes int[] resources refer to other resources by reference
+ rather than by value. Thus, need to transform the int[] resources
+ first, before the referenced resources are transformed in order to
+ ensure the transform applies exactly once.
+ See https://crbug.com/1237059 for context.
+ #}
{% for resource_type in resource_types %}
- onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
{% for e in non_final_resources[resource_type] %}
{% if e.java_type == 'int[]' %}
- for(""" + for_loop_condition + """) {
- """ + create_id_arr + """
- }
+ patchArray({{ e.resource_type }}.{{ e.name }}, {{ startIndex(e) }}, \
+packageIdTransform);
{% endif %}
{% endfor %}
{% endfor %}
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% endfor %}
}
{% for res_type in resource_types %}
private static void onResourcesLoaded{{ res_type|title }} (
int packageIdTransform) {
{% for e in non_final_resources[res_type] %}
{% if res_type != 'styleable' and e.java_type != 'int[]' %}
- """ + create_id + """
+ {{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;
{% endif %}
{% endfor %}
}
@@ -761,7 +768,14 @@ def ExtractBinaryManifestValues(aapt2_path, apk_path):
def ExtractArscPackage(aapt2_path, apk_path):
- """Returns (package_name, package_id) of resources.arsc from apk_path."""
+ """Returns (package_name, package_id) of resources.arsc from apk_path.
+
+ When the apk does not have any entries in its resources file, in recent aapt2
+ versions it will not contain a "Package" line. The package is not even in the
+ actual resources.arsc/resources.pb file (which itself is mostly empty). Thus
+ return (None, None) when dump succeeds and there are no errors to indicate
+ that the package name does not exist in the resources file.
+ """
proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -777,8 +791,11 @@ def ExtractArscPackage(aapt2_path, apk_path):
# aapt2 currently crashes when dumping webview resources, but not until after
# it prints the "Package" line (b/130553900).
- sys.stderr.write(proc.stderr.read())
- raise Exception('Failed to find arsc package name')
+ stderr_output = proc.stderr.read().decode('utf-8')
+ if stderr_output:
+ sys.stderr.write(stderr_output)
+ raise Exception('Failed to find arsc package name')
+ return None, None
def _RenameSubdirsWithPrefix(dir_path, prefix):
@@ -922,11 +939,6 @@ def ResourceArgsParser():
'libraries.')
input_opts.add_argument(
- '--r-text-in',
- help='Path to pre-existing R.txt. Its resource IDs override those found '
- 'in the aapt-generated R.txt when generating R.java.')
-
- input_opts.add_argument(
'--extra-res-packages',
help='Additional package names to generate R.java files for.')
diff --git a/chromium/build/android/gyp/util/resources_parser.py b/chromium/build/android/gyp/util/resources_parser.py
index 8d8d69cce87..ba0665aa9db 100644
--- a/chromium/build/android/gyp/util/resources_parser.py
+++ b/chromium/build/android/gyp/util/resources_parser.py
@@ -74,7 +74,10 @@ class RTxtGenerator(object):
return ret
def _ExtractNewIdsFromXml(self, xml_path):
- root = ElementTree.parse(xml_path).getroot()
+ try:
+ root = ElementTree.parse(xml_path).getroot()
+ except Exception as e:
+ raise RuntimeError('Failure parsing {}:\n {}'.format(xml_path, e))
return self._ExtractNewIdsFromNode(root)
def _ParseValuesXml(self, xml_path):
diff --git a/chromium/build/android/gyp/write_build_config.pydeps b/chromium/build/android/gyp/write_build_config.pydeps
index b1276bca7b2..e9c7d9fcaaa 100644
--- a/chromium/build/android/gyp/write_build_config.pydeps
+++ b/chromium/build/android/gyp/write_build_config.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
../../../third_party/jinja2/asyncfilters.py
../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
diff --git a/chromium/build/android/incremental_install/generate_android_manifest.py b/chromium/build/android/incremental_install/generate_android_manifest.py
index e069dab80ef..67feaa5a6ff 100755
--- a/chromium/build/android/incremental_install/generate_android_manifest.py
+++ b/chromium/build/android/incremental_install/generate_android_manifest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -108,6 +108,7 @@ def main(raw_args):
arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path,
options.in_apk)
+ assert arsc_package is not None, 'The apk does not have a valid package.'
# Extract version from the compiled manifest since it might have been set
# via aapt, and not exist in the manifest's text form.
version_code, version_name, manifest_package = (
diff --git a/chromium/build/android/incremental_install/generate_android_manifest.pydeps b/chromium/build/android/incremental_install/generate_android_manifest.pydeps
index 568ea1e2c02..b28c3070d82 100644
--- a/chromium/build/android/incremental_install/generate_android_manifest.pydeps
+++ b/chromium/build/android/incremental_install/generate_android_manifest.pydeps
@@ -2,6 +2,9 @@
# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
../../../third_party/jinja2/__init__.py
../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
diff --git a/chromium/build/android/incremental_install/installer.py b/chromium/build/android/incremental_install/installer.py
index 55e578884e3..b9ab7f0cbbf 100755
--- a/chromium/build/android/incremental_install/installer.py
+++ b/chromium/build/android/incremental_install/installer.py
@@ -228,7 +228,11 @@ def Install(device, install_json, apk=None, enable_device_cache=False,
do_push_dex()
def check_device_configured():
- target_sdk_version = int(apk.GetTargetSdkVersion())
+ if apk.GetTargetSdkVersion().isalpha():
+ # Assume pre-release SDK is always really new.
+ target_sdk_version = 99
+ else:
+ target_sdk_version = int(apk.GetTargetSdkVersion())
# Beta Q builds apply allowlist to targetSdk=28 as well.
if target_sdk_version >= 28 and device.build_version_sdk >= 28:
# In P, there are two settings:
diff --git a/chromium/build/android/incremental_install/write_installer_json.py b/chromium/build/android/incremental_install/write_installer_json.py
index cf1d2d4c57a..ce88e8a0364 100755
--- a/chromium/build/android/incremental_install/write_installer_json.py
+++ b/chromium/build/android/incremental_install/write_installer_json.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/chromium/build/android/lighttpd_server.py b/chromium/build/android/lighttpd_server.py
index 42fbcdbe692..561174de747 100755
--- a/chromium/build/android/lighttpd_server.py
+++ b/chromium/build/android/lighttpd_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -14,7 +14,6 @@ from __future__ import print_function
import codecs
import contextlib
-import httplib
import os
import random
import shutil
@@ -24,9 +23,13 @@ import sys
import tempfile
import time
+from six.moves import http_client
+from six.moves import input # pylint: disable=redefined-builtin
+
from pylib import constants
from pylib import pexpect
+
class LighttpdServer(object):
"""Wraps lighttpd server, providing robust startup.
@@ -122,11 +125,12 @@ class LighttpdServer(object):
def _TestServerConnection(self):
# Wait for server to start
server_msg = ''
- for timeout in xrange(1, 5):
+ for timeout in range(1, 5):
client_error = None
try:
- with contextlib.closing(httplib.HTTPConnection(
- '127.0.0.1', self.port, timeout=timeout)) as http:
+ with contextlib.closing(
+ http_client.HTTPConnection('127.0.0.1', self.port,
+ timeout=timeout)) as http:
http.set_debuglevel(timeout > 3)
http.request('HEAD', '/')
r = http.getresponse()
@@ -137,7 +141,7 @@ class LighttpdServer(object):
client_error = ('Bad response: %s %s version %s\n ' %
(r.status, r.reason, r.version) +
'\n '.join([': '.join(h) for h in r.getheaders()]))
- except (httplib.HTTPException, socket.error) as client_error:
+ except (http_client.HTTPException, socket.error) as client_error:
pass # Probably too quick connecting: try again
# Check for server startup error messages
# pylint: disable=no-member
@@ -248,8 +252,8 @@ def main(argv):
server = LighttpdServer(*argv[1:])
try:
if server.StartupHttpServer():
- raw_input('Server running at http://127.0.0.1:%s -'
- ' press Enter to exit it.' % server.port)
+ input('Server running at http://127.0.0.1:%s -'
+ ' press Enter to exit it.' % server.port)
else:
print('Server exit code:', server.process.exitstatus)
finally:
diff --git a/chromium/build/android/method_count.py b/chromium/build/android/method_count.py
index a39a390cf87..80d00735d47 100755
--- a/chromium/build/android/method_count.py
+++ b/chromium/build/android/method_count.py
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#! /usr/bin/env python3
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/native_flags/argcapture.py b/chromium/build/android/native_flags/argcapture.py
index 159b03ab887..b0e2acd92a9 100755
--- a/chromium/build/android/native_flags/argcapture.py
+++ b/chromium/build/android/native_flags/argcapture.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/provision_devices.py b/chromium/build/android/provision_devices.py
index 37b9f77d9c4..ecce837a0d4 100755
--- a/chromium/build/android/provision_devices.py
+++ b/chromium/build/android/provision_devices.py
@@ -67,7 +67,7 @@ def ProvisionDevices(args):
if args.denylist_file else None)
devices = [
d for d in device_utils.DeviceUtils.HealthyDevices(denylist)
- if not args.emulators or d.adb.is_emulator
+ if not args.emulators or d.is_emulator
]
if args.device:
devices = [d for d in devices if d == args.device]
diff --git a/chromium/build/android/pylib/base/base_test_result.py b/chromium/build/android/pylib/base/base_test_result.py
index 0eaf7867fcb..1741f132d5a 100644
--- a/chromium/build/android/pylib/base/base_test_result.py
+++ b/chromium/build/android/pylib/base/base_test_result.py
@@ -103,7 +103,11 @@ class BaseTestResult(object):
return self._log
def SetFailureReason(self, failure_reason):
- """Set the reason the test failed."""
+ """Set the reason the test failed.
+
+ This should be the first failure the test encounters and exclude any stack
+ trace.
+ """
self._failure_reason = failure_reason
def GetFailureReason(self):
diff --git a/chromium/build/android/pylib/constants/__init__.py b/chromium/build/android/pylib/constants/__init__.py
index e87b8fe67d8..44b190f2ac9 100644
--- a/chromium/build/android/pylib/constants/__init__.py
+++ b/chromium/build/android/pylib/constants/__init__.py
@@ -158,6 +158,7 @@ PYTHON_UNIT_TEST_SUITES = {
'path':
os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
'test_modules': [
+ 'create_unwind_table_tests',
'java_cpp_enum_tests',
'java_cpp_strings_tests',
'java_google_api_keys_tests',
diff --git a/chromium/build/android/pylib/dex/dex_parser.py b/chromium/build/android/pylib/dex/dex_parser.py
index be5f1af9d1c..1ff8d25276c 100755
--- a/chromium/build/android/pylib/dex/dex_parser.py
+++ b/chromium/build/android/pylib/dex/dex_parser.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
index e7239c93224..b4a13c9031a 100644
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -205,12 +205,31 @@ def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
def _MaybeSetLog(bundle, current_result, symbolizer, device_abi):
if _BUNDLE_STACK_ID in bundle:
+ stack = bundle[_BUNDLE_STACK_ID]
if symbolizer and device_abi:
- current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join(
- symbolizer.ExtractAndResolveNativeStackTraces(
- bundle[_BUNDLE_STACK_ID], device_abi))))
+ current_result.SetLog('%s\n%s' % (stack, '\n'.join(
+ symbolizer.ExtractAndResolveNativeStackTraces(stack, device_abi))))
else:
- current_result.SetLog(bundle[_BUNDLE_STACK_ID])
+ current_result.SetLog(stack)
+
+ current_result.SetFailureReason(_ParseExceptionMessage(stack))
+
+
+def _ParseExceptionMessage(stack):
+ """Extracts the exception message from the given stack trace.
+ """
+ # This interprets stack traces reported via InstrumentationResultPrinter:
+ # https://source.chromium.org/chromium/chromium/src/+/main:third_party/android_support_test_runner/runner/src/main/java/android/support/test/internal/runner/listener/InstrumentationResultPrinter.java;l=181?q=InstrumentationResultPrinter&type=cs
+ # This is a standard Java stack trace, of the form:
+ # <Result of Exception.toString()>
+ # at SomeClass.SomeMethod(...)
+ # at ...
+ lines = stack.split('\n')
+ for i, line in enumerate(lines):
+ if line.startswith('\tat'):
+ return '\n'.join(lines[0:i])
+ # No call stack found, so assume everything is the exception message.
+ return stack
def FilterTests(tests, filter_str=None, annotations=None,
@@ -223,39 +242,147 @@ def FilterTests(tests, filter_str=None, annotations=None,
{'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
filter_str: googletest-style filter string.
annotations: a dict of wanted annotations for test methods.
- exclude_annotations: a dict of annotations to exclude.
+ excluded_annotations: a dict of annotations to exclude.
Return:
A list of filtered tests
"""
- def gtest_filter(t):
- if not filter_str:
- return True
+
+ def test_names_from_pattern(combined_pattern, test_names):
+ patterns = combined_pattern.split(':')
+
+ hashable_patterns = set()
+ filename_patterns = []
+ for pattern in patterns:
+ if ('*' in pattern or '?' in pattern or '[' in pattern):
+ filename_patterns.append(pattern)
+ else:
+ hashable_patterns.add(pattern)
+
+ filter_test_names = set(
+ unittest_util.FilterTestNames(test_names, ':'.join(
+ filename_patterns))) if len(filename_patterns) > 0 else set()
+
+ for test_name in test_names:
+ if test_name in hashable_patterns:
+ filter_test_names.add(test_name)
+
+ return filter_test_names
+
+ def get_test_names(test):
+ test_names = set()
# Allow fully-qualified name as well as an omitted package.
unqualified_class_test = {
- 'class': t['class'].split('.')[-1],
- 'method': t['method']
+ 'class': test['class'].split('.')[-1],
+ 'method': test['method']
}
- names = [
- GetTestName(t, sep='.'),
- GetTestName(unqualified_class_test, sep='.'),
- GetUniqueTestName(t, sep='.')
- ]
-
- if t['is_junit4']:
- names += [
- GetTestNameWithoutParameterPostfix(t, sep='.'),
- GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
- ]
+
+ test_name = GetTestName(test, sep='.')
+ test_names.add(test_name)
+
+ unqualified_class_test_name = GetTestName(unqualified_class_test, sep='.')
+ test_names.add(unqualified_class_test_name)
+
+ unique_test_name = GetUniqueTestName(test, sep='.')
+ test_names.add(unique_test_name)
+
+ if test['is_junit4']:
+ junit4_test_name = GetTestNameWithoutParameterPostfix(test, sep='.')
+ test_names.add(junit4_test_name)
+
+ unqualified_junit4_test_name = \
+ GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
+ test_names.add(unqualified_junit4_test_name)
+ return test_names
+
+ def get_tests_from_names(tests, test_names, tests_to_names):
+ ''' Returns the tests for which the given names apply
+
+ Args:
+ tests: a list of tests. e.g. [
+ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+ {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+ test_names: a collection of names determining tests to return.
+
+ Return:
+ A list of tests that match the given test names
+ '''
+ filtered_tests = []
+ for t in tests:
+ current_test_names = tests_to_names[id(t)]
+
+ for current_test_name in current_test_names:
+ if current_test_name in test_names:
+ filtered_tests.append(t)
+ break
+
+ return filtered_tests
+
+ def remove_tests_from_names(tests, remove_test_names, tests_to_names):
+ ''' Returns the tests from the given list with given names removed
+
+ Args:
+ tests: a list of tests. e.g. [
+ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+ {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+ remove_test_names: a collection of names determining tests to remove.
+ tests_to_names: a dcitionary of test ids to a collection of applicable
+ names for that test
+
+ Return:
+ A list of tests that don't match the given test names
+ '''
+ filtered_tests = []
+
+ for t in tests:
+ for name in tests_to_names[id(t)]:
+ if name in remove_test_names:
+ break
+ else:
+ filtered_tests.append(t)
+ return filtered_tests
+
+ def gtests_filter(tests, combined_filter):
+ ''' Returns the tests after the filter_str has been applied
+
+ Args:
+ tests: a list of tests. e.g. [
+ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+ {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+ combined_filter: the filter string representing tests to exclude
+
+ Return:
+ A list of tests that should still be included after the filter_str is
+ applied to their names
+ '''
+
+ if not combined_filter:
+ return tests
+
+ # Collect all test names
+ all_test_names = set()
+ tests_to_names = {}
+ for t in tests:
+ tests_to_names[id(t)] = get_test_names(t)
+ for name in tests_to_names[id(t)]:
+ all_test_names.add(name)
pattern_groups = filter_str.split('-')
- if len(pattern_groups) > 1:
- negative_filter = pattern_groups[1]
- if unittest_util.FilterTestNames(names, negative_filter):
- return []
+ negative_pattern = pattern_groups[1] if len(pattern_groups) > 1 else None
+ positive_pattern = pattern_groups[0]
+
+ if positive_pattern:
+ # Only use the test names that match the positive pattern
+ positive_test_names = test_names_from_pattern(positive_pattern,
+ all_test_names)
+ tests = get_tests_from_names(tests, positive_test_names, tests_to_names)
+
+ if negative_pattern:
+ # Remove any test the negative filter matches
+ remove_names = test_names_from_pattern(negative_pattern, all_test_names)
+ tests = remove_tests_from_names(tests, remove_names, tests_to_names)
- positive_filter = pattern_groups[0]
- return unittest_util.FilterTestNames(names, positive_filter)
+ return tests
def annotation_filter(all_annotations):
if not annotations:
@@ -289,12 +416,8 @@ def FilterTests(tests, filter_str=None, annotations=None,
return filter_av in av
return filter_av == av
- filtered_tests = []
- for t in tests:
- # Gtest filtering
- if not gtest_filter(t):
- continue
-
+ return_tests = []
+ for t in gtests_filter(tests, filter_str):
# Enforce that all tests declare their size.
if not any(a in _VALID_ANNOTATIONS for a in t['annotations']):
raise MissingSizeAnnotationError(GetTestName(t))
@@ -302,11 +425,9 @@ def FilterTests(tests, filter_str=None, annotations=None,
if (not annotation_filter(t['annotations'])
or not excluded_annotation_filter(t['annotations'])):
continue
+ return_tests.append(t)
- filtered_tests.append(t)
-
- return filtered_tests
-
+ return return_tests
# TODO(yolandyan): remove this once the tests are converted to junit4
def GetAllTestsFromJar(test_jar):
@@ -532,6 +653,7 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._store_tombstones = False
self._symbolizer = None
+ self._enable_breakpad_dump = False
self._enable_java_deobfuscation = False
self._deobfuscator = None
self._initializeLogAttributes(args)
@@ -728,6 +850,7 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._coverage_directory = args.coverage_dir
def _initializeLogAttributes(self, args):
+ self._enable_breakpad_dump = args.enable_breakpad_dump
self._enable_java_deobfuscation = args.enable_java_deobfuscation
self._store_tombstones = args.store_tombstones
self._symbolizer = stack_symbolizer.Symbolizer(
@@ -800,6 +923,10 @@ class InstrumentationTestInstance(test_instance.TestInstance):
return self._edit_shared_prefs
@property
+ def enable_breakpad_dump(self):
+ return self._enable_breakpad_dump
+
+ @property
def external_shard_index(self):
return self._external_shard_index
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
index 575d318b446..f3b0d4f6e66 100755
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -198,6 +198,72 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
self.assertEqual(actual_tests, expected_tests)
+ def testGetTests_simpleGtestPositiveAndNegativeFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [{
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Foo']
+ }
+ },
+ 'class':
+ 'org.chromium.test.SampleTest',
+ 'superclass':
+ 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None
+ },
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'MediumTest': None
+ },
+ 'method': 'testMethod2',
+ },
+ ],
+ }, {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Foo']
+ }
+ },
+ 'class':
+ 'org.chromium.test.SampleTest2',
+ 'superclass':
+ 'java.lang.Object',
+ 'methods': [{
+ 'annotations': {
+ 'SmallTest': None
+ },
+ 'method': 'testMethod1',
+ }],
+ }]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Foo']
+ },
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = \
+ 'org.chromium.test.SampleTest.*-org.chromium.test.SampleTest.testMethod2'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEqual(actual_tests, expected_tests)
+
def testGetTests_simpleGtestUnqualifiedNameFilter(self):
o = self.createTestInstance()
raw_tests = [
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index d739119cadb..8638ce942c8 100644
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -513,13 +513,21 @@ class LocalDeviceInstrumentationTestRun(
# Feature flags won't work in instrumentation tests unless the activity
# is restarted.
# Tests with identical features are grouped to minimize restarts.
- if 'Batch$SplitByFeature' in annotations:
+ # UnitTests that specify flags always use Features.JUnitProcessor, so
+ # they don't need to be split.
+ if batch_name != 'UnitTests':
if 'Features$EnableFeatures' in annotations:
batch_name += '|enabled:' + ','.join(
sorted(annotations['Features$EnableFeatures']['value']))
if 'Features$DisableFeatures' in annotations:
batch_name += '|disabled:' + ','.join(
sorted(annotations['Features$DisableFeatures']['value']))
+ if 'CommandLineFlags$Add' in annotations:
+ batch_name += '|cmd_line_add:' + ','.join(
+ sorted(annotations['CommandLineFlags$Add']['value']))
+ if 'CommandLineFlags$Remove' in annotations:
+ batch_name += '|cmd_line_remove:' + ','.join(
+ sorted(annotations['CommandLineFlags$Remove']['value']))
if not batch_name in batched_tests:
batched_tests[batch_name] = []
@@ -566,6 +574,16 @@ class LocalDeviceInstrumentationTestRun(
coverage_device_file = os.path.join(coverage_directory, coverage_basename)
coverage_device_file += '.exec'
extras['coverageFile'] = coverage_device_file
+
+ if self._test_instance.enable_breakpad_dump:
+ # Use external storage directory so that the breakpad dump can be accessed
+ # by the test APK in addition to the apk_under_test.
+ breakpad_dump_directory = os.path.join(device.GetExternalStoragePath(),
+ 'chromium_dumps')
+ if device.PathExists(breakpad_dump_directory):
+ device.RemovePath(breakpad_dump_directory, recursive=True)
+ flags_to_add.append('--breakpad-dump-location=' + breakpad_dump_directory)
+
# Save screenshot if screenshot dir is specified (save locally) or if
# a GS bucket is passed (save in cloud).
screenshot_device_file = device_temp_file.DeviceTempFile(
@@ -843,14 +861,14 @@ class LocalDeviceInstrumentationTestRun(
logging.info('detected failure in %s. raw output:', test_display_name)
for l in output:
logging.info(' %s', l)
- if (not self._env.skip_clear_data
- and self._test_instance.package_info):
- permissions = (
- self._test_instance.apk_under_test.GetPermissions()
- if self._test_instance.apk_under_test
- else None)
- device.ClearApplicationState(self._test_instance.package_info.package,
- permissions=permissions)
+ if not self._env.skip_clear_data:
+ if self._test_instance.package_info:
+ permissions = (self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test else None)
+ device.ClearApplicationState(self._test_instance.package_info.package,
+ permissions=permissions)
+ if self._test_instance.enable_breakpad_dump:
+ device.RemovePath(breakpad_dump_directory, recursive=True)
else:
logging.debug('raw output from %s:', test_display_name)
for l in output:
diff --git a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
index 6cdbf475702..1e02c7e375c 100644
--- a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
+++ b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -155,7 +155,8 @@ class LocalMachineJunitTestRun(test_run.TestRun):
procs = [
subprocess.Popen(cmd,
stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT) for cmd in cmd_list
+ stderr=subprocess.STDOUT,
+ universal_newlines=True) for cmd in cmd_list
]
PrintProcessesStdout(procs)
diff --git a/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py b/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py
index 64f40e62e57..d458223abbb 100755
--- a/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py
+++ b/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -1,4 +1,4 @@
-#! /usr/bin/env python
+#! /usr/bin/env python3
#
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/chromium/build/android/pylib/results/presentation/test_results_presentation.py b/chromium/build/android/pylib/results/presentation/test_results_presentation.py
index 2fd98b1161c..fc14b8bf037 100755
--- a/chromium/build/android/pylib/results/presentation/test_results_presentation.py
+++ b/chromium/build/android/pylib/results/presentation/test_results_presentation.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/chromium/build/android/pylib/symbols/apk_lib_dump.py b/chromium/build/android/pylib/symbols/apk_lib_dump.py
index 933a0aba179..f40c7581fcc 100755
--- a/chromium/build/android/pylib/symbols/apk_lib_dump.py
+++ b/chromium/build/android/pylib/symbols/apk_lib_dump.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/chromium/build/android/pylib/symbols/deobfuscator.py b/chromium/build/android/pylib/symbols/deobfuscator.py
index aca4c3a9e89..1fd188a4259 100644
--- a/chromium/build/android/pylib/symbols/deobfuscator.py
+++ b/chromium/build/android/pylib/symbols/deobfuscator.py
@@ -33,9 +33,12 @@ class Deobfuscator(object):
self._proc = None
# Start process eagerly to hide start-up latency.
self._proc_start_time = time.time()
- self._proc = subprocess.Popen(
- cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- close_fds=True)
+ self._proc = subprocess.Popen(cmd,
+ bufsize=1,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ close_fds=True)
def IsClosed(self):
return self._closed_called or self._proc.returncode is not None
diff --git a/chromium/build/android/pylib/symbols/elf_symbolizer.py b/chromium/build/android/pylib/symbols/elf_symbolizer.py
index 4198511bf32..270263debfe 100644
--- a/chromium/build/android/pylib/symbols/elf_symbolizer.py
+++ b/chromium/build/android/pylib/symbols/elf_symbolizer.py
@@ -26,14 +26,14 @@ import time
ADDR2LINE_RECYCLE_LIMIT = 4000
-ELF_MAGIC = '\x7f\x45\x4c\x46'
+ELF_MAGIC = b'\x7f\x45\x4c\x46'
def ContainsElfMagic(file_path):
if os.path.getsize(file_path) < 4:
return False
try:
- with open(file_path, 'r') as f:
+ with open(file_path, 'rb') as f:
b = f.read(4)
return b == ELF_MAGIC
except IOError:
diff --git a/chromium/build/android/pylib/utils/app_bundle_utils.py b/chromium/build/android/pylib/utils/app_bundle_utils.py
index 986e12688ee..f14a2cb0490 100644
--- a/chromium/build/android/pylib/utils/app_bundle_utils.py
+++ b/chromium/build/android/pylib/utils/app_bundle_utils.py
@@ -140,18 +140,15 @@ def GenerateBundleApks(bundle_path,
build_utils.DoZip(files, f, base_dir=temp_dir)
if check_for_noop:
- # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
- # input_paths, to speed up MD5 computations by about 400ms (the .jar file
- # contains thousands of class files which are checked independently,
- # resulting in an .md5.stamp of more than 60000 lines!).
- input_paths = [bundle_path, aapt2_path, keystore_path]
+ input_paths = [
+ bundle_path,
+ bundletool.BUNDLETOOL_JAR_PATH,
+ aapt2_path,
+ keystore_path,
+ ]
input_strings = [
keystore_password,
keystore_alias,
- bundletool.BUNDLETOOL_JAR_PATH,
- # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
- # it's simpler to assume that this may not be the case in the future.
- bundletool.BUNDLETOOL_VERSION,
device_spec,
]
if mode is not None:
diff --git a/chromium/build/android/pylib/utils/logging_utils.py b/chromium/build/android/pylib/utils/logging_utils.py
index 846d336c2c8..4874f25c5df 100644
--- a/chromium/build/android/pylib/utils/logging_utils.py
+++ b/chromium/build/android/pylib/utils/logging_utils.py
@@ -31,7 +31,7 @@ class _ColorFormatter(logging.Formatter):
def __init__(self, wrapped_formatter=None):
"""Wraps a |logging.Formatter| and adds color."""
- super(_ColorFormatter, self).__init__(self)
+ super(_ColorFormatter, self).__init__()
self._wrapped_formatter = wrapped_formatter or logging.Formatter()
#override
diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py
index 825edadedfd..28664371920 100755
--- a/chromium/build/android/resource_sizes.py
+++ b/chromium/build/android/resource_sizes.py
@@ -33,8 +33,9 @@ from pylib import constants
from pylib.constants import host_paths
_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
-_BUILD_UTILS_PATH = os.path.join(
- host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+_ANDROID_UTILS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build',
+ 'android', 'gyp')
+_BUILD_UTILS_PATH = os.path.join(host_paths.BUILD_PATH, 'util')
with host_paths.SysPath(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build')):
import gn_helpers # pylint: disable=import-error
@@ -45,10 +46,13 @@ with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
with host_paths.SysPath(host_paths.TRACING_PATH):
from tracing.value import convert_chart_json # pylint: disable=import-error
-with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+with host_paths.SysPath(_ANDROID_UTILS_PATH, 0):
from util import build_utils # pylint: disable=import-error
from util import zipalign # pylint: disable=import-error
+with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+ from lib.results import result_sink
+ from lib.results import result_types
zipalign.ApplyZipFileZipAlignFix()
@@ -168,8 +172,10 @@ def _MeasureApkSignatureBlock(zip_file):
def _RunReadelf(so_path, options, tool_prefix=''):
- return cmd_helper.GetCmdOutput(
- [tool_prefix + 'readelf'] + options + [so_path])
+ return cmd_helper.GetCmdOutput([
+ tool_prefix + 'readobj',
+ '--elf-output-style=GNU',
+ ] + options + [so_path])
def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
@@ -881,6 +887,7 @@ def main():
if args.chartjson:
args.output_format = 'chartjson'
+ result_sink_client = result_sink.TryInitClient()
isolated_script_output = {'valid': False, 'failures': []}
test_name = 'resource_sizes (%s)' % os.path.basename(args.input)
@@ -904,6 +911,13 @@ def main():
json.dump(isolated_script_output, output_file)
with open(args.isolated_script_test_output, 'w') as output_file:
json.dump(isolated_script_output, output_file)
+ if result_sink_client:
+ status = result_types.PASS
+ if not isolated_script_output['valid']:
+ status = result_types.UNKNOWN
+ elif isolated_script_output['failures']:
+ status = result_types.FAIL
+ result_sink_client.Post(test_name, status, None, None, None)
if __name__ == '__main__':
diff --git a/chromium/build/android/resource_sizes.pydeps b/chromium/build/android/resource_sizes.pydeps
index d956f5bae72..8298d160ddd 100644
--- a/chromium/build/android/resource_sizes.pydeps
+++ b/chromium/build/android/resource_sizes.pydeps
@@ -43,8 +43,12 @@
../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
../../third_party/catapult/tracing/tracing_project.py
../gn_helpers.py
+../util/lib/__init__.py
../util/lib/common/perf_result_data_type.py
../util/lib/common/perf_tests_results_helper.py
+../util/lib/results/__init__.py
+../util/lib/results/result_sink.py
+../util/lib/results/result_types.py
devil_chromium.py
gyp/util/__init__.py
gyp/util/build_utils.py
diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py
index f06e80745aa..1e312ba0b8b 100755
--- a/chromium/build/android/test_runner.py
+++ b/chromium/build/android/test_runner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env vpython
+#!/usr/bin/env vpython3
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -353,10 +353,6 @@ def AddGTestOptions(parser):
help='Host directory to which app data files will be'
' saved. Used with --app-data-file.')
parser.add_argument(
- '--delete-stale-data',
- dest='delete_stale_data', action='store_true',
- help='Delete stale test data on the device.')
- parser.add_argument(
'--enable-xml-result-parsing',
action='store_true', help=argparse.SUPPRESS)
parser.add_argument(
@@ -468,10 +464,6 @@ def AddInstrumentationTestOptions(parser):
help='Directory in which to place all generated '
'Jacoco coverage files.')
parser.add_argument(
- '--delete-stale-data',
- action='store_true', dest='delete_stale_data',
- help='Delete stale test data on the device.')
- parser.add_argument(
'--disable-dalvik-asserts',
dest='set_asserts', action='store_false', default=True,
help='Removes the dalvik.vm.enableassertions property')
@@ -484,6 +476,11 @@ def AddInstrumentationTestOptions(parser):
dest='exclude_annotation_str',
help='Comma-separated list of annotations. Exclude tests with these '
'annotations.')
+ parser.add_argument(
+ '--enable-breakpad-dump',
+ action='store_true',
+ help='Stores any breakpad dumps till the end of the test.')
+
def package_replacement(arg):
split_arg = arg.split(',')
if len(split_arg) != 2:
@@ -941,11 +938,13 @@ def RunTestsInPlatformMode(args, result_sink_client=None):
match.group(1)) if match else None
# Some tests put in non utf-8 char as part of the test
# which breaks uploads, so need to decode and re-encode.
+ log_decoded = r.GetLog()
+ if isinstance(log_decoded, bytes):
+ log_decoded = log_decoded.decode('utf-8', 'replace')
result_sink_client.Post(r.GetName(),
r.GetType(),
r.GetDuration(),
- r.GetLog().decode(
- 'utf-8', 'replace').encode('utf-8'),
+ log_decoded.encode('utf-8'),
test_file_name,
failure_reason=r.GetFailureReason())
diff --git a/chromium/build/android/test_runner.pydeps b/chromium/build/android/test_runner.pydeps
index 3ee560cfb7e..87b39ef5633 100644
--- a/chromium/build/android/test_runner.pydeps
+++ b/chromium/build/android/test_runner.pydeps
@@ -95,6 +95,7 @@
../../third_party/catapult/devil/devil/utils/timeout_retry.py
../../third_party/catapult/devil/devil/utils/watchdog_timer.py
../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/six/six.py
../../third_party/colorama/src/colorama/__init__.py
../../third_party/colorama/src/colorama/ansi.py
../../third_party/colorama/src/colorama/ansitowin32.py
diff --git a/chromium/build/android/update_deps/update_third_party_deps.py b/chromium/build/android/update_deps/update_third_party_deps.py
index 3a869c43ec4..c03fec5d882 100755
--- a/chromium/build/android/update_deps/update_third_party_deps.py
+++ b/chromium/build/android/update_deps/update_third_party_deps.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/chromium/build/android/update_verification.py b/chromium/build/android/update_verification.py
index 63ba69a135b..123c575079e 100755
--- a/chromium/build/android/update_verification.py
+++ b/chromium/build/android/update_verification.py
@@ -28,6 +28,8 @@ import argparse
import logging
import sys
+# import raw_input when converted to python3
+from six.moves import input # pylint: disable=redefined-builtin
import devil_chromium
from devil.android import apk_helper
@@ -36,17 +38,11 @@ from devil.android import device_errors
from devil.android import device_utils
from devil.utils import run_tests_helper
-try:
- from builtins import input as raw_input # pylint: disable=redefined-builtin
-except ImportError:
- # Python-2 already has raw_input.
- pass
-
def CreateAppData(device, old_apk, app_data, package_name):
device.Install(old_apk)
- raw_input('Set the application state. Once ready, press enter and '
- 'select "Backup my data" on the device.')
+ input('Set the application state. Once ready, press enter and '
+ 'select "Backup my data" on the device.')
device.adb.Backup(app_data, packages=[package_name])
logging.critical('Application data saved to %s', app_data)
@@ -54,8 +50,8 @@ def TestUpdate(device, old_apk, new_apk, app_data, package_name):
device.Install(old_apk)
device.adb.Restore(app_data)
# Restore command is not synchronous
- raw_input('Select "Restore my data" on the device. Then press enter to '
- 'continue.')
+ input('Select "Restore my data" on the device. Then press enter to '
+ 'continue.')
if not device.IsApplicationInstalled(package_name):
raise Exception('Expected package %s to already be installed. '
'Package name might have changed!' % package_name)
diff --git a/chromium/build/chromeos/PRESUBMIT.py b/chromium/build/chromeos/PRESUBMIT.py
index d8b1bf898d3..0e829278fb7 100644
--- a/chromium/build/chromeos/PRESUBMIT.py
+++ b/chromium/build/chromeos/PRESUBMIT.py
@@ -14,9 +14,14 @@ USE_PYTHON3 = True
def CommonChecks(input_api, output_api):
results = []
results += input_api.canned_checks.RunPylint(
- input_api, output_api, pylintrc='pylintrc')
+ input_api, output_api, pylintrc='pylintrc', version='2.6')
tests = input_api.canned_checks.GetUnitTestsInDirectory(
- input_api, output_api, '.', [r'^.+_test\.py$'], run_on_python3=True)
+ input_api,
+ output_api,
+ '.', [r'^.+_test\.py$'],
+ run_on_python2=False,
+ run_on_python3=True,
+ skip_shebang_check=True)
results += input_api.RunTests(tests)
return results
diff --git a/chromium/build/chromeos/gen_skylab_runner.py b/chromium/build/chromeos/gen_skylab_runner.py
index 9299ceb8f28..3a5e0c04451 100755
--- a/chromium/build/chromeos/gen_skylab_runner.py
+++ b/chromium/build/chromeos/gen_skylab_runner.py
@@ -9,7 +9,7 @@ import os
import sys
-class SkylabClientTestTest(object):
+class SkylabClientTestTest:
# The basic shell script for client test run in Skylab. The arguments listed
# here will be fed by autotest at the run time.
diff --git a/chromium/build/chromeos/test_runner.py b/chromium/build/chromeos/test_runner.py
index d19a5fd151d..319f64c0ed0 100755
--- a/chromium/build/chromeos/test_runner.py
+++ b/chromium/build/chromeos/test_runner.py
@@ -16,13 +16,13 @@ import signal
import socket
import sys
import tempfile
+import six
# The following non-std imports are fetched via vpython. See the list at
# //.vpython
import dateutil.parser # pylint: disable=import-error
import jsonlines # pylint: disable=import-error
import psutil # pylint: disable=import-error
-import six
CHROMIUM_SRC_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..'))
@@ -70,7 +70,7 @@ class TestFormatError(Exception):
pass
-class RemoteTest(object):
+class RemoteTest:
# This is a basic shell script that can be appended to in order to invoke the
# test on the device.
@@ -166,7 +166,7 @@ class RemoteTest(object):
os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
]
logging.info('Running the following command on the device:')
- logging.info('\n' + '\n'.join(script_contents))
+ logging.info('\n%s', '\n'.join(script_contents))
fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir)
os.fchmod(fd, 0o755)
with os.fdopen(fd, 'w') as f:
@@ -219,11 +219,9 @@ class RemoteTest(object):
if test_proc.returncode == 0:
break
- ret = self.post_run(test_proc.returncode)
+ self.post_run(test_proc.returncode)
# Allow post_run to override test proc return code. (Useful when the host
# side Tast bin returns 0 even for failed tests.)
- if ret is not None:
- return ret
return test_proc.returncode
def post_run(self, return_code):
@@ -266,7 +264,7 @@ class RemoteTest(object):
class TastTest(RemoteTest):
def __init__(self, args, unknown_args):
- super(TastTest, self).__init__(args, unknown_args)
+ super().__init__(args, unknown_args)
self._suite_name = args.suite_name
self._tast_vars = args.tast_vars
@@ -404,14 +402,14 @@ class TastTest(RemoteTest):
# If we don't need to parse the host-side Tast tool's results, fall back to
# the parent method's default behavior.
if self._llvm_profile_var:
- return super(TastTest, self).post_run(return_code)
+ return super().post_run(return_code)
tast_results_path = os.path.join(self._logs_dir, 'streamed_results.jsonl')
if not os.path.exists(tast_results_path):
logging.error(
'Tast results not found at %s. Falling back to generic result '
'reporting.', tast_results_path)
- return super(TastTest, self).post_run(return_code)
+ return super().post_run(return_code)
# See the link below for the format of the results:
# https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/cmd/tast/run#TestResult
@@ -443,9 +441,8 @@ class TastTest(RemoteTest):
primary_error_message = errors[0]['reason']
for err in errors:
error_log += err['stack'] + '\n'
- error_log += (
- "\nIf you're unsure why this test failed, consult the steps "
- 'outlined in\n%s\n' % TAST_DEBUG_DOC)
+ debug_link = ("If you're unsure why this test failed, consult the steps "
+ 'outlined <a href="%s">here</a>.' % TAST_DEBUG_DOC)
base_result = base_test_result.BaseTestResult(
test['name'], result, duration=duration_ms, log=error_log)
suite_results.AddResult(base_result)
@@ -463,7 +460,8 @@ class TastTest(RemoteTest):
error_log,
None,
artifacts=artifacts,
- failure_reason=primary_error_message)
+ failure_reason=primary_error_message,
+ html_artifact=debug_link)
if self._rdb_client and self._logs_dir:
# Attach artifacts from the device that don't apply to a single test.
@@ -479,7 +477,7 @@ class TastTest(RemoteTest):
if not suite_results.DidRunPass():
return 1
- elif return_code:
+ if return_code:
logging.warning(
'No failed tests found, but exit code of %d was returned from '
'cros_run_test.', return_code)
@@ -535,7 +533,7 @@ class GTestTest(RemoteTest):
]
def __init__(self, args, unknown_args):
- super(GTestTest, self).__init__(args, unknown_args)
+ super().__init__(args, unknown_args)
self._test_exe = args.test_exe
self._runtime_deps_path = args.runtime_deps_path
@@ -605,21 +603,21 @@ class GTestTest(RemoteTest):
if self._vpython_dir:
vpython_path = os.path.join(self._path_to_outdir, self._vpython_dir,
- 'vpython')
+ 'vpython3')
cpython_path = os.path.join(self._path_to_outdir, self._vpython_dir,
- 'bin', 'python')
+ 'bin', 'python3')
if not os.path.exists(vpython_path) or not os.path.exists(cpython_path):
raise TestFormatError(
- '--vpython-dir must point to a dir with both infra/python/cpython '
- 'and infra/tools/luci/vpython installed.')
+ '--vpython-dir must point to a dir with both '
+ 'infra/3pp/tools/cpython3 and infra/tools/luci/vpython installed.')
vpython_spec_path = os.path.relpath(
- os.path.join(CHROMIUM_SRC_PATH, '.vpython'), self._path_to_outdir)
+ os.path.join(CHROMIUM_SRC_PATH, '.vpython3'), self._path_to_outdir)
# Initialize the vpython cache. This can take 10-20s, and some tests
# can't afford to wait that long on the first invocation.
device_test_script_contents.extend([
'export PATH=$PWD/%s:$PWD/%s/bin/:$PATH' %
(self._vpython_dir, self._vpython_dir),
- 'vpython -vpython-spec %s -vpython-tool install' %
+ 'vpython3 -vpython-spec %s -vpython-tool install' %
(vpython_spec_path),
])
@@ -729,7 +727,6 @@ def device_test(args, unknown_args):
# so cd to src/, which should be the root of all data deps.
os.chdir(CHROMIUM_SRC_PATH)
- # pylint: disable=redefined-variable-type
# TODO: Remove the above when depot_tool's pylint is updated to include the
# fix to https://github.com/PyCQA/pylint/issues/710.
if args.test_type == 'tast':
@@ -747,7 +744,7 @@ def device_test(args, unknown_args):
def host_cmd(args, cmd_args):
if not cmd_args:
raise TestFormatError('Must specify command to run on the host.')
- elif args.deploy_chrome and not args.path_to_outdir:
+ if args.deploy_chrome and not args.path_to_outdir:
raise TestFormatError(
'--path-to-outdir must be specified if --deploy-chrome is passed.')
diff --git a/chromium/build/chromeos/test_runner_test.py b/chromium/build/chromeos/test_runner_test.py
index fc3817856a5..0fa3a511e78 100755
--- a/chromium/build/chromeos/test_runner_test.py
+++ b/chromium/build/chromeos/test_runner_test.py
@@ -9,12 +9,12 @@ import shutil
import sys
import tempfile
import unittest
+import six
# The following non-std imports are fetched via vpython. See the list at
# //.vpython
import mock # pylint: disable=import-error
from parameterized import parameterized # pylint: disable=import-error
-import six
import test_runner
@@ -47,7 +47,7 @@ class TestRunnerTest(unittest.TestCase):
if six.PY3:
self.assertSetEqual(set(list1), set(list2))
else:
- self.assertItemsEqual(list1, list2)
+ self.assertCountEqual(list1, list2)
class TastTests(TestRunnerTest):
@@ -281,10 +281,10 @@ class GTestTest(TestRunnerTest):
gtest.build_test_command()
# Create the two expected tools, and the test should be ready to run.
- with open(os.path.join(args.vpython_dir, 'vpython'), 'w'):
+ with open(os.path.join(args.vpython_dir, 'vpython3'), 'w'):
pass # Just touch the file.
os.mkdir(os.path.join(args.vpython_dir, 'bin'))
- with open(os.path.join(args.vpython_dir, 'bin', 'python'), 'w'):
+ with open(os.path.join(args.vpython_dir, 'bin', 'python3'), 'w'):
pass
gtest = test_runner.GTestTest(args, None)
gtest.build_test_command()
diff --git a/chromium/build/config/BUILDCONFIG.gn b/chromium/build/config/BUILDCONFIG.gn
index 725894d0527..62e7e4bcd3e 100644
--- a/chromium/build/config/BUILDCONFIG.gn
+++ b/chromium/build/config/BUILDCONFIG.gn
@@ -387,6 +387,12 @@ set_defaults("static_library") {
set_defaults("source_set") {
configs = default_compiler_configs
}
+set_defaults("rust_library") {
+ configs = default_compiler_configs
+}
+set_defaults("rust_proc_macro") {
+ configs = default_compiler_configs
+}
# Compute the set of configs common to all linked targets (shared libraries,
# loadable modules, executables) to avoid duplication below.
diff --git a/chromium/build/config/OWNERS b/chromium/build/config/OWNERS
index eeb67065c2f..f4eab04c0a8 100644
--- a/chromium/build/config/OWNERS
+++ b/chromium/build/config/OWNERS
@@ -3,3 +3,4 @@ scottmg@chromium.org
per-file ozone.gni=file://ui/ozone/OWNERS
per-file ozone_extra.gni=file://ui/ozone/OWNERS
+per-file rust.gni=file://build/rust/OWNERS
diff --git a/chromium/build/config/aix/BUILD.gn b/chromium/build/config/aix/BUILD.gn
index 6c8749ab5c3..9000f6dfa26 100644
--- a/chromium/build/config/aix/BUILD.gn
+++ b/chromium/build/config/aix/BUILD.gn
@@ -28,6 +28,7 @@ config("compiler") {
"-maix64",
"-fdata-sections",
"-ffunction-sections",
+ "-fno-extern-tls-init",
"-O3",
# "-Werror"
@@ -46,4 +47,15 @@ config("compiler") {
"-maix64",
"-Wl,-bbigtoc",
]
+
+ if (is_component_build) {
+ cflags += [ "-fpic" ]
+ ldflags += [
+ "-Wl,-brtl",
+
+ # -bnoipath so that only names of .so objects are stored in loader
+ # section, excluding leading "./"
+ "-Wl,-bnoipath",
+ ]
+ }
}
diff --git a/chromium/build/config/android/DIR_METADATA b/chromium/build/config/android/DIR_METADATA
new file mode 100644
index 00000000000..cdc2d6fb6eb
--- /dev/null
+++ b/chromium/build/config/android/DIR_METADATA
@@ -0,0 +1 @@
+mixins: "//build/android/COMMON_METADATA"
diff --git a/chromium/build/config/android/config.gni b/chromium/build/config/android/config.gni
index e7540e8be1e..a3c5da17057 100644
--- a/chromium/build/config/android/config.gni
+++ b/chromium/build/config/android/config.gni
@@ -48,7 +48,7 @@ if (is_android || is_chromeos) {
# The default to use for android:minSdkVersion for targets that do
# not explicitly set it.
- default_min_sdk_version = 21
+ default_min_sdk_version = 23
# [WIP] Allows devs to achieve much faster edit-build-install cycles.
# Currently only works for ChromeModern apks due to incremental install.
@@ -322,29 +322,24 @@ if (is_android || is_chromeos) {
# like the toolchain roots.
if (current_cpu == "x86") {
android_prebuilt_arch = "android-x86"
- _binary_prefix = "i686-linux-android"
} else if (current_cpu == "arm") {
android_prebuilt_arch = "android-arm"
- _binary_prefix = "arm-linux-androideabi"
} else if (current_cpu == "mipsel") {
android_prebuilt_arch = "android-mips"
- _binary_prefix = "mipsel-linux-android"
} else if (current_cpu == "x64") {
android_prebuilt_arch = "android-x86_64"
- _binary_prefix = "x86_64-linux-android"
} else if (current_cpu == "arm64") {
android_prebuilt_arch = "android-arm64"
- _binary_prefix = "aarch64-linux-android"
} else if (current_cpu == "mips64el") {
android_prebuilt_arch = "android-mips64"
- _binary_prefix = "mips64el-linux-android"
} else {
assert(false, "Need android libgcc support for your target arch.")
}
android_toolchain_root = "$android_ndk_root/toolchains/llvm/prebuilt/${android_host_os}-${android_host_arch}"
- android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
- android_readelf = "${android_tool_prefix}readelf"
+ android_ndk_library_path = "$android_toolchain_root/lib64"
+ android_tool_prefix = "$android_toolchain_root/bin/llvm-"
+ android_readelf = "${android_tool_prefix}readobj"
android_objcopy = "${android_tool_prefix}objcopy"
android_gdbserver =
"$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
diff --git a/chromium/build/config/android/internal_rules.gni b/chromium/build/config/android/internal_rules.gni
index 3ed306a9e70..52d6c5f2120 100644
--- a/chromium/build/config/android/internal_rules.gni
+++ b/chromium/build/config/android/internal_rules.gni
@@ -1432,9 +1432,7 @@ if (enable_java_templates) {
!defined(invoker.enable_multidex) || invoker.enable_multidex
_enable_main_dex_list = _enable_multidex && _min_sdk_version < 21
_enable_desugar = !defined(invoker.enable_desugar) || invoker.enable_desugar
- _use_classic_desugar =
- defined(invoker.use_classic_desugar) && invoker.use_classic_desugar
- _desugar_needs_classpath = _enable_desugar && !_use_classic_desugar
+ _desugar_needs_classpath = _enable_desugar
# It's not safe to dex merge with libraries dex'ed at higher api versions.
assert(!_is_dex_merging || _min_sdk_version >= default_min_sdk_version)
@@ -1508,14 +1506,6 @@ if (enable_java_templates) {
} else {
args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath)" ]
}
- if (_use_classic_desugar) {
- deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ]
- inputs += [ _desugar_runtime_jar ]
- args += [
- "--input-paths",
- rebase_path(_desugar_runtime_jar, root_build_dir),
- ]
- }
if (defined(invoker.proguard_args)) {
args += invoker.proguard_args
}
@@ -1556,12 +1546,6 @@ if (enable_java_templates) {
}
_deps = invoker.deps
- if (!_is_library && _use_classic_desugar) {
- # It would be more efficient to use the pre-dex'ed copy of the runtime,
- # but it's easier to add it in this way.
- _deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ]
- _input_class_jars += [ _desugar_runtime_jar ]
- }
if (_input_class_jars != []) {
_rebased_input_class_jars =
rebase_path(_input_class_jars, root_build_dir)
@@ -1694,7 +1678,7 @@ if (enable_java_templates) {
_ignore_desugar_missing_deps =
defined(invoker.ignore_desugar_missing_deps) &&
invoker.ignore_desugar_missing_deps
- if (!_ignore_desugar_missing_deps && !_use_classic_desugar) {
+ if (!_ignore_desugar_missing_deps) {
args += [ "--show-desugar-default-interface-warnings" ]
}
}
@@ -1847,54 +1831,6 @@ if (enable_java_templates) {
_deps = invoker.jar_deps
_previous_output_jar = invoker.input_jar_path
- if (invoker.is_device_jar && invoker.enable_desugar) {
- _desugar_target = "${target_name}_device__desugar"
- _desugar_output_jar = "$target_out_dir/$target_name.desugar.jar"
-
- action_with_pydeps(_desugar_target) {
- script = "//build/android/gyp/desugar.py"
- deps = _deps + invoker.classpath_deps
- depfile = "$target_gen_dir/$target_name.d"
- _rebased_build_config =
- rebase_path(invoker.build_config, root_build_dir)
- _desugar_jar = "//third_party/bazel/desugar/Desugar.jar"
-
- inputs = [
- invoker.build_config,
- _previous_output_jar,
- _desugar_jar,
- ]
- outputs = [ _desugar_output_jar ]
- args = [
- "--desugar-jar",
- rebase_path(_desugar_jar, root_build_dir),
- "--input-jar",
- rebase_path(_previous_output_jar, root_build_dir),
- "--output-jar",
- rebase_path(_desugar_output_jar, root_build_dir),
-
- # Temporarily using java_full_interface_classpath until classpath validation of targets
- # is implemented, see http://crbug.com/885273
- "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
- "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)",
- "--depfile",
- rebase_path(depfile, root_build_dir),
- ]
- if (defined(invoker.desugar_jars_paths)) {
- _rebased_desugar_jars_paths =
- rebase_path(invoker.desugar_jars_paths, root_build_dir)
- args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
- }
- if (treat_warnings_as_errors) {
- args += [ "--warnings-as-errors" ]
- }
- }
-
- _deps = []
- _deps = [ ":$_desugar_target" ]
- _previous_output_jar = _desugar_output_jar
- }
-
if (invoker.jacoco_instrument) {
_filter_jar_target_name = "${target_name}__filter_jar"
_filter_jar_output_jar = "$target_out_dir/$target_name.filter.jar"
@@ -3463,14 +3399,11 @@ if (enable_java_templates) {
_dex_path = "$target_out_dir/$_main_target_name.dex.jar"
_enable_desugar =
!defined(invoker.enable_desugar) || invoker.enable_desugar
- _use_classic_desugar =
- defined(invoker.use_classic_desugar) && invoker.use_classic_desugar
# Build speed optimization: Skip "process device" step if the step
# would be just a copy and avoid the copy.
_process_device_jar =
defined(invoker.bytecode_rewriter_target) || _jacoco_instrument ||
- (_enable_desugar && _use_classic_desugar) ||
defined(invoker.jar_excluded_patterns) ||
defined(invoker.jar_included_patterns)
if (!_process_device_jar && _is_prebuilt) {
@@ -3946,7 +3879,6 @@ if (enable_java_templates) {
])
input_jar_path = _unprocessed_jar_path
jar_deps = _unprocessed_jar_deps + _full_classpath_deps
- is_device_jar = false
output_jar_path = _host_processed_jar_path
jacoco_instrument = _jacoco_instrument
if (_jacoco_instrument) {
@@ -3968,19 +3900,12 @@ if (enable_java_templates) {
])
input_jar_path = _unprocessed_jar_path
jar_deps = _unprocessed_jar_deps + _full_classpath_deps
- is_device_jar = true
output_jar_path = _device_processed_jar_path
jacoco_instrument = _jacoco_instrument
if (_jacoco_instrument) {
java_files = _java_files
java_sources_file = _java_sources_file
}
- enable_desugar = _enable_desugar && _use_classic_desugar
- if (enable_desugar) {
- build_config = _build_config
- classpath_deps = _classpath_deps
- forward_variables_from(invoker, [ "desugar_jars_paths" ])
- }
}
_process_device_jar_deps = [ ":${_process_device_jar_target_name}" ]
_public_deps += _process_device_jar_deps
@@ -3995,7 +3920,6 @@ if (enable_java_templates) {
[
"desugar_jars_paths",
"proguard_enable_obfuscation",
- "use_classic_desugar",
])
input_class_jars = [ _device_processed_jar_path ]
enable_desugar = _enable_desugar
@@ -4007,7 +3931,7 @@ if (enable_java_templates) {
output = _dex_path
deps = _process_device_jar_deps
- if (enable_desugar && !_use_classic_desugar) {
+ if (enable_desugar) {
# Desugaring with D8 requires full classpath.
build_config = _build_config
final_ijar_path = _final_ijar_path
diff --git a/chromium/build/config/android/rules.gni b/chromium/build/config/android/rules.gni
index 1099c49fbff..02e814c9fb2 100644
--- a/chromium/build/config/android/rules.gni
+++ b/chromium/build/config/android/rules.gni
@@ -35,6 +35,9 @@ if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
_sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ]
}
+_BUNDLETOOL_JAR_PATH =
+ "//third_party/android_build_tools/bundletool/bundletool.jar"
+
# Creates a dist directory for a native executable.
#
# Running a native executable on a device requires all the shared library
@@ -2758,7 +2761,6 @@ if (enable_java_templates) {
"secondary_native_lib_placeholders",
"sources",
"static_library_dependent_targets",
- "use_classic_desugar",
"library_always_compress",
"library_renames",
])
@@ -2884,8 +2886,7 @@ if (enable_java_templates) {
}
} else {
# Dex generation for app bundle modules with proguarding enabled takes
- # place later due to synchronized proguarding. For more details,
- # read build/android/docs/android_app_bundles.md
+ # place later due to synchronized proguarding.
_final_dex_target_name = "${_template_name}__final_dex"
dex(_final_dex_target_name) {
forward_variables_from(invoker,
@@ -3500,7 +3501,6 @@ if (enable_java_templates) {
"testonly",
"uncompress_dex",
"uncompress_shared_libraries",
- "use_classic_desugar",
"library_always_compress",
"library_renames",
"use_chromium_linker",
@@ -4343,7 +4343,6 @@ if (enable_java_templates) {
"missing_classes_allowlist",
"requires_android",
"testonly",
- "use_classic_desugar",
]
# Create android_java_prebuilt target for extra jars within jars/.
@@ -4568,10 +4567,6 @@ if (enable_java_templates) {
_enable_multidex =
!defined(invoker.enable_multidex) || invoker.enable_multidex
- if (!_proguard_enabled && defined(invoker.min_sdk_version)) {
- not_needed(invoker, [ "min_sdk_version" ])
- }
-
# Prevent "unused variable".
not_needed([ "_enable_multidex" ])
@@ -4711,11 +4706,7 @@ if (enable_java_templates) {
if (_proguard_enabled) {
# If this Bundle uses a static library, the static library APK will
# create the synchronized dex file path.
- if (_uses_static_library_synchronized_proguard) {
- if (defined(invoker.min_sdk_version)) {
- not_needed(invoker, [ "min_sdk_version" ])
- }
- } else {
+ if (!_uses_static_library_synchronized_proguard) {
dex(_dex_target) {
forward_variables_from(invoker,
[
@@ -4861,7 +4852,8 @@ if (enable_java_templates) {
_bundle_target_name = "${_target_name}__bundle"
action_with_pydeps(_bundle_target_name) {
script = "//build/android/gyp/create_app_bundle.py"
- inputs = _all_module_zip_paths + _all_module_build_configs
+ inputs = _all_module_zip_paths + _all_module_build_configs +
+ [ _BUNDLETOOL_JAR_PATH ]
outputs = [ _bundle_path ]
deps = _all_create_module_targets + [ ":$_build_config_target" ]
args = [
@@ -4877,6 +4869,16 @@ if (enable_java_templates) {
invoker.compress_shared_libraries) {
args += [ "--compress-shared-libraries" ]
}
+ _min_sdk_version = default_min_sdk_version
+ if (defined(invoker.min_sdk_version)) {
+ _min_sdk_version = invoker.min_sdk_version
+ }
+
+ # Android P+ support loading from stored dex.
+ if (_min_sdk_version < 27) {
+ args += [ "--compress-dex" ]
+ }
+
if (treat_warnings_as_errors) {
args += [ "--warnings-as-errors" ]
}
@@ -5072,7 +5074,10 @@ if (enable_java_templates) {
_apks_path = "$root_build_dir/apks/$_bundle_name.apks"
action_with_pydeps("${_target_name}_apks") {
script = "//build/android/gyp/create_app_bundle_apks.py"
- inputs = [ _bundle_path ]
+ inputs = [
+ _bundle_path,
+ _BUNDLETOOL_JAR_PATH,
+ ]
outputs = [ _apks_path ]
data = [ _apks_path ]
args = [
diff --git a/chromium/build/config/chromecast/BUILD.gn b/chromium/build/config/chromecast/BUILD.gn
index 0c3b2cbeb20..a4ebc9821fc 100644
--- a/chromium/build/config/chromecast/BUILD.gn
+++ b/chromium/build/config/chromecast/BUILD.gn
@@ -28,6 +28,7 @@ config("static_config") {
config("ldconfig") {
visibility = [ ":*" ]
+ configs = []
# Chromecast executables depend on several shared libraries in
# /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
@@ -44,6 +45,11 @@ config("ldconfig") {
ldflags = [ "-Wl,-rpath=${target_rpath}" ]
}
+ if (chromecast_branding != "public") {
+ # Some internal x64 builds need additional rpath and dynamic-linker config.
+ configs += [ "//chromecast/internal/build/config/ldconfig" ]
+ }
+
# Binaries which don't live in the same directory as Chrome component
# libraries may still depend on them. Explicitly add the component library
# directory to the rpath for the component build.
diff --git a/chromium/build/config/chromecast_build.gni b/chromium/build/config/chromecast_build.gni
index deecdb53b39..65855a178c7 100644
--- a/chromium/build/config/chromecast_build.gni
+++ b/chromium/build/config/chromecast_build.gni
@@ -50,11 +50,33 @@ declare_args() {
(is_cast_audio_only && is_android))
}
+declare_args() {
+ # True to enable the cast audio renderer.
+ enable_cast_audio_renderer = false
+}
+
# Configures media options for cast. See media/media_options.gni
cast_mojo_media_services = []
cast_mojo_media_host = ""
-if (enable_cast_renderer) {
+if (enable_cast_audio_renderer) {
+ if (is_android) {
+ cast_mojo_media_services = [
+ "cdm",
+ "audio_decoder",
+ ]
+ }
+
+ if (!is_cast_audio_only) {
+ cast_mojo_media_services += [ "video_decoder" ]
+ }
+
+ if (is_android && is_cast_audio_only) {
+ cast_mojo_media_host = "browser"
+ } else {
+ cast_mojo_media_host = "gpu"
+ }
+} else if (enable_cast_renderer) {
# In this path, mojo media services are hosted in two processes:
# 1. "renderer" and "cdm" run in browser process. This is hard coded in the
# code.
@@ -63,7 +85,6 @@ if (enable_cast_renderer) {
"cdm",
"renderer",
]
-
if (!is_cast_audio_only) {
cast_mojo_media_services += [ "video_decoder" ]
}
diff --git a/chromium/build/config/chromeos/args.gni b/chromium/build/config/chromeos/args.gni
index 99c7d601a33..a1bfc5c026b 100644
--- a/chromium/build/config/chromeos/args.gni
+++ b/chromium/build/config/chromeos/args.gni
@@ -27,4 +27,12 @@ declare_args() {
# Determines if we run the test in skylab, aka the CrOS labs.
is_skylab = false
+
+ # Determines if we collect hardware information in chrome://system and
+ # feedback logs. A similar build flag "hw_details" is defined in Chrome OS
+ # (see https://crrev.com/c/3123455).
+ is_chromeos_with_hw_details = false
+
+ # Determines if we're willing to link against libinput
+ use_libinput = false
}
diff --git a/chromium/build/config/compiler/BUILD.gn b/chromium/build/config/compiler/BUILD.gn
index d7073ad38f0..f4421666b3d 100644
--- a/chromium/build/config/compiler/BUILD.gn
+++ b/chromium/build/config/compiler/BUILD.gn
@@ -15,6 +15,7 @@ import("//build/config/coverage/coverage.gni")
import("//build/config/dcheck_always_on.gni")
import("//build/config/gclient_args.gni")
import("//build/config/host_byteorder.gni")
+import("//build/config/rust.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/ui.gni")
import("//build/toolchain/cc_wrapper.gni")
@@ -221,7 +222,8 @@ if (is_android || (is_chromeos_ash && is_chromeos_device)) {
assert(!(llvm_force_head_revision && use_goma),
"can't use goma with trunk clang")
-assert(!(llvm_force_head_revision && use_rbe), "can't use rbe with trunk clang")
+assert(!(llvm_force_head_revision && use_remoteexec),
+ "can't use rbe with trunk clang")
# default_include_dirs ---------------------------------------------------------
#
@@ -486,7 +488,8 @@ config("compiler") {
#}
}
- if (use_icf && !is_apple) {
+ # TODO(crbug.com/1253924): Enable on apple/lld for arm64 too once it works.
+ if (use_icf && (!is_apple || (use_lld && current_cpu != "arm64"))) {
ldflags += [ "-Wl,--icf=all" ]
}
@@ -546,13 +549,6 @@ config("compiler") {
ldflags += [ "-Wl,-mllvm,-instcombine-lower-dbg-declare=0" ]
}
}
-
- # TODO(crbug.com/1235145): Investigate why/if this should be needed.
- if (is_win) {
- cflags += [ "/clang:-ffp-contract=off" ]
- } else {
- cflags += [ "-ffp-contract=off" ]
- }
}
# C11/C++11 compiler flags setup.
@@ -795,6 +791,35 @@ config("compiler") {
asmflags += cflags
asmflags += cflags_c
}
+
+ # Rust compiler flags setup.
+ # ---------------------------
+ rustflags = [
+ # Overflow checks are optional in Rust, but even if switched
+ # off they do not cause undefined behavior (the overflowing
+ # behavior is defined). Because containers are bounds-checked
+ # in safe Rust, they also can't provoke buffer overflows.
+ # As such these checks may be less important in Rust than C++.
+ # But in (simplistic) testing they have negligible performance
+ # overhead, and this helps to provide consistent behavior
+ # between different configurations, so we'll keep them on until
+ # we discover a reason to turn them off.
+ "-Coverflow-checks=on",
+
+ # To make Rust .d files compatible with ninja
+ "-Z",
+ "dep-info-omit-d-target",
+ ]
+ if (rust_abi_target != "") {
+ rustflags += [ "--target=$rust_abi_target" ]
+ }
+ if (use_lto_in_rustc_linking) {
+ rustflags += [ "-Clinker-plugin-lto" ]
+ }
+ if (!use_thin_lto) {
+ # Optimization - don't include bitcode if it won't be used.
+ rustflags += [ "-Cembed-bitcode=no" ]
+ }
}
# The BUILDCONFIG file sets this config on targets by default, which means when
@@ -1260,6 +1285,16 @@ config("compiler_deterministic") {
# really need it and it can mess up the goma cache entries.
if (is_clang && !is_nacl) {
cflags += [ "-no-canonical-prefixes" ]
+
+ # Same for links: Let the compiler driver invoke the linker
+ # with a relative path and pass relative paths to built-in
+ # libraries. Not needed on Windows because we call the linker
+ # directly there, not through the compiler driver.
+ # We don't link on goma, so this change is just for cleaner
+ # internal linker invocations, for people who work on the build.
+ if (!is_win) {
+ ldflags += [ "-no-canonical-prefixes" ]
+ }
}
}
@@ -1374,158 +1409,18 @@ config("default_warnings") {
if (fatal_linker_warnings) {
ldflags = [ "/WX" ]
}
-
cflags += [
- # Warnings permanently disabled:
-
- # C4091: 'typedef ': ignored on left of 'X' when no variable is
- # declared.
- # This happens in a number of Windows headers. Dumb.
- "/wd4091",
-
- # C4127: conditional expression is constant
- # This warning can in theory catch dead code and other problems, but
- # triggers in far too many desirable cases where the conditional
- # expression is either set by macros or corresponds some legitimate
- # compile-time constant expression (due to constant template args,
- # conditionals comparing the sizes of different types, etc.). Some of
- # these can be worked around, but it's not worth it.
- "/wd4127",
-
- # C4251: 'identifier' : class 'type' needs to have dll-interface to be
- # used by clients of class 'type2'
- # This is necessary for the shared library build.
- "/wd4251",
-
- # C4275: non dll-interface class used as base for dll-interface class
- # This points out a potential (but rare) problem with referencing static
- # fields of a non-exported base, through the base's non-exported inline
- # functions, or directly. The warning is subtle enough that people just
- # suppressed it when they saw it, so it's not worth it.
- "/wd4275",
-
- # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
- # TODO(brucedawson): fix warnings, crbug.com/554200
- "/wd4312",
-
- # C4324 warns when padding is added to fulfill alignas requirements,
- # but can trigger in benign cases that are difficult to individually
- # suppress.
- "/wd4324",
-
- # C4351: new behavior: elements of array 'array' will be default
- # initialized
- # This is a silly "warning" that basically just alerts you that the
- # compiler is going to actually follow the language spec like it's
- # supposed to, instead of not following it like old buggy versions did.
- # There's absolutely no reason to turn this on.
- "/wd4351",
-
- # C4355: 'this': used in base member initializer list
- # It's commonly useful to pass |this| to objects in a class' initializer
- # list. While this warning can catch real bugs, most of the time the
- # constructors in question don't attempt to call methods on the passed-in
- # pointer (until later), and annotating every legit usage of this is
- # simply more hassle than the warning is worth.
- "/wd4355",
-
- # C4503: 'identifier': decorated name length exceeded, name was
- # truncated
- # This only means that some long error messages might have truncated
- # identifiers in the presence of lots of templates. It has no effect on
- # program correctness and there's no real reason to waste time trying to
- # prevent it.
- "/wd4503",
-
- # Warning C4589 says: "Constructor of abstract class ignores
- # initializer for virtual base class." Disable this warning because it
- # is flaky in VS 2015 RTM. It triggers on compiler generated
- # copy-constructors in some cases.
- "/wd4589",
-
- # C4611: interaction between 'function' and C++ object destruction is
- # non-portable
- # This warning is unavoidable when using e.g. setjmp/longjmp. MSDN
- # suggests using exceptions instead of setjmp/longjmp for C++, but
- # Chromium code compiles without exception support. We therefore have to
- # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
- # have to turn off this warning (and be careful about how object
- # destruction happens in such cases).
- "/wd4611",
-
# Warnings to evaluate and possibly fix/reenable later:
- "/wd4100", # Unreferenced formal function parameter.
- "/wd4121", # Alignment of a member was sensitive to packing.
- "/wd4244", # Conversion: possible loss of data.
- "/wd4505", # Unreferenced local function has been removed.
- "/wd4510", # Default constructor could not be generated.
- "/wd4512", # Assignment operator could not be generated.
- "/wd4610", # Class can never be instantiated, constructor required.
- "/wd4838", # Narrowing conversion. Doesn't seem to be very useful.
- "/wd4995", # 'X': name was marked as #pragma deprecated
- "/wd4996", # Deprecated function warning.
-
- # These are variable shadowing warnings that are new in VS2015. We
- # should work through these at some point -- they may be removed from
- # the RTM release in the /W4 set.
- "/wd4456",
- "/wd4457",
- "/wd4458",
- "/wd4459",
-
- # All of our compilers support the extensions below.
- "/wd4200", # nonstandard extension used: zero-sized array in struct/union
- "/wd4201", # nonstandard extension used: nameless struct/union
- "/wd4204", # nonstandard extension used : non-constant aggregate
- # initializer
-
- "/wd4221", # nonstandard extension used : 'identifier' : cannot be
- # initialized using address of automatic variable
-
- # http://crbug.com/588506 - Conversion suppressions waiting on Clang
- # -Wconversion.
- "/wd4245", # 'conversion' : conversion from 'type1' to 'type2',
- # signed/unsigned mismatch
-
- "/wd4267", # 'var' : conversion from 'size_t' to 'type', possible loss of
- # data
-
- "/wd4305", # 'identifier' : truncation from 'type1' to 'type2'
- "/wd4389", # 'operator' : signed/unsigned mismatch
-
- "/wd4702", # unreachable code
-
- # http://crbug.com/848979 - MSVC is more conservative than Clang with
- # regards to variables initialized and consumed in different branches.
- "/wd4701", # Potentially uninitialized local variable 'name' used
- "/wd4703", # Potentially uninitialized local pointer variable 'name' used
-
- # http://crbug.com/848979 - Remaining Clang permitted warnings.
- "/wd4661", # 'identifier' : no suitable definition provided for explicit
- # template instantiation request
-
- "/wd4706", # assignment within conditional expression
- # MSVC is stricter and requires a boolean expression.
-
- "/wd4715", # 'function' : not all control paths return a value'
- # MSVC does not analyze switch (enum) for completeness.
- ]
-
- cflags_cc += [
- # Allow "noexcept" annotations even though we compile with exceptions
- # disabled.
- "/wd4577",
+ # Deprecated function warning;
+ # maps to -Wno-deprecated-declarations in clang-cl.
+ "/wd4996",
]
-
- if (current_cpu == "x86") {
- cflags += [
- # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to
- # 4267. Example: short TruncTest(size_t x) { return x; }
- # Since we disable 4244 we need to disable 4267 during migration.
- # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
- "/wd4267",
- ]
+ if (!is_clang) {
+ # TODO(thakis): Remove this once
+ # https://swiftshader-review.googlesource.com/c/SwiftShader/+/57968 has
+ # rolled into angle.
+ cflags += [ "/wd4244" ]
}
} else {
if (is_apple && !is_nacl) {
@@ -1600,7 +1495,7 @@ config("default_warnings") {
# use_xcode_clang only refers to the iOS toolchain, host binaries use
# chromium's clang always.
- if (!is_nacl) {
+ if (!is_nacl || is_nacl_saigo) {
if (is_win) {
# TODO(thakis): https://crbug.com/617318
# Currently goma can not handle case sensitiveness for windows well.
@@ -1611,8 +1506,7 @@ config("default_warnings") {
}
if (current_toolchain == host_toolchain || !use_xcode_clang) {
- # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not
- # recognize.
+ # Flags Xcode 9.2 (Clang clang-900.0.39.2) does not recognize.
cflags += [
"-Wenum-compare-conditional",
@@ -1627,17 +1521,24 @@ config("default_warnings") {
# TODO(https://crbug.com/1016945) Clean up, enable.
"-Wno-builtin-assume-aligned-alignment",
+ ]
- # TODO(https://crbug.com/1028110): Evaluate and possible enable.
- "-Wno-deprecated-copy",
+ # NaCl does not support flags from ToT.
+ if (!is_nacl) {
+ cflags += [
+ # TODO(https://crbug.com/1203071): Clean up and enable.
+ "-Wno-unused-but-set-parameter",
+ "-Wno-unused-but-set-variable",
- # TODO(https://crbug.com/1203071): Clean up and enable.
- "-Wno-unused-but-set-parameter",
- "-Wno-unused-but-set-variable",
+ # TODO(https://crbug.com/1255745): Clean up, enable.
+ "-Wno-bitwise-instead-of-logical",
+ ]
+ }
- # TODO(https://crbug.com/1239077): Fix and re-enable.
- "-Wno-inline-asm",
- ]
+ if (is_fuchsia) {
+ # TODO(https://bugs.chromium.org/p/fuchsia/issues/detail?id=77383)
+ cflags += [ "-Wno-deprecated-copy" ]
+ }
if (enable_wmax_tokens) {
cflags += [ "-Wmax-tokens" ]
@@ -1654,9 +1555,9 @@ config("default_warnings") {
config("chromium_code") {
if (is_win) {
- cflags = [ "/W4" ] # Warning level 4.
-
if (is_clang) {
+ cflags = [ "/W4" ] # Warning level 4.
+
# Opt in to additional [[nodiscard]] on standard library methods.
defines = [ "_HAS_NODISCARD" ]
}
@@ -1713,7 +1614,7 @@ config("chromium_code") {
# TODO(thakis): Enable this more often, https://crbug.com/346399
# use_libfuzzer: https://crbug.com/1063180
if (!is_nacl && !use_libfuzzer) {
- cflags += [ "-Wunreachable-code" ]
+ cflags += [ "-Wunreachable-code-aggressive" ]
}
# Thread safety analysis is broken under nacl: https://crbug.com/982423.
@@ -1747,8 +1648,10 @@ config("no_chromium_code") {
defines = []
if (is_win) {
+ if (is_clang) {
+ cflags += [ "/W3" ] # Warning level 3.
+ }
cflags += [
- "/W3", # Warning level 3.
"/wd4800", # Disable warning when forcing value to bool.
"/wd4267", # TODO(jschuh): size_t to int.
"/wd4996", # Deprecated function warning.
@@ -2089,6 +1992,11 @@ config("optimize") {
} else {
cflags = [ "-O2" ] + common_optimize_on_cflags
}
+ if (optimize_for_size) {
+ rustflags = [ "-Copt-level=s" ]
+ } else {
+ rustflags = [ "-Copt-level=3" ]
+ }
ldflags = common_optimize_on_ldflags
}
@@ -2155,6 +2063,7 @@ config("optimize_max") {
} else {
cflags = [ "-O2" ] + common_optimize_on_cflags
}
+ rustflags = [ "-Copt-level=3" ]
}
}
@@ -2187,11 +2096,13 @@ config("optimize_speed") {
} else {
cflags = [ "-O3" ] + common_optimize_on_cflags
}
+ rustflags = [ "-Copt-level=3" ]
}
}
config("optimize_fuzzing") {
cflags = [ "-O1" ] + common_optimize_on_cflags
+ rustflags = [ "-Copt-level=1" ]
ldflags = common_optimize_on_ldflags
visibility = [ ":default_optimization" ]
}
@@ -2356,6 +2267,16 @@ config("symbols") {
cflags += [ "-g2" ]
}
+ if (!is_nacl && is_clang && !is_tsan && !is_asan) {
+ # gcc generates dwarf-aranges by default on -g1 and -g2. On clang it has
+ # to be manually enabled.
+ #
+ # It is skipped in tsan and asan because enabling it causes some
+ # formatting changes in the output which would require fixing bunches
+ # of expectation regexps.
+ cflags += [ "-gdwarf-aranges" ]
+ }
+
if (is_apple) {
swiftflags = [ "-g" ]
}
@@ -2407,6 +2328,7 @@ config("symbols") {
]
}
}
+ rustflags = [ "-g" ]
}
# Minimal symbols.
@@ -2455,6 +2377,12 @@ config("minimal_symbols") {
if (!is_nacl || is_clang) {
cflags += [ "-g1" ]
}
+
+ if (!is_nacl && is_clang && !is_tsan && !is_asan) {
+ # See comment for -gdwarf-aranges in config("symbols").
+ cflags += [ "-gdwarf-aranges" ]
+ }
+
ldflags = []
if (is_android && is_clang) {
# Android defaults to symbol_level=1 builds in production builds
@@ -2472,6 +2400,7 @@ config("minimal_symbols") {
asmflags = cflags
}
+ rustflags = [ "-Cdebuginfo=1" ]
}
# This configuration contains function names only. That is, the compiler is
diff --git a/chromium/build/config/compiler/compiler.gni b/chromium/build/config/compiler/compiler.gni
index 6f1faaf10fa..008a386efa7 100644
--- a/chromium/build/config/compiler/compiler.gni
+++ b/chromium/build/config/compiler/compiler.gni
@@ -24,6 +24,10 @@ if (is_apple) {
import("//build/config/apple/symbols.gni")
}
+if (is_ios) {
+ import("//build/config/ios/config.gni")
+}
+
declare_args() {
# Default to warnings as errors for default workflow, where we catch
# warnings with known toolchains. Allow overriding this e.g. for Chromium
@@ -197,10 +201,19 @@ can_unwind_with_cfi_table = is_android && !is_component_build &&
enable_arm_cfi_table = is_android && !is_component_build && current_cpu == "arm"
declare_args() {
+ # If this running on a GPU FYI bot.
+ # TODO(https://crbug.com/1233871): Remove this again.
+ is_gpu_fyi_bot = false
+}
+
+declare_args() {
# Set to true to use lld, the LLVM linker.
- # In late bring-up on macOS (see docs/mac_lld.md), and not functional at all for
- # iOS. The default linker everywhere else.
- use_lld = is_clang && (!is_apple || host_os == "linux")
+ # In late bring-up on macOS (see docs/mac_lld.md).
+ # Tentatively used on iOS, except in cronet builds (cronet still supports 32-bit builds, which
+ # lld doesn't support).
+ # The default linker everywhere else.
+ use_lld = is_clang && !(is_ios && is_cronet_build) &&
+ !(is_mac && is_gpu_fyi_bot) && !use_xcode_clang
}
declare_args() {
@@ -243,9 +256,8 @@ assert(
use_debug_fission == "default" || use_debug_fission || !use_debug_fission,
"Invalid use_debug_fission.")
if (use_debug_fission == "default") {
- use_debug_fission = (is_android && is_official_build) ||
- (is_debug && !is_android && !is_fuchsia && !is_apple &&
- !is_win && (use_gold || use_lld) && cc_wrapper == "")
+ use_debug_fission = is_debug && !is_android && !is_fuchsia && !is_apple &&
+ !is_win && (use_gold || use_lld) && cc_wrapper == ""
}
# If it wasn't manually set, set to an appropriate default.
diff --git a/chromium/build/config/devtools.gni b/chromium/build/config/devtools.gni
index d54aa1e9927..7a6d2b2f06f 100644
--- a/chromium/build/config/devtools.gni
+++ b/chromium/build/config/devtools.gni
@@ -2,17 +2,36 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chrome_build.gni")
import("//build_overrides/build.gni")
declare_args() {
if (build_with_chromium) {
- # This argument is used in DevTools to resolve to the correct location
+ # devtools_location is used in DevTools to resolve to the correct location
# for any script/file referenced in the DevTools build scripts. Since
# DevTools supports both a standalone build and build integration with
# Chromium, we need to differentiate between the two versions.
- devtools_location = "third_party/devtools-frontend/src/"
+ # devtools_location points to the Chromium version in both Chrome-branded
+ # and not Chrome-branded builds. devtools_root_location points to the root
+ # of the Chrome-branded version when is_chrome_branded is true and to the root
+ # of the Chromium version when is_chrome_branded is false.
+ # devtools_grd_location is the location of the GRD file listing all DevTools
+ # resources.
+ if (is_chrome_branded) {
+ devtools_root_location = "third_party/devtools-frontend-internal"
+ devtools_location = "$devtools_root_location/devtools-frontend/"
+ devtools_grd_location =
+ "$devtools_root_location/chrome_devtools_resources.grd"
+ } else {
+ devtools_root_location = "third_party/devtools-frontend/src"
+ devtools_location = "third_party/devtools-frontend/src/"
+ devtools_grd_location =
+ "$devtools_root_location/front_end/devtools_resources.grd"
+ }
} else {
# DevTools is building a standalone version
devtools_location = ""
+ devtools_root_location = ""
+ devtools_grd_location = ""
}
}
diff --git a/chromium/build/config/fuchsia/DIR_METADATA b/chromium/build/config/fuchsia/DIR_METADATA
index 6d8f079aa58..7dbde800dc1 100644
--- a/chromium/build/config/fuchsia/DIR_METADATA
+++ b/chromium/build/config/fuchsia/DIR_METADATA
@@ -1,7 +1,3 @@
-monorail {
- component: "Fuchsia"
-}
-
-team_email: "cr-fuchsia@chromium.org"
+mixins: "//build/fuchsia/COMMON_METADATA"
os: FUCHSIA
diff --git a/chromium/build/config/fuchsia/generate_runner_scripts.gni b/chromium/build/config/fuchsia/generate_runner_scripts.gni
index e5e0bf8892f..afab76f4c49 100644
--- a/chromium/build/config/fuchsia/generate_runner_scripts.gni
+++ b/chromium/build/config/fuchsia/generate_runner_scripts.gni
@@ -17,6 +17,11 @@ declare_args() {
# will be used.
default_fuchsia_build_dir_for_installation = ""
+ # Sets the Fuchsia device node name which will be used by default by the
+ # generated runner scripts. If not specficed, then no default node name will
+ # be used.
+ default_fuchsia_device_node_name = ""
+
# CPU architecture of the host used to run the tests.
test_host_cpu = host_cpu
@@ -139,14 +144,10 @@ template("fuchsia_package_runner") {
data += [
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/device-finder",
+ "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm",
-
- # TODO(crbug.com/1162314) Remove "symbolize" when transition to
- # "symbolizer" is complete.
- "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolize",
-
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi",
]
@@ -156,6 +157,7 @@ template("fuchsia_package_runner") {
"${boot_image_root}/qemu/qemu-kernel.kernel",
"${boot_image_root}/qemu/storage-full.blk",
"${boot_image_root}/qemu/zircon-a.zbi",
+ "//third_party/fuchsia-sdk/sdk/bin/device_launcher.version",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvdl",
"//third_party/qemu-${host_os}-${test_host_cpu}/",
"${aemu_root}/",
@@ -216,6 +218,13 @@ template("fuchsia_package_runner") {
default_fuchsia_build_dir_for_installation,
]
}
+
+ if (default_fuchsia_device_node_name != "") {
+ executable_args += [
+ "--node-name",
+ default_fuchsia_device_node_name,
+ ]
+ }
}
# Produces a script which installs a package and its dependencies into the
diff --git a/chromium/build/config/gcc/BUILD.gn b/chromium/build/config/gcc/BUILD.gn
index 154b259b5fa..ff47e2542c6 100644
--- a/chromium/build/config/gcc/BUILD.gn
+++ b/chromium/build/config/gcc/BUILD.gn
@@ -63,8 +63,8 @@ config("symbol_visibility_default") {
# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
# }
config("rpath_for_built_shared_libraries") {
- if (!is_android) {
- # Note: Android doesn't support rpath.
+ if (!is_android && current_os != "aix") {
+ # Note: Android, Aix don't support rpath.
if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
ldflags = [
# Want to pass "\$". GN will re-escape as required for ninja.
diff --git a/chromium/build/config/ios/BUILD.gn b/chromium/build/config/ios/BUILD.gn
index 5309e825b01..83fb0a53527 100644
--- a/chromium/build/config/ios/BUILD.gn
+++ b/chromium/build/config/ios/BUILD.gn
@@ -4,6 +4,7 @@
import("//build/config/ios/ios_sdk.gni")
import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
import("//build_overrides/build.gni")
@@ -16,11 +17,6 @@ declare_args() {
# Mimicking how Xcode handles it, the production builds (is_debug = false)
# get real bitcode sections added, while the debug builds (is_debug = true)
# only get bitcode-section "markers" added in them.
- # NOTE: This option is ignored when building versions for the iOS simulator,
- # where a part of libvpx is compiled from the assembly code written using
- # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
- # That is not a limitation for now as Xcode mandates the presence of bitcode
- # only when building bitcode-enabled projects for real devices (ARM CPUs).
enable_ios_bitcode = false
}
@@ -94,7 +90,7 @@ config("runtime_library") {
# Rebase the value in that case since gn does not convert paths in compiler
# flags (since it is not aware they are paths).
_sdk_root = ios_sdk_path
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
_sdk_root = rebase_path(ios_sdk_path, root_build_dir)
}
@@ -116,7 +112,7 @@ config("runtime_library") {
]
}
- if (use_xcode_clang && enable_ios_bitcode && target_environment == "device") {
+ if (use_xcode_clang && enable_ios_bitcode) {
if (is_debug) {
common_flags += [ "-fembed-bitcode-marker" ]
} else {
@@ -127,6 +123,20 @@ config("runtime_library") {
asmflags = common_flags
cflags = common_flags
ldflags = common_flags
+
+ # TODO(crbug.com/1223481): Temporarily use a different
+ # libclang_rt.iossim.a for arm64 simulator builds. This can be
+ # removed when an arm64 slice is added to upstream Clang.
+ if (target_environment == "simulator" && current_cpu == "arm64") {
+ assert(xcode_version_int == 1300)
+ ldflags += [
+ "-lSystem",
+ rebase_path("$ios_toolchains_path/usr/lib/clang/13.0.0/" +
+ "lib/darwin/libclang_rt.iossim.a",
+ root_build_dir),
+ "-nodefaultlibs",
+ ]
+ }
}
config("ios_executable_flags") {
@@ -234,7 +244,7 @@ _xctrunner_path =
#
# To workaround this, add a target that pretends to create those files
# (but does nothing). See https://crbug.com/1061487 for why this is needed.
-if (use_system_xcode && use_goma) {
+if (use_system_xcode && (use_goma || use_remoteexec)) {
action("copy_xctrunner_app") {
testonly = true
script = "//build/noop.py"
@@ -263,7 +273,7 @@ action("xctest_runner_without_arm64e") {
xcode_version,
]
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
deps = [ ":copy_xctrunner_app" ]
}
}
diff --git a/chromium/build/config/ios/Host-Info.plist b/chromium/build/config/ios/Host-Info.plist
index 9f6f5deef9e..6898c15fa1c 100644
--- a/chromium/build/config/ios/Host-Info.plist
+++ b/chromium/build/config/ios/Host-Info.plist
@@ -9,7 +9,7 @@
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
- <string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
+ <string>${BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
diff --git a/chromium/build/config/ios/Module-Info.plist b/chromium/build/config/ios/Module-Info.plist
index d1bf77faf08..e1b09841541 100644
--- a/chromium/build/config/ios/Module-Info.plist
+++ b/chromium/build/config/ios/Module-Info.plist
@@ -7,7 +7,7 @@
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
- <string>${IOS_BUNDLE_ID_PREFIX}.${MODULE_BUNDLE_ID:rfc1034identifier}</string>
+ <string>${BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
diff --git a/chromium/build/config/ios/config.gni b/chromium/build/config/ios/config.gni
index b25ecd942d2..75486776902 100644
--- a/chromium/build/config/ios/config.gni
+++ b/chromium/build/config/ios/config.gni
@@ -9,6 +9,11 @@ declare_args() {
# default is only there for compatibility reasons and will be removed (see
# crbug.com/1138425 for more details).
target_environment = ""
+
+ # Control whether cronet is built (this is usually set by the script
+ # components/cronet/tools/cr_cronet.py as cronet requires specific
+ # gn args to build correctly).
+ is_cronet_build = false
}
if (target_environment == "") {
diff --git a/chromium/build/config/ios/ios_sdk.gni b/chromium/build/config/ios/ios_sdk.gni
index ffff5921d80..eb80502ba86 100644
--- a/chromium/build/config/ios/ios_sdk.gni
+++ b/chromium/build/config/ios/ios_sdk.gni
@@ -5,7 +5,7 @@
import("//build/config/ios/config.gni")
import("//build/config/ios/ios_sdk_overrides.gni")
import("//build/toolchain/goma.gni")
-import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
import("//build_overrides/build.gni")
@@ -62,6 +62,9 @@ declare_args() {
# You can also pass the value via "--args" parameter for "gn gen" command by
# using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
additional_target_cpus = []
+
+ # Set to true if all test apps should use the same bundle id.
+ ios_use_shared_bundle_id_for_test_apps = false
}
declare_args() {
@@ -75,9 +78,9 @@ declare_args() {
}
# Official builds may not use goma.
-assert(!(use_goma && is_chrome_branded && is_official_build &&
- target_cpu == "arm64"),
- "goma use is forbidden for official iOS builds.")
+assert(!((use_goma || use_remoteexec) && is_chrome_branded &&
+ is_official_build && target_cpu == "arm64"),
+ "goma/re-client use is forbidden for official iOS builds.")
assert(custom_toolchain == "" || additional_target_cpus == [],
"cannot define both custom_toolchain and additional_target_cpus")
@@ -139,7 +142,7 @@ if (ios_sdk_path == "") {
ios_sdk_developer_dir,
]
}
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
ios_sdk_info_args += [
"--create_symlink_at",
"sdk/xcode_links",
diff --git a/chromium/build/config/ios/resources/XCTRunnerAddition+Info.plist b/chromium/build/config/ios/resources/XCTRunnerAddition+Info.plist
index cf9463f6942..ed26f55d163 100644
--- a/chromium/build/config/ios/resources/XCTRunnerAddition+Info.plist
+++ b/chromium/build/config/ios/resources/XCTRunnerAddition+Info.plist
@@ -3,7 +3,7 @@
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
- <string>com.apple.test.${EXECUTABLE_NAME}</string>
+ <string>${BUNDLE_IDENTIFIER}</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundleExecutable</key>
diff --git a/chromium/build/config/ios/rules.gni b/chromium/build/config/ios/rules.gni
index 1c74f158eb2..2ab3cda0b9d 100644
--- a/chromium/build/config/ios/rules.gni
+++ b/chromium/build/config/ios/rules.gni
@@ -6,6 +6,7 @@ import("//build/apple/apple_info_plist.gni")
import("//build/config/apple/symbols.gni")
import("//build/config/ios/ios_sdk.gni")
import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
import("//build_overrides/build.gni")
@@ -348,27 +349,31 @@ template("create_signed_bundle") {
}
if (ios_set_attributes_for_xcode_project_generation) {
- _xcode_product_bundle_id = ""
+ _bundle_identifier = ""
if (defined(invoker.xcode_product_bundle_id)) {
- _xcode_product_bundle_id = invoker.xcode_product_bundle_id
+ _bundle_identifier = invoker.xcode_product_bundle_id
+ assert(
+ _bundle_identifier == string_replace(_bundle_identifier, "_", "-"),
+ "$target_name: bundle_identifier does not respect rfc1034: " +
+ _bundle_identifier)
}
- if (_xcode_product_bundle_id != "") {
+ if (_bundle_identifier != "") {
_ios_provisioning_profile_info =
exec_script("//build/config/ios/codesign.py",
[
"find-provisioning-profile",
- "-b=" + _xcode_product_bundle_id,
+ "-b=" + _bundle_identifier,
],
"json")
}
xcode_extra_attributes = {
IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
- if (_xcode_product_bundle_id != "") {
+ if (_bundle_identifier != "") {
CODE_SIGN_IDENTITY = "iPhone Developer"
DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier
- PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id
+ PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier
PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name
}
@@ -459,7 +464,7 @@ template("create_signed_bundle") {
# rebase_path here unless using Goma RBE and system Xcode (as in that
# case the system framework are found via a symlink in root_build_dir).
foreach(_framework, invoker.extra_system_frameworks) {
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
_framework_path = rebase_path(_framework, root_build_dir)
} else {
_framework_path = _framework
@@ -528,11 +533,7 @@ template("ios_info_plist") {
apple_info_plist(target_name) {
format = "binary1"
- extra_substitutions = []
- if (defined(invoker.extra_substitutions)) {
- extra_substitutions = invoker.extra_substitutions
- }
- extra_substitutions += [
+ extra_substitutions = [
"IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix",
"IOS_PLATFORM_BUILD=$ios_platform_build",
"IOS_PLATFORM_NAME=$ios_sdk_name",
@@ -545,6 +546,9 @@ template("ios_info_plist") {
"XCODE_BUILD=$xcode_build",
"XCODE_VERSION=$xcode_version",
]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
+ }
plist_templates = [
"//build/config/ios/BuildInfo.plist",
_info_plist,
@@ -615,10 +619,11 @@ template("ios_info_plist") {
# variant with the same binary but the correct bundle_deps, the bundle
# at $target_out_dir/$output_name will be a copy of the first variant.
#
-# xcode_product_bundle_id:
-# (optional) string, the bundle ID that will be added in the XCode
-# attributes to enable some features when debugging (e.g. MetricKit).
-# defaults to "$ios_app_bundle_id_prefix.$output_name".
+# bundle_identifier:
+# (optional) string, value of CFBundleIdentifier in the application
+# Info.plist, defaults to "$ios_app_bundle_id_prefix.$output_name"
+# if omitted. Will be used to set BUNDLE_IDENTIFIER when generating
+# the application Info.plist
#
# For more information, see "gn help executable".
template("ios_app_bundle") {
@@ -637,19 +642,17 @@ template("ios_app_bundle") {
!defined(invoker.bundle_extension),
"bundle_extension must not be set for ios_app_bundle template for $target_name")
- _xcode_product_bundle_id = "$ios_app_bundle_id_prefix.$_output_name"
- if (defined(invoker.xcode_product_bundle_id)) {
- _xcode_product_bundle_id = invoker.xcode_product_bundle_id
- _xcode_product_bundle_id =
- "$ios_app_bundle_id_prefix.$_xcode_product_bundle_id"
- } else if (defined(invoker.bundle_id)) {
- _xcode_product_bundle_id = invoker.bundle_id
+ if (defined(invoker.bundle_identifier)) {
+ _bundle_identifier = invoker.bundle_identifier
+ assert(_bundle_identifier == string_replace(_bundle_identifier, "_", "-"),
+ "$target_name: bundle_identifier does not respect rfc1034: " +
+ _bundle_identifier)
+ } else {
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier =
+ "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-")
}
- # Bundle ID should respect rfc1034 and replace _ with -.
- _xcode_product_bundle_id =
- string_replace("$_xcode_product_bundle_id", "_", "-")
-
_arch_executable_source = _target_name + "_arch_executable_sources"
_arch_executable_target = _target_name + "_arch_executable"
_lipo_executable_target = _target_name + "_executable"
@@ -826,12 +829,16 @@ template("ios_app_bundle") {
ios_info_plist(_generate_info_plist) {
forward_variables_from(invoker,
[
- "extra_substitutions",
"info_plist",
"info_plist_target",
])
executable_name = _output_name
+
+ extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
+ }
}
if (!is_fat_secondary_toolchain) {
@@ -926,7 +933,7 @@ template("ios_app_bundle") {
bundle_binary_output = _output_name
bundle_extension = _bundle_extension
product_type = _product_type
- xcode_product_bundle_id = _xcode_product_bundle_id
+ xcode_product_bundle_id = _bundle_identifier
_generate_info_plist_outputs =
get_target_outputs(":$_generate_info_plist")
@@ -1536,6 +1543,10 @@ template("ios_framework_bundle") {
arch_binary_output = _output_name
}
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier =
+ "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-")
+
_info_plist_target = _target_name + "_info_plist"
_info_plist_bundle = _target_name + "_info_plist_bundle"
ios_info_plist(_info_plist_target) {
@@ -1543,10 +1554,14 @@ template("ios_framework_bundle") {
executable_name = _output_name
forward_variables_from(invoker,
[
- "extra_substitutions",
"info_plist",
"info_plist_target",
])
+
+ extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
+ }
}
bundle_data(_info_plist_bundle) {
@@ -1756,6 +1771,10 @@ template("ios_xctest_bundle") {
_info_plist_target = _target_name + "_info_plist"
_info_plist_bundle = _target_name + "_info_plist_bundle"
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier = "$ios_app_bundle_id_prefix.gtest." +
+ string_replace(_output_name, "_", "-")
+
ios_info_plist(_info_plist_target) {
testonly = true
visibility = [ ":$_info_plist_bundle" ]
@@ -1771,7 +1790,7 @@ template("ios_xctest_bundle") {
}
extra_substitutions = [
"XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}",
- "MODULE_BUNDLE_ID=gtest.$_output_name",
+ "BUNDLE_IDENTIFIER=$_bundle_identifier",
]
}
@@ -1817,14 +1836,11 @@ template("ios_xctest_bundle") {
bundle_binary_output = _output_name
if (ios_set_attributes_for_xcode_project_generation) {
- _xcode_product_bundle_id =
- "$ios_app_bundle_id_prefix.gtest.$_output_name"
-
_ios_provisioning_profile_info =
exec_script("//build/config/ios/codesign.py",
[
"find-provisioning-profile",
- "-b=" + _xcode_product_bundle_id,
+ "-b=" + _bundle_identifier,
],
"json")
@@ -1832,7 +1848,7 @@ template("ios_xctest_bundle") {
IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
CODE_SIGN_IDENTITY = "iPhone Developer"
DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier
- PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id
+ PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier
PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name
# For XCUITest, Xcode requires specifying the host application name
@@ -1930,7 +1946,12 @@ template("ios_xctest_test") {
}
ios_app_bundle(_host_target) {
- forward_variables_from(invoker, "*", [ "testonly" ])
+ forward_variables_from(invoker,
+ "*",
+ [
+ "testonly",
+ "bundle_identifier",
+ ])
testonly = true
output_name = _host_output
@@ -2001,6 +2022,10 @@ template("ios_xcuitest_test_runner_bundle") {
_output_name = invoker.output_name
}
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier = "$ios_app_bundle_id_prefix.gtest." +
+ string_replace(_output_name, "_", "-")
+
_xctrunner_path =
"$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app"
@@ -2030,7 +2055,7 @@ template("ios_xcuitest_test_runner_bundle") {
"-o=" + rebase_path(_output_name, root_build_dir),
] + rebase_path(sources, root_build_dir)
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
deps = [ "//build/config/ios:copy_xctrunner_app" ]
}
}
@@ -2041,6 +2066,7 @@ template("ios_xcuitest_test_runner_bundle") {
executable_name = _output_name
info_plist_target = ":$_info_plist_merge_plist"
+ extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
}
bundle_data(_info_plist_bundle) {
@@ -2062,7 +2088,7 @@ template("ios_xcuitest_test_runner_bundle") {
outputs = [ "{{bundle_contents_dir}}/PkgInfo" ]
- if (use_system_xcode && use_goma) {
+ if (use_system_xcode && (use_goma || use_remoteexec)) {
public_deps = [ "//build/config/ios:copy_xctrunner_app" ]
}
}
diff --git a/chromium/build/config/ios/swift_source_set.gni b/chromium/build/config/ios/swift_source_set.gni
new file mode 100644
index 00000000000..0fa8bce47ca
--- /dev/null
+++ b/chromium/build/config/ios/swift_source_set.gni
@@ -0,0 +1,22 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines a template for Swift source files. The default module_name
+# of the target is the entire target label (without the leading //)
+# with all "/" and ":" replaced with "_".
+template("swift_source_set") {
+ _target_name = target_name
+ source_set(target_name) {
+ forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+ forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+ if (!defined(module_name)) {
+ _target_label = get_label_info(":$_target_name", "label_no_toolchain")
+
+ # Strip the // from the beginning of the label.
+ _target_label = string_replace(_target_label, "//", "", 1)
+ module_name =
+ string_replace(string_replace(_target_label, "/", "_"), ":", "_")
+ }
+ }
+}
diff --git a/chromium/build/config/linux/gtk/BUILD.gn b/chromium/build/config/linux/gtk/BUILD.gn
index ecf95dda763..65900239718 100644
--- a/chromium/build/config/linux/gtk/BUILD.gn
+++ b/chromium/build/config/linux/gtk/BUILD.gn
@@ -45,6 +45,7 @@ group("gtk") {
"//remoting/host/it2me:common",
"//remoting/host/it2me:remote_assistance_host",
"//remoting/host/linux",
+ "//remoting/host/remote_open_url:common",
"//remoting/test:it2me_standalone_host_main",
"//webrtc/examples:peerconnection_client",
]
diff --git a/chromium/build/config/logging.gni b/chromium/build/config/logging.gni
index 25fe9913437..c2f94e929ed 100644
--- a/chromium/build/config/logging.gni
+++ b/chromium/build/config/logging.gni
@@ -9,4 +9,5 @@ declare_args() {
# Use LogErrorNotReached() for NOTREACHED().
enable_log_error_not_reached =
is_chromeos_ash && !(is_debug || dcheck_always_on)
+ enable_stack_trace_line_numbers = false
}
diff --git a/chromium/build/config/mac/BUILD.gn b/chromium/build/config/mac/BUILD.gn
index 8005ed54044..0fad7261eec 100644
--- a/chromium/build/config/mac/BUILD.gn
+++ b/chromium/build/config/mac/BUILD.gn
@@ -6,6 +6,8 @@ import("//build/config/apple/symbols.gni")
import("//build/config/c++/c++.gni")
import("//build/config/mac/mac_sdk.gni")
import("//build/config/sysroot.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic.
@@ -117,7 +119,7 @@ config("strip_all") {
#
# The symbolic link for $mac_sdk_path is set up by
# //build/config/apple/sdk_info.py in //build/config/mac/mac_sdk.gni.
-if (use_system_xcode && use_goma && target_os == "mac" &&
+if (use_system_xcode && (use_goma || use_remoteexec) && target_os == "mac" &&
current_toolchain == default_toolchain) {
action("sdk_inputs") {
script = "//build/noop.py"
diff --git a/chromium/build/config/mac/mac_sdk.gni b/chromium/build/config/mac/mac_sdk.gni
index 19aa066743e..d9e9bfc46b9 100644
--- a/chromium/build/config/mac/mac_sdk.gni
+++ b/chromium/build/config/mac/mac_sdk.gni
@@ -6,6 +6,7 @@ import("//build/config/chrome_build.gni")
import("//build/config/gclient_args.gni")
import("//build/config/mac/mac_sdk_overrides.gni")
import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
assert(current_os == "mac" || current_toolchain == default_toolchain)
@@ -80,7 +81,7 @@ if (!use_system_xcode) {
# Goma RBE requires paths relative to source directory. When using system
# Xcode, this is done by creating symbolic links in root_build_dir.
-if (use_system_xcode && use_goma) {
+if (use_system_xcode && (use_goma || use_remoteexec)) {
sdk_info_args += [
"--get_sdk_info",
"--create_symlink_at",
@@ -92,7 +93,7 @@ sdk_info_args += [ mac_sdk_name ]
_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
xcode_version = _mac_sdk_result.xcode_version
xcode_build = _mac_sdk_result.xcode_build
-if (mac_sdk_path == "" && use_system_xcode && use_goma) {
+if (mac_sdk_path == "" && use_system_xcode && (use_goma || use_remoteexec)) {
mac_sdk_path = _mac_sdk_result.sdk_path
}
diff --git a/chromium/build/config/mac/rules.gni b/chromium/build/config/mac/rules.gni
index fa9eebb192d..03073f83040 100644
--- a/chromium/build/config/mac/rules.gni
+++ b/chromium/build/config/mac/rules.gni
@@ -40,11 +40,7 @@ template("mac_info_plist") {
apple_info_plist(target_name) {
format = "xml1"
- extra_substitutions = []
- if (defined(invoker.extra_substitutions)) {
- extra_substitutions = invoker.extra_substitutions
- }
- extra_substitutions += [
+ extra_substitutions = [
"MAC_SDK_BUILD=$mac_sdk_version",
"MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
"MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target",
@@ -52,6 +48,9 @@ template("mac_info_plist") {
"XCODE_BUILD=$xcode_build",
"XCODE_VERSION=$xcode_version",
]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
+ }
plist_templates = [
"//build/config/mac/BuildInfo.plist",
_info_plist,
@@ -303,7 +302,7 @@ template("mac_framework_bundle") {
_framework_public_config = _target_name + "_public_config"
config(_framework_public_config) {
- visibility = [ ":$_framework_target" ]
+ visibility = [ ":$_framework_target+link" ]
framework_dirs = [ root_out_dir ]
frameworks = [ _framework_name ]
}
@@ -438,6 +437,10 @@ template("mac_app_bundle") {
_output_extension = "xpc"
_product_type = "com.apple.product-type.xpc-service"
_write_pkg_info = false
+ } else if (_package_type == "bundle") {
+ _output_extension = "bundle"
+ _product_type = "com.apple.product-type.bundle"
+ _write_pkg_info = false
} else {
assert(false, "Unsupported packge_type: " + packge_type)
}
diff --git a/chromium/build/config/nacl/config.gni b/chromium/build/config/nacl/config.gni
index 77e15fc51cb..6873055c61d 100644
--- a/chromium/build/config/nacl/config.gni
+++ b/chromium/build/config/nacl/config.gni
@@ -3,10 +3,16 @@
# found in the LICENSE file.
declare_args() {
- # Native Client supports both Newlib and Glibc C libraries where Newlib
- # is assumed to be the default one; use this to determine whether Glibc
- # is being used instead.
+ # Native Client supports multiple toolchains:
+ # - nacl_glibc, based on gcc and glibc.
+ # - pnacl_newlib, based on llvm 3.7 and newlib (default).
+ # - saigo_newlib, based on llvm 12+ and newlib.
+
+ # True if nacl_glibc is used.
is_nacl_glibc = false
+
+ # True if saigo_newlib is used.
+ is_nacl_saigo = false
}
is_nacl_irt = false
diff --git a/chromium/build/config/ozone.gni b/chromium/build/config/ozone.gni
index b9107f4bc90..9f94d11b308 100644
--- a/chromium/build/config/ozone.gni
+++ b/chromium/build/config/ozone.gni
@@ -4,10 +4,16 @@
import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/ui_mode.gni")
-import("//build/config/ui.gni")
import("//build/toolchain/toolchain.gni")
declare_args() {
+ # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+ # that does not require X11.
+ use_ozone =
+ is_chromeos || (is_chromecast && !is_android) || is_fuchsia || is_linux
+}
+
+declare_args() {
# Ozone extra platforms file path. Can be overridden to build out of
# tree ozone platforms.
ozone_extra_path = "//build/config/ozone_extra.gni"
@@ -106,6 +112,16 @@ declare_args() {
}
}
+declare_args() {
+ # Deprecated. Ozone/X11 is default path on Linux now. However, there are still
+ # some components like remoting, angle, webrtc (desktop capture) that relies
+ # on this gn arg and on the USE_X11 define. This will be gradually removed
+ # once all the other places are fixed.
+ # TODO(1096425): remove use_x11.
+ use_x11 =
+ ozone_platform_x11 && is_linux && !is_chromecast && !is_chromeos_lacros
+}
+
import(ozone_extra_path)
_ozone_extra_directory = get_path_info(ozone_extra_path, "dir")
@@ -132,3 +148,5 @@ ozone_platform_gbm = ozone_platform_drm
if (ozone_platform == "gbm") {
ozone_platform = "drm"
}
+
+assert(use_x11 == ozone_platform_x11 || !is_linux || is_chromecast)
diff --git a/chromium/build/config/pch.gni b/chromium/build/config/pch.gni
index 3afd6393ca0..efc8a3be36d 100644
--- a/chromium/build/config/pch.gni
+++ b/chromium/build/config/pch.gni
@@ -10,5 +10,6 @@ declare_args() {
# but for distributed build system uses (like goma or rbe) or when
# doing official builds.
# On Linux it slows down the build, so don't enable it by default.
- enable_precompiled_headers = !is_official_build && !(use_goma || use_rbe) && !is_linux
+ enable_precompiled_headers =
+ !is_official_build && !(use_goma || use_remoteexec) && !is_linux
}
diff --git a/chromium/build/config/profiling/OWNERS b/chromium/build/config/profiling/OWNERS
index 225ce184ca7..3d69566fd59 100644
--- a/chromium/build/config/profiling/OWNERS
+++ b/chromium/build/config/profiling/OWNERS
@@ -1,3 +1,2 @@
liaoyuke@chromium.org
sajjadm@chromium.org
-sebmarchand@chromium.org
diff --git a/chromium/build/config/rust.gni b/chromium/build/config/rust.gni
new file mode 100644
index 00000000000..95b6a14b791
--- /dev/null
+++ b/chromium/build/config/rust.gni
@@ -0,0 +1,138 @@
+# Copyright 2021 The Chromium Project. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_android) {
+ import("//build/config/android/config.gni")
+}
+
+declare_args() {
+ # Whether to allow Rust code to be part of the Chromium *build process*.
+ # This can be used to create Rust test binaries, even if the flag below
+ # is false.
+ enable_rust = false
+
+ # Whether to allow Rust code to contribute to the main Chromium binaries.
+ enable_rust_in_chromium = false
+
+ # Use unverified, untrusted, Rust toolchains from the internet
+ # (which support more platforms and options than those we trust for real
+ # builds).
+ use_unverified_rust_toolchain = false
+
+ # If using an unverified Rust toolchain, use this prefix for where to find
+ # the binaries.
+ rust_bin_dir = ""
+
+ # Use LTO when using rustc to link binaries. Experimental. Currently incompatible
+ # with the options we use in our C++ toolchain to split LTO units.
+ # This has no effect on the production of normal Chrome binaries, which are
+ # linked by clang/lld rather than rustc.
+ # https://crbug.com/1229419
+ use_lto_in_rustc_linking = false
+
+ # Use goma for Rust builds. Experimental. The only known problem is
+ # b/193072381, but then again, we don't expect a build speedup before much
+ # more work is done.
+ use_goma_rust = false
+}
+
+# Rust code may end up being linked into a final executable by:
+# * rustc (which calls lld)
+# * our pre-existing C++ linker invocations
+# At the moment, this first pipeline is incompatible with the ldflags we use
+# for thin LTO, due to some problem in escaping gn rules. There's a further
+# problem with -lunwind on Android.
+# However, Rust code is still useful if it's contributing to our existing
+# C++ linker invocations, so this doesn't disable Rust entirely. It does
+# disable Rust unit test executables, so we do need to fix this.
+# https://crbug.com/1229423
+rustc_can_link = !use_thin_lto && !is_android
+
+# Has a Rust toolchain available in the build by default.
+toolchain_has_official_rust =
+ (!is_nacl &&
+ (is_android && (current_cpu == "arm" || current_cpu == "arm64" ||
+ current_cpu == "x64" || current_cpu == "x86"))) ||
+ (is_linux && current_cpu == "x64")
+
+toolchain_has_rust = enable_rust && (toolchain_has_official_rust ||
+ use_unverified_rust_toolchain)
+
+# We use the Rust linker for building test executables, so we only build them
+# if we're able to use the Rust linker. We could use the C++ linker for this
+# too, we've just not set up GN to do so at the moment.
+build_rust_unit_tests = rustc_can_link
+
+if (use_unverified_rust_toolchain) {
+ assert(rust_bin_dir != "")
+ rust_prefix = "$rust_bin_dir/"
+} else if (toolchain_has_official_rust) {
+ if (host_os != "linux") {
+ assert(false,
+ "Attempt to use standard Rust toolchain on an unsupported platform")
+ }
+ rust_prefix =
+ rebase_path("//third_party/android_rust_toolchain/toolchain/bin/")
+}
+
+assert(!toolchain_has_rust || defined(rust_prefix))
+
+# Figure out the Rust target triple (aka 'rust_abi_target')
+#
+# This is here rather than in the toolchain files because it's used
+# also by //build/rust/std to find the Rust standard library.
+#
+# The list of architectures supported by Rust is here:
+# https://doc.rust-lang.org/nightly/rustc/platform-support.html
+# Although most of these are not yet supported by our limited
+# official Rust toolchain (see 'toolchain_has_official_rust' above)
+# it's useful to be able to experiment with our other platforms,
+# so we try to be comprehensive here.
+#
+# It's OK if rust_abi_target is blank. That means we're building for the host
+# and the host stdlib will be used.
+rust_abi_target = ""
+if (is_android) {
+ import("//build/config/android/abi.gni")
+ rust_abi_target = android_abi_target
+ if (rust_abi_target == "arm-linux-androideabi") {
+ # Android clang target specifications mostly match Rust, but this
+ # is an exception
+ rust_abi_target = "armv7-linux-androideabi"
+ }
+} else if (is_fuchsia) {
+ if (current_cpu == "arm64") {
+ rust_abi_target = "aarch64-fuchsia"
+ } else if (current_cpu == "x64") {
+ rust_abi_target = "x86_64-fuchsia"
+ } else {
+ assert(false, "Architecture not supported")
+ }
+} else if (is_ios) {
+ if (current_cpu == "arm64") {
+ rust_abi_target = "aarch64-apple-ios"
+ } else if (current_cpu == "arm") {
+ # There's also an armv7s-apple-ios, which targets a more recent ARMv7
+ # generation CPU found in later iPhones. We'll go with the older one for
+ # maximal compatibility. As we come to support all the different platforms
+ # with Rust, we might want to be more precise here.
+ rust_abi_target = "armv7-apple-ios"
+ } else if (current_cpu == "x64") {
+ rust_abi_target = "x86_64-apple-ios"
+ } else if (current_cpu == "x86") {
+ rust_abi_target = "i386-apple-ios"
+ } else {
+ assert(false, "Architecture not supported")
+ }
+}
+
+# Arguments for Rust invocation.
+# This is common between gcc/clang, Mac and Windows toolchains so specify once,
+# here. This is not the complete command-line: toolchains should add -o
+# and probably --emit arguments too.
+rustc_common_args = "--crate-name {{crate_name}} {{source}} --crate-type {{crate_type}} {{rustflags}} {{rustdeps}} {{externs}}"
diff --git a/chromium/build/config/sanitizers/sanitizers.gni b/chromium/build/config/sanitizers/sanitizers.gni
index 576bf83c630..8d26d0054cc 100644
--- a/chromium/build/config/sanitizers/sanitizers.gni
+++ b/chromium/build/config/sanitizers/sanitizers.gni
@@ -126,6 +126,9 @@ declare_args() {
is_ubsan_vptr = is_ubsan_security
}
+assert(!is_hwasan || (target_os == "android" && target_cpu == "arm64"),
+ "HWASan only supported on Android ARM64 builds.")
+
# Disable sanitizers for non-target toolchains.
if (!is_a_target_toolchain) {
is_asan = false
@@ -147,6 +150,8 @@ if (!is_a_target_toolchain) {
use_libfuzzer = false
use_locally_built_instrumented_libraries = false
use_sanitizer_coverage = false
+} else if (current_cpu != "arm64") {
+ is_hwasan = false
}
# Use dynamic libraries instrumented by one of the sanitizers instead of the
@@ -229,9 +234,6 @@ if (use_libfuzzer && (is_linux || is_chromeos)) {
assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
"Sanitizers should generally be used in release (set is_debug=false).")
-assert(!is_hwasan || (is_android && current_cpu == "arm64"),
- "HWASan only supported on Android ARM64 builds.")
-
assert(!is_msan || ((is_linux || is_chromeos) && current_cpu == "x64"),
"MSan currently only works on 64-bit Linux and ChromeOS builds.")
diff --git a/chromium/build/config/ui.gni b/chromium/build/config/ui.gni
index f26759808f5..55478b02cd0 100644
--- a/chromium/build/config/ui.gni
+++ b/chromium/build/config/ui.gni
@@ -20,18 +20,9 @@
import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/args.gni")
import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/ozone.gni")
declare_args() {
- # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
- # that does not require X11.
- use_ozone =
- is_chromeos || (is_chromecast && !is_android) || is_fuchsia || is_linux
-
- # Indicates if the UI toolkit depends on X11.
- # Enabled by default. Can be disabled if Ozone only build is required and
- # vice-versa.
- use_x11 = is_linux && !is_chromecast && !is_chromeos_lacros
-
# Indicates if Aura is enabled. Aura is a low-level windowing library, sort
# of a replacement for GDI or GTK.
use_aura = is_win || is_linux || is_chromeos || is_fuchsia
@@ -46,7 +37,8 @@ declare_args() {
assert(!use_glib || (is_linux && !is_chromecast))
-use_atk = is_linux && !is_chromecast && use_glib && current_toolchain == default_toolchain
+use_atk = is_linux && !is_chromecast && use_glib &&
+ current_toolchain == default_toolchain
# Whether using Xvfb to provide a display server for a test might be
# necessary.
diff --git a/chromium/build/docs/rust_toolchain.md b/chromium/build/docs/rust_toolchain.md
new file mode 100644
index 00000000000..6217fd5af66
--- /dev/null
+++ b/chromium/build/docs/rust_toolchain.md
@@ -0,0 +1,21 @@
+# Rust toolchain
+
+Chrome currently uses an experimental Rust toolchain built by the Android
+team, which supports only Linux and Android builds.
+
+To build Rust code on other platforms for development/experimentation, add the
+following to your `gn args`:
+
+```
+use_unverified_rust_toolchain=true
+rust_bin_dir="<path-to>/.cargo/bin"
+```
+
+## Using VSCode, rust-analyzer etc.
+
+Any IDE which supports rust-analyser should be able to ingest metadata from gn
+about the structure of our Rust project. Do this:
+
+* `gn gen out/Debug/ --export-rust-project`
+* `ln -s out/Debug/rust-project.json rust-project.json`, i.e. symlink the
+ `rust-project.json` file to the root of the Chromium src directory.
diff --git a/chromium/build/fuchsia/COMMON_METADATA b/chromium/build/fuchsia/COMMON_METADATA
new file mode 100644
index 00000000000..fe8198aeaa3
--- /dev/null
+++ b/chromium/build/fuchsia/COMMON_METADATA
@@ -0,0 +1,5 @@
+monorail {
+ component: "Fuchsia"
+}
+
+team_email: "cr-fuchsia@chromium.org"
diff --git a/chromium/build/fuchsia/DIR_METADATA b/chromium/build/fuchsia/DIR_METADATA
index fe8198aeaa3..05b12cfcad5 100644
--- a/chromium/build/fuchsia/DIR_METADATA
+++ b/chromium/build/fuchsia/DIR_METADATA
@@ -1,5 +1,2 @@
-monorail {
- component: "Fuchsia"
-}
+mixins: "//build/fuchsia/COMMON_METADATA"
-team_email: "cr-fuchsia@chromium.org"
diff --git a/chromium/build/fuchsia/PRESUBMIT.py b/chromium/build/fuchsia/PRESUBMIT.py
index 4cdd4d021a7..d21d7b0e7be 100644
--- a/chromium/build/fuchsia/PRESUBMIT.py
+++ b/chromium/build/fuchsia/PRESUBMIT.py
@@ -20,8 +20,7 @@ def CommonChecks(input_api, output_api):
input_api.canned_checks.GetUnitTests(
input_api,
output_api,
- unit_tests=[J('boot_data_test.py'),
- J('fvdl_target_test.py')],
+ unit_tests=[J('fvdl_target_test.py')],
run_on_python2=False,
run_on_python3=True))
return input_api.RunTests(tests)
diff --git a/chromium/build/fuchsia/aemu_target.py b/chromium/build/fuchsia/aemu_target.py
index 67170058150..9f86c77ef87 100644
--- a/chromium/build/fuchsia/aemu_target.py
+++ b/chromium/build/fuchsia/aemu_target.py
@@ -20,19 +20,19 @@ def GetTargetType():
class AemuTarget(qemu_target.QemuTarget):
EMULATOR_NAME = 'aemu'
- def __init__(self, out_dir, target_cpu, system_log_file, cpu_cores,
- require_kvm, ram_size_mb, enable_graphics, hardware_gpu):
- super(AemuTarget, self).__init__(out_dir, target_cpu, system_log_file,
- cpu_cores, require_kvm, ram_size_mb)
+ def __init__(self, out_dir, target_cpu, cpu_cores, require_kvm, ram_size_mb,
+ enable_graphics, hardware_gpu, logs_dir):
+ super(AemuTarget, self).__init__(out_dir, target_cpu, cpu_cores,
+ require_kvm, ram_size_mb, logs_dir)
self._enable_graphics = enable_graphics
self._hardware_gpu = hardware_gpu
@staticmethod
def CreateFromArgs(args):
- return AemuTarget(args.out_dir, args.target_cpu, args.system_log_file,
- args.cpu_cores, args.require_kvm, args.ram_size_mb,
- args.enable_graphics, args.hardware_gpu)
+ return AemuTarget(args.out_dir, args.target_cpu, args.cpu_cores,
+ args.require_kvm, args.ram_size_mb, args.enable_graphics,
+ args.hardware_gpu, args.logs_dir)
@staticmethod
def RegisterArgs(arg_parser):
diff --git a/chromium/build/fuchsia/boot_data.py b/chromium/build/fuchsia/boot_data.py
index 1e29a080349..f9b4c464ca2 100644
--- a/chromium/build/fuchsia/boot_data.py
+++ b/chromium/build/fuchsia/boot_data.py
@@ -35,64 +35,54 @@ TARGET_TYPE_QEMU = 'qemu'
# Specifies boot files intended for use by anything (incl. physical devices).
TARGET_TYPE_GENERIC = 'generic'
-def _GetPubKeyPath(output_dir):
- """Returns a path to the generated SSH public key."""
+# Defaults used by Fuchsia SDK
+_SSH_DIR = os.path.expanduser('~/.ssh')
+_SSH_CONFIG_DIR = os.path.expanduser('~/.fuchsia')
- return os.path.join(output_dir, 'fuchsia_ed25519.pub')
+def _GetAuthorizedKeysPath():
+ """Returns a path to the authorized keys which get copied to your Fuchsia
+ device during paving"""
-def ProvisionSSH(output_dir):
- """Generates a keypair and config file for SSH."""
+ return os.path.join(_SSH_DIR, 'fuchsia_authorized_keys')
- fuchsia_authorized_keys_path = os.path.join(output_dir,
- 'fuchsia_authorized_keys')
- id_key_path = os.path.join(output_dir, 'fuchsia_ed25519')
- _GetPubKeyPath(output_dir)
- ssh_config_path = os.path.join(output_dir, 'ssh_config')
- logging.debug('Generating SSH credentials.')
+def ProvisionSSH():
+ """Generates a key pair and config file for SSH using the GN SDK."""
- if not os.path.isfile(id_key_path):
- subprocess.check_output([
- 'ssh-keygen', '-P', '', '-t', 'ed25519', '-f', id_key_path, '-C',
- 'generated by FEMU Start testing step'
- ])
-
- if not os.path.isfile(fuchsia_authorized_keys_path):
- result = subprocess.check_output(['ssh-keygen', '-y', '-f', id_key_path])
- with open(fuchsia_authorized_keys_path, 'w') as out:
- out.write(result.decode('utf-8'))
-
- with open(ssh_config_path, "w") as ssh_config:
- ssh_config.write(
- _SSH_CONFIG_TEMPLATE.format(identity=id_key_path))
+ returncode, out, err = common.RunGnSdkFunction('fuchsia-common.sh',
+ 'check-fuchsia-ssh-config')
+ if returncode != 0:
+ logging.error('Command exited with error code %d' % (returncode))
+ logging.error('Stdout: %s' % out)
+ logging.error('Stderr: %s' % err)
+ raise Exception('Failed to provision ssh keys')
def GetTargetFile(filename, target_arch, target_type):
"""Computes a path to |filename| in the Fuchsia boot image directory specific
to |target_type| and |target_arch|."""
- assert target_type == TARGET_TYPE_QEMU or target_type == TARGET_TYPE_GENERIC
-
return os.path.join(common.IMAGES_ROOT, target_arch, target_type, filename)
-def GetSSHConfigPath(output_dir):
- return output_dir + '/ssh_config'
+def GetSSHConfigPath():
+ return os.path.join(_SSH_CONFIG_DIR, 'sshconfig')
def GetBootImage(output_dir, target_arch, target_type):
""""Gets a path to the Zircon boot image, with the SSH client public key
added."""
-
- ProvisionSSH(output_dir)
- pubkey_path = _GetPubKeyPath(output_dir)
+ ProvisionSSH()
+ authkeys_path = _GetAuthorizedKeysPath()
zbi_tool = common.GetHostToolPathFromPlatform('zbi')
image_source_path = GetTargetFile('zircon-a.zbi', target_arch, target_type)
image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi')
- cmd = [ zbi_tool, '-o', image_dest_path, image_source_path,
- '-e', 'data/ssh/authorized_keys=' + pubkey_path ]
+ cmd = [
+ zbi_tool, '-o', image_dest_path, image_source_path, '-e',
+ 'data/ssh/authorized_keys=' + authkeys_path
+ ]
subprocess.check_call(cmd)
return image_dest_path
diff --git a/chromium/build/fuchsia/boot_data_test.py b/chromium/build/fuchsia/boot_data_test.py
index 8343f2ff380..ffa88cc120b 100755
--- a/chromium/build/fuchsia/boot_data_test.py
+++ b/chromium/build/fuchsia/boot_data_test.py
@@ -5,21 +5,41 @@
import boot_data
import os
-import tempfile
import unittest
+from boot_data import _SSH_CONFIG_DIR, _SSH_DIR, _GetAuthorizedKeysPath, \
+ GetSSHConfigPath
class TestBootData(unittest.TestCase):
def testProvisionSSHGeneratesFiles(self):
- with tempfile.TemporaryDirectory() as temp_dir:
- boot_data.ProvisionSSH(temp_dir)
- fuchsia_authorized_keys_path = os.path.join(temp_dir,
- 'fuchsia_authorized_keys')
- self.assertTrue(os.path.exists(fuchsia_authorized_keys_path))
- fuchsia_id_key_path = os.path.join(temp_dir, 'fuchsia_ed25519')
- self.assertTrue(os.path.exists(fuchsia_id_key_path))
- ssh_config_path = os.path.join(temp_dir, 'ssh_config')
- self.assertTrue(os.path.exists(ssh_config_path))
+ fuchsia_authorized_keys_path = _GetAuthorizedKeysPath()
+ fuchsia_id_key_path = os.path.join(_SSH_DIR, 'fuchsia_ed25519')
+ fuchsia_pub_key_path = os.path.join(_SSH_DIR, 'fuchsia_ed25519.pub')
+ ssh_config_path = GetSSHConfigPath()
+ # Check if the keys exists before generating. If they do, delete them
+ # afterwards before asserting if ProvisionSSH works.
+ authorized_key_before = os.path.exists(fuchsia_authorized_keys_path)
+ id_keys_before = os.path.exists(fuchsia_id_key_path)
+ pub_keys_before = os.path.exists(fuchsia_pub_key_path)
+ ssh_config_before = os.path.exists(ssh_config_path)
+ ssh_dir_before = os.path.exists(_SSH_CONFIG_DIR)
+ boot_data.ProvisionSSH()
+ authorized_key_after = os.path.exists(fuchsia_authorized_keys_path)
+ id_keys_after = os.path.exists(fuchsia_id_key_path)
+ ssh_config_after = os.path.exists(ssh_config_path)
+ if not authorized_key_before:
+ os.remove(fuchsia_authorized_keys_path)
+ if not id_keys_before:
+ os.remove(fuchsia_id_key_path)
+ if not pub_keys_before:
+ os.remove(fuchsia_pub_key_path)
+ if not ssh_config_before:
+ os.remove(ssh_config_path)
+ if not ssh_dir_before:
+ os.rmdir(_SSH_CONFIG_DIR)
+ self.assertTrue(os.path.exists(authorized_key_after))
+ self.assertTrue(os.path.exists(id_keys_after))
+ self.assertTrue(os.path.exists(ssh_config_after))
if __name__ == '__main__':
diff --git a/chromium/build/fuchsia/common.py b/chromium/build/fuchsia/common.py
index 99ced81ee98..9f780915d79 100644
--- a/chromium/build/fuchsia/common.py
+++ b/chromium/build/fuchsia/common.py
@@ -97,6 +97,12 @@ def GetAvailableTcpPort():
return port
+def RunGnSdkFunction(script, function):
+ script_path = os.path.join(SDK_ROOT, 'bin', script)
+ function_cmd = ['bash', '-c', '. %s; %s' % (script_path, function)]
+ return SubprocessCallWithTimeout(function_cmd)
+
+
def SubprocessCallWithTimeout(command, silent=False, timeout_secs=None):
"""Helper function for running a command.
diff --git a/chromium/build/fuchsia/common_args.py b/chromium/build/fuchsia/common_args.py
index 70cc70d70c2..3b709c6aab2 100644
--- a/chromium/build/fuchsia/common_args.py
+++ b/chromium/build/fuchsia/common_args.py
@@ -28,7 +28,7 @@ def _AddTargetSpecificationArgs(arg_parser):
device_args.add_argument('--device',
default=None,
choices=BUILTIN_TARGET_NAMES + ['custom'],
- help='Choose to run on aemu|qemu|device. '
+ help='Choose to run on fvdl|aemu|qemu|device. '
'By default, Fuchsia will run on AEMU on x64 '
'hosts and QEMU on arm64 hosts. Alternatively, '
'setting to custom will require specifying the '
@@ -71,12 +71,7 @@ def AddCommonArgs(arg_parser):
arg_parser: an ArgumentParser object."""
common_args = arg_parser.add_argument_group('common', 'Common arguments')
- common_args.add_argument('--runner-logs-dir',
- help='Directory to write test runner logs to.')
- common_args.add_argument('--exclude-system-logs',
- action='store_false',
- dest='include_system_logs',
- help='Do not show system log data.')
+ common_args.add_argument('--logs-dir', help='Directory to write logs to.')
common_args.add_argument('--verbose',
'-v',
default=False,
@@ -87,10 +82,8 @@ def AddCommonArgs(arg_parser):
type=os.path.realpath,
help=('Path to the directory in which build files are located. '
'Defaults to current directory.'))
- common_args.add_argument('--system-log-file',
- help='File to write system logs to. Specify '
- '\'-\' to log to stdout.')
common_args.add_argument('--fuchsia-out-dir',
+ default=None,
help='Path to a Fuchsia build output directory. '
'Setting the GN arg '
'"default_fuchsia_build_dir_for_installation" '
@@ -113,7 +106,7 @@ def AddCommonArgs(arg_parser):
help='Sets the number of CPU cores to provide.')
emu_args.add_argument('--ram-size-mb',
type=int,
- default=2048,
+ default=8192,
help='Sets the emulated RAM size (MB).'),
emu_args.add_argument('--allow-no-kvm',
action='store_false',
@@ -168,6 +161,6 @@ def GetDeploymentTargetForArgs(args):
if args.device:
device = args.device
else:
- device = 'aemu' if args.target_cpu == 'x64' else 'qemu'
+ device = 'fvdl' if args.target_cpu == 'x64' else 'qemu'
return _LoadTargetClass(_GetPathToBuiltinTarget(device)).CreateFromArgs(args)
diff --git a/chromium/build/fuchsia/device_target.py b/chromium/build/fuchsia/device_target.py
index 8d134e59541..c9d5ab2f2f9 100644
--- a/chromium/build/fuchsia/device_target.py
+++ b/chromium/build/fuchsia/device_target.py
@@ -5,19 +5,15 @@
"""Implements commands for running and interacting with Fuchsia on devices."""
import boot_data
-import filecmp
import logging
import os
import pkg_repo
import re
import subprocess
-import sys
import target
-import tempfile
import time
-import uuid
-from common import SDK_ROOT, EnsurePathExists, GetHostToolPathFromPlatform
+from common import EnsurePathExists, GetHostToolPathFromPlatform
# The maximum times to attempt mDNS resolution when connecting to a freshly
# booted Fuchsia instance before aborting.
@@ -26,12 +22,8 @@ BOOT_DISCOVERY_ATTEMPTS = 30
# Number of failed connection attempts before redirecting system logs to stdout.
CONNECT_RETRY_COUNT_BEFORE_LOGGING = 10
-# Number of seconds to wait for device discovery.
-BOOT_DISCOVERY_TIMEOUT_SECS = 2 * 60
-
-# The timeout limit for one call to the device-finder tool.
-_DEVICE_FINDER_TIMEOUT_LIMIT_SECS = \
- BOOT_DISCOVERY_TIMEOUT_SECS / BOOT_DISCOVERY_ATTEMPTS
+# Number of seconds between each device discovery.
+BOOT_DISCOVERY_DELAY_SECS = 4
# Time between a reboot command is issued and when connection attempts from the
# host begin.
@@ -60,16 +52,8 @@ class DeviceTarget(target.Target):
If |_host| is set:
Deploy to a device at the host IP address as-is."""
- def __init__(self,
- out_dir,
- target_cpu,
- host=None,
- node_name=None,
- port=None,
- ssh_config=None,
- fuchsia_out_dir=None,
- os_check='update',
- system_log_file=None):
+ def __init__(self, out_dir, target_cpu, host, node_name, port, ssh_config,
+ fuchsia_out_dir, os_check, logs_dir):
"""out_dir: The directory which will contain the files that are
generated to support the deployment.
target_cpu: The CPU architecture of the deployment target. Can be
@@ -85,9 +69,8 @@ class DeviceTarget(target.Target):
mismatch.
If 'ignore', the target's SDK version is ignored."""
- super(DeviceTarget, self).__init__(out_dir, target_cpu)
+ super(DeviceTarget, self).__init__(out_dir, target_cpu, logs_dir)
- self._system_log_file = system_log_file
self._host = host
self._port = port
self._fuchsia_out_dir = None
@@ -115,15 +98,14 @@ class DeviceTarget(target.Target):
else:
# Default to using an automatically generated SSH config and keys.
- boot_data.ProvisionSSH(out_dir)
- self._ssh_config_path = boot_data.GetSSHConfigPath(out_dir)
+ boot_data.ProvisionSSH()
+ self._ssh_config_path = boot_data.GetSSHConfigPath()
@staticmethod
def CreateFromArgs(args):
return DeviceTarget(args.out_dir, args.target_cpu, args.host,
args.node_name, args.port, args.ssh_config,
- args.fuchsia_out_dir, args.os_check,
- args.system_log_file)
+ args.fuchsia_out_dir, args.os_check, args.logs_dir)
@staticmethod
def RegisterArgs(arg_parser):
@@ -178,21 +160,17 @@ class DeviceTarget(target.Target):
command = [
dev_finder_path,
'resolve',
- '-timeout',
- "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS,
'-device-limit',
'1', # Exit early as soon as a host is found.
self._node_name
]
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=open(os.devnull, 'w'))
else:
- command = [
- dev_finder_path, 'list', '-full', '-timeout',
- "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS
- ]
-
- proc = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=open(os.devnull, 'w'))
+ proc = self.RunFFXCommand(['target', 'list', '-f', 'simple'],
+ stdout=subprocess.PIPE,
+ stderr=open(os.devnull, 'w'))
output = set(proc.communicate()[0].strip().split('\n'))
if proc.returncode != 0:
@@ -201,21 +179,19 @@ class DeviceTarget(target.Target):
if self._node_name:
# Handle the result of "device-finder resolve".
self._host = output.pop().strip()
-
else:
name_host_pairs = [x.strip().split(' ') for x in output]
- # Handle the output of "device-finder list".
if len(name_host_pairs) > 1:
- print('More than one device was discovered on the network.')
- print('Use --node-name <name> to specify the device to use.')
- print('\nList of devices:')
- for pair in name_host_pairs:
- print(' ' + pair[1])
- print()
+ logging.info('More than one device was discovered on the network. '
+ 'Use --node-name <name> to specify the device to use.')
+ logging.info('List of devices:')
+ logging.info(output)
raise Exception('Ambiguous target device specification.')
-
assert len(name_host_pairs) == 1
+ # Check if device has both address and name.
+ if len(name_host_pairs[0]) < 2:
+ return False
self._host, self._node_name = name_host_pairs[0]
logging.info('Found device "%s" at address %s.' % (self._node_name,
@@ -251,12 +227,14 @@ class DeviceTarget(target.Target):
self._node_name = m.groupdict()['nodename']
logging.info('Booted device "%s".' % self._node_name)
- # Repeatdly query mDNS until we find the device, or we hit the timeout of
- # DISCOVERY_TIMEOUT_SECS.
+ # Repeatedly search for a device for |BOOT_DISCOVERY_ATTEMPT|
+ # number of attempts. If a device isn't found, wait
+ # |BOOT_DISCOVERY_DELAY_SECS| before searching again.
logging.info('Waiting for device to join network.')
for _ in xrange(BOOT_DISCOVERY_ATTEMPTS):
if self._Discover():
break
+ time.sleep(BOOT_DISCOVERY_DELAY_SECS)
if not self._host:
raise Exception('Device %s couldn\'t be discovered via mDNS.' %
diff --git a/chromium/build/fuchsia/device_target_test.py b/chromium/build/fuchsia/device_target_test.py
new file mode 100755
index 00000000000..52ead22495d
--- /dev/null
+++ b/chromium/build/fuchsia/device_target_test.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env vpython3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests scenarios with number of devices and invalid devices"""
+import subprocess
+import unittest
+import unittest.mock as mock
+from argparse import Namespace
+from device_target import DeviceTarget
+from target import Target
+
+
+class TestDiscoverDeviceTarget(unittest.TestCase):
+ def setUp(self):
+ self.args = Namespace(out_dir='out/fuchsia',
+ target_cpu='x64',
+ host=None,
+ node_name=None,
+ port=None,
+ ssh_config=None,
+ fuchsia_out_dir=None,
+ os_check='update',
+ system_log_file=None)
+
+ def testNoNodeNameOneDeviceReturnNoneCheckNameAndAddress(self):
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch.object(DeviceTarget, 'RunFFXCommand') as mock_ffx:
+ mock_spec_popen = mock.create_autospec(subprocess.Popen, instance=True)
+ mock_spec_popen.communicate.return_value = ('address device_name', '')
+ mock_spec_popen.returncode = 0
+ mock_ffx.return_value = mock_spec_popen
+ with mock.patch.object(Target,
+ '_WaitUntilReady') as mock_waituntilready:
+ mock_waituntilready.return_value = True
+ self.assertIsNone(device_target_instance.Start())
+ self.assertEqual(device_target_instance._node_name, 'device_name')
+ self.assertEqual(device_target_instance._host, 'address')
+
+ def testNoNodeNameTwoDevicesRaiseExceptionAmbiguousTarget(self):
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch.object(DeviceTarget, 'RunFFXCommand') as mock_ffx:
+ mock_spec_popen = mock.create_autospec(subprocess.Popen, instance=True)
+ mock_spec_popen.communicate.return_value = ('address1 device_name1\n'
+ 'address2 device_name2', '')
+ mock_spec_popen.returncode = 0
+ mock_spec_popen.stdout = ''
+ mock_ffx.return_value = mock_spec_popen
+ with self.assertRaisesRegex(Exception,
+ 'Ambiguous target device specification.'):
+ device_target_instance.Start()
+ self.assertIsNone(device_target_instance._node_name)
+ self.assertIsNone(device_target_instance._host)
+
+ def testNoNodeNameDeviceDoesntHaveNameRaiseExceptionCouldNotFind(self):
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch.object(DeviceTarget, 'RunFFXCommand') as mock_ffx:
+ mock_spec_popen = mock.create_autospec(subprocess.Popen, instance=True)
+ mock_spec_popen.communicate.return_value = ('address', '')
+ mock_spec_popen.returncode = 0
+ mock_ffx.return_value = mock_spec_popen
+ with self.assertRaisesRegex(Exception, 'Could not find device'):
+ device_target_instance.Start()
+ self.assertIsNone(device_target_instance._node_name)
+ self.assertIsNone(device_target_instance._host)
+
+ def testNodeNameDefinedDeviceFoundReturnNoneCheckNameAndHost(self):
+ self.args.node_name = 'device_name'
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch('subprocess.Popen') as mock_popen:
+ mock_popen.returncode = ('address', 'device_name')
+ with mock.patch.object(Target,
+ '_WaitUntilReady') as mock_waituntilready:
+ mock_waituntilready.return_value = True
+ self.assertIsNone(device_target_instance.Start())
+ self.assertEqual(device_target_instance._node_name, 'device_name')
+ self.assertEqual(device_target_instance._host, 'address')
+
+ def testNodeNameDefinedDeviceNotFoundRaiseExceptionCouldNotFind(self):
+ self.args.node_name = 'wrong_device_name'
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch('subprocess.Popen') as mock_popen:
+ mock_popen.returncode = ('', '')
+ with self.assertRaisesRegex(Exception, 'Could not find device'):
+ device_target_instance.Start()
+ self.assertIsNone(device_target_instance._node_name)
+ self.assertIsNone(device_target_instance._host)
+
+ def testNoDevicesFoundRaiseExceptionCouldNotFind(self):
+ with (DeviceTarget.CreateFromArgs(self.args)) as device_target_instance:
+ with mock.patch.object(DeviceTarget, 'RunFFXCommand') as mock_ffx:
+ mock_spec_popen = mock.create_autospec(subprocess.Popen, instance=True)
+ mock_spec_popen.communicate.return_value = ('', '')
+ mock_spec_popen.returncode = 0
+ mock_ffx.return_value = mock_spec_popen
+ with self.assertRaisesRegex(Exception, 'Could not find device'):
+ device_target_instance.Start()
+ self.assertIsNone(device_target_instance._node_name)
+ self.assertIsNone(device_target_instance._host)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/build/fuchsia/emu_target.py b/chromium/build/fuchsia/emu_target.py
index 4a86bdce670..3f0711300b3 100644
--- a/chromium/build/fuchsia/emu_target.py
+++ b/chromium/build/fuchsia/emu_target.py
@@ -8,7 +8,6 @@ import pkg_repo
import boot_data
import logging
import os
-import runner_logs
import subprocess
import sys
import target
@@ -16,15 +15,14 @@ import tempfile
class EmuTarget(target.Target):
- def __init__(self, out_dir, target_cpu, system_log_file):
+ def __init__(self, out_dir, target_cpu, logs_dir):
"""out_dir: The directory which will contain the files that are
generated to support the emulator deployment.
target_cpu: The emulated target CPU architecture.
Can be 'x64' or 'arm64'."""
- super(EmuTarget, self).__init__(out_dir, target_cpu)
+ super(EmuTarget, self).__init__(out_dir, target_cpu, logs_dir)
self._emu_process = None
- self._system_log_file = system_log_file
self._pkg_repo = None
def __enter__(self):
@@ -37,11 +35,6 @@ class EmuTarget(target.Target):
def _SetEnv(self):
return os.environ.copy()
- # Used by the context manager to ensure that the emulator is killed when
- # the Python process exits.
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.Shutdown();
-
def Start(self):
emu_command = self._BuildCommand()
@@ -59,14 +52,14 @@ class EmuTarget(target.Target):
# to a temporary file, and print that out if we are unable to connect to
# the emulator guest, to make it easier to diagnose connectivity issues.
temporary_log_file = None
- if runner_logs.IsEnabled():
- stdout = runner_logs.FileStreamFor('serial_log')
+ if self._log_manager.IsLoggingEnabled():
+ stdout = self._log_manager.Open('serial_log')
else:
temporary_log_file = tempfile.NamedTemporaryFile('w')
stdout = temporary_log_file
- LogProcessStatistics('proc_stat_start_log')
- LogSystemStatistics('system_statistics_start_log')
+ self.LogProcessStatistics('proc_stat_start_log')
+ self.LogSystemStatistics('system_statistics_start_log')
self._emu_process = subprocess.Popen(emu_command,
stdin=open(os.devnull),
@@ -76,13 +69,19 @@ class EmuTarget(target.Target):
try:
self._WaitUntilReady()
- LogProcessStatistics('proc_stat_ready_log')
+ self.LogProcessStatistics('proc_stat_ready_log')
except target.FuchsiaTargetException:
if temporary_log_file:
logging.info('Kernel logs:\n' +
open(temporary_log_file.name, 'r').read())
raise
+ def Stop(self):
+ try:
+ super(EmuTarget, self).Stop()
+ finally:
+ self.Shutdown()
+
def GetPkgRepo(self):
if not self._pkg_repo:
self._pkg_repo = pkg_repo.ManagedPkgRepo(self)
@@ -106,8 +105,8 @@ class EmuTarget(target.Target):
logging.error('%s quit unexpectedly with exit code %d' %
(self.EMULATOR_NAME, returncode))
- LogProcessStatistics('proc_stat_end_log')
- LogSystemStatistics('system_statistics_end_log')
+ self.LogProcessStatistics('proc_stat_end_log')
+ self.LogSystemStatistics('system_statistics_end_log')
def _IsEmuStillRunning(self):
@@ -121,25 +120,23 @@ class EmuTarget(target.Target):
return ('localhost', self._host_ssh_port)
def _GetSshConfigPath(self):
- return boot_data.GetSSHConfigPath(self._out_dir)
-
-
-def LogSystemStatistics(log_file_name):
- statistics_log = runner_logs.FileStreamFor(log_file_name)
- # Log the cpu load and process information.
- subprocess.call(['top', '-b', '-n', '1'],
- stdin=open(os.devnull),
- stdout=statistics_log,
- stderr=subprocess.STDOUT)
- subprocess.call(['ps', '-ax'],
- stdin=open(os.devnull),
- stdout=statistics_log,
- stderr=subprocess.STDOUT)
-
-
-def LogProcessStatistics(log_file_name):
- statistics_log = runner_logs.FileStreamFor(log_file_name)
- subprocess.call(['cat', '/proc/stat'],
- stdin=open(os.devnull),
- stdout=statistics_log,
- stderr=subprocess.STDOUT)
+ return boot_data.GetSSHConfigPath()
+
+ def LogSystemStatistics(self, log_file_name):
+ self._LaunchSubprocessWithLogs(['top', '-b', '-n', '1'], log_file_name)
+ self._LaunchSubprocessWithLogs(['ps', '-ax'], log_file_name)
+
+ def LogProcessStatistics(self, log_file_name):
+ self._LaunchSubprocessWithLogs(['cat', '/proc/stat'], log_file_name)
+
+ def _LaunchSubprocessWithLogs(self, command, log_file_name):
+ """Launch a subprocess and redirect stdout and stderr to log_file_name.
+ Command will not be run if logging directory is not set."""
+
+ if not self._log_manager.IsLoggingEnabled():
+ return
+ log = self._log_manager.Open(log_file_name)
+ subprocess.call(command,
+ stdin=open(os.devnull),
+ stdout=log,
+ stderr=subprocess.STDOUT)
diff --git a/chromium/build/fuchsia/fvdl_target.py b/chromium/build/fuchsia/fvdl_target.py
index c0304847b77..237c3f7fe85 100644
--- a/chromium/build/fuchsia/fvdl_target.py
+++ b/chromium/build/fuchsia/fvdl_target.py
@@ -12,9 +12,18 @@ import re
import subprocess
import tempfile
-_FVDL_PATH = os.path.join(common.SDK_ROOT, 'tools', 'x64', 'fvdl')
_SSH_KEY_DIR = os.path.expanduser('~/.ssh')
_DEFAULT_SSH_PORT = 22
+_DEVICE_PROTO_TEMPLATE = """
+device_spec: {{
+ horizontal_resolution: 1024
+ vertical_resolution: 600
+ vm_heap: 192
+ ram: {ramsize}
+ cache: 32
+ screen_density: 240
+}}
+"""
def GetTargetType():
@@ -28,26 +37,44 @@ class EmulatorNetworkNotFoundError(Exception):
class FvdlTarget(emu_target.EmuTarget):
EMULATOR_NAME = 'aemu'
+ _FVDL_PATH = os.path.join(common.SDK_ROOT, 'tools', 'x64', 'fvdl')
- def __init__(self, out_dir, target_cpu, system_log_file, require_kvm,
- enable_graphics, hardware_gpu, with_network):
- super(FvdlTarget, self).__init__(out_dir, target_cpu, system_log_file)
+ def __init__(self, out_dir, target_cpu, require_kvm, enable_graphics,
+ hardware_gpu, with_network, ram_size_mb, logs_dir, custom_image):
+ super(FvdlTarget, self).__init__(out_dir, target_cpu, logs_dir)
self._require_kvm = require_kvm
self._enable_graphics = enable_graphics
self._hardware_gpu = hardware_gpu
self._with_network = with_network
+ self._ram_size_mb = ram_size_mb
+ self._custom_image = custom_image
self._host = None
self._pid = None
+ if custom_image:
+ components = custom_image.split('.')
+ if len(components) != 2:
+ raise ValueError("Invalid custom_image name:", custom_image)
+ self._image_type, self._image_arch = components
+ else:
+ self._image_arch = self._GetTargetSdkArch()
+ self._image_type = boot_data.TARGET_TYPE_QEMU
+
# Use a temp file for vdl output.
self._vdl_output_file = tempfile.NamedTemporaryFile()
+ # Use a temp file for the device proto and write the ram size.
+ self._device_proto_file = tempfile.NamedTemporaryFile()
+ with open(self._device_proto_file.name, 'w') as file:
+ file.write(_DEVICE_PROTO_TEMPLATE.format(ramsize=self._ram_size_mb))
+
@staticmethod
def CreateFromArgs(args):
- return FvdlTarget(args.out_dir, args.target_cpu, args.system_log_file,
- args.require_kvm, args.enable_graphics, args.hardware_gpu,
- args.with_network)
+ return FvdlTarget(args.out_dir, args.target_cpu, args.require_kvm,
+ args.enable_graphics, args.hardware_gpu,
+ args.with_network, args.ram_size_mb, args.logs_dir,
+ args.custom_image)
@staticmethod
def RegisterArgs(arg_parser):
@@ -56,28 +83,30 @@ class FvdlTarget(emu_target.EmuTarget):
action='store_true',
default=False,
help='Run emulator with emulated nic via tun/tap.')
+ fvdl_args.add_argument('--custom-image',
+ help='Specify an image used for booting up the '
+ 'emulator.')
def _BuildCommand(self):
- boot_data.ProvisionSSH(_SSH_KEY_DIR)
+ boot_data.ProvisionSSH()
self._host_ssh_port = common.GetAvailableTcpPort()
kernel_image = common.EnsurePathExists(
- boot_data.GetTargetFile('qemu-kernel.kernel', self._GetTargetSdkArch(),
- boot_data.TARGET_TYPE_QEMU))
+ boot_data.GetTargetFile('qemu-kernel.kernel', self._image_arch,
+ self._image_type))
zbi_image = common.EnsurePathExists(
- boot_data.GetTargetFile('zircon-a.zbi', self._GetTargetSdkArch(),
- boot_data.TARGET_TYPE_QEMU))
+ boot_data.GetTargetFile('zircon-a.zbi', self._image_arch,
+ self._image_type))
fvm_image = common.EnsurePathExists(
- boot_data.GetTargetFile('storage-full.blk', self._GetTargetSdkArch(),
- boot_data.TARGET_TYPE_QEMU))
+ boot_data.GetTargetFile('storage-full.blk', self._image_arch,
+ self._image_type))
aemu_path = common.EnsurePathExists(
os.path.join(common.GetEmuRootForPlatform(self.EMULATOR_NAME),
'emulator'))
emu_command = [
- _FVDL_PATH,
+ self._FVDL_PATH,
'--sdk',
'start',
- '--nopackageserver',
'--nointeractive',
# Host port mapping for user-networking mode.
@@ -100,8 +129,13 @@ class FvdlTarget(emu_target.EmuTarget):
# Use an existing emulator checked out by Chromium.
'--aemu-path',
- aemu_path
+ aemu_path,
+
+ # Use this flag and temp file to define ram size.
+ '--device-proto',
+ self._device_proto_file.name
]
+ self._ConfigureEmulatorLog(emu_command)
if not self._require_kvm:
emu_command.append('--noacceleration')
@@ -111,10 +145,31 @@ class FvdlTarget(emu_target.EmuTarget):
emu_command.append('--host-gpu')
if self._with_network:
emu_command.append('-N')
+
logging.info('FVDL command: ' + ' '.join(emu_command))
return emu_command
+ def _ConfigureEmulatorLog(self, emu_command):
+ if self._log_manager.IsLoggingEnabled():
+ emu_command.extend([
+ '--emulator-log',
+ os.path.join(self._log_manager.GetLogDirectory(), 'emulator_log')
+ ])
+
+ env_flags = [
+ 'ANDROID_EMUGL_LOG_PRINT=1',
+ 'ANDROID_EMUGL_VERBOSE=1',
+ 'VK_LOADER_DEBUG=info,error',
+ ]
+ if self._hardware_gpu:
+ vulkan_icd_file = os.path.join(
+ common.GetEmuRootForPlatform(self.EMULATOR_NAME), 'lib64', 'vulkan',
+ 'vk_swiftshader_icd.json')
+ env_flags.append('VK_ICD_FILENAMES=%s' % vulkan_icd_file)
+ for flag in env_flags:
+ emu_command.extend(['--envs', flag])
+
def _WaitUntilReady(self):
# Indicates the FVDL command finished running.
self._emu_process.communicate()
@@ -165,7 +220,7 @@ class FvdlTarget(emu_target.EmuTarget):
logging.error('%s did not start' % (self.EMULATOR_NAME))
return
femu_command = [
- _FVDL_PATH, '--sdk', 'kill', '--launched-proto',
+ self._FVDL_PATH, '--sdk', 'kill', '--launched-proto',
self._vdl_output_file.name
]
femu_process = subprocess.Popen(femu_command)
@@ -174,9 +229,10 @@ class FvdlTarget(emu_target.EmuTarget):
logging.info('FVDL shutdown successfully')
else:
logging.info('FVDL kill returned error status {}'.format(returncode))
- emu_target.LogProcessStatistics('proc_stat_end_log')
- emu_target.LogSystemStatistics('system_statistics_end_log')
+ self.LogProcessStatistics('proc_stat_end_log')
+ self.LogSystemStatistics('system_statistics_end_log')
self._vdl_output_file.close()
+ self._device_proto_file.close()
def _GetSshConfigPath(self):
- return boot_data.GetSSHConfigPath(_SSH_KEY_DIR)
+ return boot_data.GetSSHConfigPath()
diff --git a/chromium/build/fuchsia/fvdl_target_test.py b/chromium/build/fuchsia/fvdl_target_test.py
index f75aa587ac4..9884465b661 100755
--- a/chromium/build/fuchsia/fvdl_target_test.py
+++ b/chromium/build/fuchsia/fvdl_target_test.py
@@ -7,11 +7,13 @@
import boot_data
import common
+import os
+import tempfile
import unittest
import unittest.mock as mock
from argparse import Namespace
-from fvdl_target import _FVDL_PATH, FvdlTarget, _SSH_KEY_DIR
+from fvdl_target import FvdlTarget, _SSH_KEY_DIR
class TestBuildCommandFvdlTarget(unittest.TestCase):
@@ -22,53 +24,73 @@ class TestBuildCommandFvdlTarget(unittest.TestCase):
require_kvm=True,
enable_graphics=False,
hardware_gpu=False,
- with_network=False)
+ with_network=False,
+ ram_size_mb=8192,
+ logs_dir=None,
+ custom_image=None)
+ common.EnsurePathExists = mock.MagicMock(return_value='image')
+ boot_data.ProvisionSSH = mock.MagicMock()
+ FvdlTarget.Shutdown = mock.MagicMock()
def testBasicEmuCommand(self):
with FvdlTarget.CreateFromArgs(self.args) as target:
- target.Shutdown = mock.MagicMock()
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- with mock.patch.object(boot_data, 'ProvisionSSH') as provision_mock:
- build_command = target._BuildCommand()
- self.assertIn(_FVDL_PATH, build_command)
- self.assertIn('--sdk', build_command)
- self.assertIn('start', build_command)
- self.assertNotIn('--noacceleration', target._BuildCommand())
- self.assertIn('--headless', target._BuildCommand())
- self.assertNotIn('--host-gpu', target._BuildCommand())
- self.assertNotIn('-N', target._BuildCommand())
+ build_command = target._BuildCommand()
+ self.assertIn(target._FVDL_PATH, build_command)
+ self.assertIn('--sdk', build_command)
+ self.assertIn('start', build_command)
+ self.assertNotIn('--noacceleration', build_command)
+ self.assertIn('--headless', build_command)
+ self.assertNotIn('--host-gpu', build_command)
+ self.assertNotIn('-N', build_command)
+ self.assertIn('--device-proto', build_command)
+ self.assertNotIn('--emulator-log', build_command)
+ self.assertNotIn('--envs', build_command)
+ self.assertTrue(os.path.exists(target._device_proto_file.name))
+ correct_ram_amount = False
+ with open(target._device_proto_file.name) as file:
+ for line in file:
+ if line.strip() == 'ram: 8192':
+ correct_ram_amount = True
+ break
+ self.assertTrue(correct_ram_amount)
def testBuildCommandCheckIfNotRequireKVMSetNoAcceleration(self):
self.args.require_kvm = False
with FvdlTarget.CreateFromArgs(self.args) as target:
- target.Shutdown = mock.MagicMock()
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- with mock.patch.object(boot_data, 'ProvisionSSH') as provision_mock:
- self.assertIn('--noacceleration', target._BuildCommand())
+ self.assertIn('--noacceleration', target._BuildCommand())
def testBuildCommandCheckIfNotEnableGraphicsSetHeadless(self):
self.args.enable_graphics = True
with FvdlTarget.CreateFromArgs(self.args) as target:
- target.Shutdown = mock.MagicMock()
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- with mock.patch.object(boot_data, 'ProvisionSSH') as provision_mock:
- self.assertNotIn('--headless', target._BuildCommand())
+ self.assertNotIn('--headless', target._BuildCommand())
def testBuildCommandCheckIfHardwareGpuSetHostGPU(self):
self.args.hardware_gpu = True
with FvdlTarget.CreateFromArgs(self.args) as target:
- target.Shutdown = mock.MagicMock()
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- with mock.patch.object(boot_data, 'ProvisionSSH') as provision_mock:
- self.assertIn('--host-gpu', target._BuildCommand())
+ self.assertIn('--host-gpu', target._BuildCommand())
def testBuildCommandCheckIfWithNetworkSetTunTap(self):
self.args.with_network = True
with FvdlTarget.CreateFromArgs(self.args) as target:
- target.Shutdown = mock.MagicMock()
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- with mock.patch.object(boot_data, 'ProvisionSSH') as provision_mock:
- self.assertIn('-N', target._BuildCommand())
+ self.assertIn('-N', target._BuildCommand())
+
+ def testBuildCommandCheckRamSizeNot8192SetRamSize(self):
+ custom_ram_size = 4096
+ self.args.ram_size_mb = custom_ram_size
+ with FvdlTarget.CreateFromArgs(self.args) as target:
+ self.assertIn('--device-proto', target._BuildCommand())
+ self.assertTrue(os.path.exists(target._device_proto_file.name))
+ correct_ram_amount = False
+ with open(target._device_proto_file.name, 'r') as f:
+ self.assertTrue(' ram: {}\n'.format(custom_ram_size) in f.readlines())
+
+ def testBuildCommandCheckEmulatorLogSetup(self):
+ with tempfile.TemporaryDirectory() as logs_dir:
+ self.args.logs_dir = logs_dir
+ with FvdlTarget.CreateFromArgs(self.args) as target:
+ build_command = target._BuildCommand()
+ self.assertIn('--emulator-log', build_command)
+ self.assertIn('--envs', build_command)
if __name__ == '__main__':
diff --git a/chromium/build/fuchsia/linux.sdk.sha1 b/chromium/build/fuchsia/linux.sdk.sha1
index 0f6761d2165..930a242961e 100644
--- a/chromium/build/fuchsia/linux.sdk.sha1
+++ b/chromium/build/fuchsia/linux.sdk.sha1
@@ -1 +1 @@
-6.20210812.1.1
+6.20211007.1.1
diff --git a/chromium/build/fuchsia/log_manager.py b/chromium/build/fuchsia/log_manager.py
new file mode 100644
index 00000000000..91f202ea1c4
--- /dev/null
+++ b/chromium/build/fuchsia/log_manager.py
@@ -0,0 +1,53 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates and manages log file objects.
+
+Provides an object that handles opening and closing file streams for
+logging purposes.
+"""
+
+import os
+
+
+class LogManager(object):
+ def __init__(self, logs_dir):
+
+ # A dictionary with the log file path as the key and a file stream as value.
+ self._logs = {}
+
+ self._logs_dir = logs_dir
+ if self._logs_dir:
+ if not os.path.isdir(self._logs_dir):
+ os.makedirs(self._logs_dir)
+
+ def IsLoggingEnabled(self):
+ return self._logs_dir is not None
+
+ def GetLogDirectory(self):
+ """Get the directory logs are placed into."""
+
+ return self._logs_dir
+
+ def Open(self, log_file_name):
+ """Open a file stream with log_file_name in the logs directory."""
+
+ parent_dir = self.GetLogDirectory()
+ if not parent_dir:
+ return open(os.devnull, 'w')
+ log_file_path = os.path.join(parent_dir, log_file_name)
+ if log_file_path in self._logs:
+ return self._logs[log_file_path]
+ log_file = open(log_file_path, 'w', buffering=1)
+ self._logs[log_file_path] = log_file
+ return log_file
+
+ def Stop(self):
+ for log in self._logs.values():
+ log.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.Stop()
diff --git a/chromium/build/fuchsia/mac.sdk.sha1 b/chromium/build/fuchsia/mac.sdk.sha1
index 26622324e45..1aafaccd1a3 100644
--- a/chromium/build/fuchsia/mac.sdk.sha1
+++ b/chromium/build/fuchsia/mac.sdk.sha1
@@ -1 +1 @@
-6.20210812.2.1
+6.20211007.2.1
diff --git a/chromium/build/fuchsia/qemu_target.py b/chromium/build/fuchsia/qemu_target.py
index 2a49cbf3cb2..2a31bb43bf2 100644
--- a/chromium/build/fuchsia/qemu_target.py
+++ b/chromium/build/fuchsia/qemu_target.py
@@ -24,9 +24,7 @@ from target import FuchsiaTargetException
# Virtual networking configuration data for QEMU.
-GUEST_NET = '192.168.3.0/24'
-GUEST_IP_ADDRESS = '192.168.3.9'
-HOST_IP_ADDRESS = '192.168.3.2'
+HOST_IP_ADDRESS = '10.0.2.2'
GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b'
# Capacity of the system's blobstore volume.
@@ -40,17 +38,17 @@ def GetTargetType():
class QemuTarget(emu_target.EmuTarget):
EMULATOR_NAME = 'qemu'
- def __init__(self, out_dir, target_cpu, system_log_file, cpu_cores,
- require_kvm, ram_size_mb):
- super(QemuTarget, self).__init__(out_dir, target_cpu, system_log_file)
+ def __init__(self, out_dir, target_cpu, cpu_cores, require_kvm, ram_size_mb,
+ logs_dir):
+ super(QemuTarget, self).__init__(out_dir, target_cpu, logs_dir)
self._cpu_cores=cpu_cores
self._require_kvm=require_kvm
self._ram_size_mb=ram_size_mb
@staticmethod
def CreateFromArgs(args):
- return QemuTarget(args.out_dir, args.target_cpu, args.system_log_file,
- args.cpu_cores, args.require_kvm, args.ram_size_mb)
+ return QemuTarget(args.out_dir, args.target_cpu, args.cpu_cores,
+ args.require_kvm, args.ram_size_mb, args.logs_dir)
def _IsKvmEnabled(self):
kvm_supported = sys.platform.startswith('linux') and \
@@ -122,11 +120,9 @@ class QemuTarget(emu_target.EmuTarget):
'-machine', 'q35',
])
- # Configure virtual network. It is used in the tests to connect to
- # testserver running on the host.
+ # Configure virtual network.
netdev_type = 'virtio-net-pci'
- netdev_config = 'user,id=net0,net=%s,dhcpstart=%s,host=%s' % \
- (GUEST_NET, GUEST_IP_ADDRESS, HOST_IP_ADDRESS)
+ netdev_config = 'type=user,id=net0,restrict=off'
self._host_ssh_port = common.GetAvailableTcpPort()
netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port
diff --git a/chromium/build/fuchsia/run_test_package.py b/chromium/build/fuchsia/run_test_package.py
index 7e934610279..c292d1fc9c8 100644
--- a/chromium/build/fuchsia/run_test_package.py
+++ b/chromium/build/fuchsia/run_test_package.py
@@ -36,40 +36,6 @@ def _AttachKernelLogReader(target):
stderr=subprocess.STDOUT)
-class SystemLogReader(object):
- """Collects and symbolizes Fuchsia system log to a file."""
-
- def __init__(self):
- self._listener_proc = None
- self._symbolizer_proc = None
- self._system_log = None
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- """Stops the system logging processes and closes the output file."""
- if self._symbolizer_proc:
- self._symbolizer_proc.kill()
- if self._listener_proc:
- self._listener_proc.kill()
- if self._system_log:
- self._system_log.close()
-
- def Start(self, target, package_paths, system_log_file):
- """Start a system log reader as a long-running SSH task."""
- logging.debug('Writing fuchsia system log to %s' % system_log_file)
-
- self._listener_proc = target.RunCommandPiped(['log_listener'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
-
- self._system_log = open(system_log_file, 'w', buffering=1)
- self._symbolizer_proc = RunSymbolizer(self._listener_proc.stdout,
- self._system_log,
- BuildIdsPaths(package_paths))
-
-
class MergedInputStream(object):
"""Merges a number of input streams into a UNIX pipe on a dedicated thread.
Terminates when the file descriptor of the primary stream (the first in
@@ -152,7 +118,6 @@ class RunTestPackageArgs:
code_coverage: If set, the test package will be run via 'runtests', and the
output will be saved to /tmp folder on the device.
- system_logging: If set, connects a system log reader to the target.
test_realm_label: Specifies the realm name that run-test-component should use.
This must be specified if a filter file is to be set, or a results summary
file fetched after the test suite has run.
@@ -162,7 +127,6 @@ class RunTestPackageArgs:
def __init__(self):
self.code_coverage = False
- self.system_logging = False
self.test_realm_label = None
self.use_run_test_component = False
@@ -170,7 +134,6 @@ class RunTestPackageArgs:
def FromCommonArgs(args):
run_test_package_args = RunTestPackageArgs()
run_test_package_args.code_coverage = args.code_coverage
- run_test_package_args.system_logging = args.include_system_logs
return run_test_package_args
@@ -200,24 +163,21 @@ def RunTestPackage(output_dir, target, package_paths, package_name,
Returns the exit code of the remote package process."""
- system_logger = (_AttachKernelLogReader(target)
- if args.system_logging else None)
+ kernel_logger = _AttachKernelLogReader(target)
try:
- if system_logger:
- # Spin up a thread to asynchronously dump the system log to stdout
- # for easier diagnoses of early, pre-execution failures.
- log_output_quit_event = multiprocessing.Event()
- log_output_thread = threading.Thread(target=lambda: _DrainStreamToStdout(
- system_logger.stdout, log_output_quit_event))
- log_output_thread.daemon = True
- log_output_thread.start()
+ # Spin up a thread to asynchronously dump the system log to stdout
+ # for easier diagnoses of early, pre-execution failures.
+ log_output_quit_event = multiprocessing.Event()
+ log_output_thread = threading.Thread(target=lambda: _DrainStreamToStdout(
+ kernel_logger.stdout, log_output_quit_event))
+ log_output_thread.daemon = True
+ log_output_thread.start()
with target.GetPkgRepo():
target.InstallPackage(package_paths)
- if system_logger:
- log_output_quit_event.set()
- log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
+ log_output_quit_event.set()
+ log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
logging.info('Running application.')
@@ -234,6 +194,7 @@ def RunTestPackage(output_dir, target, package_paths, package_name,
if args.test_realm_label:
command += ['--realm-label=%s' % args.test_realm_label]
command.append(_GetComponentUri(package_name))
+ command.append('--')
else:
command = ['run', _GetComponentUri(package_name)]
@@ -244,11 +205,8 @@ def RunTestPackage(output_dir, target, package_paths, package_name,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
- if system_logger:
- output_stream = MergedInputStream(
- [process.stdout, system_logger.stdout]).Start()
- else:
- output_stream = process.stdout
+ output_stream = MergedInputStream([process.stdout,
+ kernel_logger.stdout]).Start()
# Run the log data through the symbolizer process.
output_stream = SymbolizerFilter(output_stream,
@@ -269,10 +227,9 @@ def RunTestPackage(output_dir, target, package_paths, package_name,
process.returncode)
finally:
- if system_logger:
- logging.info('Terminating kernel log reader.')
- log_output_quit_event.set()
- log_output_thread.join()
- system_logger.kill()
+ logging.info('Terminating kernel log reader.')
+ log_output_quit_event.set()
+ log_output_thread.join()
+ kernel_logger.kill()
return process.returncode
diff --git a/chromium/build/fuchsia/runner_logs.py b/chromium/build/fuchsia/runner_logs.py
deleted file mode 100644
index 20ab6b227db..00000000000
--- a/chromium/build/fuchsia/runner_logs.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Creates and manages test runner log file objects.
-
-Provides a context manager object for use in a with statement
-and a module level FileStreamFor function for use by clients.
-"""
-
-import collections
-import multiprocessing
-import os
-
-from symbolizer import RunSymbolizer
-
-SYMBOLIZED_SUFFIX = '.symbolized'
-
-_RunnerLogEntry = collections.namedtuple(
- '_RunnerLogEntry', ['name', 'log_file', 'path', 'symbolize'])
-
-# Module singleton variable.
-_instance = None
-
-
-class RunnerLogManager(object):
- """ Runner logs object for use in a with statement."""
-
- def __init__(self, log_dir, build_ids_files):
- global _instance
- if _instance:
- raise Exception('Only one RunnerLogManager can be instantiated')
-
- self._log_dir = log_dir
- self._build_ids_files = build_ids_files
- self._runner_logs = []
-
- if self._log_dir and not os.path.isdir(self._log_dir):
- os.makedirs(self._log_dir)
-
- _instance = self
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- pool = multiprocessing.Pool(4)
- for log_entry in self._runner_logs:
- pool.apply_async(_FinalizeLog, (log_entry, self._build_ids_files))
- pool.close()
- pool.join()
- _instance = None
-
-
- def _FileStreamFor(self, name, symbolize):
- if any(elem.name == name for elem in self._runner_logs):
- raise Exception('RunnerLogManager can only open "%s" once' % name)
-
- path = os.path.join(self._log_dir, name) if self._log_dir else os.devnull
- log_file = open(path, 'w')
-
- self._runner_logs.append(_RunnerLogEntry(name, log_file, path, symbolize))
-
- return log_file
-
-
-def _FinalizeLog(log_entry, build_ids_files):
- log_entry.log_file.close()
-
- if log_entry.symbolize:
- input_file = open(log_entry.path, 'r')
- output_file = open(log_entry.path + SYMBOLIZED_SUFFIX, 'w')
- proc = RunSymbolizer(input_file, output_file, build_ids_files)
- proc.wait()
- output_file.close()
- input_file.close()
-
-
-def IsEnabled():
- """Returns True if the RunnerLogManager has been created, or False if not."""
-
- return _instance is not None and _instance._log_dir is not None
-
-
-def FileStreamFor(name, symbolize=False):
- """Opens a test runner file stream in the test runner log directory.
-
- If no test runner log directory is specified, output is discarded.
-
- name: log file name
- symbolize: if True, make a symbolized copy of the log after closing it.
-
- Returns an opened log file object."""
-
- return _instance._FileStreamFor(name, symbolize) if IsEnabled() else open(
- os.devnull, 'w')
diff --git a/chromium/build/fuchsia/start_emulator.py b/chromium/build/fuchsia/start_emulator.py
index 737235b4067..2c7567590c2 100755
--- a/chromium/build/fuchsia/start_emulator.py
+++ b/chromium/build/fuchsia/start_emulator.py
@@ -12,6 +12,7 @@ import os
import time
import subprocess
+from aemu_target import AemuTarget
from fvdl_target import FvdlTarget
@@ -20,6 +21,9 @@ def main():
description='Launches a long-running emulator that can '
'be re-used for multiple test runs.')
AddLongRunningArgs(parser)
+ FvdlTarget.RegisterArgs(parser)
+ AemuTarget.RegisterArgs(parser)
+ common_args.AddCommonArgs(parser)
args = parser.parse_args()
args.out_dir = None
args.device = 'fvdl'
@@ -41,34 +45,11 @@ def main():
def AddLongRunningArgs(arg_parser):
- arg_parser.add_argument('-v',
- '--verbose',
- default=False,
- action='store_true',
- help='Enable debug-level logging.')
fvdl_args = arg_parser.add_argument_group('FVDL arguments')
fvdl_args.add_argument('--target-cpu',
default=common_args.GetHostArchFromPlatform(),
help='Set target_cpu for the emulator. Defaults '
'to the same architecture as host cpu.')
- fvdl_args.add_argument('--system-log-file',
- help='File to write system logs to. Specify '
- '\'-\' to log to stdout.')
- fvdl_args.add_argument('--allow-no-kvm',
- action='store_false',
- dest='require_kvm',
- default=True,
- help='Disables KVM acceleration for the emulator.')
- fvdl_args.add_argument('--enable-graphics',
- action='store_true',
- default=False,
- help='Start FVDL with graphics instead of '\
- 'headless.')
- fvdl_args.add_argument('--hardware-gpu',
- action='store_true',
- default=False,
- help='Use local GPU hardware instead of '\
- 'Swiftshader.')
fvdl_args.add_argument('--without-network',
action='store_false',
dest='with_network',
diff --git a/chromium/build/fuchsia/target.py b/chromium/build/fuchsia/target.py
index 55164b31928..2b21678cd1d 100644
--- a/chromium/build/fuchsia/target.py
+++ b/chromium/build/fuchsia/target.py
@@ -10,7 +10,10 @@ import time
import common
import remote_cmd
-import runner_logs
+
+from log_manager import LogManager
+from symbolizer import BuildIdsPaths, RunSymbolizer
+
_SHUTDOWN_CMD = ['dm', 'poweroff']
_ATTACH_RETRY_INTERVAL = 1
@@ -58,15 +61,21 @@ class FuchsiaTargetException(Exception):
super(FuchsiaTargetException, self).__init__(message)
+# TODO(crbug.com/1250803): Factor high level commands out of target.
class Target(object):
"""Base class representing a Fuchsia deployment target."""
- def __init__(self, out_dir, target_cpu):
+ def __init__(self, out_dir, target_cpu, logs_dir):
self._out_dir = out_dir
- self._started = False
- self._dry_run = False
self._target_cpu = target_cpu
self._command_runner = None
+ self._symbolizer_proc = None
+ self._log_listener_proc = None
+ self._dry_run = False
+ self._started = False
+ self._ffx_path = os.path.join(common.SDK_ROOT, 'tools',
+ common.GetHostArchFromPlatform(), 'ffx')
+ self._log_manager = LogManager(logs_dir)
@staticmethod
def CreateFromArgs(args):
@@ -80,7 +89,7 @@ class Target(object):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
- return
+ self.Stop()
def Start(self):
"""Handles the instantiation and connection process for the Fuchsia
@@ -91,6 +100,14 @@ class Target(object):
commands."""
return self._started
+ def Stop(self):
+ """Stop all subprocesses and close log streams."""
+ if self._symbolizer_proc:
+ self._symbolizer_proc.kill()
+ if self._log_listener_proc:
+ self._log_listener_proc.kill()
+ self._log_manager.Stop()
+
def IsNewInstance(self):
"""Returns True if the connected target instance is newly provisioned."""
return True
@@ -107,6 +124,21 @@ class Target(object):
return self._command_runner
+ def StartSystemLog(self, package_paths):
+ """Start a system log reader as a long-running SSH task."""
+ system_log = self._log_manager.Open('system_log')
+ if package_paths:
+ self._log_listener_proc = self.RunCommandPiped(['log_listener'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ self._symbolizer_proc = RunSymbolizer(self._log_listener_proc.stdout,
+ system_log,
+ BuildIdsPaths(package_paths))
+ else:
+ self._log_listener_proc = self.RunCommandPiped(['log_listener'],
+ stdout=system_log,
+ stderr=subprocess.STDOUT)
+
def RunCommandPiped(self, command, **kwargs):
"""Starts a remote command and immediately returns a Popen object for the
command. The caller may interact with the streams, inspect the status code,
@@ -249,7 +281,7 @@ class Target(object):
host, port = self._GetEndpoint()
end_time = time.time() + _ATTACH_RETRY_SECONDS
- ssh_diagnostic_log = runner_logs.FileStreamFor('ssh_diagnostic_log')
+ ssh_diagnostic_log = self._log_manager.Open('ssh_diagnostic_log')
while time.time() < end_time:
runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
ssh_proc = runner.RunCommandPiped(['true'],
@@ -320,3 +352,14 @@ class Target(object):
if pkgctl_out != meta_far_merkel:
raise Exception('Hash mismatch for %s after resolve (%s vs %s).' %
(package_name, pkgctl_out, meta_far_merkel))
+
+ def RunFFXCommand(self, ffx_args, **kwargs):
+ """Automatically gets the FFX path and runs FFX based on the
+ arguments provided. Extra args can be added to be used with Popen.
+
+ ffx_args: The arguments for a ffx command.
+ kwargs: A dictionary of parameters to be passed to subprocess.Popen().
+
+ Returns a Popen object for the command."""
+ command = [self._ffx_path] + ffx_args
+ return subprocess.Popen(command, **kwargs)
diff --git a/chromium/build/fuchsia/test_runner.py b/chromium/build/fuchsia/test_runner.py
index afecdddea1d..75b83d2cda9 100755
--- a/chromium/build/fuchsia/test_runner.py
+++ b/chromium/build/fuchsia/test_runner.py
@@ -8,16 +8,15 @@
import argparse
import os
-import runner_logs
import sys
+import tempfile
from common_args import AddCommonArgs, AddTargetSpecificArgs, \
ConfigureLogging, GetDeploymentTargetForArgs
+from deploy_to_pkg_repo import InstallSymbols
from net_test_server import SetupTestServer
-from run_test_package import RunTestPackage, RunTestPackageArgs, SystemLogReader
+from run_test_package import RunTestPackage, RunTestPackageArgs
from runner_exceptions import HandleExceptionAndReturnExitCode
-from runner_logs import RunnerLogManager
-from symbolizer import BuildIdsPaths
DEFAULT_TEST_SERVER_CONCURRENCY = 4
@@ -70,8 +69,8 @@ def AddTestExecutionArgs(arg_parser):
test_args.add_argument(
'--test-launcher-filter-file',
default=None,
- help='Override default filter file passed to target test '
- 'process. Set an empty path to disable filtering.')
+ help='Filter file(s) passed to target test process. Use ";" to separate '
+ 'multiple filter files ')
test_args.add_argument('--test-launcher-jobs',
type=int,
help='Sets the number of parallel test jobs.')
@@ -92,11 +91,12 @@ def AddTestExecutionArgs(arg_parser):
test_args.add_argument(
'--isolated-script-test-perf-output',
help='If present, store chartjson results on this path.')
- test_args.add_argument('--use-run-test-component',
- default=False,
- action='store_true',
- help='Run the test package hermetically using '
- 'run-test-component, rather than run.')
+ test_args.add_argument('--use-run',
+ dest='use_run_test_component',
+ default=True,
+ action='store_false',
+ help='Run the test package using run rather than '
+ 'hermetically using run-test-component.')
test_args.add_argument(
'--code-coverage',
default=False,
@@ -111,6 +111,10 @@ def AddTestExecutionArgs(arg_parser):
test_args.add_argument('--child-arg',
action='append',
help='Arguments for the test process.')
+ test_args.add_argument('--gtest_also_run_disabled_tests',
+ default=False,
+ action='store_true',
+ help='Run tests prefixed with DISABLED_')
test_args.add_argument('child_args',
nargs='*',
help='Arguments for the test process.')
@@ -127,9 +131,9 @@ def main():
if not args.out_dir:
raise ValueError("out-dir must be specified.")
- # Code coverage uses runtests, which calls run_test_component.
- if args.code_coverage:
- args.use_run_test_component = True
+ if args.code_coverage and not args.use_run_test_component:
+ raise ValueError('Collecting code coverage info requires using '
+ 'run-test-component.')
ConfigureLogging(args)
@@ -181,6 +185,8 @@ def main():
if args.isolated_script_test_perf_output:
child_args.append('--isolated-script-test-perf-output=' +
TEST_PERF_RESULT_PATH)
+ if args.gtest_also_run_disabled_tests:
+ child_args.append('--gtest_also_run_disabled_tests')
if args.child_arg:
child_args.extend(args.child_arg)
@@ -192,20 +198,22 @@ def main():
test_realms = [TEST_REALM_NAME]
try:
- with GetDeploymentTargetForArgs(args) as target, \
- SystemLogReader() as system_logger, \
- RunnerLogManager(args.runner_logs_dir, BuildIdsPaths(args.package)):
+ with GetDeploymentTargetForArgs(args) as target:
target.Start()
-
- if args.system_log_file and args.system_log_file != '-':
- system_logger.Start(target, args.package, args.system_log_file)
+ target.StartSystemLog(args.package)
if args.test_launcher_filter_file:
- target.PutFile(args.test_launcher_filter_file,
- TEST_FILTER_PATH,
- for_package=args.package_name,
- for_realms=test_realms)
- child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH)
+ test_launcher_filter_files = args.test_launcher_filter_file.split(';')
+ with tempfile.NamedTemporaryFile('a+b') as combined_filter_file:
+ for filter_file in test_launcher_filter_files:
+ with open(filter_file, 'r') as f:
+ combined_filter_file.write(f.read())
+ combined_filter_file.seek(0)
+ target.PutFile(combined_filter_file.name,
+ TEST_FILTER_PATH,
+ for_package=args.package_name,
+ for_realms=test_realms)
+ child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH)
test_server = None
if args.enable_test_server:
@@ -213,6 +221,12 @@ def main():
test_server = SetupTestServer(target, test_concurrency,
args.package_name, test_realms)
+ if args.device is not None and args.fuchsia_out_dir is not None:
+ build_ids_path = os.path.join(args.fuchsia_out_dir, '.build-id')
+ for package in args.package:
+ InstallSymbols(os.path.join(os.path.dirname(package), 'ids.txt'),
+ build_ids_path)
+
run_package_args = RunTestPackageArgs.FromCommonArgs(args)
if args.use_run_test_component:
run_package_args.test_realm_label = TEST_REALM_NAME
diff --git a/chromium/build/fuchsia/update_images.py b/chromium/build/fuchsia/update_images.py
index 79b8e49d864..c18726dbfe4 100755
--- a/chromium/build/fuchsia/update_images.py
+++ b/chromium/build/fuchsia/update_images.py
@@ -59,16 +59,17 @@ def DownloadSdkBootImages(bucket, sdk_hash, boot_image_names, image_root_dir):
logging.info('Downloading Fuchsia boot images for %s.%s...' %
(device_type, arch))
- if bucket == 'fuchsia-sdk':
- images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\
- '{device_type}.{arch}.tgz'.format(
- bucket=bucket, sdk_hash=sdk_hash,
- device_type=device_type, arch=arch)
+
+ # Images use different formats. See fxbug.dev/85552.
+ if bucket == 'fuchsia-sdk' or device_type == "workstation":
+ type_arch_connector = '.'
else:
- images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\
- '{device_type}-{arch}.tgz'.format(
- bucket=bucket, sdk_hash=sdk_hash,
- device_type=device_type, arch=arch)
+ type_arch_connector = '-'
+
+ images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\
+ '{device_type}{type_arch_connector}{arch}.tgz'.format(
+ bucket=bucket, sdk_hash=sdk_hash, device_type=device_type,
+ type_arch_connector=type_arch_connector, arch=arch)
DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir)
diff --git a/chromium/build/install-build-deps-android.sh b/chromium/build/install-build-deps-android.sh
index 882e7be903c..686b5f6a3c2 100755
--- a/chromium/build/install-build-deps-android.sh
+++ b/chromium/build/install-build-deps-android.sh
@@ -34,7 +34,4 @@ sudo apt-get -y install lib32z1 lighttpd xvfb x11-utils
# See https://developer.android.com/sdk/installing/index.html?pkg=tools
sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
-# Required for apk-patch-size-estimator
-sudo apt-get -y install bsdiff
-
echo "install-build-deps-android.sh complete."
diff --git a/chromium/build/install-build-deps.sh b/chromium/build/install-build-deps.sh
index 41729ec487d..5e89b73e235 100755
--- a/chromium/build/install-build-deps.sh
+++ b/chromium/build/install-build-deps.sh
@@ -57,12 +57,8 @@ package_exists() {
[ ! -z "$(grep "^${escaped}$" <<< "${apt_package_list}")" ]
}
-# These default to on because (some) bots need them and it keeps things
-# simple for the bot setup if all bots just run the script in its default
-# mode. Developers who don't want stuff they don't need installed on their
-# own workstations can pass --no-arm --no-nacl when running the script.
-do_inst_arm=1
-do_inst_nacl=1
+do_inst_arm=0
+do_inst_nacl=0
while [ "$1" != "" ]
do
@@ -370,48 +366,9 @@ case $distro_codename in
esac
# arm cross toolchain packages needed to build chrome on armhf
-EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
-EM_SOURCE=$(cat <<EOF
-# Repo added by Chromium $0
-${EM_REPO}
-# deb-src http://emdebian.org/tools/debian/ jessie main
-EOF
-)
-EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
-GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
-case $distro_codename in
- jessie)
- eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
- CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
- arm_list="libc6-dev:armhf
- linux-libc-dev:armhf"
- if [ "$do_inst_arm" = "1" ]; then
- if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
- arm_list+=" ${GPP_ARM_PACKAGE}"
- else
- if [ "${add_cross_tool_repo}" = "1" ]; then
- gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
- gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
- if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
- echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
- fi
- arm_list+=" ${GPP_ARM_PACKAGE}"
- else
- echo "The Debian Cross-toolchains repository is necessary to"
- echo "cross-compile Chromium for arm."
- echo "Rerun with --add-deb-cross-tool-repo to have it added for you."
- fi
- fi
- fi
- ;;
- # All necessary ARM packages are available on the default repos on
- # Debian 9 and later.
- *)
- arm_list="libc6-dev-armhf-cross
- linux-libc-dev-armhf-cross
- ${GPP_ARM_PACKAGE}"
- ;;
-esac
+arm_list="libc6-dev-armhf-cross
+ linux-libc-dev-armhf-cross
+ g++-arm-linux-gnueabihf"
# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
case $distro_codename in
diff --git a/chromium/build/lacros/PRESUBMIT.py b/chromium/build/lacros/PRESUBMIT.py
index 6eae32a100e..1667f2ae394 100644
--- a/chromium/build/lacros/PRESUBMIT.py
+++ b/chromium/build/lacros/PRESUBMIT.py
@@ -8,7 +8,12 @@ USE_PYTHON3 = True
def _CommonChecks(input_api, output_api):
tests = input_api.canned_checks.GetUnitTestsInDirectory(
- input_api, output_api, '.', [r'^.+_test\.py$'])
+ input_api,
+ output_api,
+ '.', [r'^.+_test\.py$'],
+ run_on_python2=False,
+ run_on_python3=True,
+ skip_shebang_check=True)
return input_api.RunTests(tests)
diff --git a/chromium/build/lacros/test_runner.py b/chromium/build/lacros/test_runner.py
index 397076a2872..57418ae2eb0 100755
--- a/chromium/build/lacros/test_runner.py
+++ b/chromium/build/lacros/test_runner.py
@@ -235,7 +235,7 @@ def _GetLatestVersionOfAshChrome():
def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file,
- enable_mojo_crosapi):
+ enable_mojo_crosapi, ash_ready_file):
"""Waits for Ash-Chrome to be up and running and returns a boolean indicator.
Determine whether ash-chrome is up and running by checking whether two files
@@ -249,27 +249,32 @@ def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file,
lacros_mojo_socket_file (str): Path to the lacros mojo socket file.
enable_mojo_crosapi (bool): Whether to bootstrap the crosapi mojo interface
between ash and the lacros test binary.
+ ash_ready_file (str): Path to a non-existing file. After ash is ready for
+ testing, the file will be created.
Returns:
A boolean indicating whether Ash-chrome is up and running.
"""
def IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
- enable_mojo_crosapi):
- return (len(os.listdir(tmp_xdg_dir)) >= 2
- and (not enable_mojo_crosapi
- or os.path.exists(lacros_mojo_socket_file)))
+ enable_mojo_crosapi, ash_ready_file):
+ # There should be 2 wayland files.
+ if len(os.listdir(tmp_xdg_dir)) < 2:
+ return False
+ if enable_mojo_crosapi and not os.path.exists(lacros_mojo_socket_file):
+ return False
+ return os.path.exists(ash_ready_file)
time_counter = 0
while not IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
- enable_mojo_crosapi):
+ enable_mojo_crosapi, ash_ready_file):
time.sleep(0.5)
time_counter += 0.5
if time_counter > ASH_CHROME_TIMEOUT_SECONDS:
break
return IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
- enable_mojo_crosapi)
+ enable_mojo_crosapi, ash_ready_file)
def _ExtractAshMajorVersion(file_path):
@@ -403,6 +408,7 @@ lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome
lacros_mojo_socket_file = '%s/lacros.sock' % tmp_ash_data_dir_name
lacros_mojo_socket_arg = ('--lacros-mojo-socket-for-testing=%s' %
lacros_mojo_socket_file)
+ ash_ready_file = '%s/ash_ready.txt' % tmp_ash_data_dir_name
enable_mojo_crosapi = any(t == os.path.basename(args.command)
for t in _TARGETS_REQUIRE_MOJO_CROSAPI)
@@ -414,6 +420,7 @@ lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome
'--user-data-dir=%s' % tmp_ash_data_dir_name,
'--enable-wayland-server',
'--no-startup-window',
+ '--ash-ready-file-path=%s' % ash_ready_file,
]
if enable_mojo_crosapi:
ash_cmd.append(lacros_mojo_socket_arg)
@@ -434,12 +441,14 @@ lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome
num_tries += 1
ash_process = subprocess.Popen(ash_cmd, env=ash_env)
ash_process_has_started = _WaitForAshChromeToStart(
- tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi)
+ tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi,
+ ash_ready_file)
if ash_process_has_started:
break
logging.warning('Starting ash-chrome timed out after %ds',
ASH_CHROME_TIMEOUT_SECONDS)
+ logging.warning('Are you using test_ash_chrome?')
logging.warning('Printing the output of "ps aux" for debugging:')
subprocess.call(['ps', 'aux'])
if ash_process and ash_process.poll() is None:
diff --git a/chromium/build/lacros/test_runner_test.py b/chromium/build/lacros/test_runner_test.py
index 9713dfc387f..8fa3220bcc0 100755
--- a/chromium/build/lacros/test_runner_test.py
+++ b/chromium/build/lacros/test_runner_test.py
@@ -78,9 +78,9 @@ class TestRunnerTest(unittest.TestCase):
self.assertTrue(ash_chrome_args[0].endswith(
'build/lacros/prebuilt_ash_chrome/793554/test_ash_chrome'))
expected_ash_chrome_args = [
- '--user-data-dir=/tmp/ash-data',
- '--enable-wayland-server',
+ '--user-data-dir=/tmp/ash-data', '--enable-wayland-server',
'--no-startup-window',
+ '--ash-ready-file-path=/tmp/ash-data/ash_ready.txt'
]
if command == 'lacros_chrome_browsertests':
expected_ash_chrome_args.append(
diff --git a/chromium/build/linux/extract_symbols.gni b/chromium/build/linux/extract_symbols.gni
index 722f60d23b7..52cb1b50dfc 100644
--- a/chromium/build/linux/extract_symbols.gni
+++ b/chromium/build/linux/extract_symbols.gni
@@ -22,6 +22,7 @@ template("extract_symbols") {
dump_syms_binary =
get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
script = "//build/linux/dump_app_syms.py"
inputs = [
invoker.binary,
diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
index 29ea13c4dcb..037d3920d76 100644
--- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
+++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
@@ -345,6 +345,7 @@ https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-lin
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_amd64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_amd64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/valgrind/valgrind_3.16.1-1_amd64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_amd64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_amd64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_amd64.deb
diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm
index b91ebc48a8a..dbb31332e03 100644
--- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm
+++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm
@@ -341,6 +341,7 @@ https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-lin
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_armhf.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_armhf.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/valgrind/valgrind_3.16.1-1_armhf.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_armhf.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_armhf.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_armhf.deb
diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
index 4db2b8a9944..45d05e8f4a9 100644
--- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
+++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
@@ -344,6 +344,7 @@ https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-lin
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_arm64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_arm64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/valgrind/valgrind_3.16.1-1_arm64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_arm64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_arm64.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_arm64.deb
diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386
index cdeaf9a2b78..f177c88adb2 100644
--- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386
+++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386
@@ -341,6 +341,7 @@ https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-lin
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_i386.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_i386.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/valgrind/valgrind_3.16.1-1_i386.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_i386.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_i386.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_i386.deb
diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
index a3b6559aa28..13e978b0d8c 100644
--- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
+++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
@@ -332,6 +332,7 @@ https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-lin
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_mips64el.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_mips64el.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/valgrind/valgrind_3.16.1-1_mips64el.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_mips64el.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_mips64el.deb
https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_mips64el.deb
diff --git a/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh
index 86c311cb621..4695580b265 100755
--- a/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh
+++ b/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh
@@ -417,6 +417,7 @@ DEBIAN_PACKAGES_X86="
libmpx2
libquadmath0
libubsan1
+ valgrind
"
DEBIAN_PACKAGES_ARM="
@@ -429,6 +430,7 @@ DEBIAN_PACKAGES_ARM="
libgtk-4-1
libgtk-4-dev
libubsan1
+ valgrind
"
DEBIAN_PACKAGES_ARM64="
@@ -444,6 +446,7 @@ DEBIAN_PACKAGES_ARM64="
libthai0
libtsan0
libubsan1
+ valgrind
"
DEBIAN_PACKAGES_ARMEL="
@@ -458,6 +461,7 @@ DEBIAN_PACKAGES_ARMEL="
"
DEBIAN_PACKAGES_MIPS64EL="
+ valgrind
"
. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/chromium/build/linux/sysroot_scripts/sysroots.json b/chromium/build/linux/sysroot_scripts/sysroots.json
index 6248db7d9a3..63b6d981ea3 100644
--- a/chromium/build/linux/sysroot_scripts/sysroots.json
+++ b/chromium/build/linux/sysroot_scripts/sysroots.json
@@ -1,16 +1,16 @@
{
"sid_amd64": {
- "Sha1Sum": "43a87bbebccad99325fdcf34166295b121ee15c7",
+ "Sha1Sum": "95051d95804a77144986255f534acb920cee375b",
"SysrootDir": "debian_sid_amd64-sysroot",
"Tarball": "debian_sid_amd64_sysroot.tar.xz"
},
"sid_arm": {
- "Sha1Sum": "11d6f690ca49e8ba01a1d8c5346cedad2cf308fd",
+ "Sha1Sum": "8cf8dfa68861f84ce79d038f08da6e685487def5",
"SysrootDir": "debian_sid_arm-sysroot",
"Tarball": "debian_sid_arm_sysroot.tar.xz"
},
"sid_arm64": {
- "Sha1Sum": "2befe8ce3e88be6080e4fb7e6d412278ea6a7625",
+ "Sha1Sum": "9677df0c176ac33116618f109c4988b719ad78c6",
"SysrootDir": "debian_sid_arm64-sysroot",
"Tarball": "debian_sid_arm64_sysroot.tar.xz"
},
@@ -20,17 +20,17 @@
"Tarball": "debian_sid_armel_sysroot.tar.xz"
},
"sid_i386": {
- "Sha1Sum": "d53a049af5961f2f121ee4e149918097c193f8ed",
+ "Sha1Sum": "4840dc83ec7c3816ae0262e9e93231736bf0a5b7",
"SysrootDir": "debian_sid_i386-sysroot",
"Tarball": "debian_sid_i386_sysroot.tar.xz"
},
"sid_mips": {
- "Sha1Sum": "eb577cef43088b7e0540950c74f994267631d4cd",
+ "Sha1Sum": "779ca2ab213f78b0c7d94ff157ee444ae2d40043",
"SysrootDir": "debian_sid_mips-sysroot",
"Tarball": "debian_sid_mips_sysroot.tar.xz"
},
"sid_mips64el": {
- "Sha1Sum": "6cb76f27035d1460fe164f7e6c5318c047aac153",
+ "Sha1Sum": "27d205329440aa96002da71968a0dbef09aa7d6a",
"SysrootDir": "debian_sid_mips64el-sysroot",
"Tarball": "debian_sid_mips64el_sysroot.tar.xz"
}
diff --git a/chromium/build/linux/unbundle/remove_bundled_libraries.py b/chromium/build/linux/unbundle/remove_bundled_libraries.py
index 899877a1654..91fa62ef412 100755
--- a/chromium/build/linux/unbundle/remove_bundled_libraries.py
+++ b/chromium/build/linux/unbundle/remove_bundled_libraries.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -9,8 +9,6 @@ Removes bundled libraries to make sure they are not used.
See README for more details.
"""
-from __future__ import print_function
-
import optparse
import os.path
import sys
diff --git a/chromium/build/linux/unbundle/replace_gn_files.py b/chromium/build/linux/unbundle/replace_gn_files.py
index eba4bd1fb3c..3a65f8395b5 100755
--- a/chromium/build/linux/unbundle/replace_gn_files.py
+++ b/chromium/build/linux/unbundle/replace_gn_files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,8 +8,6 @@ Replaces GN files in tree with files from here that
make the build use system libraries.
"""
-from __future__ import print_function
-
import argparse
import os
import shutil
diff --git a/chromium/build/rust/BUILD.gn b/chromium/build/rust/BUILD.gn
new file mode 100644
index 00000000000..ebb5409b645
--- /dev/null
+++ b/chromium/build/rust/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("edition_2018") {
+ rustflags = [ "--edition=2018" ]
+}
+
+config("edition_2015") {
+ rustflags = [ "--edition=2015" ]
+}
diff --git a/chromium/build/rust/OWNERS b/chromium/build/rust/OWNERS
new file mode 100644
index 00000000000..e15cf8c77fc
--- /dev/null
+++ b/chromium/build/rust/OWNERS
@@ -0,0 +1,8 @@
+adetaylor@chromium.org
+ajgo@chromium.org
+danakj@chromium.org
+jclinton@chromium.org
+lukasza@chromium.org
+palmer@chromium.org
+rsesek@chromium.org
+thakis@chromium.org
diff --git a/chromium/build/rust/rust_source_set.gni b/chromium/build/rust/rust_source_set.gni
new file mode 100644
index 00000000000..5887a62fb4d
--- /dev/null
+++ b/chromium/build/rust/rust_source_set.gni
@@ -0,0 +1,256 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/rust.gni")
+
+# Defines a Rust static library which can be used by downstream Rust or C++
+# targets.
+#
+# Important: downstream C++ targets should NOT depend upon this
+# type of target. Instead they should append the suffix "_cpp_bindings"
+# when adding this to their deps:
+#
+# deps += [ "//example:foo_bar_cpp_bindings" ]
+#
+# This arranges to ensure that any downstream C++ linker invocations
+# are supplied with the required extra bits to include Rust code; for
+# instance, the Rust standard library.
+#
+# Downstream Rust targets should depend on this rust_source_set directly.
+#
+# Parameters
+#
+# sources
+# List of source files which this crate is allowed to compile, which is
+# used to determine the impact of source code changes on other GN targets.
+# This is not used by the Rust compiler, as it discovers source files by
+# following `mod` declarations starting at the `crate_root`. The
+# discovered source files must match this list. (This is not yet enforced,
+# but will be.)
+#
+# epoch (optional)
+# The major version of the library, which is used to differentiate between
+# multiple versions of the same library name. This includes all leading 0s
+# and the first non-zero value in the crate's version. This should be left
+# as the default, which is "0", for first-party code unless there are
+# multiple versions of a crate present. For third-party code, the version
+# epoch (matching the directory it is found in) should be specified.
+#
+# Examples:
+# 1.0.2 => epoch = "1"
+# 4.2.0 => epoch = "4"
+# 0.2.7 => epoch = "0.2"
+# 0.0.3 => epoch = "0.0.3"
+#
+# edition (optional)
+# Edition of the Rust language to be used.
+# Options are "2015" and "2018". Defaults to "2018".
+# # TODO add 2021 when it arrives
+#
+# configs (optional)
+# A list of config labels (in the GN meaning) applying to this target.
+#
+# deps (optional)
+# List of GN targets on which this crate depends. These may be Rust
+# or non-Rust targets.
+#
+# test_deps (optional)
+# List of GN targets on which this crate's tests depend, in addition
+# to deps.
+#
+# mutually_dependent_target (optional)
+# If this Rust code is intrinsically paired with some C/C++ code,
+# with bidirectional calls between the two, then this would
+# be a circular dependency. GN does not allow circular dependencies,
+# (other than for header files per allow_circular_includes_from).
+# But this is common for a 'component' which has both Rust and C++
+# code. You should structure things such that the C++ code depends
+# on the Rust code in the normal way:
+# source_set("cpp_stuff") {
+# deps = [ "rust_stuff_cpp_bindings" ]
+# # ..
+# }
+# but that the Rust target also notes the C++ target using this
+# 'mutually_dependent_target' parameter.
+# rust_source_set("rust_stuff") {
+# mutually_dependent_target = "cpp_stuff"
+# # ..
+# }
+#
+# The resultant behavior:
+# * Downstream C++ targets should depend on the main C++ target
+# ("cpp_stuff" in the above example). They'll come to depend on
+# both Rust and C++ code (plus the Rust standard library,
+# noted above).
+# * Downstream Rust targets should depend on the Rust target
+# ("rust_stuff"). They'll come to depend on both Rust and C++
+# code, because this "mutually_dependent_target" will be put into
+# their dependency list.
+# * Rust unit tests, similarly, will depend on the Rust and C++
+# code.
+# Note that this arrangement carefully avoids actual circular
+# dependencies.
+#
+# skip_unit_tests (optional)
+# Avoids building unit tests associated with this Rust crate. Normally,
+# this template will create a `<name>_unittests` executable in the output
+# directory; this behavior is suppressed if this flag is set to true.
+#
+# crate_root (optional)
+# Location of the crate root.
+# This defaults to `./src/lib.rs` and should only be changed when
+# absolutely necessary (such as in the case of generated code).
+#
+# features (optional)
+# A list of conditional compilation flags to enable. This can be used
+# to set features for crates built in-tree which are also published to
+# crates.io. Each feature in the list will be passed to rustc as
+# '--cfg feature=XXX'
+#
+# Example of usage:
+#
+# rust_source_set("foo_bar") {
+# deps = [
+# "//boo/public/rust/bar",
+# "//third_party/rust/crates:argh",
+# "//third_party/rust/crates:serde",
+# "//third_party/rust/crates:slab",
+# ]
+# sources = [ "src/lib.rs" ]
+# }
+#
+# This template is intended to serve the same purpose as 'rustc_library'
+# in Fuchsia.
+
+template("rust_source_set") {
+ _target_name = target_name
+ if (defined(invoker.crate_root)) {
+ crate_root = invoker.crate_root
+ } else {
+ crate_root = "src/lib.rs"
+ }
+
+ _features = []
+ if (defined(invoker.features)) {
+ foreach(i, invoker.features) {
+ _features += [ "--cfg=feature=\"${i}\"" ]
+ }
+ }
+ if (defined(invoker.edition) && invoker.edition == "2015") {
+ _configs = [ "//build/rust:edition_2015" ]
+ } else {
+ _configs = [ "//build/rust:edition_2018" ]
+ }
+ if (defined(invoker.configs)) {
+ _configs += invoker.configs
+ }
+ assert(!defined(invoker.rustflags),
+ "rustflags not supported by rust_source_set")
+ _deps_for_rust_targets = []
+ if (defined(invoker.mutually_dependent_target)) {
+ _deps_for_rust_targets += [ invoker.mutually_depependent_peer ]
+ }
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+ _build_unit_tests = build_rust_unit_tests
+ if (defined(invoker.skip_unit_tests) && invoker.skip_unit_tests == true) {
+ _build_unit_tests = false
+ }
+
+ # TODO(danakj): This could be a hash generated from the input crate, such as
+ # from its path, in which case the BUILD.gn would not need to specify
+ # anything. But GN doesn't give us a hash function to make that easy.
+ _metadata = "0"
+ if (defined(invoker.epoch)) {
+ _metadata = invoker.epoch
+ }
+
+ # We require that all source files are listed, even though this is
+ # not a requirement for rustc. The reason is to ensure that tools
+ # such as `gn deps` give the correct answer, and thus we trigger
+ # the right test suites etc. on code change.
+ # TODO(crbug.com/1256930) - verify this is correct
+ assert(defined(invoker.sources), "sources must be listed")
+
+ # Downstream Rust targets should include this in their deps.
+ group(target_name) {
+ deps = [ ":${_target_name}_rlib" ]
+ deps += _deps_for_rust_targets
+ }
+
+ # Downstream C++ targets should include this in their deps.
+ group("${_target_name}_cpp_bindings") {
+ deps = [
+ ":${_target_name}_rlib",
+ "//build/rust/std", # explanation: any C++ code depending on this
+ # target should also depend on the Rust standard
+ # library to ensure it's linked into the final
+ # binary by the C++ linker.
+ ]
+ if (defined(invoker.mutually_dependent_target)) {
+ visibility = invoker.mutually_dependent_target
+ }
+ }
+
+ rust_library("${target_name}_rlib") {
+ crate_name = _target_name
+ configs += _configs
+ deps = _deps
+ rustflags = _features
+ rustflags += [ string_join("",
+ [
+ "-Cmetadata=",
+ _metadata,
+ ]) ]
+ forward_variables_from(invoker,
+ "*",
+ [
+ "features",
+ "deps",
+ "rustflags",
+ "configs",
+ "output_name",
+ "crate_name",
+ "crate_root",
+ ])
+ visibility = [
+ ":${_target_name}",
+ ":${_target_name}_cpp_bindings",
+ ]
+ }
+
+ if (_build_unit_tests) {
+ # TODO(crbug.com/1229320): Arrange to run test executables on try bots.
+ # TODO(crbug.com/gn/146): Allow Rust executables to depend on C/C++ source sets.
+ # This is important in cases where Rust tests may depend upon C/C++
+ # dependencies.
+ executable("${_target_name}_unittests") {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "features",
+ "deps",
+ "rustflags",
+ "configs",
+ "output_name",
+ "crate_name",
+ "crate_root",
+ ])
+ rustflags = [
+ "--cfg",
+ "feature=\"test\"",
+ "--test",
+ ]
+ rustflags += _features
+ configs += _configs
+ deps = _deps
+ if (defined(invoker.test_deps)) {
+ deps += invoker.test_deps
+ }
+ deps += _deps_for_rust_targets
+ }
+ }
+}
diff --git a/chromium/build/rust/std/BUILD.gn b/chromium/build/rust/std/BUILD.gn
new file mode 100644
index 00000000000..185ef6ade9a
--- /dev/null
+++ b/chromium/build/rust/std/BUILD.gn
@@ -0,0 +1,153 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file provides the ability for our C++ toolchain to successfully
+# link binaries containing arbitrary Rust code.
+#
+# By "arbitrary Rust code" I mean .rlib archives full of Rust code, which
+# is actually a static archive.
+#
+# Those static libraries don't link as-is into a final executable because
+# they're designed for downstream processing by further invocations of rustc
+# which link into a final binary. That final invocation of rustc knows how
+# to do two things:
+# * Find the Rust standard library.
+# * Remap some generic allocator symbols to the specific allocator symbols
+# in use.
+# This file does both those things. Any C++ target containing Rust .rlibs
+# should simply depend on :std within this file and it will be taken care of.
+# In practice, this will in future be taken care of by a standard template
+# used for each Rust source set, so that a typical user of Rust need not
+# think about it.
+#
+# This is obviously a bit fragile - rustc might do other magic in future.
+# But, linking with a final C++ toolchain is something often needed, and
+# https://github.com/rust-lang/rust/issues/64191 aims to make this
+# officially possible.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/rust.gni")
+
+stdlib_files = [
+ "std", # List first because it makes depfiles more debuggable (see below)
+ "addr2line",
+ "adler",
+ "alloc",
+ "cfg_if",
+ "compiler_builtins",
+ "core",
+ "getopts",
+ "gimli",
+ "hashbrown",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "panic_abort",
+ "panic_unwind",
+ "proc_macro",
+ "rustc_demangle",
+ "std_detect",
+ "term",
+ "test",
+ "unicode_width",
+ "unwind",
+]
+
+if (!use_unverified_rust_toolchain) {
+ # rlib files which are distributed alongside Rust's prebuilt stdlib, but we
+ # don't need to pass to the C++ linker because they're used for specialized
+ # purposes.
+ skip_stdlib_files = [
+ "profiler_builtins",
+ "rustc_std_workspace_alloc",
+ "rustc_std_workspace_core",
+ "rustc_std_workspace_std",
+ ]
+}
+
+if (toolchain_has_rust) {
+ action("find_stdlib") {
+ # Specifics of what we're doing here.
+ #
+ # We are using prebuilt Rust rlibs supplied along with the toolchain.
+ # The Rust standard library consists of rlibs with roughly all the names
+ # above.
+ #
+ # However, their filenames are not predictable, and therefore we can't
+ # have ninja rules which depend upon them. (gn offers a facility to
+ # build rules dynamically, but it's frowned upon because a script needs
+ # to run each time).
+ #
+ # Instead therefore we copy these unpredictable .rlib paths to apredictable
+ # location. That's what this script does. Furthermore, it generates a
+ # .d file in order to teach Ninja that it only needs to do this copying
+ # once, unless the source .rlibs change.
+ #
+ # The script accepts the list of known libraries and will raise an
+ # exception if the list on disk differs. (Either 'Found stdlib rlib
+ # that wasn't expected' or 'We failed to find all expected stdlib
+ # rlibs').
+ script = "find_std_rlibs.py"
+ depfile = "$target_out_dir/stdlib.d"
+ out_libdir = rebase_path(target_out_dir, root_build_dir)
+ out_depfile = rebase_path(depfile, root_build_dir)
+ args = [
+ "--rust-bin-dir",
+ rust_prefix,
+ "--output",
+ out_libdir,
+ "--depfile",
+ out_depfile,
+
+ # Due to limitations in Ninja's handling of .d files, we have to pick
+ # *the first* of our outputs. To make diagnostics more obviously
+ # related to the Rust standard library, we ensure libstd.rlib is first.
+ "--depfile-target",
+ stdlib_files[0],
+ ]
+ if (!use_unverified_rust_toolchain) {
+ args += [
+ "--stdlibs",
+ string_join(",", stdlib_files),
+ "--skip",
+ string_join(",", skip_stdlib_files),
+ ]
+ }
+ if (rust_abi_target != "") {
+ args += [
+ "--target",
+ rust_abi_target,
+ ]
+ }
+
+ outputs = []
+ foreach(lib, stdlib_files) {
+ outputs += [ "$target_out_dir/lib$lib.rlib" ]
+ }
+ }
+
+ config("rust_stdlib_config") {
+ ldflags = []
+ out_libdir = rebase_path(target_out_dir, root_build_dir)
+ foreach(lib, stdlib_files) {
+ this_file = "$out_libdir/lib$lib.rlib"
+ ldflags += [ this_file ]
+ }
+ }
+
+ source_set("remap_alloc") {
+ sources = [
+ "immediate_crash.h",
+ "remap_alloc.cc",
+ ]
+ }
+
+ group("std") {
+ all_dependent_configs = [ ":rust_stdlib_config" ]
+ deps = [
+ ":find_stdlib",
+ ":remap_alloc",
+ ]
+ }
+}
diff --git a/chromium/build/rust/std/find_std_rlibs.py b/chromium/build/rust/std/find_std_rlibs.py
new file mode 100755
index 00000000000..c61ace6ac74
--- /dev/null
+++ b/chromium/build/rust/std/find_std_rlibs.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env/python3
+
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# See BUILD.gn in this directory for an explanation of what this script is for.
+
+import argparse
+import os
+import stat
+import sys
+import shutil
+import subprocess
+import re
+
+REMOVE_METADATA_SUFFIX_RE = re.compile(b"-[0-9a-f]*", re.I)
+
+
+def expand_name(concise_name):
+ return "lib%s.rlib" % concise_name
+
+
+def main():
+ parser = argparse.ArgumentParser("find_std_rlibs.py")
+ parser.add_argument("--rust-bin-dir",
+ help="Path to Rust binaries",
+ required=True),
+ parser.add_argument("--target", help="Rust target triple", required=False),
+ parser.add_argument("--output",
+ help="Path to rlibs without suffixes",
+ required=True)
+ parser.add_argument("--depfile", help="Path to write depfile", required=True)
+ parser.add_argument("--depfile-target",
+ help="Target to key depfile around",
+ required=True)
+ parser.add_argument("--stdlibs",
+ help="Expected list of standard library libraries")
+ parser.add_argument("--skip-stdlibs",
+ help="Standard library files to skip",
+ default="")
+ args = parser.parse_args()
+ # Ensure we handle each rlib in the expected list exactly once.
+ if args.stdlibs:
+ rlibs_expected = [expand_name(x) for x in args.stdlibs.split(',')]
+ else:
+ rlibs_expected = None
+ rlibs_to_skip = [expand_name(x) for x in args.skip_stdlibs.split(',')]
+ # Ask rustc where to find the stdlib for this target.
+ rustc = os.path.join(args.rust_bin_dir, "rustc")
+ rustc_args = [rustc, "--print", "target-libdir"]
+ if args.target:
+ rustc_args.extend(["--target", args.target])
+ rustlib_dir = subprocess.check_output(rustc_args).rstrip()
+ # Copy the rlibs to a predictable location. Whilst we're doing so,
+ # also write a .d file so that ninja knows it doesn't need to do this
+ # again unless the source rlibs change.
+ # Format:
+ # <output path to>/lib<lib name.rlib>: <path to each Rust stlib rlib>
+ with open(args.depfile, 'w') as depfile:
+ # Ninja isn't versatile at understanding depfiles. We have to say that a
+ # single output depends on all the inputs. We choose any one of the
+ # output rlibs for that purpose. If any of the input rlibs change, ninja
+ # will run this script again and we'll copy them all afresh.
+ depfile.write("%s:" %
+ (os.path.join(args.output, expand_name(args.depfile_target))))
+ for f in os.listdir(rustlib_dir):
+ if f.endswith(b'.rlib'):
+ # As standard Rust includes a hash on the end of each filename
+ # representing certain metadata, to ensure that clients will link
+ # against the correct version. As gn will be manually passing
+ # the correct file path to our linker invocations, we don't need
+ # that, and it would prevent us having the predictable filenames
+ # which we need for statically computable gn dependency rules.
+ (concise_name, count) = REMOVE_METADATA_SUFFIX_RE.subn(b"", f)
+ if count == 0:
+ raise Exception("Unable to remove suffix from %s" % f)
+ if concise_name.decode() in rlibs_to_skip:
+ continue
+ if rlibs_expected is not None:
+ if concise_name.decode() not in rlibs_expected:
+ raise Exception("Found stdlib rlib that wasn't expected: %s" %
+ concise_name)
+ rlibs_expected.remove(concise_name.decode())
+ infile = os.path.join(rustlib_dir, f)
+ outfile = os.path.join(str.encode(args.output), concise_name)
+ depfile.write(" %s" % (infile.decode()))
+ if (not os.path.exists(outfile)
+ or os.stat(infile).st_mtime > os.stat(outfile).st_mtime):
+ if os.path.exists(outfile):
+ st = os.stat(outfile)
+ os.chmod(outfile, st.st_mode | stat.S_IWUSR)
+ shutil.copy(infile, outfile)
+ depfile.write("\n")
+ if rlibs_expected:
+ raise Exception("We failed to find all expected stdlib rlibs: %s" %
+ ','.join(rlibs_expected))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/rust/std/immediate_crash.h b/chromium/build/rust/std/immediate_crash.h
new file mode 100644
index 00000000000..9874bdcd486
--- /dev/null
+++ b/chromium/build/rust/std/immediate_crash.h
@@ -0,0 +1,170 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is copied from //base/immediate_crash.h.
+
+#ifndef BUILD_RUST_STD_IMMEDIATE_CRASH_H_
+#define BUILD_RUST_STD_IMMEDIATE_CRASH_H_
+
+#include "build/build_config.h"
+
+// Crashes in the fastest possible way with no attempt at logging.
+// There are several constraints; see http://crbug.com/664209 for more context.
+//
+// - TRAP_SEQUENCE_() must be fatal. It should not be possible to ignore the
+// resulting exception or simply hit 'continue' to skip over it in a debugger.
+// - Different instances of TRAP_SEQUENCE_() must not be folded together, to
+// ensure crash reports are debuggable. Unlike __builtin_trap(), asm volatile
+// blocks will not be folded together.
+// Note: TRAP_SEQUENCE_() previously required an instruction with a unique
+// nonce since unlike clang, GCC folds together identical asm volatile
+// blocks.
+// - TRAP_SEQUENCE_() must produce a signal that is distinct from an invalid
+// memory access.
+// - TRAP_SEQUENCE_() must be treated as a set of noreturn instructions.
+// __builtin_unreachable() is used to provide that hint here. clang also uses
+// this as a heuristic to pack the instructions in the function epilogue to
+// improve code density.
+//
+// Additional properties that are nice to have:
+// - TRAP_SEQUENCE_() should be as compact as possible.
+// - The first instruction of TRAP_SEQUENCE_() should not change, to avoid
+// shifting crash reporting clusters. As a consequence of this, explicit
+// assembly is preferred over intrinsics.
+// Note: this last bullet point may no longer be true, and may be removed in
+// the future.
+
+// Note: TRAP_SEQUENCE Is currently split into two macro helpers due to the fact
+// that clang emits an actual instruction for __builtin_unreachable() on certain
+// platforms (see https://crbug.com/958675). In addition, the int3/bkpt/brk will
+// be removed in followups, so splitting it up like this now makes it easy to
+// land the followups.
+
+#if defined(COMPILER_GCC)
+
+#if defined(OS_NACL)
+
+// Crash report accuracy is not guaranteed on NaCl.
+#define TRAP_SEQUENCE1_() __builtin_trap()
+#define TRAP_SEQUENCE2_() asm volatile("")
+
+#elif defined(ARCH_CPU_X86_FAMILY)
+
+// TODO(https://crbug.com/958675): In theory, it should be possible to use just
+// int3. However, there are a number of crashes with SIGILL as the exception
+// code, so it seems likely that there's a signal handler that allows execution
+// to continue after SIGTRAP.
+#define TRAP_SEQUENCE1_() asm volatile("int3")
+
+#if defined(OS_APPLE)
+// Intentionally empty: __builtin_unreachable() is always part of the sequence
+// (see IMMEDIATE_CRASH below) and already emits a ud2 on Mac.
+#define TRAP_SEQUENCE2_() asm volatile("")
+#else
+#define TRAP_SEQUENCE2_() asm volatile("ud2")
+#endif // defined(OS_APPLE)
+
+#elif defined(ARCH_CPU_ARMEL)
+
+// bkpt will generate a SIGBUS when running on armv7 and a SIGTRAP when running
+// as a 32 bit userspace app on arm64. There doesn't seem to be any way to
+// cause a SIGTRAP from userspace without using a syscall (which would be a
+// problem for sandboxing).
+// TODO(https://crbug.com/958675): Remove bkpt from this sequence.
+#define TRAP_SEQUENCE1_() asm volatile("bkpt #0")
+#define TRAP_SEQUENCE2_() asm volatile("udf #0")
+
+#elif defined(ARCH_CPU_ARM64)
+
+// This will always generate a SIGTRAP on arm64.
+// TODO(https://crbug.com/958675): Remove brk from this sequence.
+#define TRAP_SEQUENCE1_() asm volatile("brk #0")
+#define TRAP_SEQUENCE2_() asm volatile("hlt #0")
+
+#else
+
+// Crash report accuracy will not be guaranteed on other architectures, but at
+// least this will crash as expected.
+#define TRAP_SEQUENCE1_() __builtin_trap()
+#define TRAP_SEQUENCE2_() asm volatile("")
+
+#endif // ARCH_CPU_*
+
+#elif defined(COMPILER_MSVC)
+
+#if !defined(__clang__)
+
+// MSVC x64 doesn't support inline asm, so use the MSVC intrinsic.
+#define TRAP_SEQUENCE1_() __debugbreak()
+#define TRAP_SEQUENCE2_()
+
+#elif defined(ARCH_CPU_ARM64)
+
+// Windows ARM64 uses "BRK #F000" as its breakpoint instruction, and
+// __debugbreak() generates that in both VC++ and clang.
+#define TRAP_SEQUENCE1_() __debugbreak()
+// Intentionally empty: __builtin_unreachable() is always part of the sequence
+// (see IMMEDIATE_CRASH below) and already emits a ud2 on Win64,
+// https://crbug.com/958373
+#define TRAP_SEQUENCE2_() __asm volatile("")
+
+#else
+
+#define TRAP_SEQUENCE1_() asm volatile("int3")
+#define TRAP_SEQUENCE2_() asm volatile("ud2")
+
+#endif // __clang__
+
+#else
+
+#error No supported trap sequence!
+
+#endif // COMPILER_GCC
+
+#define TRAP_SEQUENCE_() \
+ do { \
+ TRAP_SEQUENCE1_(); \
+ TRAP_SEQUENCE2_(); \
+ } while (false)
+
+// CHECK() and the trap sequence can be invoked from a constexpr function.
+// This could make compilation fail on GCC, as it forbids directly using inline
+// asm inside a constexpr function. However, it allows calling a lambda
+// expression including the same asm.
+// The side effect is that the top of the stacktrace will not point to the
+// calling function, but to this anonymous lambda. This is still useful as the
+// full name of the lambda will typically include the name of the function that
+// calls CHECK() and the debugger will still break at the right line of code.
+#if !defined(COMPILER_GCC)
+
+#define WRAPPED_TRAP_SEQUENCE_() TRAP_SEQUENCE_()
+
+#else
+
+#define WRAPPED_TRAP_SEQUENCE_() \
+ do { \
+ [] { TRAP_SEQUENCE_(); }(); \
+ } while (false)
+
+#endif // !defined(COMPILER_GCC)
+
+#if defined(__clang__) || defined(COMPILER_GCC)
+
+// __builtin_unreachable() hints to the compiler that this is noreturn and can
+// be packed in the function epilogue.
+#define IMMEDIATE_CRASH() \
+ ({ \
+ WRAPPED_TRAP_SEQUENCE_(); \
+ __builtin_unreachable(); \
+ })
+
+#else
+
+// This is supporting non-chromium user of logging.h to build with MSVC, like
+// pdfium. On MSVC there is no __builtin_unreachable().
+#define IMMEDIATE_CRASH() WRAPPED_TRAP_SEQUENCE_()
+
+#endif // defined(__clang__) || defined(COMPILER_GCC)
+
+#endif // BUILD_RUST_STD_IMMEDIATE_CRASH_H_
diff --git a/chromium/build/rust/std/remap_alloc.cc b/chromium/build/rust/std/remap_alloc.cc
new file mode 100644
index 00000000000..799cf8b033a
--- /dev/null
+++ b/chromium/build/rust/std/remap_alloc.cc
@@ -0,0 +1,74 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+#include <stdlib.h>
+
+#include "build/rust/std/immediate_crash.h"
+
+// When linking a final binary, rustc has to pick between either:
+// * The default Rust allocator
+// * Any #[global_allocator] defined in *any rlib in its dependency tree*
+// (https://doc.rust-lang.org/edition-guide/rust-2018/platform-and-target-support/global-allocators.html)
+//
+// In this latter case, this fact will be recorded in some of the metadata
+// within the .rlib file. (An .rlib file is just a .a file, but does have
+// additional metadata for use by rustc. This is, as far as I know, the only
+// such metadata we would ideally care about.)
+//
+// In all the linked rlibs,
+// * If 0 crates define a #[global_allocator], rustc uses its default allocator
+// * If 1 crate defines a #[global_allocator], rustc uses that
+// * If >1 crates define a #[global_allocator], rustc bombs out.
+//
+// Because rustc does these checks, it doesn't just have the __rust_alloc
+// symbols defined anywhere (neither in the stdlib nor in any of these
+// crates which have a #[global_allocator] defined.)
+//
+// Instead:
+// Rust's final linking stage invokes dynamic LLVM codegen to create symbols
+// for the basic heap allocation operations. It literally creates a
+// __rust_alloc symbol at link time. Unless any crate has specified a
+// #[global_allocator], it simply calls from __rust_alloc into
+// __rdl_alloc, which is the default Rust allocator. The same applies to a
+// few other symbols.
+//
+// We're not (always) using rustc for final linking. For cases where we're not
+// Rustc as the final linker, we'll define those symbols here instead.
+//
+// In future, we may wish to do something different from using the Rust
+// default allocator (e.g. explicitly redirect to PartitionAlloc). We could
+// do that here, or we could build a crate with a #[global_allocator] and
+// redirect these symbols to that crate instead. The advantage of the latter
+// is that it would work equally well for those cases where rustc is doing
+// the final linking.
+
+extern "C" {
+
+void* __rdl_alloc(size_t, size_t);
+void __rdl_dealloc(void*);
+void* __rdl_realloc(void*, size_t, size_t, size_t);
+void* __rdl_alloc_zeroed(size_t, size_t);
+
+void* __rust_alloc(size_t a, size_t b) {
+ return __rdl_alloc(a, b);
+}
+
+void __rust_dealloc(void* a) {
+ __rdl_dealloc(a);
+}
+
+void* __rust_realloc(void* a, size_t b, size_t c, size_t d) {
+ return __rdl_realloc(a, b, c, d);
+}
+
+void* __rust_alloc_zeroed(size_t a, size_t b) {
+ return __rdl_alloc_zeroed(a, b);
+}
+
+void __rust_alloc_error_handler(size_t a, size_t b) {
+ IMMEDIATE_CRASH();
+}
+
+} // extern "C"
diff --git a/chromium/build/rust/tests/BUILD.gn b/chromium/build/rust/tests/BUILD.gn
new file mode 100644
index 00000000000..4ccd89b2a92
--- /dev/null
+++ b/chromium/build/rust/tests/BUILD.gn
@@ -0,0 +1,29 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/rust.gni")
+
+group("tests") {
+ # Build some minimal binaries to exercise the Rust toolchain
+ # only if that toolchain is enabled in gn args.
+ testonly = true
+ if (toolchain_has_rust) {
+ deps = [
+ "test_cpp_including_rust",
+ "test_rust_source_set",
+ ]
+ if (build_rust_unit_tests) {
+ deps += [
+ "test_cpp_including_rust:test_cpp_including_rust_unittests",
+ "test_rust_source_set:test_rust_source_set_unittests",
+ ]
+ }
+ if (rustc_can_link) {
+ deps += [
+ "test_rust_exe",
+ "test_rust_multiple_dep_versions_exe",
+ ]
+ }
+ }
+}
diff --git a/chromium/build/rust/tests/test_cpp_including_rust/BUILD.gn b/chromium/build/rust/tests/test_cpp_including_rust/BUILD.gn
new file mode 100644
index 00000000000..afe5ff4e836
--- /dev/null
+++ b/chromium/build/rust/tests/test_cpp_including_rust/BUILD.gn
@@ -0,0 +1,25 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//base/allocator/allocator.gni")
+import("//testing/test.gni")
+
+executable("test_cpp_including_rust") {
+ sources = [ "main.cc" ]
+ deps = [
+ "//build/rust/tests/test_rust_source_set:test_rust_source_set_cpp_bindings",
+ ]
+}
+
+test("test_cpp_including_rust_unittests") {
+ sources = [ "unittests.cc" ]
+ deps = [
+ "//base",
+ "//base/allocator:buildflags",
+ "//base/test:run_all_unittests",
+ "//build/rust/tests/test_rust_source_set:test_rust_source_set_cpp_bindings",
+ "//testing/gmock",
+ "//testing/gtest",
+ ]
+}
diff --git a/chromium/build/rust/tests/test_rust_exe/BUILD.gn b/chromium/build/rust/tests/test_rust_exe/BUILD.gn
new file mode 100644
index 00000000000..0ce2e4a5078
--- /dev/null
+++ b/chromium/build/rust/tests/test_rust_exe/BUILD.gn
@@ -0,0 +1,12 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+executable("test_rust_exe") {
+ crate_root = "main.rs"
+ deps = [ "//build/rust/tests/test_rust_source_set" ]
+ rustflags = [
+ "--edition",
+ "2018",
+ ]
+}
diff --git a/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn
new file mode 100644
index 00000000000..9ab2863c8d2
--- /dev/null
+++ b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/rust_source_set.gni")
+
+# The exe depends on lib v1.But it also transitively depends on lib v2.
+# The code in the exe should use v1, and the code in the transitive lib should
+# use v2.
+executable("test_rust_multiple_dep_versions_exe") {
+ crate_root = "main.rs"
+ deps = [
+ ":transitive_v2",
+ "//build/rust/tests/test_rust_multiple_dep_versions_exe/v1:test_lib",
+ ]
+}
+
+rust_source_set("transitive_v2") {
+ crate_root = "transitive_lib.rs"
+ sources = [ "transitive_lib.rs" ]
+ deps =
+ [ "//build/rust/tests/test_rust_multiple_dep_versions_exe/v2:test_lib" ]
+}
diff --git a/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn
new file mode 100644
index 00000000000..9a0963aa496
--- /dev/null
+++ b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/rust_source_set.gni")
+
+rust_source_set("test_lib") {
+ epoch = "1"
+ sources = [ "src/lib.rs" ]
+
+ # Avoiding collision between v1 and v2 unit tests.The output binary should
+ # be versioned or renamed somehow.
+ skip_unit_tests = true
+}
diff --git a/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn
new file mode 100644
index 00000000000..42dbdd90b1f
--- /dev/null
+++ b/chromium/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/rust_source_set.gni")
+
+rust_source_set("test_lib") {
+ epoch = "2"
+ sources = [ "src/lib.rs" ]
+
+ # Avoiding collision between v1 and v2 unit tests.The output binary should
+ # be versioned or renamed somehow.
+ skip_unit_tests = true
+}
diff --git a/chromium/build/rust/tests/test_rust_source_set/BUILD.gn b/chromium/build/rust/tests/test_rust_source_set/BUILD.gn
new file mode 100644
index 00000000000..b0d0e5533bc
--- /dev/null
+++ b/chromium/build/rust/tests/test_rust_source_set/BUILD.gn
@@ -0,0 +1,10 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/rust_source_set.gni")
+
+# Dependent Rust targets should depend on this.
+rust_source_set("test_rust_source_set") {
+ sources = [ "src/lib.rs" ]
+}
diff --git a/chromium/build/sanitizers/lsan_suppressions.cc b/chromium/build/sanitizers/lsan_suppressions.cc
index 9c0fec46618..8b355500618 100644
--- a/chromium/build/sanitizers/lsan_suppressions.cc
+++ b/chromium/build/sanitizers/lsan_suppressions.cc
@@ -41,6 +41,7 @@ char kLSanDefaultSuppressions[] =
// http://crbug.com/431213, http://crbug.com/416665
"leak:gin/object_template_builder.h\n"
+ "leak:gin/function_template.h\n"
// Leaks in swrast_dri.so. http://crbug.com/540042
"leak:swrast_dri.so\n"
diff --git a/chromium/build/skia_gold_common/output_managerless_skia_gold_session.py b/chromium/build/skia_gold_common/output_managerless_skia_gold_session.py
index 95ebbb41fdc..a74b68fff61 100644
--- a/chromium/build/skia_gold_common/output_managerless_skia_gold_session.py
+++ b/chromium/build/skia_gold_common/output_managerless_skia_gold_session.py
@@ -8,7 +8,9 @@ Diff output is instead stored in a directory and pointed to with file:// URLs.
import os
import subprocess
-import tempfile
+import time
+
+import six
from skia_gold_common import skia_gold_session
@@ -35,11 +37,15 @@ class OutputManagerlessSkiaGoldSession(skia_gold_session.SkiaGoldSession):
optional_keys=optional_keys,
force_dryrun=force_dryrun)
- def _CreateDiffOutputDir(self):
- # We intentionally don't clean this up and don't put it in self._working_dir
- # since we need it to stick around after the test completes so the user
- # can look at its contents.
- return tempfile.mkdtemp()
+ def _CreateDiffOutputDir(self, name):
+ # Do this instead of just making a temporary directory so that it's easier
+ # for users to look through multiple results. We intentionally do not clean
+ # this directory up since the user might need to look at it later.
+ timestamp = int(time.time())
+ name = '%s_%d' % (name, timestamp)
+ filepath = os.path.join(self._local_png_directory, name)
+ os.makedirs(filepath)
+ return filepath
def _StoreDiffLinks(self, image_name, _, output_dir):
results = self._comparison_results.setdefault(image_name,
@@ -58,7 +64,11 @@ class OutputManagerlessSkiaGoldSession(skia_gold_session.SkiaGoldSession):
@staticmethod
def _RunCmdForRcAndOutput(cmd):
try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ output = subprocess.check_output(cmd,
+ stderr=subprocess.STDOUT).decode('utf-8')
return 0, output
except subprocess.CalledProcessError as e:
- return e.returncode, e.output
+ output = e.output
+ if not isinstance(output, six.string_types):
+ output = output.decode('utf-8')
+ return e.returncode, output
diff --git a/chromium/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py b/chromium/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py
index cdd87d960fd..80937bb8e87 100755
--- a/chromium/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py
+++ b/chromium/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py
@@ -6,11 +6,14 @@
#pylint: disable=protected-access
import os
+import re
import sys
import tempfile
import unittest
-if sys.version_info[0] == 2:
+import six
+
+if six.PY2:
import mock
else:
import unittest.mock as mock
@@ -67,6 +70,36 @@ class GpuSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
# directory.
self.assertNotIn(self._working_dir, call_args[i + 1])
+ @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, '_StoreDiffLinks')
+ @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession,
+ '_RunCmdForRcAndOutput')
+ def test_explicitLocalPngDirectory(self, cmd_mock, _):
+ cmd_mock.return_value = (0, '')
+ if sys.platform == 'win32':
+ local_png_dir = 'c:\\tmp\\foo'
+ else:
+ local_png_dir = '/tmp/foo'
+ args = createSkiaGoldArgs(git_revision='a',
+ skia_gold_local_png_write_directory=local_png_dir)
+ sgp = skia_gold_properties.SkiaGoldProperties(args)
+ session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp,
+ self._json_keys, None,
+ None)
+ _, _ = session.Diff('name', None, None)
+ self.assertEqual(cmd_mock.call_count, 1)
+ if six.PY3:
+ call_args = cmd_mock.call_args.args[0]
+ else:
+ call_args = cmd_mock.call_args[0][0]
+ self.assertIn('--out-dir', call_args)
+ output_dir = call_args[call_args.index('--out-dir') + 1]
+ # Directory should be a subdirectory of the directory we gave and be made
+ # up of the image name and a timestamp.
+ parent_dir, sub_dir = output_dir.rsplit(os.sep, 1)
+ self.assertEqual(parent_dir, local_png_dir)
+ sub_dir = os.path.normpath(sub_dir)
+ self.assertIsNotNone(re.match(r'^name_\d+$', sub_dir))
+
class OutputManagerlessSkiaGoldSessionStoreDiffLinksTest(
fake_filesystem_unittest.TestCase):
diff --git a/chromium/build/skia_gold_common/skia_gold_properties.py b/chromium/build/skia_gold_common/skia_gold_properties.py
index 6c1d22b6be9..c34146c699c 100644
--- a/chromium/build/skia_gold_common/skia_gold_properties.py
+++ b/chromium/build/skia_gold_common/skia_gold_properties.py
@@ -11,8 +11,6 @@ Examples:
import logging
import os
-import subprocess
-import sys
class SkiaGoldProperties(object):
@@ -31,6 +29,7 @@ class SkiaGoldProperties(object):
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
+ self._local_png_directory = None
self._InitializeProperties(args)
@@ -62,6 +61,10 @@ class SkiaGoldProperties(object):
return self._IsLocalRun()
@property
+ def local_png_directory(self):
+ return self._local_png_directory
+
+ @property
def no_luci_auth(self):
return self._no_luci_auth
@@ -111,6 +114,9 @@ class SkiaGoldProperties(object):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
+ if hasattr(args, 'skia_gold_local_png_write_directory'):
+ self._local_png_directory = args.skia_gold_local_png_write_directory
+
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth
diff --git a/chromium/build/skia_gold_common/skia_gold_session.py b/chromium/build/skia_gold_common/skia_gold_session.py
index eb196ab3ebc..7e69a238cdf 100644
--- a/chromium/build/skia_gold_common/skia_gold_session.py
+++ b/chromium/build/skia_gold_common/skia_gold_session.py
@@ -6,9 +6,9 @@
import logging
import os
import shutil
-import subprocess
import sys
import tempfile
+import time
CHROMIUM_SRC = os.path.realpath(
os.path.join(os.path.dirname(__file__), '..', '..'))
@@ -73,6 +73,8 @@ class SkiaGoldSession(object):
self._corpus = corpus
self._instance = instance
self._bucket = bucket
+ self._local_png_directory = (self._gold_properties.local_png_directory
+ or tempfile.mkdtemp())
self._triage_link_file = tempfile.NamedTemporaryFile(suffix='.txt',
dir=working_dir,
delete=False).name
@@ -368,7 +370,7 @@ class SkiaGoldSession(object):
'--bypass-skia-gold-functionality is not supported when running '
'tests locally.')
- output_dir = self._CreateDiffOutputDir()
+ output_dir = self._CreateDiffOutputDir(name)
# TODO(skbug.com/10611): Remove this temporary work dir and instead just use
# self._working_dir once `goldctl diff` stops clobbering the auth files in
# the provided work directory.
@@ -503,7 +505,10 @@ class SkiaGoldSession(object):
"""
open(self._triage_link_file, 'w').close()
- def _CreateDiffOutputDir(self):
+ def _CreateDiffOutputDir(self, _):
+ # We don't use self._local_png_directory here since we want it to be
+ # automatically cleaned up with the working directory. Any subclasses that
+ # want to keep it around can override this method.
return tempfile.mkdtemp(dir=self._working_dir)
def _GetDiffGoldInstance(self):
diff --git a/chromium/build/skia_gold_common/skia_gold_session_unittest.py b/chromium/build/skia_gold_common/skia_gold_session_unittest.py
index b27438dcaf7..15b8a9924c6 100755
--- a/chromium/build/skia_gold_common/skia_gold_session_unittest.py
+++ b/chromium/build/skia_gold_common/skia_gold_session_unittest.py
@@ -49,7 +49,8 @@ class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase):
auth_mock.return_value = (0, None)
init_mock.return_value = (0, None)
compare_mock.return_value = (0, None)
- session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+ sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs())
+ session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
self._json_keys, None, None)
status, _ = session.RunComparison(None, None, None)
self.assertEqual(status,
@@ -65,7 +66,8 @@ class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase):
@mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
auth_mock.return_value = (1, 'Auth failed')
- session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+ sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs())
+ session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
self._json_keys, None, None)
status, error = session.RunComparison(None, None, None)
self.assertEqual(status,
@@ -83,7 +85,8 @@ class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase):
def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
auth_mock.return_value = (0, None)
init_mock.return_value = (1, 'Init failed')
- session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+ sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs())
+ session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
self._json_keys, None, None)
status, error = session.RunComparison(None, None, None)
self.assertEqual(status,
@@ -783,8 +786,9 @@ class SkiaGoldSessionTriageLinkOmissionTest(fake_filesystem_unittest.TestCase):
self._working_dir = tempfile.mkdtemp()
def _CreateSession(self):
+ sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs())
json_keys = tempfile.NamedTemporaryFile(delete=False).name
- session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+ session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
json_keys, None, None)
session._comparison_results = {
'foo': skia_gold_session.SkiaGoldSession.ComparisonResults(),
diff --git a/chromium/build/skia_gold_common/unittest_utils.py b/chromium/build/skia_gold_common/unittest_utils.py
index cd46ce8b43c..c4f77a14df1 100644
--- a/chromium/build/skia_gold_common/unittest_utils.py
+++ b/chromium/build/skia_gold_common/unittest_utils.py
@@ -15,6 +15,7 @@ _SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [
'gerrit_patchset',
'buildbucket_id',
'bypass_skia_gold_functionality',
+ 'skia_gold_local_png_write_directory',
])
@@ -26,8 +27,10 @@ def createSkiaGoldArgs(local_pixel_tests=None,
gerrit_issue=None,
gerrit_patchset=None,
buildbucket_id=None,
- bypass_skia_gold_functionality=None):
+ bypass_skia_gold_functionality=None,
+ skia_gold_local_png_write_directory=None):
return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, code_review_system,
continuous_integration_system, git_revision,
gerrit_issue, gerrit_patchset, buildbucket_id,
- bypass_skia_gold_functionality)
+ bypass_skia_gold_functionality,
+ skia_gold_local_png_write_directory)
diff --git a/chromium/build/toolchain/BUILD.gn b/chromium/build/toolchain/BUILD.gn
index 6cf8f1b4039..ab51780af27 100644
--- a/chromium/build/toolchain/BUILD.gn
+++ b/chromium/build/toolchain/BUILD.gn
@@ -12,7 +12,7 @@ declare_args() {
}
if (current_toolchain == default_toolchain) {
- if (action_pool_depth == -1 || (use_goma || use_rbe)) {
+ if (action_pool_depth == -1 || (use_goma || use_remoteexec)) {
action_pool_depth = exec_script("get_cpu_count.py", [], "value")
}
diff --git a/chromium/build/toolchain/aix/BUILD.gn b/chromium/build/toolchain/aix/BUILD.gn
index 523b54ef61a..cdcfbdb56d1 100644
--- a/chromium/build/toolchain/aix/BUILD.gn
+++ b/chromium/build/toolchain/aix/BUILD.gn
@@ -18,7 +18,7 @@ gcc_toolchain("ppc64") {
current_os = "aix"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
diff --git a/chromium/build/toolchain/android/BUILD.gn b/chromium/build/toolchain/android/BUILD.gn
index 728885e594d..8988f2dfad4 100644
--- a/chromium/build/toolchain/android/BUILD.gn
+++ b/chromium/build/toolchain/android/BUILD.gn
@@ -43,23 +43,25 @@ template("android_clang_toolchain") {
# Output linker map files for binary size analysis.
enable_linker_map = true
- _android_tool_prefix =
- "$android_toolchain_root/bin/${invoker.binary_prefix}-"
+ _android_tool_prefix = "$android_toolchain_root/bin/"
# The tools should be run relative to the build dir.
_tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir)
-
_prefix = rebase_path("$clang_base_path/bin", root_build_dir)
cc = "$_prefix/clang"
cxx = "$_prefix/clang++"
ar = "$_prefix/llvm-ar"
ld = cxx
- readelf = _tool_prefix + "readelf"
+ readelf = _tool_prefix + "llvm-readobj"
nm = "$_prefix/llvm-nm"
strip = "$_prefix/llvm-strip"
if (_use_debug_fission) {
- dwp = _tool_prefix + "dwp"
+ _dwp_tool_prefix =
+ rebase_path("$android_toolchain_root/bin/${invoker.binary_prefix}-",
+ root_build_dir)
+ dwp = _dwp_tool_prefix + "dwp"
}
+
use_unstripped_as_runtime_outputs = android_unstripped_runtime_outputs
# Don't use .cr.so for loadable_modules since they are always loaded via
@@ -69,7 +71,9 @@ template("android_clang_toolchain") {
}
android_clang_toolchain("android_clang_x86") {
- binary_prefix = "i686-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "i686-linux-android"
+ }
toolchain_args = {
current_cpu = "x86"
@@ -94,21 +98,27 @@ android_clang_toolchain("android_clang_x86") {
}
android_clang_toolchain("android_clang_arm") {
- binary_prefix = "arm-linux-androideabi"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "arm-linux-androideabi"
+ }
toolchain_args = {
current_cpu = "arm"
}
}
android_clang_toolchain("android_clang_mipsel") {
- binary_prefix = "mipsel-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "mipsel-linux-android"
+ }
toolchain_args = {
current_cpu = "mipsel"
}
}
android_clang_toolchain("android_clang_x64") {
- binary_prefix = "x86_64-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "x86_64-linux-android"
+ }
toolchain_args = {
current_cpu = "x64"
@@ -133,14 +143,18 @@ android_clang_toolchain("android_clang_x64") {
}
android_clang_toolchain("android_clang_arm64") {
- binary_prefix = "aarch64-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "aarch64-linux-android"
+ }
toolchain_args = {
current_cpu = "arm64"
}
}
android_clang_toolchain("android_clang_arm64_hwasan") {
- binary_prefix = "aarch64-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "aarch64-linux-android"
+ }
toolchain_args = {
current_cpu = "arm64"
is_hwasan = true
@@ -149,7 +163,9 @@ android_clang_toolchain("android_clang_arm64_hwasan") {
}
android_clang_toolchain("android_clang_mips64el") {
- binary_prefix = "mips64el-linux-android"
+ if (use_debug_fission && target_os == "android") {
+ binary_prefix = "mips64el-linux-android"
+ }
toolchain_args = {
current_cpu = "mips64el"
}
diff --git a/chromium/build/toolchain/android/DIR_METADATA b/chromium/build/toolchain/android/DIR_METADATA
new file mode 100644
index 00000000000..cdc2d6fb6eb
--- /dev/null
+++ b/chromium/build/toolchain/android/DIR_METADATA
@@ -0,0 +1 @@
+mixins: "//build/android/COMMON_METADATA"
diff --git a/chromium/build/toolchain/apple/.style.yapf b/chromium/build/toolchain/apple/.style.yapf
new file mode 100644
index 00000000000..557fa7bf84c
--- /dev/null
+++ b/chromium/build/toolchain/apple/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = pep8
diff --git a/chromium/build/toolchain/apple/linker_driver.py b/chromium/build/toolchain/apple/linker_driver.py
index c21e18a0fb0..6c60f42fc96 100755
--- a/chromium/build/toolchain/apple/linker_driver.py
+++ b/chromium/build/toolchain/apple/linker_driver.py
@@ -10,14 +10,8 @@ import shutil
import subprocess
import sys
-# On mac, the values of these globals are modified when parsing -Wcrl, flags. On
-# ios, the script uses the defaults.
-DSYMUTIL_INVOKE = ['xcrun', 'dsymutil']
-STRIP_INVOKE = ['xcrun', 'strip']
-
-# Setting this flag will emit a deterministic binary by stripping dates from the
-# N_OSO field.
-DETERMINISTIC_FLAG = '--deterministic'
+# Prefix for all custom linker driver arguments.
+LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
# The linker_driver.py is responsible for forwarding a linker invocation to
# the compiler driver, while processing special arguments itself.
@@ -33,274 +27,266 @@ DETERMINISTIC_FLAG = '--deterministic'
# removal of the special driver arguments, described below). Then the driver
# performs additional actions, based on these arguments:
#
-# -Wcrl,dsym,<dsym_path_prefix>
-# After invoking the linker, this will run `dsymutil` on the linker's
-# output, producing a dSYM bundle, stored at dsym_path_prefix. As an
-# example, if the linker driver were invoked with:
-# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
-# The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
+# -Wcrl,dsym,<dsym_path_prefix>
+# After invoking the linker, this will run `dsymutil` on the linker's
+# output, producing a dSYM bundle, stored at dsym_path_prefix. As an
+# example, if the linker driver were invoked with:
+# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
+# The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
#
-# -Wcrl,dsymutilpath,<dsymutil_path>
-# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case
-# `xcrun` is not used to invoke it.
+# -Wcrl,dsymutilpath,<dsymutil_path>
+# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case
+# `xcrun` is not used to invoke it.
#
-# -Wcrl,unstripped,<unstripped_path_prefix>
-# After invoking the linker, and before strip, this will save a copy of
-# the unstripped linker output in the directory unstripped_path_prefix.
+# -Wcrl,unstripped,<unstripped_path_prefix>
+# After invoking the linker, and before strip, this will save a copy of
+# the unstripped linker output in the directory unstripped_path_prefix.
#
-# -Wcrl,strip,<strip_arguments>
-# After invoking the linker, and optionally dsymutil, this will run
-# the strip command on the linker's output. strip_arguments are
-# comma-separated arguments to be passed to the strip command.
+# -Wcrl,strip,<strip_arguments>
+# After invoking the linker, and optionally dsymutil, this will run
+# the strip command on the linker's output. strip_arguments are
+# comma-separated arguments to be passed to the strip command.
#
-# -Wcrl,strippath,<strip_path>
-# Sets the path to the strip to run with -Wcrl,strip, in which case
-# `xcrun` is not used to invoke it.
-
-
-def Main(args):
- """Main function for the linker driver. Separates out the arguments for
- the main compiler driver and the linker driver, then invokes all the
- required tools.
-
- Args:
- args: list of string, Arguments to the script.
- """
-
- if len(args) < 2:
- raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
-
- # Collect arguments to the linker driver (this script) and remove them from
- # the arguments being passed to the compiler driver.
- linker_driver_actions = {}
- compiler_driver_args = []
- deterministic = False
- for arg in args[1:]:
- if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
- # Convert driver actions into a map of name => lambda to invoke.
- driver_action = ProcessLinkerDriverArg(arg)
- assert driver_action[0] not in linker_driver_actions
- linker_driver_actions[driver_action[0]] = driver_action[1]
- elif arg == DETERMINISTIC_FLAG:
- deterministic = True
- else:
- compiler_driver_args.append(arg)
-
- linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)]
-
- try:
- # Zero the mtime in OSO fields for deterministic builds.
- # https://crbug.com/330262.
- env = os.environ.copy()
- if deterministic:
- env['ZERO_AR_DATE'] = '1'
- # Run the linker by invoking the compiler driver.
- subprocess.check_call(compiler_driver_args, env=env)
-
- # Run the linker driver actions, in the order specified by the actions list.
- for action in _LINKER_DRIVER_ACTIONS:
- name = action[0]
- if name in linker_driver_actions:
- linker_driver_outputs += linker_driver_actions[name](args)
- except:
- # If a linker driver action failed, remove all the outputs to make the
- # build step atomic.
- map(_RemovePath, linker_driver_outputs)
-
- # Re-report the original failure.
- raise
-
-
-def ProcessLinkerDriverArg(arg):
- """Processes a linker driver argument and returns a tuple containing the
- name and unary lambda to invoke for that linker driver action.
-
- Args:
- arg: string, The linker driver argument.
-
- Returns:
- A 2-tuple:
- 0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
- 1: An 1-ary lambda that takes the full list of arguments passed to
- Main(). The lambda should call the linker driver action that
- corresponds to the argument and return a list of outputs from the
- action.
- """
- if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
- raise ValueError('%s is not a linker driver argument' % (arg, ))
-
- sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
-
- for driver_action in _LINKER_DRIVER_ACTIONS:
- (name, action) = driver_action
- if sub_arg.startswith(name):
- return (name, lambda full_args: action(sub_arg[len(name):], full_args))
-
- raise ValueError('Unknown linker driver argument: %s' % (arg, ))
-
-
-def RunDsymUtil(dsym_path_prefix, full_args):
- """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes dsymutil
- on the linker's output and produces a dsym file at |dsym_file| path.
-
- Args:
- dsym_path_prefix: string, The path at which the dsymutil output should be
- located.
- full_args: list of string, Full argument list for the linker driver.
-
- Returns:
- list of string, Build step outputs.
- """
- if not len(dsym_path_prefix):
- raise ValueError('Unspecified dSYM output file')
-
- linker_out = _FindLinkerOutput(full_args)
- base = os.path.basename(linker_out)
- dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
-
- # Remove old dSYMs before invoking dsymutil.
- _RemovePath(dsym_out)
-
- tools_paths = _FindToolsPaths(full_args)
- if os.environ.get('PATH'):
- tools_paths.append(os.environ['PATH'])
- dsymutil_env = os.environ.copy()
- dsymutil_env['PATH'] = ':'.join(tools_paths)
- subprocess.check_call(DSYMUTIL_INVOKE + ['-o', dsym_out, linker_out],
- env=dsymutil_env)
- return [dsym_out]
-
-
-def SetDsymutilPath(dsymutil_path, full_args):
- """Linker driver action for -Wcrl,dsymutilpath,<dsymutil_path>.
-
- Sets the invocation command for dsymutil, which allows the caller to specify
- an alternate dsymutil. This action is always processed before the RunDsymUtil
- action.
-
- Args:
- dsymutil_path: string, The path to the dsymutil binary to run
- full_args: list of string, Full argument list for the linker driver.
-
- Returns:
- No output - this step is run purely for its side-effect.
- """
- global DSYMUTIL_INVOKE
- DSYMUTIL_INVOKE = [dsymutil_path]
- return []
-
-
-def RunSaveUnstripped(unstripped_path_prefix, full_args):
- """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>. Copies
- the linker output to |unstripped_path_prefix| before stripping.
-
- Args:
- unstripped_path_prefix: string, The path at which the unstripped output
- should be located.
- full_args: list of string, Full argument list for the linker driver.
-
- Returns:
- list of string, Build step outputs.
- """
- if not len(unstripped_path_prefix):
- raise ValueError('Unspecified unstripped output file')
-
- linker_out = _FindLinkerOutput(full_args)
- base = os.path.basename(linker_out)
- unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
-
- shutil.copyfile(linker_out, unstripped_out)
- return [unstripped_out]
-
-
-def RunStrip(strip_args_string, full_args):
- """Linker driver action for -Wcrl,strip,<strip_arguments>.
-
- Args:
- strip_args_string: string, Comma-separated arguments for `strip`.
- full_args: list of string, Full arguments for the linker driver.
-
- Returns:
- list of string, Build step outputs.
- """
- strip_command = list(STRIP_INVOKE)
- if len(strip_args_string) > 0:
- strip_command += strip_args_string.split(',')
- strip_command.append(_FindLinkerOutput(full_args))
- subprocess.check_call(strip_command)
- return []
-
-
-def SetStripPath(strip_path, full_args):
- """Linker driver action for -Wcrl,strippath,<strip_path>.
-
- Sets the invocation command for strip, which allows the caller to specify
- an alternate strip. This action is always processed before the RunStrip
- action.
-
- Args:
- strip_path: string, The path to the strip binary to run
- full_args: list of string, Full argument list for the linker driver.
-
- Returns:
- No output - this step is run purely for its side-effect.
- """
- global STRIP_INVOKE
- STRIP_INVOKE = [strip_path]
- return []
-
-
-def _FindLinkerOutput(full_args):
- """Finds the output of the linker by looking for the output flag in its
- argument list. As this is a required linker argument, raises an error if it
- cannot be found.
- """
- # The linker_driver.py script may be used to wrap either the compiler linker
- # (uses -o to configure the output) or lipo (uses -output to configure the
- # output). Since wrapping the compiler linker is the most likely possibility
- # use try/except and fallback to checking for -output if -o is not found.
- try:
- output_flag_index = full_args.index('-o')
- except ValueError:
- output_flag_index = full_args.index('-output')
- return full_args[output_flag_index + 1]
-
-
-def _FindToolsPaths(full_args):
- """Finds all paths where the script should look for additional tools."""
- paths = []
- for idx, arg in enumerate(full_args):
- if arg in ['-B', '--prefix']:
- paths.append(full_args[idx + 1])
- elif arg.startswith('-B'):
- paths.append(arg[2:])
- elif arg.startswith('--prefix='):
- paths.append(arg[9:])
- return paths
-
-
-def _RemovePath(path):
- """Removes the file or directory at |path| if it exists."""
- if os.path.exists(path):
- if os.path.isdir(path):
- shutil.rmtree(path)
- else:
- os.unlink(path)
-
-
-_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
-"""List of linker driver actions. The sort order of this list affects the
-order in which the actions are invoked. The first item in the tuple is the
-argument's -Wcrl,<sub_argument> and the second is the function to invoke.
-"""
-_LINKER_DRIVER_ACTIONS = [
- ('dsymutilpath,', SetDsymutilPath),
- ('dsym,', RunDsymUtil),
- ('unstripped,', RunSaveUnstripped),
- ('strippath,', SetStripPath),
- ('strip,', RunStrip),
-]
+# -Wcrl,strippath,<strip_path>
+# Sets the path to the strip to run with -Wcrl,strip, in which case
+# `xcrun` is not used to invoke it.
+
+
+class LinkerDriver(object):
+ def __init__(self, args):
+ """Creates a new linker driver.
+
+ Args:
+ args: list of string, Arguments to the script.
+ """
+ if len(args) < 2:
+ raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
+ self._args = args
+
+ # List of linker driver actions. **The sort order of this list affects
+ # the order in which the actions are invoked.**
+ # The first item in the tuple is the argument's -Wcrl,<sub_argument>
+ # and the second is the function to invoke.
+ self._actions = [
+ ('dsymutilpath,', self.set_dsymutil_path),
+ ('dsym,', self.run_dsymutil),
+ ('unstripped,', self.run_save_unstripped),
+ ('strippath,', self.set_strip_path),
+ ('strip,', self.run_strip),
+ ]
+
+ # Linker driver actions can modify the these values.
+ self._dsymutil_cmd = ['xcrun', 'dsymutil']
+ self._strip_cmd = ['xcrun', 'strip']
+
+ # The linker output file, lazily computed in self._get_linker_output().
+ self._linker_output = None
+
+ def run(self):
+ """Runs the linker driver, separating out the main compiler driver's
+ arguments from the ones handled by this class. It then invokes the
+ required tools, starting with the compiler driver to produce the linker
+ output.
+ """
+ # Collect arguments to the linker driver (this script) and remove them
+ # from the arguments being passed to the compiler driver.
+ linker_driver_actions = {}
+ compiler_driver_args = []
+ for index, arg in enumerate(self._args[1:]):
+ if arg.startswith(LINKER_DRIVER_ARG_PREFIX):
+ # Convert driver actions into a map of name => lambda to invoke.
+ driver_action = self._process_driver_arg(arg)
+ assert driver_action[0] not in linker_driver_actions
+ linker_driver_actions[driver_action[0]] = driver_action[1]
+ else:
+ compiler_driver_args.append(arg)
+
+ if self._get_linker_output() is None:
+ raise ValueError(
+ 'Could not find path to linker output (-o or --output)')
+
+ linker_driver_outputs = [self._get_linker_output()]
+
+ try:
+ # Zero the mtime in OSO fields for deterministic builds.
+ # https://crbug.com/330262.
+ env = os.environ.copy()
+ env['ZERO_AR_DATE'] = '1'
+ # Run the linker by invoking the compiler driver.
+ subprocess.check_call(compiler_driver_args, env=env)
+
+ # Run the linker driver actions, in the order specified by the
+ # actions list.
+ for action in self._actions:
+ name = action[0]
+ if name in linker_driver_actions:
+ linker_driver_outputs += linker_driver_actions[name]()
+ except:
+ # If a linker driver action failed, remove all the outputs to make
+ # the build step atomic.
+ map(_remove_path, linker_driver_outputs)
+
+ # Re-report the original failure.
+ raise
+
+ def _get_linker_output(self):
+ """Returns the value of the output argument to the linker."""
+ if not self._linker_output:
+ for index, arg in enumerate(self._args):
+ if arg in ('-o', '-output', '--output'):
+ self._linker_output = self._args[index + 1]
+ break
+ return self._linker_output
+
+ def _process_driver_arg(self, arg):
+ """Processes a linker driver argument and returns a tuple containing the
+ name and unary lambda to invoke for that linker driver action.
+
+ Args:
+ arg: string, The linker driver argument.
+
+ Returns:
+ A 2-tuple:
+ 0: The driver action name, as in |self._actions|.
+ 1: A lambda that calls the linker driver action with its direct
+ argument and returns a list of outputs from the action.
+ """
+ if not arg.startswith(LINKER_DRIVER_ARG_PREFIX):
+ raise ValueError('%s is not a linker driver argument' % (arg, ))
+
+ sub_arg = arg[len(LINKER_DRIVER_ARG_PREFIX):]
+
+ for driver_action in self._actions:
+ (name, action) = driver_action
+ if sub_arg.startswith(name):
+ return (name, lambda: action(sub_arg[len(name):]))
+
+ raise ValueError('Unknown linker driver argument: %s' % (arg, ))
+
+ def run_dsymutil(self, dsym_path_prefix):
+ """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes
+ dsymutil on the linker's output and produces a dsym file at |dsym_file|
+ path.
+
+ Args:
+ dsym_path_prefix: string, The path at which the dsymutil output
+ should be located.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(dsym_path_prefix):
+ raise ValueError('Unspecified dSYM output file')
+
+ linker_output = self._get_linker_output()
+ base = os.path.basename(linker_output)
+ dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
+
+ # Remove old dSYMs before invoking dsymutil.
+ _remove_path(dsym_out)
+
+ tools_paths = _find_tools_paths(self._args)
+ if os.environ.get('PATH'):
+ tools_paths.append(os.environ['PATH'])
+ dsymutil_env = os.environ.copy()
+ dsymutil_env['PATH'] = ':'.join(tools_paths)
+ subprocess.check_call(self._dsymutil_cmd +
+ ['-o', dsym_out, linker_output],
+ env=dsymutil_env)
+ return [dsym_out]
+
+ def set_dsymutil_path(self, dsymutil_path):
+ """Linker driver action for -Wcrl,dsymutilpath,<dsymutil_path>.
+
+ Sets the invocation command for dsymutil, which allows the caller to
+ specify an alternate dsymutil. This action is always processed before
+ the RunDsymUtil action.
+
+ Args:
+ dsymutil_path: string, The path to the dsymutil binary to run
+
+ Returns:
+ No output - this step is run purely for its side-effect.
+ """
+ self._dsymutil_cmd = [dsymutil_path]
+ return []
+
+ def run_save_unstripped(self, unstripped_path_prefix):
+ """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>.
+ Copies the linker output to |unstripped_path_prefix| before stripping.
+
+ Args:
+ unstripped_path_prefix: string, The path at which the unstripped
+ output should be located.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(unstripped_path_prefix):
+ raise ValueError('Unspecified unstripped output file')
+
+ base = os.path.basename(self._get_linker_output())
+ unstripped_out = os.path.join(unstripped_path_prefix,
+ base + '.unstripped')
+
+ shutil.copyfile(self._get_linker_output(), unstripped_out)
+ return [unstripped_out]
+
+ def run_strip(self, strip_args_string):
+ """Linker driver action for -Wcrl,strip,<strip_arguments>.
+
+ Args:
+ strip_args_string: string, Comma-separated arguments for `strip`.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ strip_command = list(self._strip_cmd)
+ if len(strip_args_string) > 0:
+ strip_command += strip_args_string.split(',')
+ strip_command.append(self._get_linker_output())
+ subprocess.check_call(strip_command)
+ return []
+
+ def set_strip_path(self, strip_path):
+ """Linker driver action for -Wcrl,strippath,<strip_path>.
+
+ Sets the invocation command for strip, which allows the caller to
+ specify an alternate strip. This action is always processed before the
+ RunStrip action.
+
+ Args:
+ strip_path: string, The path to the strip binary to run
+
+ Returns:
+ No output - this step is run purely for its side-effect.
+ """
+ self._strip_cmd = [strip_path]
+ return []
+
+
+def _find_tools_paths(full_args):
+ """Finds all paths where the script should look for additional tools."""
+ paths = []
+ for idx, arg in enumerate(full_args):
+ if arg in ['-B', '--prefix']:
+ paths.append(full_args[idx + 1])
+ elif arg.startswith('-B'):
+ paths.append(arg[2:])
+ elif arg.startswith('--prefix='):
+ paths.append(arg[9:])
+ return paths
+
+
+def _remove_path(path):
+ """Removes the file or directory at |path| if it exists."""
+ if os.path.exists(path):
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ else:
+ os.unlink(path)
+
if __name__ == '__main__':
- Main(sys.argv)
- sys.exit(0)
+ LinkerDriver(sys.argv).run()
+ sys.exit(0)
diff --git a/chromium/build/toolchain/apple/toolchain.gni b/chromium/build/toolchain/apple/toolchain.gni
index 50a968cf475..0d23e9ee55e 100644
--- a/chromium/build/toolchain/apple/toolchain.gni
+++ b/chromium/build/toolchain/apple/toolchain.gni
@@ -10,8 +10,10 @@ import("//build/config/apple/symbols.gni")
import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/coverage/coverage.gni")
+import("//build/config/rust.gni")
import("//build/toolchain/cc_wrapper.gni")
import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
assert((target_os == "ios" && host_os == "mac") || host_os != "win")
@@ -73,6 +75,11 @@ template("apple_toolchain") {
# toolchain args, use those values, otherwise default to the global one.
# This works because the only reasonable override that toolchains might
# supply for these values are to force-disable them.
+ if (defined(toolchain_args.use_remoteexec)) {
+ toolchain_uses_remoteexec = toolchain_args.use_remoteexec
+ } else {
+ toolchain_uses_remoteexec = use_remoteexec
+ }
if (defined(toolchain_args.use_goma)) {
toolchain_uses_goma = toolchain_args.use_goma
} else {
@@ -83,6 +90,18 @@ template("apple_toolchain") {
} else {
toolchain_cc_wrapper = cc_wrapper
}
+ assert(!(toolchain_uses_remoteexec && toolchain_uses_goma),
+ "Goma and re-client can't be used together.")
+ assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec),
+ "re-client and cc_wrapper can't be used together.")
+ assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
+ "Goma and cc_wrapper can't be used together.")
+
+ if (defined(toolchain_args.use_lld)) {
+ toolchain_uses_lld = toolchain_args.use_lld
+ } else {
+ toolchain_uses_lld = use_lld
+ }
if (defined(toolchain_args.use_xcode_clang)) {
toolchain_uses_xcode_clang = toolchain_args.use_xcode_clang
} else {
@@ -103,10 +122,22 @@ template("apple_toolchain") {
swiftmodule_switch = "-Wl,-add_ast_path,"
# Compute the compiler prefix.
- if (toolchain_uses_goma) {
+ if (toolchain_uses_remoteexec) {
+ if (defined(toolchain_args.rbe_cc_cfg_file)) {
+ toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file
+ } else {
+ toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file
+ }
+
+ # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true.
+ compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
+ } else if (toolchain_uses_goma) {
assert(toolchain_cc_wrapper == "",
"Goma and cc_wrapper can't be used together.")
compiler_prefix = "$goma_dir/gomacc "
+ if (use_goma_rust) {
+ rust_compiler_prefix = compiler_prefix
+ }
} else if (toolchain_cc_wrapper != "") {
compiler_prefix = toolchain_cc_wrapper + " "
} else {
@@ -119,7 +150,7 @@ template("apple_toolchain") {
# Set the explicit search path for clang++ so it uses the right linker
# binary.
- if (!use_lld) {
+ if (!toolchain_uses_lld) {
ld += " -B " + invoker.bin_path
}
@@ -150,9 +181,6 @@ template("apple_toolchain") {
_strippath = invoker.bin_path + "strip"
linker_driver += " -Wcrl,strippath," + _strippath
- # This makes the linker set timestamps in Mach-O files to 0.
- linker_driver += " --deterministic"
-
# On iOS, the final applications are assembled using lipo (to support fat
# builds). The correct flags are passed to the linker_driver.py script
# directly during the lipo call. The test is against the target_os because
@@ -196,6 +224,59 @@ template("apple_toolchain") {
_unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
}
+ if (toolchain_has_rust) {
+ if (!defined(rust_compiler_prefix)) {
+ rust_compiler_prefix = ""
+ }
+ rustc = "$rust_compiler_prefix${rust_prefix}rustc"
+
+ # Ideally, we'd add -Clink-args=\"{{ldflags}}\" to each of the Rust
+ # tools below which may link (i.e. rust_bin, rust_cdylib, rust_macro).
+ # However, it seems -fuse-ld=lld causes difficulties.
+
+ tool("rust_staticlib") {
+ rust_outfile = "{{target_out_dir}}/{{crate_name}}.a"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_rlib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.rlib"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ if (rustc_can_link) {
+ tool("rust_bin") {
+ rust_outfile = "{{root_out_dir}}/{{crate_name}}"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_cdylib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dylib"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_macro") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dylib"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+ }
+ }
+
tool("cc") {
depfile = "{{output}}.d"
precompiled_header_type = "gcc"
@@ -244,7 +325,7 @@ template("apple_toolchain") {
tool("alink") {
rspfile = "{{output}}.rsp"
- if (!use_lld) {
+ if (!toolchain_uses_lld) {
# Note about -filelist: Apple's linker reads the file list file and
# interprets each newline-separated chunk of text as a file name. It
# doesn't do the things one would expect from the shell like unescaping
@@ -315,7 +396,7 @@ template("apple_toolchain") {
link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
}
link_command += dsym_switch
- link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+ link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}"
replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
extract_toc_command = "{ $otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; $nm -gPp \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
@@ -364,7 +445,8 @@ template("apple_toolchain") {
link_command = "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
link_command += dsym_switch
- link_command += " {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+ link_command +=
+ " {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}"
command = link_command
rspfile_content = "{{inputs_newline}}"
@@ -400,7 +482,7 @@ template("apple_toolchain") {
# do for command-line arguments. Thus any source names with spaces, or
# label names with spaces (which GN bases the output paths on) will be
# corrupted by this process. Don't use spaces for source files or labels.
- command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+ command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}"
description = "LINK $outfile"
rspfile_content = "{{inputs_newline}}"
outputs = [ outfile ]
@@ -461,7 +543,7 @@ template("apple_toolchain") {
# order.
"{{target_gen_dir}}/{{module_name}}.swiftmodule",
- "{{target_gen_dir}}/{{module_name}}.h",
+ "{{target_gen_dir}}/{{target_output_name}}.h",
"{{target_gen_dir}}/{{module_name}}.swiftdoc",
"{{target_gen_dir}}/{{module_name}}.swiftsourceinfo",
]
@@ -487,7 +569,7 @@ template("apple_toolchain") {
"$_env_vars $python_path $_tool -module-name {{module_name}} " +
"-object-dir $_objects_dir " +
"-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " +
- "-header-path {{target_gen_dir}}/{{module_name}}.h " +
+ "-header-path {{target_gen_dir}}/{{target_output_name}}.h " +
"-depfile {{target_out_dir}}/{{module_name}}.d " +
"-depfile-filter {{target_gen_dir}}/{{module_name}}.swiftmodule " +
"-bridge-header {{bridge_header}} $_extra_flags " +
diff --git a/chromium/build/toolchain/cc_wrapper.gni b/chromium/build/toolchain/cc_wrapper.gni
index 4c6d7514f0a..6186070e3d0 100644
--- a/chromium/build/toolchain/cc_wrapper.gni
+++ b/chromium/build/toolchain/cc_wrapper.gni
@@ -39,5 +39,5 @@ declare_args() {
assert(!use_goma || cc_wrapper == "",
"use_goma and cc_wrapper can not be used together.")
-assert(!use_rbe || cc_wrapper == "",
- "use_rbe and cc_wrapper can not be used together.")
+assert(!use_remoteexec || cc_wrapper == "",
+ "use_remoteexec and cc_wrapper can not be used together.")
diff --git a/chromium/build/toolchain/concurrent_links.gni b/chromium/build/toolchain/concurrent_links.gni
index f1a42c9101a..c0342256a6a 100644
--- a/chromium/build/toolchain/concurrent_links.gni
+++ b/chromium/build/toolchain/concurrent_links.gni
@@ -36,7 +36,7 @@ if (concurrent_links == -1) {
# a little padding to account for future growth.
_args += [ "--mem_per_link_gb=45" ]
} else {
- _args += [ "--mem_per_link_gb=10" ]
+ _args += [ "--mem_per_link_gb=16" ]
}
} else if ((use_clang_coverage &&
# When coverage_instrumentation_input_file is not empty it means
diff --git a/chromium/build/toolchain/cros/BUILD.gn b/chromium/build/toolchain/cros/BUILD.gn
index c1e8c2109c2..cd1825cbebb 100644
--- a/chromium/build/toolchain/cros/BUILD.gn
+++ b/chromium/build/toolchain/cros/BUILD.gn
@@ -22,7 +22,7 @@ template("cros_toolchain") {
if (use_goma && toolchain_args.needs_gomacc_path_arg) {
extra_cppflags += " --gomacc-path $goma_dir/gomacc"
}
- if (use_rbe && toolchain_args.needs_gomacc_path_arg) {
+ if (use_remoteexec && toolchain_args.needs_gomacc_path_arg) {
extra_cppflags += " --gomacc-path $rbe_cros_cc_wrapper"
}
diff --git a/chromium/build/toolchain/gcc_toolchain.gni b/chromium/build/toolchain/gcc_toolchain.gni
index a303d472baf..8b2fe90d004 100644
--- a/chromium/build/toolchain/gcc_toolchain.gni
+++ b/chromium/build/toolchain/gcc_toolchain.gni
@@ -5,6 +5,7 @@
import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/coverage/coverage.gni")
+import("//build/config/rust.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/v8_target_cpu.gni")
import("//build/toolchain/cc_wrapper.gni")
@@ -145,14 +146,14 @@ template("gcc_toolchain") {
}
}
- # When the invoker has explicitly overridden user_rbe, use_goma or
+ # When the invoker has explicitly overridden use_remoteexec, use_goma or
# cc_wrapper in the toolchain args, use those values, otherwise default
# to the global one. This works because the only reasonable override
# that toolchains might supply for these values are to force-disable them.
- if (defined(toolchain_args.use_rbe)) {
- toolchain_uses_rbe = toolchain_args.use_rbe
+ if (defined(toolchain_args.use_remoteexec)) {
+ toolchain_uses_remoteexec = toolchain_args.use_remoteexec
} else {
- toolchain_uses_rbe = use_rbe
+ toolchain_uses_remoteexec = use_remoteexec
}
if (defined(toolchain_args.use_goma)) {
toolchain_uses_goma = toolchain_args.use_goma
@@ -172,10 +173,10 @@ template("gcc_toolchain") {
} else {
toolchain_cc_wrapper = cc_wrapper
}
- assert(!(toolchain_uses_rbe && toolchain_uses_goma),
- "Goma and RBE can't be used together.")
- assert(!(toolchain_cc_wrapper != "" && toolchain_uses_rbe),
- "RBE and cc_wrapper can't be used together.")
+ assert(!(toolchain_uses_remoteexec && toolchain_uses_goma),
+ "Goma and re-client can't be used together.")
+ assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec),
+ "re-client and cc_wrapper can't be used together.")
assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
"Goma and cc_wrapper can't be used together.")
@@ -187,7 +188,7 @@ template("gcc_toolchain") {
# wrapper will have picked up gomacc via cmd-line arg. So need to prepend
# gomacc in that case.
goma_path = "$goma_dir/gomacc"
- if (toolchain_uses_rbe &&
+ if (toolchain_uses_remoteexec &&
(!defined(invoker_toolchain_args.needs_gomacc_path_arg) ||
!invoker_toolchain_args.needs_gomacc_path_arg)) {
if (defined(toolchain_args.rbe_cc_cfg_file)) {
@@ -196,26 +197,20 @@ template("gcc_toolchain") {
toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file
}
- # C/C++ (clang) rewrapper prefix to use when use_rbe is true.
+ # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true.
compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
} else if (toolchain_uses_goma &&
(!defined(invoker_toolchain_args.needs_gomacc_path_arg) ||
!invoker_toolchain_args.needs_gomacc_path_arg)) {
compiler_prefix = "${goma_path} "
+ if (use_goma_rust) {
+ rust_compiler_prefix = compiler_prefix
+ }
} else {
compiler_prefix = "${toolchain_cc_wrapper} "
}
- if (defined(toolchain_args.use_rbe_links)) {
- toolchain_uses_rbe_links = toolchain_args.use_rbe_links
- } else {
- toolchain_uses_rbe_links = use_rbe_links
- }
-
- if (toolchain_uses_rbe_links) {
- link_prefix = "${rbe_bin_dir}/rewrapper -cfg=../../buildtools/reclient_cfgs/rewrapper_linux_link.cfg -exec_root=${rbe_exec_root} "
- not_needed([ "goma_path" ])
- } else if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) {
+ if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) {
# goma_ld.py uses autoninja in an attempt to set a reasonable
# number of jobs, but this results in too low a value on
# Chrome OS builders. So we pass in an explicit value.
@@ -419,9 +414,7 @@ template("gcc_toolchain") {
sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir.
rspfile = sofile + ".rsp"
- if (!use_rbe_links) {
- pool = "//build/toolchain:link_pool($default_toolchain)"
- }
+ pool = "//build/toolchain:link_pool($default_toolchain)"
if (defined(invoker.strip)) {
unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
@@ -435,7 +428,12 @@ template("gcc_toolchain") {
# .TOC file, overwrite it, otherwise, don't change it.
tocfile = sofile + ".TOC"
- link_command = "$ld -shared -Wl,-soname=\"$soname\" {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\""
+ soname_flag = ""
+ if (current_os != "aix") {
+ # -soname flag is not available on aix ld
+ soname_flag = "-Wl,-soname=\"$soname\""
+ }
+ link_command = "$ld -shared $soname_flag {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\""
# Generate a map file to be used for binary size analysis.
# Map file adds ~10% to the link time on a z620.
@@ -458,12 +456,20 @@ template("gcc_toolchain") {
# The host might not have a POSIX shell and utilities (e.g. Windows).
solink_wrapper =
rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir)
- command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command"
+ solink_extra_flags = ""
+ if (current_os == "aix") {
+ # to be intercepted by solink_wrapper
+ solink_extra_flags = "--link-only"
+ }
+ command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command $solink_extra_flags"
if (target_cpu == "mipsel" && is_component_build && is_android) {
- rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group"
+ rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group {{rlibs}}"
+ } else if (current_os == "aix") {
+ # --whole-archive, --no-whole-archive flags are not available on the aix ld.
+ rspfile_content = "{{inputs}} {{solibs}} {{libs}} {{rlibs}}"
} else {
- rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} {{rlibs}}"
}
description = "SOLINK $sofile"
@@ -518,9 +524,7 @@ template("gcc_toolchain") {
sofile = "{{output_dir}}/$soname"
rspfile = sofile + ".rsp"
- if (!use_rbe_links) {
- pool = "//build/toolchain:link_pool($default_toolchain)"
- }
+ pool = "//build/toolchain:link_pool($default_toolchain)"
if (defined(invoker.strip)) {
unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
@@ -528,13 +532,22 @@ template("gcc_toolchain") {
unstripped_sofile = sofile
}
- command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+ soname_flag = ""
+ whole_archive_flag = ""
+ no_whole_archive_flag = ""
+ if (current_os != "aix") {
+ # -soname, --whole-archive, --no-whole-archive flags are not available on aix ld
+ soname_flag = "-Wl,-soname=\"$soname\""
+ whole_archive_flag = "-Wl,--whole-archive"
+ no_whole_archive_flag = "-Wl,--no-whole-archive"
+ }
+ command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" $soname_flag @\"$rspfile\""
if (defined(invoker.strip)) {
strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
command += " && " + strip_command
}
- rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
+ rspfile_content = "$whole_archive_flag {{inputs}} {{solibs}} $no_whole_archive_flag {{libs}} {{rlibs}}"
description = "SOLINK_MODULE $sofile"
@@ -567,9 +580,7 @@ template("gcc_toolchain") {
rspfile = "$outfile.rsp"
unstripped_outfile = outfile
- if (!use_rbe_links) {
- pool = "//build/toolchain:link_pool($default_toolchain)"
- }
+ pool = "//build/toolchain:link_pool($default_toolchain)"
# Use this for {{output_extension}} expansions unless a target manually
# overrides it (in which case {{output_extension}} will be what the target
@@ -610,7 +621,7 @@ template("gcc_toolchain") {
command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command"
description = "LINK $outfile"
- rspfile_content = "{{inputs}}"
+ rspfile_content = "{{inputs}} {{rlibs}}"
outputs = [ outfile ]
if (outfile != unstripped_outfile) {
outputs += [ unstripped_outfile ]
@@ -654,6 +665,58 @@ template("gcc_toolchain") {
pool = "//build/toolchain:action_pool($default_toolchain)"
}
+ if (toolchain_has_rust) {
+ rustc = ""
+ if (defined(rust_compiler_prefix)) {
+ rustc += rust_compiler_prefix
+ }
+ rustc += rust_prefix
+ rustc += "rustc"
+
+ # RSP files not used due to https://bugs.chromium.org/p/gn/issues/detail?id=249
+ tool("rust_staticlib") {
+ rust_outfile = "{{target_out_dir}}/{{crate_name}}.a"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -Clinker=$ld -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_rlib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.rlib"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -Clinker=$ld -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ if (rustc_can_link) {
+ tool("rust_bin") {
+ rust_outfile = "{{root_out_dir}}/{{crate_name}}"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -Clinker=$ld -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_cdylib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.so"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -Clinker=$ld -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_macro") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.so"
+ depfile = "{{output}}.d"
+ command = "$rustc $rustc_common_args --emit=dep-info=$depfile,link -Clinker=$ld -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+ }
+ }
+
forward_variables_from(invoker, [ "deps" ])
}
}
diff --git a/chromium/build/toolchain/linux/BUILD.gn b/chromium/build/toolchain/linux/BUILD.gn
index 0c0b0a3321e..2f99002ac70 100644
--- a/chromium/build/toolchain/linux/BUILD.gn
+++ b/chromium/build/toolchain/linux/BUILD.gn
@@ -46,7 +46,7 @@ gcc_toolchain("arm64") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -67,7 +67,7 @@ gcc_toolchain("arm") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -123,7 +123,7 @@ gcc_toolchain("x86") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -170,6 +170,14 @@ clang_toolchain("clang_x64_v8_riscv64") {
}
}
+clang_toolchain("clang_x64_v8_loong64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ v8_current_cpu = "loong64"
+ current_os = "linux"
+ }
+}
+
# In a LaCrOS build, this toolchain is intended to be used as an alternate
# toolchain to build Ash-Chrome in a subdirectory.
clang_toolchain("ash_clang_x64") {
@@ -219,7 +227,7 @@ gcc_toolchain("x64") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -254,7 +262,7 @@ gcc_toolchain("mipsel") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
use_goma = false
}
@@ -276,7 +284,7 @@ gcc_toolchain("mips64el") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
use_goma = false
}
@@ -322,7 +330,7 @@ gcc_toolchain("s390x") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -341,7 +349,7 @@ gcc_toolchain("ppc64") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -362,7 +370,7 @@ gcc_toolchain("mips") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
}
}
@@ -383,7 +391,32 @@ gcc_toolchain("mips64") {
current_os = "linux"
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_loong64") {
+ toolchain_args = {
+ current_cpu = "loong64"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("loong64") {
+ toolprefix = "loongarch64-linux-gnu"
+
+ cc = "${toolprefix}-gcc"
+ cxx = "${toolprefix}-g++"
+
+ readelf = "${toolprefix}-readelf"
+ nm = "${toolprefix}-nm"
+ ar = "${toolprefix}-ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "loong64"
+ current_os = "linux"
is_clang = false
}
}
diff --git a/chromium/build/toolchain/mac/BUILD.gn b/chromium/build/toolchain/mac/BUILD.gn
index 1d0f2a2c228..70274fac5ad 100644
--- a/chromium/build/toolchain/mac/BUILD.gn
+++ b/chromium/build/toolchain/mac/BUILD.gn
@@ -21,6 +21,9 @@ template("mac_toolchain") {
current_os = "mac"
if (target_os == "ios") {
+ # Use LLD for the host part of a chrome/ios build.
+ use_lld = true
+
# TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
# not supported by the Chromium mac_clang_x64 toolchain on iOS
# distribution.
diff --git a/chromium/build/toolchain/nacl/BUILD.gn b/chromium/build/toolchain/nacl/BUILD.gn
index 66a88bedb5e..383d2b545f0 100644
--- a/chromium/build/toolchain/nacl/BUILD.gn
+++ b/chromium/build/toolchain/nacl/BUILD.gn
@@ -18,12 +18,16 @@ revisions = exec_script("//native_client/build/get_toolchain_revision.py",
"nacl_x86_glibc",
"nacl_arm_glibc",
"pnacl_newlib",
+ "saigo_newlib",
],
"trim list lines")
nacl_x86_glibc_rev = revisions[0]
nacl_arm_glibc_rev = revisions[1]
pnacl_newlib_rev = revisions[2]
+saigo_newlib_rev = revisions[3]
+
+use_saigo = true
if (host_os == "win") {
toolsuffix = ".exe"
@@ -49,7 +53,7 @@ if (host_os == "win") {
# When the compilers are run via goma, rbe or ccache rather than directly by
# GN/Ninja, the rbe/goma/ccache wrapper handles .bat files but gets confused
# by being given the scriptprefix.
-if (host_os == "win" && !use_goma && !use_rbe && cc_wrapper == "") {
+if (host_os == "win" && !use_goma && !use_remoteexec && cc_wrapper == "") {
compiler_scriptprefix = scriptprefix
} else {
compiler_scriptprefix = ""
@@ -155,7 +159,7 @@ template("nacl_glibc_toolchain") {
current_cpu = toolchain_cpu
# reclient does not support gcc.
- use_rbe = false
+ use_remoteexec = false
is_clang = false
is_nacl_glibc = true
use_lld = false
@@ -226,6 +230,10 @@ template("nacl_irt_toolchain") {
toolchain_package = "pnacl_newlib"
toolchain_revision = pnacl_newlib_rev
+ if (use_saigo) {
+ toolchain_package = "saigo_newlib"
+ toolchain_revision = saigo_newlib_rev
+ }
toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
invoker.toolchain_tuple + "-",
root_build_dir)
@@ -261,6 +269,7 @@ template("nacl_irt_toolchain") {
current_cpu = toolchain_cpu
is_clang = true
use_lld = false
+ is_nacl_saigo = use_saigo
}
# TODO(ncbray): depend on link script
diff --git a/chromium/build/toolchain/nacl_toolchain.gni b/chromium/build/toolchain/nacl_toolchain.gni
index 4d3257fc477..25fbd6405bc 100644
--- a/chromium/build/toolchain/nacl_toolchain.gni
+++ b/chromium/build/toolchain/nacl_toolchain.gni
@@ -58,17 +58,16 @@ template("nacl_toolchain") {
use_clang_coverage = false
coverage_instrumentation_input_file = ""
- if (use_rbe) {
+ if (use_remoteexec) {
if (is_win) {
- rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows_nacl.cfg"
+ rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_windows.cfg"
+ } else if (is_mac) {
+ rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_mac.cfg"
} else {
# TODO(ukai): non linux?
- rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux_nacl.cfg"
+ rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux.cfg"
}
}
-
- # Nacl Links are not yet supported.
- use_rbe_links = false
}
}
}
diff --git a/chromium/build/toolchain/rbe.gni b/chromium/build/toolchain/rbe.gni
index 97d5518e79a..baf7462bb5a 100644
--- a/chromium/build/toolchain/rbe.gni
+++ b/chromium/build/toolchain/rbe.gni
@@ -3,18 +3,18 @@
# The directory where the re-client tooling binaries are.
rbe_bin_dir = rebase_path("//buildtools/reclient", root_build_dir)
-# RBE Execution root - this should be the root of the source tree.
+# Execution root - this should be the root of the source tree.
# This is defined here instead of in the config file because
# this will vary depending on where the user has placed the
# chromium source on their system.
rbe_exec_root = rebase_path("//")
declare_args() {
- # Set to true to enable remote compilation using RBE.
- use_rbe = false
+ # Set to true to enable remote compilation using reclient.
+ use_remoteexec = false
- # Set to true to enable remote compilation of links using RBE.
- use_rbe_links = false
+ # Set to true to enable remote compilation using reclient (deprecated).
+ use_rbe = false
# The directory where the re-client configuration files are.
rbe_cfg_dir = rebase_path("//buildtools/reclient_cfgs", root_build_dir)
@@ -26,13 +26,20 @@ declare_args() {
rbe_cros_cc_wrapper = ""
}
+# Set use_remoteexec if use_rbe is set. Remove this once use_rbe is no longer
+# used to configure builds.
+if (!use_remoteexec && use_rbe) {
+ use_remoteexec = true
+}
+
# Configuration file selection based on operating system.
if (is_linux || is_android || is_chromeos) {
- rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux.cfg"
+ rbe_cc_cfg_file = "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_linux.cfg"
}
if (is_win) {
- rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows.cfg"
+ rbe_cc_cfg_file =
+ "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_windows.cfg"
}
if (is_mac || is_ios) {
- rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_mac.cfg"
+ rbe_cc_cfg_file = "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_mac.cfg"
}
diff --git a/chromium/build/toolchain/win/BUILD.gn b/chromium/build/toolchain/win/BUILD.gn
index c3def9ece7a..28923e66196 100644
--- a/chromium/build/toolchain/win/BUILD.gn
+++ b/chromium/build/toolchain/win/BUILD.gn
@@ -4,6 +4,7 @@
import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
+import("//build/config/rust.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/win/visual_studio_version.gni")
import("//build/toolchain/cc_wrapper.gni")
@@ -23,7 +24,7 @@ assert(is_win)
# This tool will is used as a wrapper for various commands below.
tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
-if (use_rbe) {
+if (use_remoteexec) {
goma_prefix = ""
rbe_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
clang_prefix = rbe_prefix
@@ -299,6 +300,50 @@ template("msvc_toolchain") {
outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
}
+ if (toolchain_has_rust) {
+ tool("rust_staticlib") {
+ rust_outfile = "{{target_out_dir}}/{{crate_name}}.lib"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_rlib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.rlib"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ if (rustc_can_link) {
+ tool("rust_bin") {
+ rust_outfile = "{{root_out_dir}}/{{crate_name}}.exe"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_cdylib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dll"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_macro") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dll"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+ }
+ }
+
tool("alink") {
rspfile = "{{output}}.rsp"
command = "$linker_wrapper$lib /OUT:{{output}} /nologo ${sys_lib_flags}{{arflags}} @$rspfile"
@@ -348,7 +393,8 @@ template("msvc_toolchain") {
# The use of inputs_newline is to work around a fixed per-line buffer
# size in the linker.
- rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+ rspfile_content =
+ "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}"
}
tool("solink_module") {
@@ -371,7 +417,8 @@ template("msvc_toolchain") {
# The use of inputs_newline is to work around a fixed per-line buffer
# size in the linker.
- rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+ rspfile_content =
+ "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}"
}
tool("link") {
@@ -393,7 +440,8 @@ template("msvc_toolchain") {
# The use of inputs_newline is to work around a fixed per-line buffer
# size in the linker.
- rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+ rspfile_content =
+ "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}} {{rlibs}}"
}
# These two are really entirely generic, but have to be repeated in
diff --git a/chromium/build/util/BUILD.gn b/chromium/build/util/BUILD.gn
index a97a1ffb755..2745449ea7f 100644
--- a/chromium/build/util/BUILD.gn
+++ b/chromium/build/util/BUILD.gn
@@ -30,6 +30,8 @@ action("chromium_git_revision") {
group("test_results") {
data = [
+ "//.vpython",
+ "//.vpython3",
"//build/util/lib/__init__.py",
"//build/util/lib/results/",
]
diff --git a/chromium/build/util/LASTCHANGE b/chromium/build/util/LASTCHANGE
index 781e3cd9c9e..24510aeea0f 100644
--- a/chromium/build/util/LASTCHANGE
+++ b/chromium/build/util/LASTCHANGE
@@ -1,2 +1,2 @@
-LASTCHANGE=a3a88852f68349f07722a79f1f72376ea6ba5828-refs/branch-heads/4606@{#1448}
-LASTCHANGE_YEAR=2021
+LASTCHANGE=5871e0c772fb1596c16cc4f693ed6e492a85e9a1-refs/branch-heads/4664@{#1425}
+LASTCHANGE_YEAR=2022
diff --git a/chromium/build/util/LASTCHANGE.committime b/chromium/build/util/LASTCHANGE.committime
index b60ebdaa32a..137eadec640 100644
--- a/chromium/build/util/LASTCHANGE.committime
+++ b/chromium/build/util/LASTCHANGE.committime
@@ -1 +1 @@
-1637169872 \ No newline at end of file
+1643082090 \ No newline at end of file
diff --git a/chromium/build/util/PRESUBMIT.py b/chromium/build/util/PRESUBMIT.py
index df4d718148c..575c806d3c8 100644
--- a/chromium/build/util/PRESUBMIT.py
+++ b/chromium/build/util/PRESUBMIT.py
@@ -40,7 +40,10 @@ def _GetPythonUnitTests(input_api, output_api):
output_api,
input_api.PresubmitLocalPath(),
files_to_check=['.*_test\\.py$'],
- files_to_skip=files_to_skip)
+ files_to_skip=files_to_skip,
+ run_on_python2=False,
+ run_on_python3=True,
+ skip_shebang_check=True)
def CommonChecks(input_api, output_api):
diff --git a/chromium/build/util/lib/common/chrome_test_server_spawner.py b/chromium/build/util/lib/common/chrome_test_server_spawner.py
index 9810215e811..aab0d0796d1 100644
--- a/chromium/build/util/lib/common/chrome_test_server_spawner.py
+++ b/chromium/build/util/lib/common/chrome_test_server_spawner.py
@@ -44,10 +44,6 @@ os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s'
os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver')))
-# The timeout (in seconds) of starting up the Python test server.
-_TEST_SERVER_STARTUP_TIMEOUT = 10
-
-
def _GetServerTypeCommandLine(server_type):
"""Returns the command-line by the given server type.
@@ -108,30 +104,25 @@ class TestServerThread(threading.Thread):
self.port_forwarder = port_forwarder
self.test_server_process = None
self.is_ready = False
- self.host_port = self.arguments['port']
+ self.host_port = 0
self.host_ocsp_port = 0
assert isinstance(self.host_port, int)
# The forwarder device port now is dynamically allocated.
self.forwarder_device_port = 0
self.forwarder_ocsp_device_port = 0
- # Anonymous pipe in order to get port info from test server.
- self.pipe_in = None
- self.pipe_out = None
self.process = None
self.command_line = []
- def _WaitToStartAndGetPortFromTestServer(self):
+ def _WaitToStartAndGetPortFromTestServer(self, pipe_in):
"""Waits for the Python test server to start and gets the port it is using.
The port information is passed by the Python test server with a pipe given
- by self.pipe_out. It is written as a result to |self.host_port|.
+ by |pipe_in|. It is written as a result to |self.host_port|.
Returns:
Whether the port used by the test server was successfully fetched.
"""
- assert self.host_port == 0 and self.pipe_out and self.pipe_in
- (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
- _TEST_SERVER_STARTUP_TIMEOUT)
+ (in_fds, _, _) = select.select([pipe_in], [], [])
if len(in_fds) == 0:
_logger.error('Failed to wait to the Python test server to be started.')
return False
@@ -141,14 +132,14 @@ class TestServerThread(threading.Thread):
# configured to use little-endian.
# TODO(jnd): Change the Python test server and local_test_server_*.cc to
# use a unified byte order (either big-endian or little-endian).
- data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+ data_length = os.read(pipe_in, struct.calcsize('=L'))
if data_length:
(data_length,) = struct.unpack('=L', data_length)
assert data_length
if not data_length:
_logger.error('Failed to get length of server data.')
return False
- server_data_json = os.read(self.pipe_in, data_length)
+ server_data_json = os.read(pipe_in, data_length)
if not server_data_json:
_logger.error('Failed to get server data.')
return False
@@ -173,7 +164,7 @@ class TestServerThread(threading.Thread):
return self.port_forwarder.WaitPortNotAvailable(self.host_port)
- def _GenerateCommandLineArguments(self):
+ def _GenerateCommandLineArguments(self, pipe_out):
"""Generates the command line to run the test server.
Note that all options are processed by following the definitions in
@@ -189,12 +180,8 @@ class TestServerThread(threading.Thread):
if type_cmd:
self.command_line.append(type_cmd)
- # Use a pipe to get the port given by the instance of Python test server
- # if the test does not specify the port.
- assert self.host_port == args_copy['port']
- if self.host_port == 0:
- (self.pipe_in, self.pipe_out) = os.pipe()
- self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+ # Use a pipe to get the port given by the Python test server.
+ self.command_line.append('--startup-pipe=%d' % pipe_out)
# Pass the remaining arguments as-is.
for key, values in args_copy.iteritems():
@@ -206,12 +193,15 @@ class TestServerThread(threading.Thread):
else:
self.command_line.append('--%s=%s' % (key, value))
- def _CloseUnnecessaryFDsForTestServerProcess(self):
+ def _CloseUnnecessaryFDsForTestServerProcess(self, pipe_out):
# This is required to avoid subtle deadlocks that could be caused by the
# test server child process inheriting undesirable file descriptors such as
- # file lock file descriptors.
- for fd in xrange(0, 1024):
- if fd != self.pipe_out:
+ # file lock file descriptors. Note stdin, stdout, and stderr (0-2) are left
+ # alone and redirected with subprocess.Popen. It is important to leave those
+ # fds filled, or the test server will accidentally open other fds at those
+ # numbers.
+ for fd in xrange(3, 1024):
+ if fd != pipe_out:
try:
os.close(fd)
except:
@@ -220,63 +210,79 @@ class TestServerThread(threading.Thread):
def run(self):
_logger.info('Start running the thread!')
self.wait_event.clear()
- self._GenerateCommandLineArguments()
- command = [sys.executable,
- os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver',
- 'testserver.py')] + self.command_line
- _logger.info('Running: %s', command)
-
- # Disable PYTHONUNBUFFERED because it has a bad interaction with the
- # testserver. Remove once this interaction is fixed.
- unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
-
- # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative
- # paths in the arguments are resolved correctly.
- self.process = subprocess.Popen(
- command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
- cwd=_DIR_SOURCE_ROOT)
- if unbuf:
- os.environ['PYTHONUNBUFFERED'] = unbuf
- if self.process:
- if self.pipe_out:
- self.is_ready = self._WaitToStartAndGetPortFromTestServer()
- else:
- self.is_ready = self.port_forwarder.WaitPortNotAvailable(self.host_port)
-
- if self.is_ready:
- port_map = [(0, self.host_port)]
- if self.host_ocsp_port:
- port_map.extend([(0, self.host_ocsp_port)])
- self.port_forwarder.Map(port_map)
-
- self.forwarder_device_port = \
- self.port_forwarder.GetDevicePortForHostPort(self.host_port)
- if self.host_ocsp_port:
- self.forwarder_ocsp_device_port = \
- self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port)
-
- # Check whether the forwarder is ready on the device.
- self.is_ready = self.forwarder_device_port and \
- self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port)
-
- # Wake up the request handler thread.
- self.ready_event.set()
- # Keep thread running until Stop() gets called.
- self.stop_event.wait()
- if self.process.poll() is None:
- self.process.kill()
- # Wait for process to actually terminate.
- # (crbug.com/946475)
- self.process.wait()
- self.port_forwarder.Unmap(self.forwarder_device_port)
- self.process = None
- self.is_ready = False
- if self.pipe_out:
- os.close(self.pipe_in)
- os.close(self.pipe_out)
- self.pipe_in = None
- self.pipe_out = None
+ # Set up a pipe for the server to report when it has started.
+ pipe_in, pipe_out = os.pipe()
+ try:
+ self._GenerateCommandLineArguments(pipe_out)
+ # TODO(crbug.com/941669): When this script is ported to Python 3, replace
+ # 'vpython3' below with sys.executable.
+ command = [
+ 'vpython3',
+ os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver',
+ 'testserver.py')
+ ] + self.command_line
+ _logger.info('Running: %s', command)
+
+ # Disable PYTHONUNBUFFERED because it has a bad interaction with the
+ # testserver. Remove once this interaction is fixed.
+ unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
+
+ # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative
+ # paths in the arguments are resolved correctly. devnull can be replaced
+ # with subprocess.DEVNULL in Python 3.
+ with open(os.devnull, 'r+b') as devnull:
+ self.process = subprocess.Popen(
+ command,
+ preexec_fn=lambda: self._CloseUnnecessaryFDsForTestServerProcess(
+ pipe_out),
+ stdin=devnull,
+ # Preserve stdout and stderr from the test server.
+ stdout=None,
+ stderr=None,
+ cwd=_DIR_SOURCE_ROOT)
+ # Close pipe_out early. If self.process crashes, this will be visible
+ # in _WaitToStartAndGetPortFromTestServer's select loop.
+ os.close(pipe_out)
+ pipe_out = -1
+ if unbuf:
+ os.environ['PYTHONUNBUFFERED'] = unbuf
+ self.is_ready = self._WaitToStartAndGetPortFromTestServer(pipe_in)
+
+ if self.is_ready:
+ port_map = [(0, self.host_port)]
+ if self.host_ocsp_port:
+ port_map.extend([(0, self.host_ocsp_port)])
+ self.port_forwarder.Map(port_map)
+
+ self.forwarder_device_port = \
+ self.port_forwarder.GetDevicePortForHostPort(self.host_port)
+ if self.host_ocsp_port:
+ self.forwarder_ocsp_device_port = \
+ self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port)
+
+ # Check whether the forwarder is ready on the device.
+ self.is_ready = self.forwarder_device_port and \
+ self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port)
+
+ # Wake up the request handler thread.
+ self.ready_event.set()
+ # Keep thread running until Stop() gets called.
+ self.stop_event.wait()
+ if self.process.poll() is None:
+ self.process.kill()
+ # Wait for process to actually terminate.
+ # (crbug.com/946475)
+ self.process.wait()
+
+ self.port_forwarder.Unmap(self.forwarder_device_port)
+ self.process = None
+ self.is_ready = False
+ finally:
+ if pipe_in >= 0:
+ os.close(pipe_in)
+ if pipe_out >= 0:
+ os.close(pipe_out)
_logger.info('Test-server has died.')
self.wait_event.set()
diff --git a/chromium/build/util/lib/results/result_sink.py b/chromium/build/util/lib/results/result_sink.py
index 86818e46361..5f4de26d85e 100644
--- a/chromium/build/util/lib/results/result_sink.py
+++ b/chromium/build/util/lib/results/result_sink.py
@@ -65,7 +65,8 @@ class ResultSinkClient(object):
test_log,
test_file,
artifacts=None,
- failure_reason=None):
+ failure_reason=None,
+ html_artifact=None):
"""Uploads the test result to the ResultSink server.
This assumes that the rdb stream has been called already and that
@@ -80,6 +81,9 @@ class ResultSinkClient(object):
artifacts: An optional dict of artifacts to attach to the test.
failure_reason: An optional string with the reason why the test failed.
Should be None if the test did not fail.
+ html_artifact: An optional html-formatted string to prepend to the test's
+ log. Useful to encode click-able URL links in the test log, since that
+ won't be formatted in the test_log.
Returns:
N/A
@@ -100,20 +104,24 @@ class ResultSinkClient(object):
},
{
# Status before getting mapped to result_db statuses.
- 'key': 'android_test_runner_status',
+ 'key': 'raw_status',
'value': status,
}
],
'testId':
test_id,
+ 'testMetadata': {
+ 'name': test_id,
+ }
}
artifacts = artifacts or {}
+ tr['summaryHtml'] = html_artifact if html_artifact else ''
if test_log:
# Upload the original log without any modifications.
b64_log = six.ensure_str(base64.b64encode(six.ensure_binary(test_log)))
artifacts.update({'Test Log': {'contents': b64_log}})
- tr['summaryHtml'] = '<text-artifact artifact-id="Test Log" />'
+ tr['summaryHtml'] += '<text-artifact artifact-id="Test Log" />'
if artifacts:
tr['artifacts'] = artifacts
if failure_reason:
@@ -128,12 +136,9 @@ class ResultSinkClient(object):
tr['duration'] = '%.9fs' % float(duration / 1000.0)
if test_file and str(test_file).startswith('//'):
- tr['testMetadata'] = {
- 'name': test_id,
- 'location': {
- 'file_name': test_file,
- 'repo': 'https://chromium.googlesource.com/chromium/src',
- }
+ tr['testMetadata']['location'] = {
+ 'file_name': test_file,
+ 'repo': 'https://chromium.googlesource.com/chromium/src',
}
res = requests.post(url=self.test_results_url,
diff --git a/chromium/build/util/lib/results/result_sink_test.py b/chromium/build/util/lib/results/result_sink_test.py
index f9b8b0b7005..3486ad90d11 100755
--- a/chromium/build/util/lib/results/result_sink_test.py
+++ b/chromium/build/util/lib/results/result_sink_test.py
@@ -79,6 +79,8 @@ class ClientTest(unittest.TestCase):
failure_reason='omg test failure')
data = json.loads(mock_post.call_args[1]['data'])
self.assertEqual(data['testResults'][0]['status'], 'FAIL')
+ self.assertEqual(data['testResults'][0]['testMetadata']['name'],
+ 'some-test')
self.assertEqual(
data['testResults'][0]['failureReason']['primaryErrorMessage'],
'omg test failure')
@@ -91,6 +93,8 @@ class ClientTest(unittest.TestCase):
self.assertEqual(
data['testResults'][0]['testMetadata']['location']['file_name'],
'//some/test.cc')
+ self.assertEqual(data['testResults'][0]['testMetadata']['name'],
+ 'some-test')
self.assertIsNotNone(data['testResults'][0]['summaryHtml'])
diff --git a/chromium/build/whitespace_file.txt b/chromium/build/whitespace_file.txt
index c6768a3aa22..de293c0f5ff 100644
--- a/chromium/build/whitespace_file.txt
+++ b/chromium/build/whitespace_file.txt
@@ -188,3 +188,8 @@ Vestibulum rhoncus neque sodales nibh lobortis, non fringilla odio aliquet.
Praesent ultrices quam eu pretium ultrices.
Quisque et consequat ex. Curabitur sed nunc neque.
foo
+
+And if you go chasing rabbits
+And you know you're going to fall
+Tell 'em a hookah-smoking caterpillar
+