summaryrefslogtreecommitdiff
path: root/chromium/build
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2017-07-12 14:07:37 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2017-07-17 10:29:26 +0000
commitec02ee4181c49b61fce1c8fb99292dbb8139cc90 (patch)
tree25cde714b2b71eb639d1cd53f5a22e9ba76e14ef /chromium/build
parentbb09965444b5bb20b096a291445170876225268d (diff)
downloadqtwebengine-chromium-ec02ee4181c49b61fce1c8fb99292dbb8139cc90.tar.gz
BASELINE: Update Chromium to 59.0.3071.134
Change-Id: Id02ef6fb2204c5fd21668a1c3e6911c83b17585a Reviewed-by: Alexandru Croitor <alexandru.croitor@qt.io>
Diffstat (limited to 'chromium/build')
-rw-r--r--chromium/build/OWNERS9
-rw-r--r--chromium/build/OWNERS.status12
-rw-r--r--chromium/build/android/BUILD.gn23
-rwxr-xr-xchromium/build/android/adb_install_apk.py4
-rw-r--r--chromium/build/android/android.isolate28
-rw-r--r--chromium/build/android/binary_size/OWNERS4
-rw-r--r--chromium/build/android/binary_size/__init__.py3
-rwxr-xr-xchromium/build/android/binary_size/apk_downloader.py75
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha11
-rw-r--r--chromium/build/android/binary_size/apks/README.md24
-rw-r--r--chromium/build/android/devil_chromium.json6
-rwxr-xr-xchromium/build/android/enable_asserts.py4
-rw-r--r--chromium/build/android/findbugs_filter/findbugs_exclude.xml8
-rw-r--r--chromium/build/android/gradle/OWNERS4
-rwxr-xr-xchromium/build/android/gradle/generate_gradle.py18
-rwxr-xr-xchromium/build/android/gradle/gn_to_cmake.py680
-rw-r--r--chromium/build/android/gradle/root.jinja2
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum.py20
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum_tests.py15
-rwxr-xr-xchromium/build/android/gyp/push_libraries.py3
-rw-r--r--chromium/build/android/gyp/util/proguard_util.py14
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py30
-rw-r--r--chromium/build/android/incremental_install/BUILD.gn1
-rw-r--r--chromium/build/android/incremental_install/README.md81
-rwxr-xr-xchromium/build/android/incremental_install/generate_android_manifest.py19
-rwxr-xr-xchromium/build/android/incremental_install/installer.py5
-rw-r--r--chromium/build/android/java_assertion_enabler/OWNERS2
-rw-r--r--chromium/build/android/lint/suppressions.xml13
-rw-r--r--chromium/build/android/main_dex_classes.flags14
-rw-r--r--chromium/build/android/multidex.flags12
-rw-r--r--chromium/build/android/play_services/config.json9
-rw-r--r--chromium/build/android/play_services/google_play_services_library.zip.sha12
-rwxr-xr-xchromium/build/android/provision_devices.py3
-rw-r--r--chromium/build/android/pylib/android/logdog_logcat_monitor.py8
-rw-r--r--chromium/build/android/pylib/constants/__init__.py2
-rw-r--r--chromium/build/android/pylib/gtest/filter/unit_tests_disabled2
-rw-r--r--chromium/build/android/pylib/gtest/gtest_test_instance.py47
-rw-r--r--chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py45
-rwxr-xr-xchromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py60
-rw-r--r--chromium/build/android/pylib/local/device/local_device_environment.py11
-rw-r--r--chromium/build/android/pylib/local/device/local_device_gtest_run.py36
-rw-r--r--chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py196
-rw-r--r--chromium/build/android/pylib/local/device/local_device_monkey_test_run.py2
-rw-r--r--chromium/build/android/pylib/local/device/local_device_perf_test_run.py7
-rw-r--r--chromium/build/android/pylib/local/device/local_device_test_run.py26
-rw-r--r--chromium/build/android/pylib/local/machine/local_machine_environment.py5
-rw-r--r--chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py13
-rw-r--r--chromium/build/android/pylib/perf/perf_test_instance.py2
-rw-r--r--chromium/build/android/pylib/results/presentation/javascript/main_html.js38
-rw-r--r--chromium/build/android/pylib/results/presentation/template/main.html5
-rw-r--r--chromium/build/android/pylib/results/presentation/template/table.html4
-rwxr-xr-xchromium/build/android/pylib/results/presentation/test_results_presentation.py86
-rw-r--r--chromium/build/android/pylib/utils/emulator.py1
-rw-r--r--chromium/build/android/pylib/utils/google_storage_helper.py57
-rw-r--r--chromium/build/android/pylib/utils/logdog_helper.py3
-rwxr-xr-xchromium/build/android/render_tests/process_render_test_results.py214
-rw-r--r--chromium/build/android/render_tests/render_webpage.html.jinja284
-rwxr-xr-xchromium/build/android/resource_sizes.py101
-rwxr-xr-xchromium/build/android/test_runner.py1071
-rw-r--r--chromium/build/android/test_runner.pydeps4
-rwxr-xr-xchromium/build/android/test_wrapper/logdog_wrapper.py119
-rw-r--r--chromium/build/android/test_wrapper/logdog_wrapper.pydeps11
-rwxr-xr-xchromium/build/android/tombstones.py2
-rw-r--r--chromium/build/args/headless.gn1
-rw-r--r--chromium/build/config/BUILD.gn36
-rw-r--r--chromium/build/config/android/BUILD.gn1
-rw-r--r--chromium/build/config/android/OWNERS2
-rw-r--r--chromium/build/config/android/config.gni22
-rw-r--r--chromium/build/config/android/internal_rules.gni7
-rw-r--r--chromium/build/config/android/rules.gni27
-rw-r--r--chromium/build/config/chromecast/BUILD.gn30
-rw-r--r--chromium/build/config/clang/BUILD.gn3
-rw-r--r--chromium/build/config/compiler/BUILD.gn125
-rw-r--r--chromium/build/config/compiler/compiler.gni55
-rw-r--r--chromium/build/config/features.gni15
-rw-r--r--chromium/build/config/freetype/BUILD.gn15
-rw-r--r--chromium/build/config/freetype/OWNERS2
-rw-r--r--chromium/build/config/ios/rules.gni29
-rw-r--r--chromium/build/config/linux/gtk/gtk.gni7
-rw-r--r--chromium/build/config/linux/gtk2/BUILD.gn1
-rw-r--r--chromium/build/config/posix/BUILD.gn13
-rw-r--r--chromium/build/config/sanitizers/BUILD.gn12
-rw-r--r--chromium/build/config/sanitizers/sanitizers.gni5
-rw-r--r--chromium/build/config/sysroot.gni60
-rw-r--r--chromium/build/config/ui.gni10
-rw-r--r--chromium/build/config/win/BUILD.gn20
-rw-r--r--chromium/build/config/win/msvs_dependencies.isolate179
-rw-r--r--chromium/build/dotfile_settings.gni1
-rwxr-xr-xchromium/build/experimental/install-build-deps.py5
-rwxr-xr-xchromium/build/fix_gn_headers.py154
-rwxr-xr-xchromium/build/get_landmines.py5
-rwxr-xr-xchromium/build/install-build-deps-android.sh103
-rwxr-xr-xchromium/build/install-build-deps.sh34
-rw-r--r--chromium/build/linux/BUILD.gn13
-rwxr-xr-xchromium/build/linux/install-chromeos-fonts.py6
-rw-r--r--chromium/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpgbin29655 -> 0 bytes
-rwxr-xr-xchromium/build/linux/sysroot_scripts/install-sysroot.py64
-rw-r--r--chromium/build/linux/sysroot_scripts/packagelist.precise.amd64179
-rw-r--r--chromium/build/linux/sysroot_scripts/packagelist.wheezy.amd64180
-rw-r--r--chromium/build/linux/sysroot_scripts/packagelist.wheezy.arm179
-rw-r--r--chromium/build/linux/sysroot_scripts/packagelist.wheezy.i386180
-rw-r--r--chromium/build/linux/sysroot_scripts/packagelist.wheezy.mipsel178
-rwxr-xr-xchromium/build/linux/sysroot_scripts/sysroot-creator-precise.sh212
-rwxr-xr-xchromium/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh208
-rw-r--r--chromium/build/linux/sysroot_scripts/sysroots.json30
-rw-r--r--chromium/build/linux/unbundle/README37
-rw-r--r--chromium/build/linux/unbundle/ffmpeg.gn9
-rw-r--r--chromium/build/linux/unbundle/libdrm.gn22
-rw-r--r--chromium/build/linux/unbundle/libjpeg.gn3
-rwxr-xr-xchromium/build/linux/unbundle/replace_gn_files.py1
-rwxr-xr-xchromium/build/mac/copy_asan_runtime_dylib.sh76
-rwxr-xr-xchromium/build/mac/copy_framework_unversioned.sh118
-rwxr-xr-xchromium/build/mac/strip_from_xcode62
-rwxr-xr-xchromium/build/mac/strip_save_dsym335
-rw-r--r--chromium/build/sample_arg_file.gn6
-rw-r--r--chromium/build/sanitizers/lsan_suppressions.cc158
-rw-r--r--chromium/build/sanitizers/sanitizer_options.cc8
-rw-r--r--chromium/build/sanitizers/tsan_suppressions.cc35
-rw-r--r--chromium/build/secondary/third_party/android_tools/BUILD.gn78
-rw-r--r--chromium/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn2
-rw-r--r--chromium/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn2
-rw-r--r--chromium/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn11
-rw-r--r--chromium/build/toolchain/android/BUILD.gn3
-rwxr-xr-xchromium/build/toolchain/clang_static_analyzer_wrapper.py106
-rwxr-xr-xchromium/build/toolchain/gcc_link_wrapper.py10
-rwxr-xr-xchromium/build/toolchain/gcc_solink_wrapper.py10
-rw-r--r--chromium/build/toolchain/gcc_toolchain.gni122
-rw-r--r--chromium/build/toolchain/linux/BUILD.gn12
-rw-r--r--chromium/build/toolchain/nacl/BUILD.gn11
-rw-r--r--chromium/build/toolchain/nacl_toolchain.gni1
-rw-r--r--chromium/build/toolchain/win/BUILD.gn9
-rw-r--r--chromium/build/toolchain/win/setup_toolchain.py10
-rw-r--r--chromium/build/toolchain/win/tool_wrapper.py10
-rw-r--r--chromium/build/toolchain/wrapper_utils.py46
-rw-r--r--chromium/build/util/LASTCHANGE2
-rw-r--r--chromium/build/util/LASTCHANGE.blink2
-rwxr-xr-xchromium/build/util/lastchange.py111
-rwxr-xr-xchromium/build/vs_toolchain.py91
-rw-r--r--chromium/build/whitespace_file.txt4
-rw-r--r--chromium/build/win/BUILD.gn64
-rw-r--r--chromium/build/win/run_pgo_profiling_benchmarks.py2
151 files changed, 3523 insertions, 3894 deletions
diff --git a/chromium/build/OWNERS b/chromium/build/OWNERS
index e1325381e4a..c644ca7d5fd 100644
--- a/chromium/build/OWNERS
+++ b/chromium/build/OWNERS
@@ -6,8 +6,17 @@ scottmg@chromium.org
thakis@chromium.org
brucedawson@chromium.org
+per-file .gitignore=*
per-file mac_toolchain.py=erikchen@chromium.org
per-file mac_toolchain.py=justincohen@chromium.org
per-file package_mac_toolchain.py=erikchen@chromium.org
per-file package_mac_toolchain.py=justincohen@chromium.org
per-file whitespace_file.txt=*
+per-file OWNERS.status=*
+
+# gn-dev is probably a better team here, but the tooling won't let us
+# have more than one team per component, and infra-dev is a catch-all
+# for other build-related lists.
+#
+# TEAM: infra-dev@chromium.org
+# COMPONENT: Build
diff --git a/chromium/build/OWNERS.status b/chromium/build/OWNERS.status
new file mode 100644
index 00000000000..f5cc1fc8bab
--- /dev/null
+++ b/chromium/build/OWNERS.status
@@ -0,0 +1,12 @@
+# Use this file to set a global status message that should be shown whenever
+# git cl owners proposes to add you as a reviewer.
+#
+# The status messages should be somewhat stable, so please don't use this for
+# short term, or frequently changing updates.
+#
+# The format of the file is
+#
+# you@chromium.org: Single line status message.
+#
+
+jochen@chromium.org: EMEA based reviewer.
diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn
index 53366db1934..a09c0f5ddca 100644
--- a/chromium/build/android/BUILD.gn
+++ b/chromium/build/android/BUILD.gn
@@ -52,7 +52,8 @@ if (enable_java_templates) {
_data +=
"android_sdk_root=" + rebase_path(android_sdk_root, root_build_dir) + CR
_data += "android_sdk_version=$android_sdk_version$CR"
- _data += "android_tool_prefix=$android_tool_prefix"
+ _data += "android_tool_prefix=" +
+ rebase_path(android_tool_prefix, root_build_dir) + CR
write_file("$root_build_dir/build_vars.txt", _data)
}
@@ -114,17 +115,27 @@ group("test_runner_py") {
"devil_chromium.json",
"pylib/gtest/filter/",
"test_wrapper/logdog_wrapper.py",
- "//third_party/android_tools/sdk/build-tools/24.0.2/aapt",
- "//third_party/android_tools/sdk/build-tools/24.0.2/dexdump",
- "//third_party/android_tools/sdk/build-tools/24.0.2/lib64/libc++.so",
- "//third_party/android_tools/sdk/build-tools/24.0.2/split-select",
- "//third_party/android_tools/sdk/platform-tools/adb",
+ "${android_sdk_build_tools}/aapt",
+ "${android_sdk_build_tools}/dexdump",
+ "${android_sdk_build_tools}/lib64/libc++.so",
+ "${android_sdk_build_tools}/split-select",
+ "${android_sdk_root}/platform-tools/adb",
"//third_party/catapult/third_party/gsutil/",
"//third_party/catapult/devil/devil/devil_dependencies.json",
"//third_party/proguard/lib/proguard.jar",
]
}
+group("logdog_wrapper_py") {
+ _py_files = read_file("test_wrapper/logdog_wrapper.pydeps", "list lines")
+
+ # Filter out comments.
+ set_sources_assignment_filter([ "#*" ])
+ sources = _py_files
+
+ data = sources
+}
+
# Create wrapper scripts in out/bin that takes care of setting the
# --output-directory.
_scripts_to_wrap = [
diff --git a/chromium/build/android/adb_install_apk.py b/chromium/build/android/adb_install_apk.py
index 7904b41a531..fd8b8d7630b 100755
--- a/chromium/build/android/adb_install_apk.py
+++ b/chromium/build/android/adb_install_apk.py
@@ -113,7 +113,8 @@ def main():
device.Install(apk, reinstall=args.keep_data,
allow_downgrade=args.downgrade,
timeout=args.timeout)
- except device_errors.CommandFailedError:
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
logging.exception('Failed to install %s', args.apk_name)
if blacklist:
blacklist.Extend([str(device)], reason='install_failure')
@@ -129,4 +130,3 @@ def main():
if __name__ == '__main__':
sys.exit(main())
-
diff --git a/chromium/build/android/android.isolate b/chromium/build/android/android.isolate
deleted file mode 100644
index 568ad8be4d2..00000000000
--- a/chromium/build/android/android.isolate
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'files': [
- '../../build/util/lib/common/',
- '../../third_party/android_tools/sdk/build-tools/',
- '../../third_party/android_tools/sdk/platform-tools/',
- '../../third_party/catapult/',
- '../../third_party/requests/',
- '../../tools/swarming_client/',
- '<(PRODUCT_DIR)/icudtl.dat',
- '<(PRODUCT_DIR)/lib.java/chromium_commands.dex.jar',
- '<(PRODUCT_DIR)/host_forwarder',
- '<(PRODUCT_DIR)/forwarder_dist/',
- '<(PRODUCT_DIR)/md5sum_bin_host',
- '<(PRODUCT_DIR)/md5sum_dist/',
- 'devil_chromium.json',
- 'devil_chromium.py',
- 'gyp/util/',
- 'incremental_install/',
- 'lighttpd_server.py',
- 'pylib/',
- 'test_runner.py',
- ]
- }
-}
diff --git a/chromium/build/android/binary_size/OWNERS b/chromium/build/android/binary_size/OWNERS
new file mode 100644
index 00000000000..c964495a78c
--- /dev/null
+++ b/chromium/build/android/binary_size/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/chromium/build/android/binary_size/__init__.py b/chromium/build/android/binary_size/__init__.py
new file mode 100644
index 00000000000..a22a6ee39a9
--- /dev/null
+++ b/chromium/build/android/binary_size/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/chromium/build/android/binary_size/apk_downloader.py b/chromium/build/android/binary_size/apk_downloader.py
new file mode 100755
index 00000000000..b90fe5f92c9
--- /dev/null
+++ b/chromium/build/android/binary_size/apk_downloader.py
@@ -0,0 +1,75 @@
+#!/usr/bin/python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build'))
+import find_depot_tools # pylint: disable=import-error,unused-import
+import download_from_google_storage
+
+CURRENT_MILESTONE = '58'
+DEFAULT_BUCKET = 'gs://chromium-android-tools/apks'
+DEFAULT_DOWNLOAD_PATH = os.path.join(os.path.dirname(__file__), 'apks')
+DEFAULT_BUILDER = 'Android_Builder'
+DEFAULT_APK = 'MonochromePublic.apk'
+
+
+def MaybeDownloadApk(builder, milestone, apk, download_path, bucket):
+ """Returns path to the downloaded APK or None if not found."""
+ apk_path = os.path.join(download_path, builder, milestone, apk)
+ sha1_path = apk_path + '.sha1'
+ base_url = os.path.join(bucket, builder, milestone)
+ if os.path.exists(apk_path):
+ print '%s already exists' % apk_path
+ return apk_path
+ elif not os.path.exists(sha1_path):
+ print 'Skipping %s, file not found' % sha1_path
+ return None
+ else:
+ download_from_google_storage.download_from_google_storage(
+ input_filename=sha1_path,
+ sha1_file=sha1_path,
+ base_url=base_url,
+ gsutil=download_from_google_storage.Gsutil(
+ download_from_google_storage.GSUTIL_DEFAULT_PATH),
+ num_threads=1,
+ directory=False,
+ recursive=False,
+ force=False,
+ output=apk_path,
+ ignore_errors=False,
+ verbose=True,
+ auto_platform=False,
+ extract=False)
+ return apk_path
+
+
+def main():
+ argparser = argparse.ArgumentParser(
+ description='Utility for downloading archived APKs used for measuring '
+ 'per-milestone patch size growth.',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ argparser.add_argument('--download-path', default=DEFAULT_DOWNLOAD_PATH,
+ help='Directory to store downloaded APKs.')
+ argparser.add_argument('--milestone', default=CURRENT_MILESTONE,
+ help='Download reference APK for this milestone.')
+ argparser.add_argument('--apk', default=DEFAULT_APK, help='APK name.')
+ argparser.add_argument('--builder', default=DEFAULT_BUILDER,
+ help='Builder name.')
+ argparser.add_argument('--bucket', default=DEFAULT_BUCKET,
+ help='Google storage bucket where APK is stored.')
+ args = argparser.parse_args()
+ MaybeDownloadApk(
+ args.builder, args.milestone, args.apk, args.download_path, args.bucket)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..c2629a666af
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+f6a9731abe16df80a4026843a850d3c721414b96 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 00000000000..8c0ab5d5cd6
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+a168708620b6370e0325a00c0bc3b4b53ad86a18 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..119dc0ec48f
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+75bc1faae7eff3c3781d1e0343414c1e42d8aeef \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 00000000000..28ddb430134
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+3e9673008a930aa8bb2bcd7e26f8da91a0448ec3 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..935e09d2bac
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+ec034225a5e637fc83944b5ada634aba8075d1b2 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
new file mode 100644
index 00000000000..8ac82edc403
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+68925703102a2ff5a55e3b00e90a086dfd6d7ee6 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..432f6aef87e
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+c980519b19f3eb010fe0e54a436272e3c94656be \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 00000000000..aa40702ea6b
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+c1f4797decdd33465d671cf2fb5f266f4c9e1c7a \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..7e7ddf5b81f
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+47f902113feb297714ba22d25d7cb51754990923 \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 00000000000..b97041ab125
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+255104059ee2e51541d67a504ff22327b945768b \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 00000000000..bd8ffec6780
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+0a18193a6534ccc31deaffecb817b8b6c991647a \ No newline at end of file
diff --git a/chromium/build/android/binary_size/apks/README.md b/chromium/build/android/binary_size/apks/README.md
new file mode 100644
index 00000000000..474ba93c432
--- /dev/null
+++ b/chromium/build/android/binary_size/apks/README.md
@@ -0,0 +1,24 @@
+### Updating APKs in this folder (for new milestones, builders, or APKs)
+
+1. Find the commit as close as possible to the current branch point (i.e. if the
+latest builds are m59, we want to compare to the commit before the m58 branch
+point).
+
+2. Download and unzip build artifacts from the relevant perf builder. You can
+use this link:
+[https<nolink>://storage.cloud.google.com/chrome-perf/**Android%20Builder**/full-build-linux_**3a87aecc31cd1ffe751dd72c04e5a96a1fc8108a**.zip](https://storage.cloud.google.com/chrome-perf/Android%20Builder/full-build-linux_3a87aecc31cd1ffe751dd72c04e5a96a1fc8108a.zip)
+, replacing the bolded parts with your info OR from the
+"gsutil upload_build_product" step on the bot page (both are Googlers only).
+
+3. Upload the apk: _upload_to_google_storage.py --bucket
+'chromium-android-tools/apks/**Android_Builder**/**58**'
+**path/to/ApkTarget.apk**_ replacing the bolded parts again.
+ * Note that we use **Android_Builder** instead of **Android Builder** (replace
+spaces with underscores)
+
+4. Move the generated .sha1 file to the corresponding place in
+//build/android/binary_size/apks/. In this case, the path would be
+//build/android/binary_size/apks/Android_Builder/58
+
+5. Commit the added .sha1 files and (optionally) update the `CURRENT_MILESTONE`
+in apk_downloader.py
diff --git a/chromium/build/android/devil_chromium.json b/chromium/build/android/devil_chromium.json
index 1f9080d115d..6b87fa2c821 100644
--- a/chromium/build/android/devil_chromium.json
+++ b/chromium/build/android/devil_chromium.json
@@ -5,7 +5,7 @@
"file_info": {
"linux2_x86_64": {
"local_paths": [
- "../../third_party/android_tools/sdk/build-tools/24.0.2/aapt"
+ "../../third_party/android_tools/sdk/build-tools/25.0.2/aapt"
]
}
}
@@ -32,7 +32,7 @@
"file_info": {
"linux2_x86_64": {
"local_paths": [
- "../../third_party/android_tools/sdk/build-tools/24.0.2/dexdump"
+ "../../third_party/android_tools/sdk/build-tools/25.0.2/dexdump"
]
}
}
@@ -41,7 +41,7 @@
"file_info": {
"linux2_x86_64": {
"local_paths": [
- "../../third_party/android_tools/sdk/build-tools/24.0.2/split-select"
+ "../../third_party/android_tools/sdk/build-tools/25.0.2/split-select"
]
}
}
diff --git a/chromium/build/android/enable_asserts.py b/chromium/build/android/enable_asserts.py
index b303edad9a5..c4b07239604 100755
--- a/chromium/build/android/enable_asserts.py
+++ b/chromium/build/android/enable_asserts.py
@@ -42,8 +42,8 @@ def main():
def set_java_asserts_and_restart(device):
if device.SetJavaAsserts(args.set_asserts):
- device.RunShellCommand('stop')
- device.RunShellCommand('start')
+ device.RunShellCommand(['stop'], check_return=True)
+ device.RunShellCommand(['start'], check_return=True)
devices.pMap(set_java_asserts_and_restart)
return 0
diff --git a/chromium/build/android/findbugs_filter/findbugs_exclude.xml b/chromium/build/android/findbugs_filter/findbugs_exclude.xml
index f6a1394f2df..3f0316c32e5 100644
--- a/chromium/build/android/findbugs_filter/findbugs_exclude.xml
+++ b/chromium/build/android/findbugs_filter/findbugs_exclude.xml
@@ -26,4 +26,12 @@ In particular, ~ at the start of a string means it's a regex.
https://developer.android.com/reference/java/security/AccessController.html
-->
<Bug pattern="DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED" />
+
+ <!-- Ignore unused public Rule in instrumentation tests -->
+ <Match>
+ <Class name="~.*\.*Test" />
+ <Field type="android.support.test.rule.UiThreadTestRule" />
+ <Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD" />
+ </Match>
+
</FindBugsFilter>
diff --git a/chromium/build/android/gradle/OWNERS b/chromium/build/android/gradle/OWNERS
new file mode 100644
index 00000000000..d1f94845f4d
--- /dev/null
+++ b/chromium/build/android/gradle/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py
index ab3bd63ceb5..d57dfa9fb14 100755
--- a/chromium/build/android/gradle/generate_gradle.py
+++ b/chromium/build/android/gradle/generate_gradle.py
@@ -249,7 +249,7 @@ class _ProjectContextGenerator(object):
def _GenJniLibs(self, root_entry):
libraries = []
for entry in self._GetEntries(root_entry):
- libraries += entry.BuildConfig().get('native', [])
+ libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
if libraries:
return _CreateJniLibsDir(constants.GetOutDirectory(),
self.EntryOutputDir(root_entry), libraries)
@@ -297,7 +297,8 @@ class _ProjectContextGenerator(object):
def _Srcjars(self, entry):
srcjars = _RebasePath(entry.Gradle().get('bundled_srcjars', []))
if not self.use_gradle_process_resources:
- srcjars += _RebasePath(entry.BuildConfig()['javac']['srcjars'])
+ srcjars += _RebasePath(entry.Javac()['srcjars'])
+ srcjars += _RebasePath(entry.Gradle().get('srcjars'))
return srcjars
def _GetEntries(self, entry):
@@ -632,8 +633,13 @@ def main():
parser.add_argument('--target',
dest='targets',
action='append',
- help='GN target to generate project for. '
- 'May be repeated.')
+ help='GN target to generate project for. Replaces set of '
+ 'default targets. May be repeated.')
+ parser.add_argument('--extra-target',
+ dest='extra_targets',
+ action='append',
+ help='GN target to generate project for, in addition to '
+ 'the default ones. May be repeated.')
parser.add_argument('--project-dir',
help='Root of the output project.',
default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
@@ -675,6 +681,8 @@ def main():
targets = _QueryForAllGnTargets(output_dir)
else:
targets = args.targets or _DEFAULT_TARGETS
+ if args.extra_targets:
+ targets.extend(args.extra_targets)
targets = [re.sub(r'_test_apk$', '_test_apk__apk', t) for t in targets]
# TODO(wnwen): Utilize Gradle's test constructs for our junit tests?
targets = [re.sub(r'_junit_tests$', '_junit_tests__java_binary', t)
@@ -737,7 +745,7 @@ def main():
_ExtractZips(generator.project_dir, zip_tuples)
logging.warning('Project created! (%d subprojects)', len(project_entries))
- logging.warning('Generated projects work best with Android Studio 2.2')
+ logging.warning('Generated projects work with Android Studio 2.3')
logging.warning('For more tips: https://chromium.googlesource.com/chromium'
'/src.git/+/master/docs/android_studio.md')
diff --git a/chromium/build/android/gradle/gn_to_cmake.py b/chromium/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 00000000000..a790d63493b
--- /dev/null
+++ b/chromium/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,680 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+ """
+ return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+ """Escapes the string 'a' for use as a CMake target name.
+
+ CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+ The ':' is only allowed for imported targets.
+ """
+ def Escape(c):
+ if c in string.ascii_letters or c in string.digits or c in '_.+-':
+ return c
+ else:
+ return '__'
+ return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+ """Sets a CMake variable."""
+ out.write('set("')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('" "')
+ out.write(CMakeStringEscape(value))
+ out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(out, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(out, variable_name, values[0])
+ out.write('list(APPEND "')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('"\n "')
+ out.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+ """Given a target, sets the given property."""
+ out.write('set_target_properties("${target}" PROPERTIES ')
+ out.write(property_name)
+ out.write(' "')
+ for value in values:
+ out.write(CMakeStringEscape(value))
+ out.write(sep)
+ out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write('${')
+ output.write(variable_name)
+ output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.c': 'c',
+ '.s': 'asm',
+ '.S': 'asm',
+ '.asm': 'asm',
+ '.o': 'obj',
+ '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+ def __init__(self, command, modifier, property_modifier, is_linkable):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+ self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+ None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+ 'unknown': CMakeTargetType.custom,
+ 'group': CMakeTargetType.custom,
+ 'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+ 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+ 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+ 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+ 'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+ 'copy': CMakeTargetType.custom,
+ 'action': CMakeTargetType.custom,
+ 'action_foreach': CMakeTargetType.custom,
+ 'bundle_data': CMakeTargetType.custom,
+ 'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+ return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+ # See <chromium>/src/tools/gn/label.cc#Resolve
+ # //base/test:test_support(//build/toolchain/win:msvc)
+ path_separator = FindFirstOf(gn_target_name, (':', '('))
+ location = None
+ name = None
+ toolchain = None
+ if not path_separator:
+ location = gn_target_name[2:]
+ else:
+ location = gn_target_name[2:path_separator]
+ toolchain_separator = gn_target_name.find('(', path_separator)
+ if toolchain_separator == -1:
+ name = gn_target_name[path_separator + 1:]
+ else:
+ if toolchain_separator > path_separator:
+ name = gn_target_name[path_separator + 1:toolchain_separator]
+ assert gn_target_name.endswith(')')
+ toolchain = gn_target_name[toolchain_separator + 1:-1]
+ assert location or name
+
+ cmake_target_name = None
+ if location.endswith('/' + name):
+ cmake_target_name = location
+ elif location:
+ cmake_target_name = location + '_' + name
+ else:
+ cmake_target_name = name
+ if toolchain:
+ cmake_target_name += '--' + toolchain
+ return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+ def __init__(self, project_json):
+ self.targets = project_json['targets']
+ build_settings = project_json['build_settings']
+ self.root_path = build_settings['root_path']
+ self.build_path = posixpath.join(self.root_path,
+ build_settings['build_dir'][2:])
+
+ def GetAbsolutePath(self, path):
+ if path.startswith("//"):
+ return self.root_path + "/" + path[2:]
+ else:
+ return path
+
+ def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose sources have not been absorbed."""
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ object_dependencies.add(dependency)
+ if dependency_type not in gn_target_types_that_absorb_objects:
+ self.GetObjectSourceDependencies(dependency, object_dependencies)
+
+ def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose libraries have not been absorbed."""
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ object_dependencies.add(dependency)
+ self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+ def __init__(self, gn_target_name, project):
+ self.gn_name = gn_target_name
+ self.properties = project.targets[self.gn_name]
+ self.cmake_name = GetCMakeTargetName(self.gn_name)
+ self.gn_type = self.properties.get('type', None)
+ self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+ outputs = []
+ output_directories = set()
+ for output in target.properties.get('outputs', []):
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ out.write('"\n "'.join([CMakeStringEscape(a) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action: ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+ source_dir, source_file_part = posixpath.split(source)
+ source_name_part, _ = posixpath.splitext(source_file_part)
+ #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+ return a.replace('{{source}}', source) \
+ .replace('{{source_file_part}}', source_file_part) \
+ .replace('{{source_name_part}}', source_name_part) \
+ .replace('{{source_dir}}', source_dir) \
+ .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+ all_outputs = target.properties.get('outputs', [])
+ inputs = target.properties.get('sources', [])
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs_per_input = len(all_outputs) / len(inputs)
+ for count, source in enumerate(inputs):
+ source_abs_path = project.GetAbsolutePath(source)
+
+ outputs = []
+ output_directories = set()
+ for output in all_outputs[outputs_per_input * count:
+ outputs_per_input * (count+1)]:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output_' + str(count)
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ # TODO: need to expand {{xxx}} in arguments
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ expand = functools.partial(ExpandPlaceholders, source_abs_path)
+ out.write('"\n "'.join(
+ [CMakeStringEscape(expand(a)) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS')
+ if 'input' in sources:
+ WriteVariable(out, sources['input'], ' ')
+ out.write(' "')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action ${target} on ')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+ inputs = target.properties.get('sources', [])
+ raw_outputs = target.properties.get('outputs', [])
+
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs = []
+ for output in raw_outputs:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ for src, dst in zip(inputs, outputs):
+ out.write(' COMMAND ${CMAKE_COMMAND} -E copy "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+ out.write('" "')
+ out.write(CMakeStringEscape(dst))
+ out.write('"\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Copy ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+ # Hack, set linker language to c if no c or cxx files present.
+ if not 'c' in sources and not 'cxx' in sources:
+ SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+ # Mark uncompiled sources as uncompiled.
+ if 'input' in sources:
+ SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+ if 'other' in sources:
+ SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+ # Mark object sources as linkable.
+ if 'obj' in sources:
+ SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+ # TODO: 'output_name', 'output_dir', 'output_extension'
+ # This includes using 'source_outputs' to direct compiler output.
+
+ # Includes
+ includes = target.properties.get('include_dirs', [])
+ if includes:
+ out.write('set_property(TARGET "${target}" ')
+ out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+ for include_dir in includes:
+ out.write('\n "')
+ out.write(project.GetAbsolutePath(include_dir))
+ out.write('"')
+ out.write(')\n')
+
+ # Defines
+ defines = target.properties.get('defines', [])
+ if defines:
+ SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+ # Compile flags
+ # "arflags", "asmflags", "cflags",
+ # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+ # CMake does not have per target lang compile flags.
+ # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+ # http://public.kitware.com/Bug/view.php?id=14857
+ flags = []
+ flags.extend(target.properties.get('cflags', []))
+ cflags_asm = target.properties.get('asmflags', [])
+ cflags_c = target.properties.get('cflags_c', [])
+ cflags_cxx = target.properties.get('cflags_cc', [])
+ if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+ flags.extend(cflags_c)
+ elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+ flags.extend(cflags_cxx)
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if 'asm' in sources and cflags_asm:
+ SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+ if 'c' in sources and cflags_c:
+ SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+ if 'cxx' in sources and cflags_cxx:
+ SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+ if flags:
+ SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = target.properties.get('ldflags', [])
+ if ldflags:
+ SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+ 'executable',
+ 'loadable_module',
+ 'shared_library',
+ 'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+ # gn separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see Compile flags).
+ source_types = {'cxx':[], 'c':[], 'asm':[],
+ 'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+ # TODO .def files on Windows
+ for source in target.properties.get('sources', []):
+ _, ext = posixpath.splitext(source)
+ source_abs_path = project.GetAbsolutePath(source)
+ source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+ for input_path in target.properties.get('inputs', []):
+ input_abs_path = project.GetAbsolutePath(input_path)
+ source_types['input'].append(input_abs_path)
+
+ # OBJECT library dependencies need to be listed as sources.
+ # Only executables and non-OBJECT libraries may reference an OBJECT library.
+ # https://gitlab.kitware.com/cmake/cmake/issues/14778
+ if target.gn_type in gn_target_types_that_absorb_objects:
+ object_dependencies = set()
+ project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+ for dependency in object_dependencies:
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+ source_types['obj_target'].append(obj_target_sources)
+
+ sources = {}
+ for source_type, sources_of_type in source_types.items():
+ if sources_of_type:
+ sources[source_type] = '${target}__' + source_type + '_srcs'
+ SetVariableList(out, sources[source_type], sources_of_type)
+ return sources
+
+
+def WriteTarget(out, target, project):
+ out.write('\n#')
+ out.write(target.gn_name)
+ out.write('\n')
+
+ if target.cmake_type is None:
+ print 'Target {} has unknown target type {}, skipping.'.format(
+ target.gn_name, target.gn_type)
+ return
+
+ SetVariable(out, 'target', target.cmake_name)
+
+ sources = WriteSourceVariables(out, target, project)
+
+ synthetic_dependencies = set()
+ if target.gn_type == 'action':
+ WriteAction(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'action_foreach':
+ WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'copy':
+ WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+ out.write(target.cmake_type.command)
+ out.write('("${target}"')
+ if target.cmake_type.modifier is not None:
+ out.write(' ')
+ out.write(target.cmake_type.modifier)
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ if synthetic_dependencies:
+ out.write(' DEPENDS')
+ for synthetic_dependencie in synthetic_dependencies:
+ WriteVariable(out, synthetic_dependencie, ' ')
+ out.write(')\n')
+
+ if target.cmake_type.command != 'add_custom_target':
+ WriteCompilerFlags(out, target, project, sources)
+
+ libraries = set()
+ nonlibraries = set()
+
+ dependencies = set(target.properties.get('deps', []))
+ # Transitive OBJECT libraries are in sources.
+ # Those sources are dependent on the OBJECT library dependencies.
+ # Those sources cannot bring in library dependencies.
+ object_dependencies = set()
+ if target.gn_type != 'source_set':
+ project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+ for object_dependency in object_dependencies:
+ dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+ for dependency in dependencies:
+ gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+ cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ if cmake_dependency_type.command != 'add_library':
+ nonlibraries.add(cmake_dependency_name)
+ elif cmake_dependency_type.modifier != 'OBJECT':
+ if target.cmake_type.is_linkable:
+ libraries.add(cmake_dependency_name)
+ else:
+ nonlibraries.add(cmake_dependency_name)
+
+ # Non-library dependencies.
+ if nonlibraries:
+ out.write('add_dependencies("${target}"')
+ for nonlibrary in nonlibraries:
+ out.write('\n "')
+ out.write(nonlibrary)
+ out.write('"')
+ out.write(')\n')
+
+ # Non-OBJECT library dependencies.
+ external_libraries = target.properties.get('libs', [])
+ if target.cmake_type.is_linkable and (external_libraries or libraries):
+ library_dirs = target.properties.get('lib_dirs', [])
+ if library_dirs:
+ SetVariableList(out, '${target}__library_directories', library_dirs)
+
+ system_libraries = []
+ for external_library in external_libraries:
+ if '/' in external_library:
+ libraries.add(project.GetAbsolutePath(external_library))
+ else:
+ if external_library.endswith('.framework'):
+ external_library = external_library[:-len('.framework')]
+ system_library = 'library__' + external_library
+ if library_dirs:
+ system_library = system_library + '__for_${target}'
+ out.write('find_library("')
+ out.write(CMakeStringEscape(system_library))
+ out.write('" "')
+ out.write(CMakeStringEscape(external_library))
+ out.write('"')
+ if library_dirs:
+ out.write(' PATHS "')
+ WriteVariable(out, '${target}__library_directories')
+ out.write('"')
+ out.write(')\n')
+ system_libraries.append(system_library)
+ out.write('target_link_libraries("${target}"')
+ for library in libraries:
+ out.write('\n "')
+ out.write(CMakeStringEscape(library))
+ out.write('"')
+ for system_library in system_libraries:
+ WriteVariable(out, system_library, '\n "')
+ out.write('"')
+ out.write(')\n')
+
+
+def WriteProject(project):
+ out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+ # Update the gn generated ninja build.
+ # If a build file has changed, this will update CMakeLists.ext if
+ # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+ # style was used to create this config.
+ out.write('execute_process(COMMAND ninja -C "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('" build.ninja)\n')
+
+ out.write('include(CMakeLists.ext)\n')
+ out.close()
+
+ out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n')
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ out.write('enable_language(ASM)\n\n')
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ # Current issues with automatic re-generation:
+ # The gn generated build.ninja target uses build.ninja.d
+ # but build.ninja.d does not contain the ide or gn.
+ # Currently the ide is not run if the project.json file is not changed
+ # but the ide needs to be run anyway if it has itself changed.
+ # This can be worked around by deleting the project.json file.
+ out.write('file(READ "')
+ gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+ out.write(CMakeStringEscape(gn_deps_file))
+ out.write('" "gn_deps_string" OFFSET ')
+ out.write(str(len('build.ninja: ')))
+ out.write(')\n')
+ # One would think this would need to worry about escaped spaces
+ # but gn doesn't escape spaces here (it generates invalid .d files).
+ out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+ out.write('foreach("gn_dep" ${gn_deps})\n')
+ out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+ out.write('endforeach("gn_dep")\n')
+
+ for target_name in project.targets.keys():
+ out.write('\n')
+ WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+ if len(sys.argv) != 2:
+ print 'Usage: ' + sys.argv[0] + ' <json_file_name>'
+ exit(1)
+
+ json_path = sys.argv[1]
+ project = None
+ with open(json_path, 'r') as json_file:
+ project = json.loads(json_file.read())
+
+ WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja
index 52f0edae3a0..d3730e7bc16 100644
--- a/chromium/build/android/gradle/root.jinja
+++ b/chromium/build/android/gradle/root.jinja
@@ -8,6 +8,6 @@ buildscript {
jcenter()
}
dependencies {
- classpath "com.android.tools.build:gradle:2.2.3"
+ classpath "com.android.tools.build:gradle:2.3.0"
}
}
diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py
index f6d7ced4f43..3cb70ce8cf1 100755
--- a/chromium/build/android/gyp/java_cpp_enum.py
+++ b/chromium/build/android/gyp/java_cpp_enum.py
@@ -329,17 +329,16 @@ import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
-public class ${CLASS_NAME} {
- @IntDef({
+@IntDef({
${INT_DEF}
- })
- @Retention(RetentionPolicy.SOURCE)
- public @interface ${ANNOTATION} {}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
${ENUM_ENTRIES}
}
""")
- enum_template = Template(' public static final int ${NAME} = ${VALUE};')
+ enum_template = Template(' int ${NAME} = ${VALUE};')
enum_entries_string = []
enum_names = []
for enum_name, enum_value in enum_definition.entries.iteritems():
@@ -359,25 +358,20 @@ ${ENUM_ENTRIES}
'\n'.join(comments_line_wrapper.wrap(enum_comments)))
enum_entries_string.append(' */')
enum_entries_string.append(enum_template.substitute(values))
- enum_names.append(enum_name)
+ enum_names.append(enum_definition.class_name + '.' + enum_name)
enum_entries_string = '\n'.join(enum_entries_string)
- enum_names_indent = ' ' * 6
+ enum_names_indent = ' ' * 4
wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
subsequent_indent = enum_names_indent,
width = 100)
enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
- annotation_template = Template('${NAME}Enum')
- annotation_values = { 'NAME': enum_definition.class_name, }
- annotation_name = annotation_template.substitute(annotation_values)
-
values = {
'CLASS_NAME': enum_definition.class_name,
'ENUM_ENTRIES': enum_entries_string,
'PACKAGE': enum_definition.enum_package,
'INT_DEF': enum_names_string,
- 'ANNOTATION': annotation_name,
'SCRIPT_NAME': GetScriptName(),
'SOURCE_PATH': source_path,
'YEAR': str(date.today().year)
diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py
index f0018a18f9a..8d9e60de698 100755
--- a/chromium/build/android/gyp/java_cpp_enum_tests.py
+++ b/chromium/build/android/gyp/java_cpp_enum_tests.py
@@ -51,21 +51,20 @@ import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
-public class ClassName {
- @IntDef({
- E1, E2
- })
- @Retention(RetentionPolicy.SOURCE)
- public @interface ClassNameEnum {}
+@IntDef({
+ ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
/**
* %s
* really really long.
*/
- public static final int E1 = 1;
+ int E1 = 1;
/**
* This is a comment.
*/
- public static final int E2 = 2 << 2;
+ int E2 = 2 << 2;
}
"""
long_comment = ('This is a multiple line comment that is really long. '
diff --git a/chromium/build/android/gyp/push_libraries.py b/chromium/build/android/gyp/push_libraries.py
index 7d904430ccc..1a64f3dc9b0 100755
--- a/chromium/build/android/gyp/push_libraries.py
+++ b/chromium/build/android/gyp/push_libraries.py
@@ -40,7 +40,8 @@ def DoPush(options):
def Push():
if needs_directory:
- device.RunShellCommand('mkdir -p ' + options.device_dir)
+ device.RunShellCommand(
+ ['mkdir', '-p', options.device_dir], check_return=True)
needs_directory[:] = [] # = False
device.PushChangedFiles([(os.path.abspath(host_path), device_path)])
diff --git a/chromium/build/android/gyp/util/proguard_util.py b/chromium/build/android/gyp/util/proguard_util.py
index a965ff33271..6fc57d9f160 100644
--- a/chromium/build/android/gyp/util/proguard_util.py
+++ b/chromium/build/android/gyp/util/proguard_util.py
@@ -15,15 +15,24 @@ class _ProguardOutputFilter(object):
"""
IGNORE_RE = re.compile(
- r'(?:Pro.*version|Note:|Reading|Preparing|.*:.*(?:MANIFEST\.MF|\.empty))')
+ r'(?:Pro.*version|Note:|Reading|Preparing|ProgramClass:|'
+ '.*:.*(?:MANIFEST\.MF|\.empty))')
def __init__(self):
self._last_line_ignored = False
+ self._ignore_next_line = False
def __call__(self, output):
ret = []
for line in output.splitlines(True):
- if not line.startswith(' '):
+ if self._ignore_next_line:
+ self._ignore_next_line = False
+ continue
+
+ if '***BINARY RUN STATS***' in line:
+ self._last_line_ignored = True
+ self._ignore_next_line = True
+ elif not line.startswith(' '):
self._last_line_ignored = bool(self.IGNORE_RE.match(line))
elif 'You should check if you need to specify' in line:
self._last_line_ignored = True
@@ -199,4 +208,3 @@ class ProguardCmdBuilder(object):
}
build_utils.WriteJson(this_info, self._outjar + '.info')
-
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index b90f06f8d58..2303dc54afc 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -374,6 +374,7 @@ def main(argv):
direct_library_deps = deps.Direct('java_library')
all_library_deps = deps.All('java_library')
+ direct_resources_deps = deps.Direct('android_resources')
all_resources_deps = deps.All('android_resources')
# Resources should be ordered with the highest-level dependency first so that
# overrides are done correctly.
@@ -432,23 +433,23 @@ def main(argv):
gradle['dependent_java_projects'].append(c['path'])
- if (options.type in ('java_binary', 'java_library') and
- not options.bypass_platform_checks):
+ if (options.type in ('java_binary', 'java_library')):
deps_info['requires_android'] = options.requires_android
deps_info['supports_android'] = options.supports_android
- deps_require_android = (all_resources_deps +
- [d['name'] for d in all_library_deps if d['requires_android']])
- deps_not_support_android = (
- [d['name'] for d in all_library_deps if not d['supports_android']])
+ if not options.bypass_platform_checks:
+ deps_require_android = (all_resources_deps +
+ [d['name'] for d in all_library_deps if d['requires_android']])
+ deps_not_support_android = (
+ [d['name'] for d in all_library_deps if not d['supports_android']])
- if deps_require_android and not options.requires_android:
- raise Exception('Some deps require building for the Android platform: ' +
- str(deps_require_android))
+ if deps_require_android and not options.requires_android:
+ raise Exception('Some deps require building for the Android platform: '
+ + str(deps_require_android))
- if deps_not_support_android and options.supports_android:
- raise Exception('Not all deps support the Android platform: ' +
- str(deps_not_support_android))
+ if deps_not_support_android and options.supports_android:
+ raise Exception('Not all deps support the Android platform: '
+ + str(deps_not_support_android))
if options.type in ('java_binary', 'java_library', 'android_apk'):
deps_info['jar_path'] = options.jar_path
@@ -478,8 +479,11 @@ def main(argv):
c['package_name'] for c in all_resources_deps if 'package_name' in c]
if options.type == 'android_apk':
- # Apks will get their resources srcjar explicitly passed to the java step.
+ # Apks will get their resources srcjar explicitly passed to the java step
config['javac']['srcjars'] = []
+ # Gradle may need to generate resources for some apks.
+ gradle['srcjars'] = [
+ c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
if options.type == 'android_assets':
all_asset_sources = []
diff --git a/chromium/build/android/incremental_install/BUILD.gn b/chromium/build/android/incremental_install/BUILD.gn
index 3bb4696ea4b..3bab39b255c 100644
--- a/chromium/build/android/incremental_install/BUILD.gn
+++ b/chromium/build/android/incremental_install/BUILD.gn
@@ -13,6 +13,7 @@ android_library("bootstrap_java") {
"java/org/chromium/incrementalinstall/ClassLoaderPatcher.java",
"java/org/chromium/incrementalinstall/LockFile.java",
"java/org/chromium/incrementalinstall/Reflect.java",
+ "java/org/chromium/incrementalinstall/SecondInstrumentation.java",
]
emma_never_instrument = true
run_findbugs_override = false
diff --git a/chromium/build/android/incremental_install/README.md b/chromium/build/android/incremental_install/README.md
new file mode 100644
index 00000000000..0916e07d23c
--- /dev/null
+++ b/chromium/build/android/incremental_install/README.md
@@ -0,0 +1,81 @@
+# Incremental Install
+
+Incremental Install is a way of building & deploying an APK that tries to
+minimize the time it takes to make a change and see that change running on
+device. They work best with `is_component_build=true`, and do *not* require a
+rooted device.
+
+## Building
+
+**Option 1:** Add the gn arg:
+
+ incremental_apk_by_default = true
+
+This causes all apks to be built as incremental (except for blacklisted ones).
+
+**Option 2:** Add `_incremental` to the apk target name. E.g.:
+
+ ninja -C out/Debug chrome_public_apk_incremental
+ ninja -C out/Debug chrome_public_test_apk_incremental
+
+## Running
+
+It is not enough to `adb install` them. You must use a generated wrapper script:
+
+ out/Debug/bin/install_chrome_public_apk_incremental
+ out/Debug/bin/run_chrome_public_test_apk_incremental # Automatically sets --fast-local-dev
+
+## Caveats
+
+Isolated processes (on L+) are incompatible with incremental install. As a
+work-around, you can disable isolated processes only for incremental apks using
+gn arg:
+
+ disable_incremental_isolated_processes = true
+
+# How it Works
+
+## Overview
+
+The basic idea is to side-load .dex and .so files to `/data/local/tmp` rather
+than bundling them in the .apk. Then, when making a change, only the changed
+.dex / .so needs to be pushed to the device.
+
+Faster Builds:
+
+ * No `final_dex` step (where all .dex files are merged into one)
+ * No need to rebuild .apk for code-only changes (but required for resources)
+ * Apks sign faster because they are smaller.
+
+Faster Installs:
+
+ * The .apk is smaller, and so faster to verify.
+ * No need to run `adb install` for code-only changes.
+ * Only changed .so / .dex files are pushed. MD5s of existing on-device files
+ are cached on host computer.
+
+Slower Initial Runs:
+
+ * The first time you run an incremental .apk, the `DexOpt` needs to run on all
+ .dex files. This step is normally done during `adb install`, but is done on
+ start-up for incremental apks.
+ * DexOpt results are cached, so subsequent runs are much faster
+
+## The Code
+
+All incremental apks have the same classes.dex, which is built from:
+
+ //build/android/incremental_install:bootstrap_java
+
+They also have a transformed `AndroidManifest.xml`, which overrides the the
+main application class and any instrumentation classes so that they instead
+point to `BootstrapApplication`. This is built by:
+
+ //build/android/incremental_install/generate_android_manifest.py
+
+Wrapper scripts and install logic is contained in:
+
+ //build/android/incremental_install/create_install_script.py
+ //build/android/incremental_install/installer.py
+
+Finally, GN logic for incremental apks is sprinkled throughout.
diff --git a/chromium/build/android/incremental_install/generate_android_manifest.py b/chromium/build/android/incremental_install/generate_android_manifest.py
index 15c758dcc6c..a2677166563 100755
--- a/chromium/build/android/incremental_install/generate_android_manifest.py
+++ b/chromium/build/android/incremental_install/generate_android_manifest.py
@@ -22,9 +22,15 @@ ElementTree.register_namespace('android', _ANDROID_NAMESPACE)
_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
_META_DATA_APP_NAME = 'incremental-install-real-app'
-_META_DATA_INSTRUMENTATION_NAME = 'incremental-install-real-instrumentation'
_DEFAULT_APPLICATION_CLASS = 'android.app.Application'
-_DEFAULT_INSTRUMENTATION_CLASS = 'android.app.Instrumentation'
+_META_DATA_INSTRUMENTATION_NAMES = [
+ 'incremental-install-real-instrumentation-0',
+ 'incremental-install-real-instrumentation-1',
+]
+_INCREMENTAL_INSTRUMENTATION_CLASSES = [
+ 'android.app.Instrumentation',
+ 'org.chromium.incrementalinstall.SecondInstrumentation',
+]
def _AddNamespace(name):
@@ -84,12 +90,13 @@ def _ProcessManifest(main_manifest, disable_isolated_processes):
# Seems to be a bug in ElementTree, as doc.find() doesn't work here.
instrumentation_nodes = doc.findall('instrumentation')
- if instrumentation_nodes:
- instrumentation_node = instrumentation_nodes[0]
+ assert len(instrumentation_nodes) <= 2, (
+ 'Need to update incremental install to support >2 <instrumentation> tags')
+ for i, instrumentation_node in enumerate(instrumentation_nodes):
real_instrumentation_class = instrumentation_node.get(_AddNamespace('name'))
instrumentation_node.set(_AddNamespace('name'),
- _DEFAULT_INSTRUMENTATION_CLASS)
- _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAME,
+ _INCREMENTAL_INSTRUMENTATION_CLASSES[i])
+ _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i],
real_instrumentation_class)
return ElementTree.tostring(doc, encoding='UTF-8')
diff --git a/chromium/build/android/incremental_install/installer.py b/chromium/build/android/incremental_install/installer.py
index e48f8de23b5..a35cc93d199 100755
--- a/chromium/build/android/incremental_install/installer.py
+++ b/chromium/build/android/incremental_install/installer.py
@@ -196,12 +196,13 @@ def Install(device, apk, split_globs=None, native_libs=None, dex_files=None,
cmd = ('D="%s";'
'mkdir -p $D &&'
'echo -n >$D/install.lock 2>$D/firstrun.lock')
- device.RunShellCommand(cmd % device_incremental_dir, check_return=True)
+ device.RunShellCommand(
+ cmd % device_incremental_dir, shell=True, check_return=True)
# The firstrun.lock is released by the app itself.
def release_installer_lock():
device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir,
- check_return=True)
+ check_return=True, shell=True)
# Concurrency here speeds things up quite a bit, but DeviceUtils hasn't
# been designed for multi-threading. Enabling only because this is a
diff --git a/chromium/build/android/java_assertion_enabler/OWNERS b/chromium/build/android/java_assertion_enabler/OWNERS
index e96e054238c..ea2bde431e3 100644
--- a/chromium/build/android/java_assertion_enabler/OWNERS
+++ b/chromium/build/android/java_assertion_enabler/OWNERS
@@ -1,2 +1,4 @@
agrieve@chromium.org
zpeng@chromium.org
+
+# COMPONENT: Build
diff --git a/chromium/build/android/lint/suppressions.xml b/chromium/build/android/lint/suppressions.xml
index 33882a2d261..b020fe1b052 100644
--- a/chromium/build/android/lint/suppressions.xml
+++ b/chromium/build/android/lint/suppressions.xml
@@ -94,7 +94,7 @@ Still reading?
</issue>
<issue id="IconDensities">
<!-- The large assets below only include a few densities to reduce APK size. -->
- <ignore regexp=": data_reduction_illustration.png, google_icon_sprite.png, physical_web_logo.png, physical_web_logo_anim1.png, physical_web_logo_anim2.png$"/>
+ <ignore regexp=": data_reduction_illustration.png, physical_web_logo.png, physical_web_logo_anim1.png, physical_web_logo_anim2.png$"/>
<!-- crbug.com/457918 is tracking missing assets -->
<ignore regexp="chrome/android/java/res/drawable-xxhdpi"/>
<ignore regexp="chrome/android/java/res/drawable-xxxhdpi"/>
@@ -108,7 +108,6 @@ Still reading?
</issue>
<issue id="IconDipSize">
<ignore regexp="chromecast/internal"/>
- <ignore regexp="google_icon_sprite.png.*"/>
</issue>
<issue id="IconDuplicates" severity="Error">
<ignore regexp="chromecast/internal"/>
@@ -184,6 +183,7 @@ Still reading?
<issue id="NewApi">
<ignore regexp="Attribute `paddingStart` referenced here can result in a crash on some specific devices older than API 17"/>
<ignore regexp="chrome/android/java/res/drawable/downloads_big.xml"/>
+ <ignore regexp="chrome/android/java/res/drawable/ic_bluetooth_connected.xml"/>
<ignore regexp="chrome/android/java/res/values-v17/styles.xml"/>
<ignore regexp="chromecast/internal"/>
<ignore regexp="com/android/tv"/>
@@ -310,7 +310,6 @@ Still reading?
<ignore regexp="chrome/android/java/res/drawable-hdpi/infobar_protected_media_identifier.png"/>
<ignore regexp="chrome/android/java/res/drawable-hdpi/infobar_restore.png"/>
<ignore regexp="chrome/android/java/res/drawable-hdpi/infobar_screen_share.png"/>
- <ignore regexp="chrome/android/java/res/drawable-hdpi/infobar_subresource_filtering.png"/>
<ignore regexp="chrome/android/java/res/drawable-hdpi/mir_card.png"/>
<ignore regexp="chrome/android/java/res/drawable-hdpi/pageinfo_bad.png"/>
<ignore regexp="chrome/android/java/res/drawable-hdpi/pageinfo_good.png"/>
@@ -323,12 +322,6 @@ Still reading?
<ignore regexp="chrome/android/java/res/drawable-hdpi/signin_promo_illustration.png"/>
<ignore regexp="chrome/android/java/res/drawable-nodpi/missing.png"/>
<ignore regexp="chrome/android/java/res/values-v17/styles.xml"/>
- <!--
- This file isn't used if the target SDK version is less than 24.
- TODO(estevenson) remove this and the conditional inclusion in
- AndroidManifest.xml after rolling to SDK 24.
- -->
- <ignore regexp="chrome/android/java/res/xml/network_security_config.xml"/>
<ignore regexp="chromecast/browser/android/apk/res/values/strings.xml"/>
<ignore regexp="chromecast/internal"/>
<ignore regexp="clank"/>
@@ -433,7 +426,7 @@ Still reading?
</issue>
<issue id="UselessParent">
<ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
- <ignore regexp="chrome/android/java/res/layout/data_reduction_promo_screen.xml"/>
+ <ignore regexp="chrome/android/java/res/layout/data_usage_breakdown.xml"/>
<ignore regexp="chromecast/internal"/>
<ignore regexp="tools/android/kerberos/SpnegoAuthenticator/res/layout/activity_account_authenticator.xml"/>
</issue>
diff --git a/chromium/build/android/main_dex_classes.flags b/chromium/build/android/main_dex_classes.flags
index 81152dcc1ea..33583d5e890 100644
--- a/chromium/build/android/main_dex_classes.flags
+++ b/chromium/build/android/main_dex_classes.flags
@@ -1,3 +1,10 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what should be kept in the main dex. Only used
+# during main dex list determination, not during actual proguarding.
+
-keep @**.MainDex class * {
*;
}
@@ -8,5 +15,10 @@
# Required when code coverage is enabled.
-keep class com.vladium.** {
- *;
+ *;
+}
+
+# Used by tests for secondary dex extraction.
+-keep class android.support.v4.content.ContextCompat {
+ *;
}
diff --git a/chromium/build/android/multidex.flags b/chromium/build/android/multidex.flags
new file mode 100644
index 00000000000..59e7e856ce5
--- /dev/null
+++ b/chromium/build/android/multidex.flags
@@ -0,0 +1,12 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what to keep through proguarding when multidex is
+# enabled. Not used during main dex list determination.
+
+-keepattributes *Annotations*
+-keep @interface org.chromium.base.annotations.MainDex
+-keep @**.MainDex class * {
+ *;
+}
diff --git a/chromium/build/android/play_services/config.json b/chromium/build/android/play_services/config.json
index d9a1e817a43..8a755f6495e 100644
--- a/chromium/build/android/play_services/config.json
+++ b/chromium/build/android/play_services/config.json
@@ -1,14 +1,15 @@
{
"clients": [
- "play-services-basement",
- "play-services-tasks",
- "play-services-base",
"play-services-auth-base",
"play-services-auth",
+ "play-services-base",
+ "play-services-basement",
"play-services-cast",
- "play-services-iid",
"play-services-gcm",
+ "play-services-iid",
+ "play-services-location",
"play-services-nearby",
+ "play-services-tasks",
"play-services-vision"
],
"version_number": "10.2.0",
diff --git a/chromium/build/android/play_services/google_play_services_library.zip.sha1 b/chromium/build/android/play_services/google_play_services_library.zip.sha1
index 70df3c6e69a..442310edab8 100644
--- a/chromium/build/android/play_services/google_play_services_library.zip.sha1
+++ b/chromium/build/android/play_services/google_play_services_library.zip.sha1
@@ -1 +1 @@
-7bef387dc3f3fa6fb62d29f26fd18814b3d51ecf \ No newline at end of file
+c0867d3d4daf1fad7d460bcfd27effdf15bbd450 \ No newline at end of file
diff --git a/chromium/build/android/provision_devices.py b/chromium/build/android/provision_devices.py
index 1e8ed92b305..ecf22c9a563 100755
--- a/chromium/build/android/provision_devices.py
+++ b/chromium/build/android/provision_devices.py
@@ -137,7 +137,8 @@ def ProvisionDevice(device, blacklist, options):
if blacklist:
blacklist.Extend([str(device)], reason='provision_timeout')
- except device_errors.CommandFailedError:
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
logging.exception('Failed to provision device %s. Adding to blacklist.',
str(device))
if blacklist:
diff --git a/chromium/build/android/pylib/android/logdog_logcat_monitor.py b/chromium/build/android/pylib/android/logdog_logcat_monitor.py
index a89b9f4ea53..5a51cdb7f86 100644
--- a/chromium/build/android/pylib/android/logdog_logcat_monitor.py
+++ b/chromium/build/android/pylib/android/logdog_logcat_monitor.py
@@ -59,10 +59,14 @@ class LogdogLogcatMonitor(logcat_monitor.LogcatMonitor):
def record_to_stream():
if self._logdog_stream:
for data in self._adb.Logcat(filter_specs=self._filter_specs,
- logcat_format='threadtime'):
+ logcat_format='threadtime',
+ iter_timeout=0.08):
+ if self._stop_recording_event.isSet():
+ return
+ if data:
+ self._logdog_stream.write(data + '\n')
if self._stop_recording_event.isSet():
return
- self._logdog_stream.write(data + '\n')
self._stop_recording_event.clear()
if not self._record_thread:
diff --git a/chromium/build/android/pylib/constants/__init__.py b/chromium/build/android/pylib/constants/__init__.py
index 916ee275564..1187f696c17 100644
--- a/chromium/build/android/pylib/constants/__init__.py
+++ b/chromium/build/android/pylib/constants/__init__.py
@@ -96,7 +96,7 @@ DEVICE_PERF_OUTPUT_DIR = (
SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
ANDROID_SDK_VERSION = version_codes.MARSHMALLOW
-ANDROID_SDK_BUILD_TOOLS_VERSION = '24.0.2'
+ANDROID_SDK_BUILD_TOOLS_VERSION = '25.0.2'
ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
'third_party', 'android_tools', 'sdk')
ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
diff --git a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
index d6547b7872d..93a0b4b80fa 100644
--- a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
+++ b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -26,7 +26,7 @@ DownloadItemModelTest.InterruptTooltip
# l10n_util.cc(655)] Check failed: std::string::npos != pos
DownloadItemModelTest.InterruptStatus
# l10n_util.cc(655)] Check failed: std::string::npos != pos
-WebsiteSettingsTest.OnSiteDataAccessed
+PageInfoTest.OnSiteDataAccessed
# crbug.com/139423
ValueStoreFrontendTest.GetExistingData
diff --git a/chromium/build/android/pylib/gtest/gtest_test_instance.py b/chromium/build/android/pylib/gtest/gtest_test_instance.py
index 678f68b17ea..26288be6ecb 100644
--- a/chromium/build/android/pylib/gtest/gtest_test_instance.py
+++ b/chromium/build/android/pylib/gtest/gtest_test_instance.py
@@ -138,12 +138,11 @@ def ParseGTestOutput(output):
def handle_possibly_unknown_test():
if test_name is not None:
results.append(base_test_result.BaseTestResult(
- test_name,
+ TestNameWithoutDisabledPrefix(test_name),
fallback_result_type or base_test_result.ResultType.UNKNOWN,
duration, log=('\n'.join(log) if log else '')))
for l in output:
- logging.info(l)
matcher = _RE_TEST_STATUS.match(l)
if matcher:
if matcher.group(1) == 'RUN':
@@ -175,7 +174,7 @@ def ParseGTestOutput(output):
if result_type and test_name:
results.append(base_test_result.BaseTestResult(
- test_name, result_type, duration,
+ TestNameWithoutDisabledPrefix(test_name), result_type, duration,
log=('\n'.join(log) if log else '')))
test_name = None
@@ -203,7 +202,7 @@ def ParseGTestXML(xml_content):
log.append(html.unescape(failure.attrib['message']))
results.append(base_test_result.BaseTestResult(
- '%s.%s' % (suite_name, case_name),
+ '%s.%s' % (suite_name, TestNameWithoutDisabledPrefix(case_name)),
result_type,
int(float(testcase.attrib['time']) * 1000),
log=('\n'.join(log) if log else '')))
@@ -262,11 +261,13 @@ class GtestTestInstance(test_instance.TestInstance):
if len(args.suite_name) > 1:
raise ValueError('Platform mode currently supports only 1 gtest suite')
self._exe_dist_dir = None
+ self._external_shard_index = args.test_launcher_shard_index
self._extract_test_list_from_filter = args.extract_test_list_from_filter
+ self._filter_tests_lock = threading.Lock()
self._shard_timeout = args.shard_timeout
self._store_tombstones = args.store_tombstones
+ self._total_external_shards = args.test_launcher_total_shards
self._suite = args.suite_name[0]
- self._filter_tests_lock = threading.Lock()
# GYP:
if args.executable_dist_dir:
@@ -332,11 +333,25 @@ class GtestTestInstance(test_instance.TestInstance):
self._app_data_files = None
self._app_data_file_dir = None
- self._test_arguments = args.test_arguments
+ self._flags = None
+ self._initializeCommandLineFlags(args)
# TODO(jbudorick): Remove this once it's deployed.
self._enable_xml_result_parsing = args.enable_xml_result_parsing
+ def _initializeCommandLineFlags(self, args):
+ self._flags = []
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
+ with open(args.device_flags_file) as f:
+ stripped_lines = (l.strip() for l in f)
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.run_disabled:
+ self._flags.append('--gtest_also_run_disabled_tests')
+ if args.test_arguments:
+ self._flags.extend(args.test_arguments.split())
+
@property
def activity(self):
return self._apk_helper and self._apk_helper.GetActivityName()
@@ -366,12 +381,20 @@ class GtestTestInstance(test_instance.TestInstance):
return self._exe_dist_dir
@property
+ def external_shard_index(self):
+ return self._external_shard_index
+
+ @property
+ def extract_test_list_from_filter(self):
+ return self._extract_test_list_from_filter
+
+ @property
def extras(self):
return self._extras
@property
- def gtest_also_run_disabled_tests(self):
- return self._run_disabled
+ def flags(self):
+ return self._flags
@property
def gtest_filter(self):
@@ -406,12 +429,8 @@ class GtestTestInstance(test_instance.TestInstance):
return self._test_apk_incremental_install_script
@property
- def test_arguments(self):
- return self._test_arguments
-
- @property
- def extract_test_list_from_filter(self):
- return self._extract_test_list_from_filter
+ def total_external_shards(self):
+ return self._total_external_shards
#override
def TestType(self):
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
index ef260f7a51a..d01e36eeeee 100644
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -321,8 +321,8 @@ def _GetTestsFromPickle(pickle_path, jar_path):
raise TestListPickleException(
'%s newer than %s.' % (jar_path, pickle_path))
- with open(pickle_path, 'r') as pickle_file:
- pickle_data = pickle.loads(pickle_file.read())
+ with open(pickle_path, 'r') as f:
+ pickle_data = pickle.load(f)
jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path]
if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
@@ -498,13 +498,15 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._store_tombstones = False
self._initializeTombstonesAttributes(args)
- self._should_save_images = None
self._should_save_logcat = None
self._initializeLogAttributes(args)
self._edit_shared_prefs = []
self._initializeEditPrefsAttributes(args)
+ self._external_shard_index = args.test_launcher_shard_index
+ self._total_external_shards = args.test_launcher_total_shards
+
def _initializeApkAttributes(self, args, error_func):
if args.apk_under_test:
apk_under_test_path = args.apk_under_test
@@ -640,22 +642,24 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def _initializeFlagAttributes(self, args):
self._flags = ['--enable-test-intents']
- # TODO(jbudorick): Transition "--device-flags" to "--device-flags-file"
- if hasattr(args, 'device_flags') and args.device_flags:
- with open(args.device_flags) as device_flags_file:
- stripped_lines = (l.strip() for l in device_flags_file)
- self._flags.extend([flag for flag in stripped_lines if flag])
- if hasattr(args, 'device_flags_file') and args.device_flags_file:
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
with open(args.device_flags_file) as device_flags_file:
stripped_lines = (l.strip() for l in device_flags_file)
- self._flags.extend([flag for flag in stripped_lines if flag])
- if (hasattr(args, 'strict_mode') and
- args.strict_mode and
- args.strict_mode != 'off'):
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.strict_mode and args.strict_mode != 'off':
self._flags.append('--strict-mode=' + args.strict_mode)
- if hasattr(args, 'regenerate_goldens') and args.regenerate_goldens:
+ if args.regenerate_goldens:
self._flags.append('--regenerate-goldens')
+ if args.test_arguments:
+ # --test-arguments is deprecated for gtests and is in the process of
+ # being removed.
+ raise Exception(
+ '--test-arguments is not supported for instrumentation '
+ 'tests. Pass command-line flags directly instead.')
+
def _initializeDriverAttributes(self):
self._driver_apk = os.path.join(
constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
@@ -679,7 +683,6 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def _initializeLogAttributes(self, args):
self._should_save_logcat = bool(args.json_results_file)
- self._should_save_images = bool(args.json_results_file)
def _initializeEditPrefsAttributes(self, args):
if not hasattr(args, 'shared_prefs_file'):
@@ -736,12 +739,12 @@ class InstrumentationTestInstance(test_instance.TestInstance):
return self._edit_shared_prefs
@property
- def flags(self):
- return self._flags
+ def external_shard_index(self):
+ return self._external_shard_index
@property
- def should_save_images(self):
- return self._should_save_images
+ def flags(self):
+ return self._flags
@property
def should_save_logcat(self):
@@ -795,6 +798,10 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def timeout_scale(self):
return self._timeout_scale
+ @property
+ def total_external_shards(self):
+ return self._total_external_shards
+
#override
def TestType(self):
return 'instrumentation'
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
index 386f2897540..2ff8a36eb58 100755
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -7,6 +7,8 @@
# pylint: disable=protected-access
+import collections
+import tempfile
import unittest
from pylib.base import base_test_result
@@ -38,6 +40,64 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
return instrumentation_test_instance.InstrumentationTestInstance(
mock.MagicMock(), mock.MagicMock(), lambda s: None)
+ _FlagAttributesArgs = collections.namedtuple(
+ '_FlagAttributesArgs',
+ [
+ 'command_line_flags',
+ 'device_flags_file',
+ 'strict_mode',
+ 'regenerate_goldens',
+ 'test_arguments',
+ ])
+
+ def createFlagAttributesArgs(
+ self, command_line_flags=None, device_flags_file=None,
+ strict_mode=None, regenerate_goldens=None, test_arguments=None):
+ return self._FlagAttributesArgs(
+ command_line_flags, device_flags_file, strict_mode,
+ regenerate_goldens, test_arguments)
+
+ def test_initializeFlagAttributes_commandLineFlags(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar'])
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_deviceFlagsFile(self):
+ o = self.createTestInstance()
+ with tempfile.NamedTemporaryFile() as flags_file:
+ flags_file.write('\n'.join(['--foo', '--bar']))
+ flags_file.flush()
+
+ args = self.createFlagAttributesArgs(device_flags_file=flags_file.name)
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_strictModeOn(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='on')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+
+ def test_initializeFlagAttributes_strictModeOff(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='off')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents'])
+
+ def test_initializeFlagAttributes_regenerateGoldens(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(regenerate_goldens=True)
+ o._initializeFlagAttributes(args)
+ self.assertEquals(
+ o._flags, ['--enable-test-intents', '--regenerate-goldens'])
+
+ def test_initializeFlagAttributes_testArgumentsRaisesException(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(test_arguments='--foo --bar')
+ with self.assertRaises(Exception):
+ o._initializeFlagAttributes(args)
+
def testGetTests_noFilter(self):
o = self.createTestInstance()
raw_tests = [
diff --git a/chromium/build/android/pylib/local/device/local_device_environment.py b/chromium/build/android/pylib/local/device/local_device_environment.py
index 461223147b0..86ffc5f59da 100644
--- a/chromium/build/android/pylib/local/device/local_device_environment.py
+++ b/chromium/build/android/pylib/local/device/local_device_environment.py
@@ -10,12 +10,14 @@ import shutil
import tempfile
import threading
+import devil_chromium
from devil import base_error
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_list
from devil.android import device_utils
from devil.android import logcat_monitor
+from devil.android.sdk import adb_wrapper
from devil.utils import file_utils
from devil.utils import parallelizer
from pylib import constants
@@ -94,6 +96,15 @@ class LocalDeviceEnvironment(environment.Environment):
self._tool_name = args.tool
self._trace_output = args.trace_output
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory(),
+ adb_path=args.adb_path)
+
+ # Some things such as Forwarder require ADB to be in the environment path.
+ adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath())
+ if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+ os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
+
#override
def SetUp(self):
if self.trace_output:
diff --git a/chromium/build/android/pylib/local/device/local_device_gtest_run.py b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
index 82da5354627..84a37fc1240 100644
--- a/chromium/build/android/pylib/local/device/local_device_gtest_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
@@ -172,6 +172,8 @@ class _ApkDelegate(object):
logging.exception('gtest shard failed.')
except device_errors.CommandTimeoutError:
logging.exception('gtest shard timed out.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('gtest shard device unreachable.')
except Exception:
device.ForceStop(self._package)
raise
@@ -234,6 +236,8 @@ class _ExeDelegate(object):
except (device_errors.CommandFailedError, KeyError):
pass
+ # Executable tests return a nonzero exit code on test failure, which is
+ # fine from the test runner's perspective; thus check_return=False.
output = device.RunShellCommand(
cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
return output
@@ -359,7 +363,6 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
logging.info('No tests found. Output:')
for l in raw_test_list:
logging.info(' %s', l)
- tests = self._test_instance.FilterTests(tests)
return tests
# Query all devices in case one fails.
@@ -370,7 +373,12 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
if all(not tl for tl in test_lists):
raise device_errors.CommandFailedError(
'Failed to list tests on any device')
- return list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+ tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+ tests = self._test_instance.FilterTests(tests)
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
#override
def _RunTest(self, device, test):
@@ -384,17 +392,19 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
dir=self._delegate.ResultsDirectory(device),
suffix='.xml') as device_tmp_results_file:
- flags = self._test_instance.test_arguments or ''
+ flags = list(self._test_instance.flags)
if self._test_instance.enable_xml_result_parsing:
- flags += ' --gtest_output=xml:%s' % device_tmp_results_file.name
- if self._test_instance.gtest_also_run_disabled_tests:
- flags += ' --gtest_also_run_disabled_tests'
+ flags.append('--gtest_output=xml:%s' % device_tmp_results_file.name)
+
+ logging.info('flags:')
+ for f in flags:
+ logging.info(' %s', f)
with contextlib_ext.Optional(
trace_event.trace(str(test)),
self._env.trace_output):
output = self._delegate.Run(
- test, device, flags=flags,
+ test, device, flags=' '.join(flags),
timeout=timeout, retries=0)
if self._test_instance.enable_xml_result_parsing:
@@ -410,6 +420,9 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
if not self._env.skip_clear_data:
self._delegate.Clear(device)
+ for l in output:
+ logging.info(l)
+
# Parse the output.
# TODO(jbudorick): Transition test scripts away from parsing stdout.
if self._test_instance.enable_xml_result_parsing:
@@ -435,10 +448,15 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
time.strftime('%Y%m%dT%H%M%S', time.localtime()),
device.serial)
tombstones_url = logdog_helper.text(
- stream_name, resolved_tombstones)
+ stream_name, '\n'.join(resolved_tombstones))
result.SetLink('tombstones', tombstones_url)
- not_run_tests = set(test).difference(set(r.GetName() for r in results))
+ tests_stripped_disabled_prefix = set()
+ for t in test:
+ tests_stripped_disabled_prefix.add(
+ gtest_test_instance.TestNameWithoutDisabledPrefix(t))
+ not_run_tests = tests_stripped_disabled_prefix.difference(
+ set(r.GetName() for r in results))
return results, list(not_run_tests) if results else None
#override
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index 7bfe3f5974e..c6a0c9df943 100644
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -6,7 +6,6 @@ import logging
import os
import posixpath
import re
-import tempfile
import time
from devil.android import device_errors
@@ -15,25 +14,15 @@ from devil.android.sdk import shared_prefs
from devil.utils import reraiser_thread
from pylib import valgrind_tools
from pylib.android import logdog_logcat_monitor
-from pylib.constants import host_paths
from pylib.base import base_test_result
from pylib.instrumentation import instrumentation_test_instance
from pylib.local.device import local_device_environment
from pylib.local.device import local_device_test_run
-from pylib.utils import google_storage_helper
from pylib.utils import logdog_helper
from py_trace_event import trace_event
from py_utils import contextlib_ext
-from py_utils import tempfile_ext
import tombstones
-try:
- from PIL import Image # pylint: disable=import-error
- from PIL import ImageChops # pylint: disable=import-error
- can_compute_diffs = True
-except ImportError:
- can_compute_diffs = False
-
_TAG = 'test_runner_py'
TIMEOUT_ANNOTATIONS = [
@@ -46,15 +35,7 @@ TIMEOUT_ANNOTATIONS = [
('SmallTest', 1 * 60),
]
-_RE_RENDER_IMAGE_NAME = re.compile(
- r'(?P<test_class>\w+)\.'
- r'(?P<description>\w+)\.'
- r'(?P<device_model>\w+)\.'
- r'(?P<orientation>port|land)\.png')
-
-RENDER_TESTS_RESULTS_DIR = {
- 'ChromePublicTest': 'chrome/test/data/android/render_tests'
-}
+LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v']
# TODO(jbudorick): Make this private once the instrumentation test_runner is
# deprecated.
@@ -94,30 +75,40 @@ class LocalDeviceInstrumentationTestRun(
self._env.BlacklistDevice)
@trace_event.traced
def individual_device_set_up(dev, host_device_tuples):
- def install_apk():
- if self._test_instance.apk_under_test:
- if self._test_instance.apk_under_test_incremental_install_script:
- local_device_test_run.IncrementalInstall(
- dev,
- self._test_instance.apk_under_test,
- self._test_instance.apk_under_test_incremental_install_script)
- else:
- permissions = self._test_instance.apk_under_test.GetPermissions()
- dev.Install(self._test_instance.apk_under_test,
- permissions=permissions)
-
- if self._test_instance.test_apk_incremental_install_script:
- local_device_test_run.IncrementalInstall(
- dev,
- self._test_instance.test_apk,
- self._test_instance.test_apk_incremental_install_script)
+ steps = []
+ def install_helper(apk, permissions):
+ return lambda: dev.Install(apk, permissions=permissions)
+ def incremental_install_helper(dev, apk, script):
+ return lambda: local_device_test_run.IncrementalInstall(
+ dev, apk, script)
+
+ if self._test_instance.apk_under_test:
+ if self._test_instance.apk_under_test_incremental_install_script:
+ steps.append(incremental_install_helper(
+ dev,
+ self._test_instance.apk_under_test,
+ self._test_instance.
+ apk_under_test_incremental_install_script))
else:
- permissions = self._test_instance.test_apk.GetPermissions()
- dev.Install(self._test_instance.test_apk, permissions=permissions)
+ permissions = self._test_instance.apk_under_test.GetPermissions()
+ steps.append(install_helper(self._test_instance.apk_under_test,
+ permissions))
+
+ if self._test_instance.test_apk_incremental_install_script:
+ steps.append(incremental_install_helper(
+ dev,
+ self._test_instance.test_apk,
+ self._test_instance.
+ test_apk_incremental_install_script))
+ else:
+ permissions = self._test_instance.test_apk.GetPermissions()
+ steps.append(install_helper(self._test_instance.test_apk,
+ permissions))
- for apk in self._test_instance.additional_apks:
- dev.Install(apk)
+ steps.extend(install_helper(apk, None)
+ for apk in self._test_instance.additional_apks)
+ def set_debug_app():
# Set debug app in order to enable reading command line flags on user
# builds
if self._test_instance.flags:
@@ -184,8 +175,8 @@ class LocalDeviceInstrumentationTestRun(
valgrind_tools.SetChromeTimeoutScale(
dev, self._test_instance.timeout_scale)
- steps = (install_apk, edit_shared_prefs, push_test_data,
- create_flag_changer)
+ steps += [set_debug_app, edit_shared_prefs, push_test_data,
+ create_flag_changer]
if self._env.concurrent_adb:
reraiser_thread.RunAsync(steps)
else:
@@ -225,7 +216,11 @@ class LocalDeviceInstrumentationTestRun(
#override
def _GetTests(self):
- return self._test_instance.GetTests()
+ tests = self._test_instance.GetTests()
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
#override
def _GetUniqueTestName(self, test):
@@ -311,7 +306,8 @@ class LocalDeviceInstrumentationTestRun(
time.strftime('%Y%m%dT%H%M%S', time.localtime()),
device.serial)
logmon = logdog_logcat_monitor.LogdogLogcatMonitor(
- device.adb, stream_name)
+ device.adb, stream_name, filter_specs=LOGCAT_FILTERS)
+
with contextlib_ext.Optional(
logmon, self._test_instance.should_save_logcat):
with contextlib_ext.Optional(
@@ -341,8 +337,6 @@ class LocalDeviceInstrumentationTestRun(
if logcat_url:
result.SetLink('logcat', logcat_url)
- self._ProcessRenderTestResults(device, results)
-
# Update the result name if the test used flags.
if flags:
for r in results:
@@ -374,19 +368,11 @@ class LocalDeviceInstrumentationTestRun(
file_name = '%s-%s.png' % (
test_display_name,
time.strftime('%Y%m%dT%H%M%S', time.localtime()))
- screenshot_file = device.TakeScreenshot(
+ saved_dir = device.TakeScreenshot(
os.path.join(self._test_instance.screenshot_dir, file_name))
logging.info(
'Saved screenshot for %s to %s.',
- test_display_name, screenshot_file)
- if self._test_instance.should_save_images:
- link = google_storage_helper.upload(
- google_storage_helper.unique_name('screenshot', device=device),
- screenshot_file,
- bucket='chromium-render-tests')
- for result in results:
- result.SetLink('failure_screenshot', link)
-
+ test_display_name, saved_dir)
logging.info('detected failure in %s. raw output:', test_display_name)
for l in output:
logging.info(' %s', l)
@@ -398,6 +384,7 @@ class LocalDeviceInstrumentationTestRun(
else None)
device.ClearApplicationState(self._test_instance.package_info.package,
permissions=permissions)
+
else:
logging.debug('raw output from %s:', test_display_name)
for l in output:
@@ -405,8 +392,9 @@ class LocalDeviceInstrumentationTestRun(
if self._test_instance.coverage_directory:
device.PullFile(coverage_directory,
self._test_instance.coverage_directory)
- device.RunShellCommand('rm -f %s' % os.path.join(coverage_directory,
- '*'))
+ device.RunShellCommand(
+ 'rm -f %s' % posixpath.join(coverage_directory, '*'),
+ check_return=True, shell=True)
if self._test_instance.store_tombstones:
tombstones_url = None
for result in results:
@@ -425,95 +413,6 @@ class LocalDeviceInstrumentationTestRun(
result.SetLink('tombstones', tombstones_url)
return results, None
- def _ProcessRenderTestResults(self, device, results):
- render_results_dir = RENDER_TESTS_RESULTS_DIR.get(self._test_instance.suite)
- if not render_results_dir:
- return
-
- failure_images_device_dir = posixpath.join(
- device.GetExternalStoragePath(),
- 'chromium_tests_root', render_results_dir, 'failures')
- if not device.FileExists(failure_images_device_dir):
- return
-
- if self._test_instance.should_save_images:
- with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
- device.PullFile(failure_images_device_dir, temp_dir)
- device.RemovePath(failure_images_device_dir, recursive=True)
-
- for failure_filename in os.listdir(
- os.path.join(temp_dir, 'failures')):
-
- m = _RE_RENDER_IMAGE_NAME.match(failure_filename)
- if not m:
- logging.warning('Unexpected file in render test failures: %s',
- failure_filename)
- continue
-
- failure_filepath = os.path.join(
- temp_dir, 'failures', failure_filename)
- failure_link = google_storage_helper.upload(
- google_storage_helper.unique_name(
- failure_filename, device=device),
- failure_filepath,
- bucket='chromium-render-tests')
-
- golden_filepath = os.path.join(
- host_paths.DIR_SOURCE_ROOT, render_results_dir,
- failure_filename)
- if not os.path.exists(golden_filepath):
- logging.error('Cannot find golden image for %s', failure_filename)
- continue
- golden_link = google_storage_helper.upload(
- google_storage_helper.unique_name(
- failure_filename, device=device),
- golden_filepath,
- bucket='chromium-render-tests')
-
- if can_compute_diffs:
- diff_filename = '_diff'.join(
- os.path.splitext(failure_filename))
- diff_filepath = os.path.join(temp_dir, diff_filename)
- (ImageChops.difference(
- Image.open(failure_filepath), Image.open(golden_filepath))
- .convert('L')
- .point(lambda i: 255 if i else 0)
- .save(diff_filepath))
- diff_link = google_storage_helper.upload(
- google_storage_helper.unique_name(
- diff_filename, device=device),
- diff_filepath,
- bucket='chromium-render-tests')
- else:
- diff_link = ''
- logging.error('Error importing PIL library. Image diffs for '
- 'render test results will not be computed.')
-
- with tempfile.NamedTemporaryFile(suffix='.html') as temp_html:
- temp_html.write('''
- <html>
- <table>
- <tr>
- <th>Failure</th>
- <th>Golden</th>
- <th>Diff</th>
- </tr>
- <tr>
- <td><img src="%s"/></td>
- <td><img src="%s"/></td>
- <td><img src="%s"/></td>
- </tr>
- </table>
- </html>
- ''' % (failure_link, golden_link, diff_link))
- html_results_link = google_storage_helper.upload(
- google_storage_helper.unique_name(
- 'render_html', device=device),
- temp_html.name,
- bucket='chromium-render-tests')
- for result in results:
- result.SetLink(failure_filename, html_results_link)
-
#override
def _ShouldRetry(self, test):
if 'RetryOnFailure' in test.get('annotations', {}):
@@ -549,4 +448,3 @@ class LocalDeviceInstrumentationTestRun(
timeout *= cls._GetTimeoutScaleFromAnnotations(annotations)
return timeout
-
diff --git a/chromium/build/android/pylib/local/device/local_device_monkey_test_run.py b/chromium/build/android/pylib/local/device/local_device_monkey_test_run.py
index 31c7c8133d1..a59b67249c9 100644
--- a/chromium/build/android/pylib/local/device/local_device_monkey_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_monkey_test_run.py
@@ -115,7 +115,7 @@ class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun):
cmd.append('-v')
cmd.append(str(self._test_instance.event_count))
return device.RunShellCommand(
- cmd, timeout=self._test_instance.timeout)
+ cmd, timeout=self._test_instance.timeout, check_return=True)
finally:
try:
# Kill the monkey test process on the device. If you manually
diff --git a/chromium/build/android/pylib/local/device/local_device_perf_test_run.py b/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
index 19e20030e93..2ac8b0fd6d5 100644
--- a/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
@@ -206,10 +206,10 @@ class TestShard(object):
pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name'])
if os.path.exists(pickled):
with file(pickled, 'r') as f:
- previous = pickle.loads(f.read())
+ previous = pickle.load(f)
result['output'] = previous['output'] + result['output']
with file(pickled, 'w') as f:
- f.write(pickle.dumps(result))
+ pickle.dump(result, f)
def _TestTearDown(self):
if self._output_dir:
@@ -245,7 +245,8 @@ class DeviceTestShard(TestShard):
result_type = self._RunSingleTest(test)
except device_errors.CommandTimeoutError:
result_type = base_test_result.ResultType.TIMEOUT
- except device_errors.CommandFailedError:
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
logging.exception('Exception when executing %s.', test)
result_type = base_test_result.ResultType.FAIL
finally:
diff --git a/chromium/build/android/pylib/local/device/local_device_test_run.py b/chromium/build/android/pylib/local/device/local_device_test_run.py
index 1d7ebdf8028..14c4366b051 100644
--- a/chromium/build/android/pylib/local/device/local_device_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_test_run.py
@@ -54,6 +54,17 @@ def SubstituteDeviceRoot(device_path, device_root):
return device_path
+class TestsTerminated(Exception):
+ pass
+
+
+class InvalidShardingSettings(Exception):
+ def __init__(self, shard_index, total_shards):
+ super(InvalidShardingSettings, self).__init__(
+ 'Invalid sharding settings. shard_index: %d total_shards: %d'
+ % (shard_index, total_shards))
+
+
class LocalDeviceTestRun(test_run.TestRun):
def __init__(self, env, test_instance):
@@ -95,9 +106,6 @@ class LocalDeviceTestRun(test_run.TestRun):
logging.info('Finished running tests on this device.')
- class TestsTerminated(Exception):
- pass
-
def stop_tests(_signum, _frame):
logging.critical('Received SIGTERM. Stopping test execution.')
exit_now.set()
@@ -177,6 +185,17 @@ class LocalDeviceTestRun(test_run.TestRun):
return [t for t in failed_tests if self._ShouldRetry(t)]
+ def _ApplyExternalSharding(self, tests, shard_index, total_shards):
+ logging.info('Using external sharding settings. This is shard %d/%d',
+ shard_index, total_shards)
+
+ if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
+ raise InvalidShardingSettings(shard_index, total_shards)
+
+ return [
+ t for t in tests
+ if hash(self._GetUniqueTestName(t)) % total_shards == shard_index]
+
def GetTool(self, device):
if not str(device) in self._tools:
self._tools[str(device)] = valgrind_tools.CreateTool(
@@ -196,7 +215,6 @@ class LocalDeviceTestRun(test_run.TestRun):
def _GetTests(self):
raise NotImplementedError
-
def _RunTest(self, device, test):
raise NotImplementedError
diff --git a/chromium/build/android/pylib/local/machine/local_machine_environment.py b/chromium/build/android/pylib/local/machine/local_machine_environment.py
index b9f6acad37a..a816e7f4ad6 100644
--- a/chromium/build/android/pylib/local/machine/local_machine_environment.py
+++ b/chromium/build/android/pylib/local/machine/local_machine_environment.py
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import devil_chromium
+from pylib import constants
from pylib.base import environment
@@ -10,6 +12,9 @@ class LocalMachineEnvironment(environment.Environment):
def __init__(self, _args, _error_func):
super(LocalMachineEnvironment, self).__init__()
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory())
+
#override
def SetUp(self):
pass
diff --git a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
index ef8bef4f1ae..b4ce4a9a4df 100644
--- a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
+++ b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -28,9 +28,8 @@ class LocalMachineJunitTestRun(test_run.TestRun):
#override
def RunTests(self):
with tempfile.NamedTemporaryFile() as json_file:
- java_script = os.path.join(
- constants.GetOutDirectory(), 'bin', 'helper',
- self._test_instance.suite)
+ java_script = os.path.join(constants.GetOutDirectory(), 'bin', 'helper',
+ self._test_instance.suite)
command = [java_script]
# Add Jar arguments.
@@ -50,9 +49,11 @@ class LocalMachineJunitTestRun(test_run.TestRun):
# TODO(mikecase): Add a --robolectric-dep-dir arg to test runner.
# Have this arg set by GN in the generated test runner scripts.
jvm_args += [
- '-Drobolectric.dependency.dir=%s' %
- os.path.join(constants.GetOutDirectory(),
- 'lib.java', 'third_party', 'robolectric')]
+ '-Drobolectric.dependency.dir=%s' % os.path.join(
+ constants.GetOutDirectory(), 'lib.java', 'third_party',
+ 'robolectric'),
+ '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,
+ ]
if self._test_instance.coverage_dir:
if not os.path.exists(self._test_instance.coverage_dir):
os.makedirs(self._test_instance.coverage_dir)
diff --git a/chromium/build/android/pylib/perf/perf_test_instance.py b/chromium/build/android/pylib/perf/perf_test_instance.py
index c897f189a1a..69ecf4c262a 100644
--- a/chromium/build/android/pylib/perf/perf_test_instance.py
+++ b/chromium/build/android/pylib/perf/perf_test_instance.py
@@ -26,7 +26,7 @@ def _GetPersistedResult(test_name):
return None
with file(file_name, 'r') as f:
- return pickle.loads(f.read())
+ return pickle.load(f)
def _GetChromiumRevision():
diff --git a/chromium/build/android/pylib/results/presentation/javascript/main_html.js b/chromium/build/android/pylib/results/presentation/javascript/main_html.js
index 2106fcd98b0..ea75a8479e9 100644
--- a/chromium/build/android/pylib/results/presentation/javascript/main_html.js
+++ b/chromium/build/android/pylib/results/presentation/javascript/main_html.js
@@ -46,12 +46,34 @@ function showTestsOfOneSuiteOnly(suite_name) {
});
showTestTable(true);
showSuiteTable(false);
+ window.scrollTo(0, 0);
+}
+
+function showTestsOfOneSuiteOnlyWithNewState(suite_name) {
+ showTestsOfOneSuiteOnly(suite_name);
+ history.pushState({suite: suite_name}, suite_name, '');
}
function showSuiteTableOnly() {
setTitle('Suites Summary')
showTestTable(false);
showSuiteTable(true);
+ window.scrollTo(0, 0);
+}
+
+function showSuiteTableOnlyWithReplaceState() {
+ showSuiteTableOnly();
+ history.replaceState({}, 'suite_table', '');
+}
+
+function setBrowserBackButtonLogic() {
+ window.onpopstate = function(event) {
+ if (!event.state || !event.state.suite) {
+ showSuiteTableOnly();
+ } else {
+ showTestsOfOneSuiteOnly(event.state.suite);
+ }
+ };
}
function setTitle(title) {
@@ -166,21 +188,11 @@ function sortByColumn(head) {
}
}
-function loadPage() {
- var args = getArguments();
- if ('suite' in args) {
- // The user wants to visit detailed 'subpage' of that suite.
- showTestsOfOneSuiteOnly(args['suite']);
- } else {
- // The user wants to visit the summary of all suites.
- showSuiteTableOnly();
- }
-}
-
function reportIssues() {
var url = 'https://bugs.chromium.org/p/chromium/issues/entry?' +
- 'labels=Pri-2,Type-Bug&summary=Result Details Feedback:&' +
+ 'labels=Pri-2,Type-Bug,Restrict-View-Google&' +
+ 'summary=Result Details Feedback:&' +
'comment=Please check out: ' + window.location;
var newWindow = window.open(url, '_blank');
newWindow.focus();
-} \ No newline at end of file
+}
diff --git a/chromium/build/android/pylib/results/presentation/template/main.html b/chromium/build/android/pylib/results/presentation/template/main.html
index cd8c103cb2b..b21d4c9b110 100644
--- a/chromium/build/android/pylib/results/presentation/template/main.html
+++ b/chromium/build/android/pylib/results/presentation/template/main.html
@@ -2,7 +2,7 @@
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
- <link rel="stylesheet" href="../../../../../../../default.css" type="text/css">
+ <link rel="stylesheet" href="{{server_url}}/{{bucket}}/css/default.css" type="text/css">
<style>
table, th, td {
border: 1px solid black;
@@ -52,7 +52,7 @@
<a onclick="reportIssues();"><b>Feedback</b></a>
</body>
<script>
- loadPage();
+ showSuiteTableOnlyWithReplaceState();
// Enable sorting for each column of tables.
Array.prototype.slice.call(document.getElementsByTagName('th'))
.forEach(function(head) {
@@ -61,5 +61,6 @@
function() { sortByColumn(head); });
}
);
+ setBrowserBackButtonLogic();
</script>
</html> \ No newline at end of file
diff --git a/chromium/build/android/pylib/results/presentation/template/table.html b/chromium/build/android/pylib/results/presentation/template/table.html
index 3ed498e678f..5c2a2666d60 100644
--- a/chromium/build/android/pylib/results/presentation/template/table.html
+++ b/chromium/build/android/pylib/results/presentation/template/table.html
@@ -26,6 +26,8 @@
{% for link in cell.links %}
<a href="{{link.href}}" target="{{link.target}}">{{link.data}}</a>
{% endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
{%- else -%}
{{cell.data}}
{%- endif %}
@@ -43,6 +45,8 @@
{% for link in cell.links %}
<a href="{{link.href}}" target="{{link.target}}"><b>{{link.data}}</b></a>
{% endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
{%- else -%}
<b>{{cell.data}}</b>
{%- endif %}
diff --git a/chromium/build/android/pylib/results/presentation/test_results_presentation.py b/chromium/build/android/pylib/results/presentation/test_results_presentation.py
index e8a49086f75..3324784a404 100755
--- a/chromium/build/android/pylib/results/presentation/test_results_presentation.py
+++ b/chromium/build/android/pylib/results/presentation/test_results_presentation.py
@@ -7,7 +7,10 @@
import argparse
import collections
import json
+import tempfile
+import time
import os
+import subprocess
import sys
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
@@ -75,6 +78,22 @@ def links_cell(links, html_class='center', rowspan=None):
}
+def action_cell(action, data, html_class):
+ """Formats table cell with javascript actions.
+
+ Args:
+ action: Javscript action.
+ data: Data in cell.
+ class: Class for table cell.
+ """
+ return {
+ 'cell_type': 'action',
+ 'action': action,
+ 'data': data,
+ 'class': html_class,
+ }
+
+
def logs_cell(result):
"""Formats result logs data for processing in jinja template."""
link_list = []
@@ -162,13 +181,11 @@ def create_suite_table(results_dict):
]
footer_row = [
- links_cell(
- links=[
- link(href=('?suite=%s' % 'TOTAL'),
- target=LinkTarget.CURRENT_TAB,
- data='TOTAL')
- ],
- ), # suite_name
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
+ 'TOTAL',
+ 'center'
+ ), # TOTAL
cell(data=0), # number_success_tests
cell(data=0), # number_fail_tests
cell(data=0), # all_tests
@@ -188,12 +205,10 @@ def create_suite_table(results_dict):
suite_row = suite_row_dict[suite_name]
else:
suite_row = [
- links_cell(
- links=[
- link(href=('?suite=%s' % suite_name),
- target=LinkTarget.CURRENT_TAB,
- data=suite_name)],
- html_class='left'
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
+ suite_name,
+ 'left'
), # suite_name
cell(data=0), # number_success_tests
cell(data=0), # number_fail_tests
@@ -232,7 +247,7 @@ def create_suite_table(results_dict):
footer_row)
-def results_to_html(results_dict, cs_base_url, master_name):
+def results_to_html(results_dict, cs_base_url, bucket, server_url):
"""Convert list of test results into html format."""
test_rows_header, test_rows = create_test_table(results_dict, cs_base_url)
@@ -256,10 +271,10 @@ def results_to_html(results_dict, cs_base_url, master_name):
os.path.join('template', 'main.html'))
return main_template.render( # pylint: disable=no-member
{'tb_values': [suite_table_values, test_table_values],
- 'master_name': master_name})
+ 'bucket': bucket, 'server_url': server_url})
-def result_details(json_path, cs_base_url, master_name):
+def result_details(json_path, cs_base_url, bucket, server_url):
"""Get result details from json path and then convert results to html."""
with open(json_path) as json_file:
@@ -272,21 +287,52 @@ def result_details(json_path, cs_base_url, master_name):
for testsuite_run in json_object['per_iteration_data']:
for test, test_runs in testsuite_run.iteritems():
results_dict[test].extend(test_runs)
- return results_to_html(results_dict, cs_base_url, master_name)
+ return results_to_html(results_dict, cs_base_url, bucket, server_url)
+
+
+def upload_to_google_bucket(html, test_name, builder_name, build_number,
+ bucket, server_url, content_type):
+ with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
+ temp_file.write(html)
+ temp_file.flush()
+ dest = 'html/%s_%s_%s_%s.html' % (
+ test_name, builder_name, build_number,
+ time.strftime('%Y_%m_%d_T%H_%M_%S'))
+ gsutil_path = os.path.join(BASE_DIR, 'third_party', 'catapult',
+ 'third_party', 'gsutil', 'gsutil.py')
+ subprocess.check_call([
+ sys.executable, gsutil_path, '-h', "Content-Type:%s" % content_type,
+ 'cp', temp_file.name, 'gs://%s/%s' % (bucket, dest)])
+ return '%s/%s/%s' % (server_url, bucket, dest)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--json-file', help='Path of json file.', required=True)
parser.add_argument('--cs-base-url', help='Base url for code search.',
default='http://cs.chromium.org')
- parser.add_argument('--master-name', help='Master name in urls.')
+ parser.add_argument('--bucket', help='Google storage bucket.', required=True)
+ parser.add_argument('--builder-name', help='Builder name.', required=True)
+ parser.add_argument('--build-number', help='Build number.', required=True)
+ parser.add_argument('--test-name', help='The name of the test.',
+ required=True)
+ parser.add_argument('--server-url', help='The url of the server.',
+ default='https://storage.cloud.google.com')
+ parser.add_argument(
+ '--content-type',
+ help=('Content type, which is used to determine '
+ 'whether to download the file, or view in browser.'),
+ default='text/html',
+ choices=['text/html', 'application/octet-stream'])
args = parser.parse_args()
if os.path.exists(args.json_file):
result_html_string = result_details(args.json_file, args.cs_base_url,
- args.master_name)
- print result_html_string.encode('UTF-8')
+ args.bucket, args.server_url)
+ print upload_to_google_bucket(result_html_string.encode('UTF-8'),
+ args.test_name, args.builder_name,
+ args.build_number, args.bucket,
+ args.server_url, args.content_type)
else:
raise IOError('--json-file %s not found.' % args.json_file)
diff --git a/chromium/build/android/pylib/utils/emulator.py b/chromium/build/android/pylib/utils/emulator.py
index e2a5fea35df..a5aa544b4c7 100644
--- a/chromium/build/android/pylib/utils/emulator.py
+++ b/chromium/build/android/pylib/utils/emulator.py
@@ -417,7 +417,6 @@ class Emulator(object):
if self.headless:
emulator_command.extend([
'-no-skin',
- '-no-audio',
'-no-window'
])
else:
diff --git a/chromium/build/android/pylib/utils/google_storage_helper.py b/chromium/build/android/pylib/utils/google_storage_helper.py
deleted file mode 100644
index 2fbbc1513eb..00000000000
--- a/chromium/build/android/pylib/utils/google_storage_helper.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper functions to upload data to Google Storage.
-
-Text data should be streamed to logdog using |logdog_helper| module.
-Due to logdog not having image or HTML viewer, those instead should be uploaded
-to Google Storage directly using this module.
-"""
-
-import logging
-import os
-import sys
-import time
-
-from devil.utils import cmd_helper
-from pylib.constants import host_paths
-from pylib.utils import decorators
-
-sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build'))
-import find_depot_tools # pylint: disable=import-error
-
-_URL_TEMPLATE = 'https://storage.googleapis.com/%s/'
-
-
-@decorators.NoRaiseException(default_return_value='')
-def upload(name, filepath, bucket):
- """Uploads data to Google Storage.
-
- Args:
- name: Name of the file on Google Storage.
- filepath: Path to file you want to upload.
- bucket: Bucket to upload file to.
- """
- gs_path = os.path.join('gs://%s/' % bucket, name)
- logging.info('Uploading %s to %s', filepath, gs_path)
- cmd_helper.RunCmd(
- [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'cp',
- filepath, gs_path])
-
- return os.path.join(_URL_TEMPLATE % bucket, name)
-
-
-def unique_name(basename, timestamp=True, device=None):
- """Helper function for creating a unique name for a logdog stream.
-
- Args:
- basename: Base of the unique name.
- timestamp: Whether or not to add a timestamp to name.
- device: Device to add device serial of to name.
- """
- return '%s%s%s' % (
- basename,
- '_%s' % time.strftime('%Y%m%dT%H%M%S', time.localtime())
- if timestamp else '',
- '_%s' % device.serial if device else '')
diff --git a/chromium/build/android/pylib/utils/logdog_helper.py b/chromium/build/android/pylib/utils/logdog_helper.py
index fc933d32cc9..f5e4137a68c 100644
--- a/chromium/build/android/pylib/utils/logdog_helper.py
+++ b/chromium/build/android/pylib/utils/logdog_helper.py
@@ -80,5 +80,6 @@ def get_viewer_url(name):
@decorators.Memoize
def get_logdog_client():
- logging.debug('Getting logdog client.')
+ logging.info('Getting logdog client.')
return bootstrap.ButlerBootstrap.probe().stream_client()
+
diff --git a/chromium/build/android/render_tests/process_render_test_results.py b/chromium/build/android/render_tests/process_render_test_results.py
new file mode 100755
index 00000000000..9ab0d1b8677
--- /dev/null
+++ b/chromium/build/android/render_tests/process_render_test_results.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+import devil_chromium
+from devil.android import device_utils
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build'))
+import find_depot_tools # pylint: disable=import-error
+
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2 # pylint: disable=import-error
+
+try:
+ from PIL import Image # pylint: disable=import-error
+ from PIL import ImageChops # pylint: disable=import-error
+ can_compute_diffs = True
+except ImportError:
+ can_compute_diffs = False
+ logging.exception('Error importing PIL library. Image diffs will not be '
+ 'displayed properly unless PIL module is installed.')
+
+_RE_IMAGE_NAME = re.compile(
+ r'(?P<test_class>\w+)\.'
+ r'(?P<description>\w+)\.'
+ r'(?P<device_model>\w+)\.'
+ r'(?P<orientation>port|land)\.png')
+
+_RENDER_TEST_BASE_URL = 'https://storage.googleapis.com/chromium-render-tests/'
+_RENDER_TEST_BUCKET = 'gs://chromium-render-tests/'
+
+_JINJA_TEMPLATE_DIR = os.path.dirname(os.path.abspath(__file__))
+_JINJA_TEMPLATE_FILENAME = 'render_webpage.html.jinja2'
+
+
+def _UploadFiles(upload_dir, files):
+ """Upload files to the render tests GS bucket."""
+ if files:
+ google_storage_upload_dir = os.path.join(_RENDER_TEST_BUCKET, upload_dir)
+ cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
+ '-m', 'cp']
+ cmd.extend(files)
+ cmd.append(google_storage_upload_dir)
+ cmd_helper.RunCmd(cmd)
+
+
+def _GoogleStorageUrl(upload_dir, filename):
+ return os.path.join(
+ _RENDER_TEST_BASE_URL, upload_dir, os.path.basename(filename))
+
+
+def _ComputeImageDiff(failure_image, golden_image):
+ """Compute mask showing which pixels are different between two images."""
+ return (ImageChops.difference(failure_image, golden_image)
+ .convert('L')
+ .point(lambda i: 255 if i else 0))
+
+
+def ProcessRenderTestResults(devices, render_results_dir,
+ upload_dir, html_file):
+ """Grabs render results from device and generates webpage displaying results.
+
+ Args:
+ devices: List of DeviceUtils objects to grab results from.
+ render_results_path: Path where render test results are storage.
+ Will look for failures render test results on the device in
+ /sdcard/chromium_tests_root/<render_results_path>/failures/
+ and will look for golden images at Chromium src/<render_results_path>/.
+ upload_dir: Directory to upload the render test results to.
+ html_file: File to write the test results to.
+ """
+ results_dict = collections.defaultdict(lambda: collections.defaultdict(list))
+
+ diff_upload_dir = os.path.join(upload_dir, 'diffs')
+ failure_upload_dir = os.path.join(upload_dir, 'failures')
+ golden_upload_dir = os.path.join(upload_dir, 'goldens')
+
+ diff_images = []
+ failure_images = []
+ golden_images = []
+
+ temp_dir = None
+ try:
+ temp_dir = tempfile.mkdtemp()
+
+ for device in devices:
+ failures_device_dir = posixpath.join(
+ device.GetExternalStoragePath(),
+ 'chromium_tests_root', render_results_dir, 'failures')
+ device.PullFile(failures_device_dir, temp_dir)
+
+ for failure_filename in os.listdir(os.path.join(temp_dir, 'failures')):
+ m = _RE_IMAGE_NAME.match(failure_filename)
+ if not m:
+ logging.warning(
+ 'Unexpected file in render test failures, %s', failure_filename)
+ continue
+ failure_file = os.path.join(temp_dir, 'failures', failure_filename)
+
+ # Check to make sure we have golden image for this failure.
+ golden_file = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, render_results_dir, failure_filename)
+ if not os.path.exists(golden_file):
+ logging.error('Cannot find golden image for %s', failure_filename)
+ continue
+
+ # Compute image diff between failure and golden.
+ if can_compute_diffs:
+ diff_image = _ComputeImageDiff(
+ Image.open(failure_file), Image.open(golden_file))
+ diff_filename = '_diff'.join(
+ os.path.splitext(os.path.basename(failure_file)))
+ diff_file = os.path.join(temp_dir, diff_filename)
+ diff_image.save(diff_file)
+ diff_images.append(diff_file)
+
+ failure_images.append(failure_file)
+ golden_images.append(golden_file)
+
+ test_class = m.group('test_class')
+ device_model = m.group('device_model')
+
+ results_entry = {
+ 'description': m.group('description'),
+ 'orientation': m.group('orientation'),
+ 'failure_image': _GoogleStorageUrl(failure_upload_dir, failure_file),
+ 'golden_image': _GoogleStorageUrl(golden_upload_dir, golden_file),
+ }
+ if can_compute_diffs:
+ results_entry.update(
+ {'diff_image': _GoogleStorageUrl(diff_upload_dir, diff_file)})
+ results_dict[test_class][device_model].append(results_entry)
+
+ if can_compute_diffs:
+ _UploadFiles(diff_upload_dir, diff_images)
+ _UploadFiles(failure_upload_dir, failure_images)
+ _UploadFiles(golden_upload_dir, golden_images)
+
+ if failure_images:
+ failures_zipfile = os.path.join(temp_dir, 'failures.zip')
+ with zipfile.ZipFile(failures_zipfile, mode='w') as zf:
+ for failure_file in failure_images:
+ zf.write(failure_file, os.path.join(
+ render_results_dir, os.path.basename(failure_file)))
+ failure_zip_url = _GoogleStorageUrl(upload_dir, failures_zipfile)
+ _UploadFiles(upload_dir, [failures_zipfile])
+ else:
+ failure_zip_url = None
+
+ jinja2_env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR),
+ trim_blocks=True)
+ template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
+ # pylint: disable=no-member
+ processed_template_output = template.render(
+ full_results=dict(results_dict),
+ failure_zip_url=failure_zip_url, show_diffs=can_compute_diffs)
+ # pylint: enable=no-member
+ with open(html_file, 'wb') as f:
+ f.write(processed_template_output)
+ finally:
+ if temp_dir:
+ shutil.rmtree(temp_dir)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--render-results-dir',
+ required=True,
+ help='Path on device to look for render test images')
+ parser.add_argument('--output-html-file',
+ required=True,
+ help='File to output the results webpage.')
+ parser.add_argument('-d', '--device', dest='devices', action='append',
+ default=[],
+ help='Device to look for render test results on. '
+ 'Default is to look on all connected devices.')
+ parser.add_argument('--adb-path', type=os.path.abspath,
+ help='Absolute path to the adb binary to use.')
+ parser.add_argument('--buildername', type=str, required=True,
+ help='Bot buildername. Used to generate path to upload '
+ 'render test results')
+ parser.add_argument('--build-number', type=str, required=True,
+ help='Bot build number. Used to generate path to upload '
+ 'render test results')
+
+ args = parser.parse_args()
+ devil_chromium.Initialize(adb_path=args.adb_path)
+ devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices)
+
+ upload_dir = os.path.join(args.buildername, args.build_number)
+ ProcessRenderTestResults(
+ devices, args.render_results_dir, upload_dir, args.output_html_file)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/android/render_tests/render_webpage.html.jinja2 b/chromium/build/android/render_tests/render_webpage.html.jinja2
new file mode 100644
index 00000000000..b5ea6039cd1
--- /dev/null
+++ b/chromium/build/android/render_tests/render_webpage.html.jinja2
@@ -0,0 +1,84 @@
+<!--
+ * Copyright 2016 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<!DOCTYPE html>
+<html>
+ <head>
+ <link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
+ <link rel="stylesheet" href="https://code.getmdl.io/1.2.1/material.blue-indigo.min.css">
+ <script defer src="https://code.getmdl.io/1.2.1/material.min.js"></script>
+
+ <style>
+ div.text-element {
+ text-align: center;
+ }
+ body {
+ background-color: #efefef;
+ }
+ </style>
+ </head>
+
+ <body>
+ {% if failure_zip_url is not none %}
+ <a href="{{ failure_zip_url }}">
+ <div class="mdl-color--primary" width="100%">
+ <h3>Download Image Zip</h3>
+ </div>
+ </a>
+ {% endif %}
+
+ {% for test_class, device_results in full_results.iteritems() %}
+ <div class="mdl-color--primary" width="100%">
+ <h3>{{ test_class }}</h3>
+ </div>
+
+ <div class="mdl-tabs mdl-js-tabs mdl-js-ripple-effect">
+ <div class="mdl-tabs__tab-bar">
+ {% for device_model, _ in device_results.iteritems() %}
+ <a href="#{{ device_model }}-panel" class="mdl-tabs__tab">{{ device_model }}</a>
+ {% endfor %}
+ </div>
+
+ {% for device_model, test_results in device_results.iteritems() %}
+ <div class="mdl-tabs__panel" id="{{ device_model }}-panel">
+
+ <div class="mdl-grid">
+ <div class="mdl-cell mdl-cell--3-col text-element"><b>Description</b></div>
+ <div class="mdl-cell mdl-cell--3-col text-element"><b>Golden</b></div>
+ <div class="mdl-cell mdl-cell--3-col text-element"><b>Failure</b></div>
+ {% if show_diffs %}
+ <div class="mdl-cell mdl-cell--3-col text-element"><b>Diff</b></div>
+ {% endif %}
+ </div>
+ {% for result in test_results %}
+ <div class="mdl-grid">
+ <div class="mdl-cell mdl-cell--3-col text-element">
+ {{ result['description'] }}
+ </div>
+ <div class="mdl-cell mdl-cell--3-col">
+ <a href="{{ result['golden_image'] }}">
+ <img class="mdl-shadow--2dp" src="{{ result['golden_image'] }}" width="100%">
+ </a>
+ </div>
+ <div class="mdl-cell mdl-cell--3-col mdl-shadow--2dp">
+ <a href="{{ result['failure_image'] }}">
+ <img src="{{ result['failure_image'] }}" width="100%">
+ </a>
+ </div>
+ {% if show_diffs %}
+ <div class="mdl-cell mdl-cell--3-col mdl-shadow--2dp">
+ <a href="{{ result['diff_image'] }}">
+ <img src="{{ result['diff_image'] }}" width="100%">
+ </a>
+ </div>
+ {% endif %}
+ </div>
+ {% endfor %}
+ </div>
+ {% endfor %}
+ </div>
+ {% endfor %}
+ </body>
+</html>
diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py
index 976f888a6e8..c11588413b6 100755
--- a/chromium/build/android/resource_sizes.py
+++ b/chromium/build/android/resource_sizes.py
@@ -23,6 +23,7 @@ import tempfile
import zipfile
import zlib
+from binary_size import apk_downloader
import devil_chromium
from devil.android.sdk import build_tools
from devil.utils import cmd_helper
@@ -35,6 +36,8 @@ _AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
_GRIT_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'tools', 'grit')
_BUILD_UTILS_PATH = os.path.join(
host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+_APK_PATCH_SIZE_ESTIMATOR_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'apk-patch-size-estimator')
# Prepend the grit module from the source tree so it takes precedence over other
# grit versions that might present in the search path.
@@ -47,6 +50,9 @@ with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
with host_paths.SysPath(_BUILD_UTILS_PATH, 1):
from util import build_utils # pylint: disable=import-error
+with host_paths.SysPath(_APK_PATCH_SIZE_ESTIMATOR_PATH):
+ import apk_patch_size_estimator # pylint: disable=import-error
+
# Python had a bug in zipinfo parsing that triggers on ChromeModern.apk
# https://bugs.python.org/issue14315
@@ -154,7 +160,7 @@ def _CreateSectionNameSizeMap(so_path, tools_prefix):
def _ParseLibBuildId(so_path, tools_prefix):
"""Returns the Build ID of the given native library."""
- stdout = _RunReadelf(so_path, ['n'], tools_prefix)
+ stdout = _RunReadelf(so_path, ['-n'], tools_prefix)
match = re.search(r'Build ID: (\w+)', stdout)
return match.group(1) if match else None
@@ -293,6 +299,8 @@ class _FileGroup(object):
None)
def FindLargest(self):
+ if not self._zip_infos:
+ return None
return max(self._zip_infos, key=lambda i: i.file_size)
def ComputeZippedSize(self):
@@ -586,22 +594,26 @@ def _AnnotatePakResources():
def _PrintStaticInitializersCountFromApk(apk_filename, tools_prefix,
chartjson=None):
- print 'Finding static initializers (can take a minute)'
with zipfile.ZipFile(apk_filename) as z:
- infolist = z.infolist()
+ so_files = [f for f in z.infolist()
+ if f.filename.endswith('.so') and f.file_size > 0]
+ # Skip checking static initializers for 32 bit .so files when 64 bit .so files
+ # are present since the 32 bit versions will be checked by bots that only
+ # build the 32 bit version. This avoids the complexity of finding 32 bit .so
+ # files in the output directory in 64 bit builds.
+ has_64 = any('64' in f.filename for f in so_files)
+ files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
out_dir = constants.GetOutDirectory()
si_count = 0
- for zip_info in infolist:
- # Check file size to account for placeholder libraries.
- if zip_info.filename.endswith('.so') and zip_info.file_size > 0:
- lib_name = os.path.basename(zip_info.filename).replace('crazy.', '')
- unstripped_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
- if os.path.exists(unstripped_path):
- si_count += _PrintStaticInitializersCount(
- apk_filename, zip_info.filename, unstripped_path, tools_prefix)
- else:
- raise Exception('Unstripped .so not found. Looked here: %s',
- unstripped_path)
+ for so_info in files_to_check:
+ lib_name = os.path.basename(so_info.filename).replace('crazy.', '')
+ unstripped_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+ if os.path.exists(unstripped_path):
+ si_count += _PrintStaticInitializersCount(
+ apk_filename, so_info.filename, unstripped_path, tools_prefix)
+ else:
+ raise Exception('Unstripped .so not found. Looked here: %s',
+ unstripped_path)
ReportPerfResult(chartjson, 'StaticInitializersCount', 'count', si_count,
'count')
@@ -622,19 +634,15 @@ def _PrintStaticInitializersCount(apk_path, apk_so_name, so_with_symbols_path,
"""
# GetStaticInitializers uses get-static-initializers.py to get a list of all
# static initializers. This does not work on all archs (particularly arm).
- # TODO(rnephew): Get rid of warning when crbug.com/585588 is fixed.
+ # This mostly copies infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+ print 'Finding static initializers in %s (can take a minute)' % apk_so_name
with Unzip(apk_path, filename=apk_so_name) as unzipped_so:
_VerifyLibBuildIdsMatch(tools_prefix, unzipped_so, so_with_symbols_path)
readelf_si_count = CountStaticInitializers(unzipped_so, tools_prefix)
- sis, dump_si_count = GetStaticInitializers(
- so_with_symbols_path, tools_prefix)
- if readelf_si_count != dump_si_count:
- print ('There are %d files with static initializers, but '
- 'dump-static-initializers found %d: files' %
- (readelf_si_count, dump_si_count))
- else:
- print '%s - Found %d files with static initializers:' % (
- os.path.basename(so_with_symbols_path), dump_si_count)
+ sis, dump_si_count = GetStaticInitializers(so_with_symbols_path, tools_prefix)
+ print ('Found %s files with static initializers using readelf\n'
+ 'Found %s files with static initializers using '
+ 'dump-static-initializers') % (readelf_si_count, dump_si_count)
print '\n'.join(sis)
return readelf_si_count
@@ -674,6 +682,26 @@ def _PrintDexAnalysis(apk_filename, chartjson=None):
'bytes')
+def _PrintPatchSizeEstimate(new_apk, builder, bucket, chartjson=None):
+ apk_name = os.path.basename(new_apk)
+ title = apk_name + '_PatchSizeEstimate'
+ # Reference APK paths have spaces replaced by underscores.
+ builder = builder.replace(' ', '_')
+ old_apk = apk_downloader.MaybeDownloadApk(
+ builder, apk_downloader.CURRENT_MILESTONE, apk_name,
+ apk_downloader.DEFAULT_DOWNLOAD_PATH, bucket)
+ if old_apk:
+ # Use a temp dir in case patch size functions fail to clean up temp files.
+ with build_utils.TempDir() as tmp:
+ tmp_name = os.path.join(tmp, 'patch.tmp')
+ bsdiff = apk_patch_size_estimator.calculate_bsdiff(
+ old_apk, new_apk, None, tmp_name)
+ ReportPerfResult(chartjson, title, 'BSDiff (gzipped)', bsdiff, 'bytes')
+ fbf = apk_patch_size_estimator.calculate_filebyfile(
+ old_apk, new_apk, None, tmp_name)
+ ReportPerfResult(chartjson, title, 'FileByFile (gzipped)', fbf, 'bytes')
+
+
@contextmanager
def Unzip(zip_file, filename=None):
"""Utility for temporary use of a single file in a zip archive."""
@@ -710,8 +738,22 @@ def main():
argparser.add_argument('--no-output-dir', action='store_true',
help='Skip all measurements that rely on having '
'output-dir')
+ argparser.add_argument('--no-static-initializer-check', action='store_false',
+ dest='static_initializer_check', default=True,
+ help='Skip checking for static initializers')
argparser.add_argument('-d', '--device',
help='Dummy option for perf runner.')
+ argparser.add_argument('--estimate-patch-size', action='store_true',
+ help='Include patch size estimates. Useful for perf '
+ 'builders where a reference APK is available but adds '
+ '~3 mins to run time.')
+ argparser.add_argument('--reference-apk-builder',
+ default=apk_downloader.DEFAULT_BUILDER,
+ help='Builder name to use for reference APK for patch '
+ 'size estimates.')
+ argparser.add_argument('--reference-apk-bucket',
+ default=apk_downloader.DEFAULT_BUCKET,
+ help='Storage bucket holding reference APKs.')
argparser.add_argument('apk', help='APK file path.')
args = argparser.parse_args()
@@ -723,16 +765,21 @@ def main():
constants.CheckOutputDirectory()
devil_chromium.Initialize()
build_vars = _ReadBuildVars(constants.GetOutDirectory())
- tools_prefix = build_vars['android_tool_prefix']
+ tools_prefix = os.path.join(constants.GetOutDirectory(),
+ build_vars['android_tool_prefix'])
else:
tools_prefix = ''
PrintApkAnalysis(args.apk, tools_prefix, chartjson=chartjson)
_PrintDexAnalysis(args.apk, chartjson=chartjson)
+ if args.estimate_patch_size:
+ _PrintPatchSizeEstimate(args.apk, args.reference_apk_builder,
+ args.reference_apk_bucket, chartjson=chartjson)
if not args.no_output_dir:
PrintPakAnalysis(args.apk, args.min_pak_resource_size)
- _PrintStaticInitializersCountFromApk(
- args.apk, tools_prefix, chartjson=chartjson)
+ if args.static_initializer_check:
+ _PrintStaticInitializersCountFromApk(
+ args.apk, tools_prefix, chartjson=chartjson)
if chartjson:
results_path = os.path.join(args.output_dir, 'results-chart.json')
logging.critical('Dumping json to %s', results_path)
diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py
index 99711722442..cb2767e6624 100755
--- a/chromium/build/android/test_runner.py
+++ b/chromium/build/android/test_runner.py
@@ -12,19 +12,19 @@ import contextlib
import itertools
import logging
import os
+import shutil
import signal
import sys
import threading
import traceback
import unittest
-import devil_chromium
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
from devil import base_error
-from devil.android import device_blacklist
-from devil.android import device_errors
-from devil.android import device_utils
-from devil.android import forwarder
-from devil.android import ports
from devil.utils import reraiser_thread
from devil.utils import run_tests_helper
@@ -33,9 +33,9 @@ from pylib.base import base_test_result
from pylib.base import environment_factory
from pylib.base import test_instance_factory
from pylib.base import test_run_factory
-from pylib.constants import host_paths
from pylib.results import json_results
from pylib.results import report_results
+from pylib.utils import logdog_helper
from py_utils import contextlib_ext
@@ -44,87 +44,102 @@ _DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
+def AddTestLauncherOptions(parser):
+ """Adds arguments mirroring //base/test/launcher.
+
+ Args:
+ parser: The parser to which arguments should be added.
+ Returns:
+ The given parser.
+ """
+ parser.add_argument(
+ '--test-launcher-retry-limit',
+ '--test_launcher_retry_limit',
+ '--num_retries', '--num-retries',
+ dest='num_retries', type=int, default=2,
+ help='Number of retries for a test before '
+ 'giving up (default: %(default)s).')
+ parser.add_argument(
+ '--test-launcher-summary-output',
+ '--json-results-file',
+ dest='json_results_file', type=os.path.realpath,
+ help='If set, will dump results in JSON form '
+ 'to specified file.')
+ parser.add_argument(
+ '--test-launcher-shard-index',
+ type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+ help='Index of the external shard to run.')
+ parser.add_argument(
+ '--test-launcher-total-shards',
+ type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+ help='Total number of external shards.')
+
+ return parser
+
+
+def AddCommandLineOptions(parser):
+ """Adds arguments to support passing command-line flags to the device."""
+ parser.add_argument(
+ '--device-flags-file',
+ type=os.path.realpath,
+ help='The relative filepath to a file containing '
+ 'command-line flags to set on the device')
+ # TODO(jbudorick): This is deprecated. Remove once clients have switched
+ # to passing command-line flags directly.
+ parser.add_argument(
+ '-a', '--test-arguments',
+ dest='test_arguments', default='',
+ help=argparse.SUPPRESS)
+ parser.set_defaults(allow_unknown=True)
+ parser.set_defaults(command_line_flags=None)
+
+
+def AddTracingOptions(parser):
+ # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
+ # for all test types.
+ parser.add_argument(
+ '--trace-output',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Path to save test_runner trace data to.')
+
+
def AddCommonOptions(parser):
"""Adds all common options to |parser|."""
- group = parser.add_argument_group('Common Options')
-
default_build_type = os.environ.get('BUILDTYPE', 'Debug')
- debug_or_release_group = group.add_mutually_exclusive_group()
+ debug_or_release_group = parser.add_mutually_exclusive_group()
debug_or_release_group.add_argument(
- '--debug', action='store_const', const='Debug', dest='build_type',
+ '--debug',
+ action='store_const', const='Debug', dest='build_type',
default=default_build_type,
- help=('If set, run test suites under out/Debug. '
- 'Default is env var BUILDTYPE or Debug.'))
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug.')
debug_or_release_group.add_argument(
- '--release', action='store_const', const='Release', dest='build_type',
- help=('If set, run test suites under out/Release. '
- 'Default is env var BUILDTYPE or Debug.'))
-
- # TODO(jbudorick): Remove --build-directory once no bots use it.
- group.add_argument('--build-directory', dest='build_directory',
- help='DEPRECATED')
- group.add_argument('--output-directory', dest='output_directory',
- type=os.path.realpath,
- help=('Path to the directory in which build files are'
- ' located (must include build type). This will take'
- ' precedence over --debug, --release and'
- ' --build-directory'))
- group.add_argument('--num_retries', '--num-retries',
- '--test_launcher_retry_limit',
- '--test-launcher-retry-limit',
- dest='num_retries',
- type=int, default=2,
- help=('Number of retries for a test before '
- 'giving up (default: %(default)s).'))
- group.add_argument('--repeat', '--gtest_repeat', '--gtest-repeat',
- dest='repeat', type=int, default=0,
- help='Number of times to repeat the specified set of '
- 'tests.')
- group.add_argument('--break-on-failure', '--break_on_failure',
- dest='break_on_failure', action='store_true',
- help='Whether to break on failure.')
- group.add_argument('-v',
- '--verbose',
- dest='verbose_count',
- default=0,
- action='count',
- help='Verbose level (multiple times for more)')
- group.add_argument('--flakiness-dashboard-server',
- dest='flakiness_dashboard_server',
- help=('Address of the server that is hosting the '
- 'Chrome for Android flakiness dashboard.'))
- group.add_argument('--enable-platform-mode', action='store_true',
- help=('Run the test scripts in platform mode, which '
- 'conceptually separates the test runner from the '
- '"device" (local or remote, real or emulated) on '
- 'which the tests are running. [experimental]'))
- group.add_argument('-e', '--environment', default='local',
- choices=constants.VALID_ENVIRONMENTS,
- help='Test environment to run in (default: %(default)s).')
- group.add_argument('--adb-path', type=os.path.realpath,
- help=('Specify the absolute path of the adb binary that '
- 'should be used.'))
- group.add_argument('--json-results-file', '--test-launcher-summary-output',
- dest='json_results_file', type=os.path.realpath,
- help='If set, will dump results in JSON form '
- 'to specified file.')
- group.add_argument('--trace-output', metavar='FILENAME',
- type=os.path.realpath,
- help='Path to save test_runner trace data to. This option '
- 'has been implemented for gtest, instrumentation '
- 'test and perf test.')
-
- logcat_output_group = group.add_mutually_exclusive_group()
- logcat_output_group.add_argument(
- '--logcat-output-dir', type=os.path.realpath,
- help='If set, will dump logcats recorded during test run to directory. '
- 'File names will be the device ids with timestamps.')
- logcat_output_group.add_argument(
- '--logcat-output-file', type=os.path.realpath,
- help='If set, will merge logcats recorded during test run and dump them '
- 'to the specified file.')
+ '--release',
+ action='store_const', const='Release', dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+
+ parser.add_argument(
+ '--break-on-failure', '--break_on_failure',
+ dest='break_on_failure', action='store_true',
+ help='Whether to break on failure.')
+
+ # TODO(jbudorick): Remove this once everything has switched to platform
+ # mode.
+ parser.add_argument(
+ '--enable-platform-mode',
+ action='store_true',
+ help='Run the test scripts in platform mode, which '
+ 'conceptually separates the test runner from the '
+ '"device" (local or remote, real or emulated) on '
+ 'which the tests are running. [experimental]')
+
+ parser.add_argument(
+ '-e', '--environment',
+ default='local', choices=constants.VALID_ENVIRONMENTS,
+ help='Test environment to run in (default: %(default)s).')
class FastLocalDevAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
@@ -135,343 +150,373 @@ def AddCommonOptions(parser):
namespace.skip_clear_data = True
namespace.extract_test_list_from_filter = True
- group.add_argument('--fast-local-dev', type=bool, nargs=0,
- action=FastLocalDevAction,
- help='Alias for: --verbose --num-retries=0 '
- '--enable-device-cache --enable-concurrent-adb '
- '--skip-clear-data --extract-test-list-from-filter')
+ parser.add_argument(
+ '--fast-local-dev',
+ type=bool, nargs=0, action=FastLocalDevAction,
+ help='Alias for: --verbose --num-retries=0 '
+ '--enable-device-cache --enable-concurrent-adb '
+ '--skip-clear-data --extract-test-list-from-filter')
+
+ # TODO(jbudorick): Remove this once downstream bots have switched to
+ # api.test_results.
+ parser.add_argument(
+ '--flakiness-dashboard-server',
+ dest='flakiness_dashboard_server',
+ help=argparse.SUPPRESS)
+
+ parser.add_argument(
+ '--output-directory',
+ dest='output_directory', type=os.path.realpath,
+ help='Path to the directory in which build files are'
+ ' located (must include build type). This will take'
+ ' precedence over --debug and --release')
+ parser.add_argument(
+ '--repeat', '--gtest_repeat', '--gtest-repeat',
+ dest='repeat', type=int, default=0,
+ help='Number of times to repeat the specified set of tests.')
+ parser.add_argument(
+ '-v', '--verbose',
+ dest='verbose_count', default=0, action='count',
+ help='Verbose level (multiple times for more)')
+
+ AddTestLauncherOptions(parser)
+
def ProcessCommonOptions(args):
"""Processes and handles all common options."""
run_tests_helper.SetLogLevel(args.verbose_count)
constants.SetBuildType(args.build_type)
- if args.build_directory:
- constants.SetBuildDirectory(args.build_directory)
if args.output_directory:
constants.SetOutputDirectory(args.output_directory)
- devil_chromium.Initialize(
- output_directory=constants.GetOutDirectory(),
- adb_path=args.adb_path)
-
- # Some things such as Forwarder require ADB to be in the environment path.
- adb_dir = os.path.dirname(constants.GetAdbPath())
- if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
- os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
-
def AddDeviceOptions(parser):
"""Adds device options to |parser|."""
- group = parser.add_argument_group(title='Device Options')
- group.add_argument('--tool',
- dest='tool',
- help=('Run the test under a tool '
- '(use --tool help to list them)'))
- group.add_argument('-d', '--device', dest='test_device',
- help=('Target device for the test suite '
- 'to run on.'))
- group.add_argument('--blacklist-file', type=os.path.realpath,
- help='Device blacklist file.')
- group.add_argument('--enable-device-cache', action='store_true',
- help='Cache device state to disk between runs')
- group.add_argument('--enable-concurrent-adb', action='store_true',
- help='Run multiple adb commands at the same time, even '
- 'for the same device.')
- group.add_argument('--skip-clear-data', action='store_true',
- help='Do not wipe app data between tests. Use this to '
- 'speed up local development and never on bots '
- '(increases flakiness)')
- group.add_argument('--target-devices-file', type=os.path.realpath,
- help='Path to file with json list of device serials to '
- 'run tests on. When not specified, all available '
- 'devices are used.')
-
-def AddGTestOptions(parser):
- """Adds gtest options to |parser|."""
-
- group = parser.add_argument_group('GTest Options')
- group.add_argument('-s', '--suite', dest='suite_name',
- nargs='+', metavar='SUITE_NAME', required=True,
- help='Executable name of the test suite to run.')
- group.add_argument('--executable-dist-dir', type=os.path.realpath,
- help="Path to executable's dist directory for native"
- " (non-apk) tests.")
- group.add_argument('--test-apk-incremental-install-script',
- type=os.path.realpath,
- help='Path to install script for the test apk.')
- group.add_argument('--gtest_also_run_disabled_tests',
- '--gtest-also-run-disabled-tests',
- dest='run_disabled', action='store_true',
- help='Also run disabled tests if applicable.')
- group.add_argument('-a', '--test-arguments', dest='test_arguments',
- default='',
- help='Additional arguments to pass to the test.')
- group.add_argument('-t', '--shard-timeout',
- dest='shard_timeout', type=int, default=120,
- help='Timeout to wait for each test '
- '(default: %(default)s).')
- # TODO(jbudorick): Remove this after ensuring nothing else uses it.
- group.add_argument('--isolate_file_path',
- '--isolate-file-path',
- dest='isolate_file_path',
- type=os.path.realpath,
- help=argparse.SUPPRESS)
- group.add_argument('--runtime-deps-path',
- dest='runtime_deps_path',
- type=os.path.realpath,
- help='Runtime data dependency file from GN.')
- group.add_argument('--app-data-file', action='append', dest='app_data_files',
- help='A file path relative to the app data directory '
- 'that should be saved to the host.')
- group.add_argument('--app-data-file-dir',
- help='Host directory to which app data files will be'
- ' saved. Used with --app-data-file.')
- group.add_argument('--delete-stale-data', dest='delete_stale_data',
- action='store_true',
- help='Delete stale test data on the device.')
- group.add_argument('--extract-test-list-from-filter',
- action='store_true',
- help='When a test filter is specified, and the list of '
- 'tests can be determined from it, skip querying the '
- 'device for the list of all tests. Speeds up local '
- 'development, but is not safe to use on bots ('
- 'http://crbug.com/549214')
- group.add_argument('--enable-xml-result-parsing',
- action='store_true',
- help=argparse.SUPPRESS)
- group.add_argument('--store-tombstones', dest='store_tombstones',
- action='store_true',
- help='Add tombstones in results if crash.')
-
- filter_group = group.add_mutually_exclusive_group()
- filter_group.add_argument('-f', '--gtest_filter', '--gtest-filter',
- dest='test_filter',
- help='googletest-style filter string.')
- filter_group.add_argument('--gtest-filter-file', dest='test_filter_file',
- type=os.path.realpath,
- help='Path to file that contains googletest-style '
- 'filter strings. See also '
- '//testing/buildbot/filters/README.md.')
-
- AddDeviceOptions(parser)
- AddCommonOptions(parser)
+ parser = parser.add_argument_group('device arguments')
+ parser.add_argument(
+ '--adb-path',
+ type=os.path.realpath,
+ help='Specify the absolute path of the adb binary that '
+ 'should be used.')
+ parser.add_argument(
+ '--blacklist-file',
+ type=os.path.realpath,
+ help='Device blacklist file.')
+ parser.add_argument(
+ '-d', '--device',
+ dest='test_device',
+ help='Target device for the test suite to run on.')
+ parser.add_argument(
+ '--enable-concurrent-adb',
+ action='store_true',
+ help='Run multiple adb commands at the same time, even '
+ 'for the same device.')
+ parser.add_argument(
+ '--enable-device-cache',
+ action='store_true',
+ help='Cache device state to disk between runs')
+ parser.add_argument(
+ '--skip-clear-data',
+ action='store_true',
+ help='Do not wipe app data between tests. Use this to '
+ 'speed up local development and never on bots '
+ '(increases flakiness)')
+ parser.add_argument(
+ '--target-devices-file',
+ type=os.path.realpath,
+ help='Path to file with json list of device serials to '
+ 'run tests on. When not specified, all available '
+ 'devices are used.')
+ parser.add_argument(
+ '--tool',
+ dest='tool',
+ help='Run the test under a tool '
+ '(use --tool help to list them)')
+
+ parser.add_argument(
+ '--upload-logcats-file',
+ action='store_true',
+ dest='upload_logcats_file',
+ help='Whether to upload logcat file to logdog.')
-def AddLinkerTestOptions(parser):
- group = parser.add_argument_group('Linker Test Options')
- group.add_argument('-f', '--gtest-filter', dest='test_filter',
- help='googletest-style filter string.')
- group.add_argument('--test-apk', type=os.path.realpath,
- help='Path to the linker test APK.')
- AddCommonOptions(parser)
- AddDeviceOptions(parser)
+ logcat_output_group = parser.add_mutually_exclusive_group()
+ logcat_output_group.add_argument(
+ '--logcat-output-dir', type=os.path.realpath,
+ help='If set, will dump logcats recorded during test run to directory. '
+ 'File names will be the device ids with timestamps.')
+ logcat_output_group.add_argument(
+ '--logcat-output-file', type=os.path.realpath,
+ help='If set, will merge logcats recorded during test run and dump them '
+ 'to the specified file.')
-def AddJavaTestOptions(argument_group):
- """Adds the Java test options to |option_parser|."""
+def AddGTestOptions(parser):
+ """Adds gtest options to |parser|."""
- argument_group.add_argument(
- '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
- dest='test_filter',
- help=('Test filter (if not fully qualified, will run all matches).'))
- argument_group.add_argument(
- '-A', '--annotation', dest='annotation_str',
- help=('Comma-separated list of annotations. Run only tests with any of '
- 'the given annotations. An annotation can be either a key or a '
- 'key-values pair. A test that has no annotation is considered '
- '"SmallTest".'))
- argument_group.add_argument(
- '-E', '--exclude-annotation', dest='exclude_annotation_str',
- help=('Comma-separated list of annotations. Exclude tests with these '
- 'annotations.'))
- argument_group.add_argument(
- '--screenshot-directory', dest='screenshot_dir', type=os.path.realpath,
- help='Capture screenshots of test failures')
- argument_group.add_argument(
- '--save-perf-json', action='store_true',
- help='Saves the JSON file for each UI Perf test.')
- argument_group.add_argument(
- '--official-build', action='store_true', help='Run official build tests.')
- argument_group.add_argument(
- '--disable-dalvik-asserts', dest='set_asserts', action='store_false',
- default=True, help='Removes the dalvik.vm.enableassertions property')
- argument_group.add_argument(
+ parser = parser.add_argument_group('gtest arguments')
+
+ parser.add_argument(
+ '--app-data-file',
+ action='append', dest='app_data_files',
+ help='A file path relative to the app data directory '
+ 'that should be saved to the host.')
+ parser.add_argument(
+ '--app-data-file-dir',
+ help='Host directory to which app data files will be'
+ ' saved. Used with --app-data-file.')
+ parser.add_argument(
+ '--delete-stale-data',
+ dest='delete_stale_data', action='store_true',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--enable-xml-result-parsing',
+ action='store_true', help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--executable-dist-dir',
+ type=os.path.realpath,
+ help="Path to executable's dist directory for native"
+ " (non-apk) tests.")
+ parser.add_argument(
+ '--extract-test-list-from-filter',
+ action='store_true',
+ help='When a test filter is specified, and the list of '
+ 'tests can be determined from it, skip querying the '
+ 'device for the list of all tests. Speeds up local '
+ 'development, but is not safe to use on bots ('
+ 'http://crbug.com/549214')
+ parser.add_argument(
'--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
dest='run_disabled', action='store_true',
help='Also run disabled tests if applicable.')
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '-t', '--shard-timeout',
+ dest='shard_timeout', type=int, default=120,
+ help='Timeout to wait for each test (default: %(default)s).')
+ parser.add_argument(
+ '--store-tombstones',
+ dest='store_tombstones', action='store_true',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
+ help='Executable name of the test suite to run.')
+ parser.add_argument(
+ '--test-apk-incremental-install-script',
+ type=os.path.realpath,
+ help='Path to install script for the test apk.')
-
-
-def ProcessJavaTestOptions(args):
- """Processes options/arguments and populates |options| with defaults."""
-
- # TODO(jbudorick): Handle most of this function in argparse.
- if args.annotation_str:
- args.annotations = args.annotation_str.split(',')
- elif args.test_filter:
- args.annotations = []
- else:
- args.annotations = ['SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest',
- 'IntegrationTest']
-
- if args.exclude_annotation_str:
- args.exclude_annotations = args.exclude_annotation_str.split(',')
- else:
- args.exclude_annotations = []
+ filter_group = parser.add_mutually_exclusive_group()
+ filter_group.add_argument(
+ '-f', '--gtest_filter', '--gtest-filter',
+ dest='test_filter',
+ help='googletest-style filter string.')
+ filter_group.add_argument(
+ '--gtest-filter-file',
+ dest='test_filter_file', type=os.path.realpath,
+ help='Path to file that contains googletest-style filter strings. '
+ 'See also //testing/buildbot/filters/README.md.')
def AddInstrumentationTestOptions(parser):
"""Adds Instrumentation test options to |parser|."""
- parser.usage = '%(prog)s [options]'
-
- group = parser.add_argument_group('Instrumentation Test Options')
- AddJavaTestOptions(group)
-
- java_or_python_group = group.add_mutually_exclusive_group()
- java_or_python_group.add_argument(
- '-j', '--java-only', action='store_false',
- dest='run_python_tests', default=True, help='Run only the Java tests.')
- java_or_python_group.add_argument(
- '-p', '--python-only', action='store_false',
- dest='run_java_tests', default=True,
- help='DEPRECATED')
-
- group.add_argument('--host-driven-root',
- help='DEPRECATED')
- group.add_argument('-w', '--wait_debugger', dest='wait_for_debugger',
- action='store_true',
- help='Wait for debugger.')
+ parser.add_argument_group('instrumentation arguments')
+
+ parser.add_argument(
+ '--additional-apk',
+ action='append', dest='additional_apks', default=[],
+ type=os.path.realpath,
+ help='Additional apk that must be installed on '
+ 'the device when the tests are run')
+ parser.add_argument(
+ '-A', '--annotation',
+ dest='annotation_str',
+ help='Comma-separated list of annotations. Run only tests with any of '
+ 'the given annotations. An annotation can be either a key or a '
+ 'key-values pair. A test that has no annotation is considered '
+ '"SmallTest".')
# TODO(jbudorick): Remove support for name-style APK specification once
# bots are no longer doing it.
- group.add_argument('--apk-under-test',
- help='Path or name of the apk under test.')
- group.add_argument('--apk-under-test-incremental-install-script',
- help='Path to install script for the --apk-under-test.')
- group.add_argument('--test-apk', required=True,
- help='Path or name of the apk containing the tests '
- '(name is without the .apk extension; '
- 'e.g. "ContentShellTest").')
- group.add_argument('--test-jar',
- help='Path of jar containing test java files.')
- group.add_argument('--test-apk-incremental-install-script',
- type=os.path.realpath,
- help='Path to install script for the --test-apk.')
- group.add_argument('--additional-apk', action='append',
- dest='additional_apks', default=[],
- type=os.path.realpath,
- help='Additional apk that must be installed on '
- 'the device when the tests are run')
- group.add_argument('--coverage-dir', type=os.path.realpath,
- help=('Directory in which to place all generated '
- 'EMMA coverage files.'))
- group.add_argument('--device-flags', dest='device_flags',
- type=os.path.realpath,
- help='The relative filepath to a file containing '
- 'command-line flags to set on the device')
- group.add_argument('--device-flags-file', type=os.path.realpath,
- help='The relative filepath to a file containing '
- 'command-line flags to set on the device')
- # TODO(jbudorick): Remove this after ensuring nothing else uses it.
- group.add_argument('--isolate_file_path',
- '--isolate-file-path',
- dest='isolate_file_path',
- type=os.path.realpath,
- help=argparse.SUPPRESS)
- group.add_argument('--runtime-deps-path',
- dest='runtime_deps_path',
- type=os.path.realpath,
- help='Runtime data dependency file from GN.')
- group.add_argument('--delete-stale-data', dest='delete_stale_data',
- action='store_true',
- help='Delete stale test data on the device.')
- group.add_argument('--timeout-scale', type=float,
- help='Factor by which timeouts should be scaled.')
- group.add_argument('--strict-mode', dest='strict_mode', default='testing',
- help='StrictMode command-line flag set on the device, '
- 'death/testing to kill the process, off to stop '
- 'checking, flash to flash only. Default testing.')
- group.add_argument('--regenerate-goldens', dest='regenerate_goldens',
- action='store_true',
- help='Causes the render tests to not fail when a check'
- 'fails or the golden image is missing but to render'
- 'the view and carry on.')
- group.add_argument('--store-tombstones', dest='store_tombstones',
- action='store_true',
- help='Add tombstones in results if crash.')
- group.add_argument('--shared-prefs-file', dest='shared_prefs_file',
- type=os.path.realpath,
- help='The relative path to a file containing JSON list '
- 'of shared preference files to edit and how to do '
- 'so. Example list: '
- '[{'
- ' "package": "com.package.example",'
- ' "filename": "ExampleSettings.xml",'
- ' "set": {'
- ' "boolean_key_in_xml": true,'
- ' "string_key_in_xml": "string_value"'
- ' },'
- ' "remove": ['
- ' "key_in_xml_to_remove"'
- ' ]'
- '}]')
-
- AddCommonOptions(parser)
- AddDeviceOptions(parser)
+ parser.add_argument(
+ '--apk-under-test',
+ help='Path or name of the apk under test.')
+ parser.add_argument(
+ '--coverage-dir',
+ type=os.path.realpath,
+ help='Directory in which to place all generated '
+ 'EMMA coverage files.')
+ parser.add_argument(
+ '--delete-stale-data',
+ action='store_true', dest='delete_stale_data',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--disable-dalvik-asserts',
+ dest='set_asserts', action='store_false', default=True,
+ help='Removes the dalvik.vm.enableassertions property')
+ parser.add_argument(
+ '-E', '--exclude-annotation',
+ dest='exclude_annotation_str',
+ help='Comma-separated list of annotations. Exclude tests with these '
+ 'annotations.')
+ parser.add_argument(
+ '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+ dest='test_filter',
+ help='Test filter (if not fully qualified, will run all matches).')
+ parser.add_argument(
+ '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
+ dest='run_disabled', action='store_true',
+ help='Also run disabled tests if applicable.')
+ parser.add_argument(
+ '--regenerate-goldens',
+ action='store_true', dest='regenerate_goldens',
+ help='Causes the render tests to not fail when a check'
+ 'fails or the golden image is missing but to render'
+ 'the view and carry on.')
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '--save-perf-json',
+ action='store_true',
+ help='Saves the JSON file for each UI Perf test.')
+ parser.add_argument(
+ '--screenshot-directory',
+ dest='screenshot_dir', type=os.path.realpath,
+ help='Capture screenshots of test failures')
+ parser.add_argument(
+ '--shared-prefs-file',
+ dest='shared_prefs_file', type=os.path.realpath,
+ help='The relative path to a file containing JSON list of shared '
+ 'preference files to edit and how to do so. Example list: '
+ '[{'
+ ' "package": "com.package.example",'
+ ' "filename": "ExampleSettings.xml",'
+ ' "set": {'
+ ' "boolean_key_in_xml": true,'
+ ' "string_key_in_xml": "string_value"'
+ ' },'
+ ' "remove": ['
+ ' "key_in_xml_to_remove"'
+ ' ]'
+ '}]')
+ parser.add_argument(
+ '--store-tombstones',
+ action='store_true', dest='store_tombstones',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '--strict-mode',
+ dest='strict_mode', default='testing',
+ help='StrictMode command-line flag set on the device, '
+ 'death/testing to kill the process, off to stop '
+ 'checking, flash to flash only. (default: %(default)s)')
+ parser.add_argument(
+ '--test-apk',
+ required=True,
+ help='Path or name of the apk containing the tests.')
+ parser.add_argument(
+ '--test-jar',
+ help='Path of jar containing test java files.')
+ parser.add_argument(
+ '--timeout-scale',
+ type=float,
+ help='Factor by which timeouts should be scaled.')
+ parser.add_argument(
+ '-w', '--wait_debugger',
+ action='store_true', dest='wait_for_debugger',
+ help='Wait for debugger.')
+
+ # These arguments are suppressed from the help text because they should
+ # only ever be specified by an intermediate script.
+ parser.add_argument(
+ '--apk-under-test-incremental-install-script',
+ help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--test-apk-incremental-install-script',
+ type=os.path.realpath,
+ help=argparse.SUPPRESS)
def AddJUnitTestOptions(parser):
"""Adds junit test options to |parser|."""
- group = parser.add_argument_group('JUnit Test Options')
- group.add_argument(
- '-s', '--test-suite', dest='test_suite', required=True,
- help=('JUnit test suite to run.'))
- group.add_argument(
- '-f', '--test-filter', dest='test_filter',
- help='Filters tests googletest-style.')
- group.add_argument(
- '--package-filter', dest='package_filter',
+ parser = parser.add_argument_group('junit arguments')
+
+ parser.add_argument(
+ '--coverage-dir',
+ dest='coverage_dir', type=os.path.realpath,
+ help='Directory to store coverage info.')
+ parser.add_argument(
+ '--package-filter',
+ dest='package_filter',
help='Filters tests by package.')
- group.add_argument(
- '--runner-filter', dest='runner_filter',
+ parser.add_argument(
+ '--runner-filter',
+ dest='runner_filter',
help='Filters tests by runner class. Must be fully qualified.')
- group.add_argument(
- '--coverage-dir', dest='coverage_dir', type=os.path.realpath,
- help='Directory to store coverage info.')
- AddCommonOptions(parser)
+ parser.add_argument(
+ '-f', '--test-filter',
+ dest='test_filter',
+ help='Filters tests googletest-style.')
+ parser.add_argument(
+ '-s', '--test-suite',
+ dest='test_suite', required=True,
+ help='JUnit test suite to run.')
+
+
+def AddLinkerTestOptions(parser):
+
+ parser.add_argument_group('linker arguments')
+
+ parser.add_argument(
+ '-f', '--gtest-filter',
+ dest='test_filter',
+ help='googletest-style filter string.')
+ parser.add_argument(
+ '--test-apk',
+ type=os.path.realpath,
+ help='Path to the linker test APK.')
def AddMonkeyTestOptions(parser):
"""Adds monkey test options to |parser|."""
- group = parser.add_argument_group('Monkey Test Options')
- group.add_argument(
- '--browser', required=True, choices=constants.PACKAGE_INFO.keys(),
+ parser = parser.add_argument_group('monkey arguments')
+
+ parser.add_argument(
+ '--browser',
+ required=True, choices=constants.PACKAGE_INFO.keys(),
metavar='BROWSER', help='Browser under test.')
- group.add_argument(
- '--event-count', default=10000, type=int,
- help='Number of events to generate (default: %(default)s).')
- group.add_argument(
- '--category', nargs='*', dest='categories', default=[],
+ parser.add_argument(
+ '--category',
+ nargs='*', dest='categories', default=[],
help='A list of allowed categories. Monkey will only visit activities '
'that are listed with one of the specified categories.')
- group.add_argument(
- '--throttle', default=100, type=int,
- help='Delay between events (ms) (default: %(default)s). ')
- group.add_argument(
- '--seed', type=int,
+ parser.add_argument(
+ '--event-count',
+ default=10000, type=int,
+ help='Number of events to generate (default: %(default)s).')
+ parser.add_argument(
+ '--seed',
+ type=int,
help='Seed value for pseudo-random generator. Same seed value generates '
'the same sequence of events. Seed is randomized by default.')
- AddCommonOptions(parser)
- AddDeviceOptions(parser)
+ parser.add_argument(
+ '--throttle',
+ default=100, type=int,
+ help='Delay between events (ms) (default: %(default)s). ')
def AddPerfTestOptions(parser):
"""Adds perf test options to |parser|."""
- group = parser.add_argument_group('Perf Test Options')
+ parser = parser.add_argument_group('perf arguments')
class SingleStepAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
@@ -483,92 +528,106 @@ def AddPerfTestOptions(parser):
'but no single step command provided.')
setattr(namespace, self.dest, values)
- step_group = group.add_mutually_exclusive_group(required=True)
+ step_group = parser.add_mutually_exclusive_group(required=True)
# TODO(jbudorick): Revise --single-step to use argparse.REMAINDER.
# This requires removing "--" from client calls.
step_group.add_argument(
- '--single-step', action='store_true',
+ '--print-step',
+ help='The name of a previously executed perf step to print.')
+ step_group.add_argument(
+ '--single-step',
+ action='store_true',
help='Execute the given command with retries, but only print the result '
'for the "most successful" round.')
step_group.add_argument(
'--steps',
help='JSON file containing the list of commands to run.')
- step_group.add_argument(
- '--print-step',
- help='The name of a previously executed perf step to print.')
- group.add_argument(
- '--output-json-list', type=os.path.realpath,
- help='Writes a JSON list of information for each --steps into the given '
- 'file. Information includes runtime and device affinity for each '
- '--steps.')
- group.add_argument(
+ parser.add_argument(
'--collect-chartjson-data',
action='store_true',
help='Cache the telemetry chartjson output from each step for later use.')
- group.add_argument(
- '--output-chartjson-data',
- type=os.path.realpath,
- help='Writes telemetry chartjson formatted output into the given file.')
- group.add_argument(
+ parser.add_argument(
'--collect-json-data',
action='store_true',
help='Cache the telemetry JSON output from each step for later use.')
- group.add_argument(
- '--output-json-data',
+ parser.add_argument(
+ '--dry-run',
+ action='store_true',
+ help='Just print the steps without executing.')
+ parser.add_argument(
+ '--flaky-steps',
type=os.path.realpath,
- help='Writes telemetry JSON formatted output into the given file.')
+ help='A JSON file containing steps that are flaky '
+ 'and will have its exit code ignored.')
# TODO(rnephew): Remove this when everything moves to new option in platform
# mode.
- group.add_argument(
- '--get-output-dir-archive', metavar='FILENAME', type=os.path.realpath,
+ parser.add_argument(
+ '--get-output-dir-archive',
+ metavar='FILENAME', type=os.path.realpath,
help='Write the cached output directory archived by a step into the'
' given ZIP file.')
- group.add_argument(
- '--output-dir-archive-path', metavar='FILENAME', type=os.path.realpath,
- help='Write the cached output directory archived by a step into the'
- ' given ZIP file.')
- group.add_argument(
- '--flaky-steps', type=os.path.realpath,
- help=('A JSON file containing steps that are flaky '
- 'and will have its exit code ignored.'))
- group.add_argument(
- '--no-timeout', action='store_true',
- help=('Do not impose a timeout. Each perf step is responsible for '
- 'implementing the timeout logic.'))
- group.add_argument(
- '-f', '--test-filter',
- help=('Test filter (will match against the names listed in --steps).'))
- group.add_argument(
- '--dry-run', action='store_true',
- help='Just print the steps without executing.')
+ parser.add_argument(
+ '--known-devices-file',
+ help='Path to known device list.')
# Uses 0.1 degrees C because that's what Android does.
- group.add_argument(
- '--max-battery-temp', type=int,
+ parser.add_argument(
+ '--max-battery-temp',
+ type=int,
help='Only start tests when the battery is at or below the given '
'temperature (0.1 C)')
- group.add_argument(
- 'single_step_command', nargs='*', action=SingleStepAction,
- help='If --single-step is specified, the command to run.')
- group.add_argument(
- '--min-battery-level', type=int,
+ parser.add_argument(
+ '--min-battery-level',
+ type=int,
help='Only starts tests when the battery is charged above '
- 'given level.')
- group.add_argument('--known-devices-file', help='Path to known device list.')
- group.add_argument(
- '--write-buildbot-json', action='store_true',
+ 'given level.')
+ parser.add_argument(
+ '--no-timeout',
+ action='store_true',
+ help='Do not impose a timeout. Each perf step is responsible for '
+ 'implementing the timeout logic.')
+ parser.add_argument(
+ '--output-chartjson-data',
+ type=os.path.realpath,
+ help='Writes telemetry chartjson formatted output into the given file.')
+ parser.add_argument(
+ '--output-dir-archive-path',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Write the cached output directory archived by a step into the'
+ ' given ZIP file.')
+ parser.add_argument(
+ '--output-json-data',
+ type=os.path.realpath,
+ help='Writes telemetry JSON formatted output into the given file.')
+ parser.add_argument(
+ '--output-json-list',
+ type=os.path.realpath,
+ help='Writes a JSON list of information for each --steps into the given '
+ 'file. Information includes runtime and device affinity for each '
+ '--steps.')
+ parser.add_argument(
+ '-f', '--test-filter',
+ help='Test filter (will match against the names listed in --steps).')
+ parser.add_argument(
+ '--write-buildbot-json',
+ action='store_true',
help='Whether to output buildbot json.')
- AddCommonOptions(parser)
- AddDeviceOptions(parser)
+
+ parser.add_argument(
+ 'single_step_command',
+ nargs='*', action=SingleStepAction,
+ help='If --single-step is specified, the command to run.')
def AddPythonTestOptions(parser):
- group = parser.add_argument_group('Python Test Options')
- group.add_argument(
- '-s', '--suite', dest='suite_name', metavar='SUITE_NAME',
+
+ parser = parser.add_argument_group('python arguments')
+
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', metavar='SUITE_NAME',
choices=constants.PYTHON_UNIT_TEST_SUITES.keys(),
help='Name of the test suite to run.')
- AddCommonOptions(parser)
def _RunPythonTests(args):
@@ -588,43 +647,11 @@ def _RunPythonTests(args):
sys.path = sys.path[1:]
-def _GetAttachedDevices(blacklist_file, test_device, enable_cache, num_retries):
- """Get all attached devices.
-
- Args:
- blacklist_file: Path to device blacklist.
- test_device: Name of a specific device to use.
- enable_cache: Whether to enable checksum caching.
-
- Returns:
- A list of attached devices.
- """
- blacklist = (device_blacklist.Blacklist(blacklist_file)
- if blacklist_file
- else None)
-
- attached_devices = device_utils.DeviceUtils.HealthyDevices(
- blacklist, enable_device_files_cache=enable_cache,
- default_retries=num_retries)
- if test_device:
- test_device = [d for d in attached_devices if d == test_device]
- if not test_device:
- raise device_errors.DeviceUnreachableError(
- 'Did not find device %s among attached device. Attached devices: %s'
- % (test_device, ', '.join(attached_devices)))
- return test_device
-
- else:
- if not attached_devices:
- raise device_errors.NoDevicesError()
- return sorted(attached_devices)
-
-
_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation', 'junit',
'linker', 'monkey', 'perf']
-def RunTestsCommand(args): # pylint: disable=too-many-return-statements
+def RunTestsCommand(args):
"""Checks test type and dispatches to the appropriate function.
Args:
@@ -644,15 +671,6 @@ def RunTestsCommand(args): # pylint: disable=too-many-return-statements
if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
return RunTestsInPlatformMode(args)
- forwarder.Forwarder.RemoveHostLog()
- if not ports.ResetTestServerPortAllocation():
- raise Exception('Failed to reset test server port.')
-
- # pylint: disable=protected-access
- if os.path.exists(ports._TEST_SERVER_PORT_LOCKFILE):
- os.unlink(ports._TEST_SERVER_PORT_LOCKFILE)
- # pylint: enable=protected-access
-
if command == 'python':
return _RunPythonTests(args)
else:
@@ -725,6 +743,28 @@ def RunTestsInPlatformMode(args):
write_json_file(),
args.json_results_file)
+ @contextlib.contextmanager
+ def upload_logcats_file():
+ try:
+ yield
+ finally:
+ if not args.logcat_output_file:
+ logging.critical('Cannot upload logcats file. '
+ 'File to save logcat is not specified.')
+ else:
+ with open(args.logcat_output_file) as src:
+ dst = logdog_helper.open_text('unified_logcats')
+ if dst:
+ shutil.copyfileobj(src, dst)
+ dst.close()
+ logging.critical(
+ 'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats'))
+
+
+ logcats_uploader = contextlib_ext.Optional(
+ upload_logcats_file(),
+ 'upload_logcats_file' in args and args.upload_logcats_file)
+
### Set up test objects.
env = environment_factory.CreateEnvironment(args, infra_error)
@@ -734,7 +774,7 @@ def RunTestsInPlatformMode(args):
### Run.
- with json_writer, env, test_instance, test_run:
+ with json_writer, logcats_uploader, env, test_instance, test_run:
repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
else itertools.count())
@@ -798,34 +838,6 @@ def RunTestsInPlatformMode(args):
else constants.ERROR_EXIT_CODE)
-CommandConfigTuple = collections.namedtuple(
- 'CommandConfigTuple',
- ['add_options_func', 'help_txt'])
-VALID_COMMANDS = {
- 'gtest': CommandConfigTuple(
- AddGTestOptions,
- 'googletest-based C++ tests'),
- 'instrumentation': CommandConfigTuple(
- AddInstrumentationTestOptions,
- 'InstrumentationTestCase-based Java tests'),
- 'junit': CommandConfigTuple(
- AddJUnitTestOptions,
- 'JUnit4-based Java tests'),
- 'monkey': CommandConfigTuple(
- AddMonkeyTestOptions,
- "Tests based on Android's monkey"),
- 'perf': CommandConfigTuple(
- AddPerfTestOptions,
- 'Performance tests'),
- 'python': CommandConfigTuple(
- AddPythonTestOptions,
- 'Python tests based on unittest.TestCase'),
- 'linker': CommandConfigTuple(
- AddLinkerTestOptions,
- 'Linker tests'),
-}
-
-
def DumpThreadStacks(_signal, _frame):
for thread in threading.enumerate():
reraiser_thread.LogThreadStack(thread)
@@ -835,16 +847,67 @@ def main():
signal.signal(signal.SIGUSR1, DumpThreadStacks)
parser = argparse.ArgumentParser()
- command_parsers = parser.add_subparsers(title='test types',
- dest='command')
-
- for test_type, config in sorted(VALID_COMMANDS.iteritems(),
- key=lambda x: x[0]):
- subparser = command_parsers.add_parser(
- test_type, usage='%(prog)s [options]', help=config.help_txt)
- config.add_options_func(subparser)
-
- args = parser.parse_args()
+ command_parsers = parser.add_subparsers(
+ title='test types', dest='command')
+
+ subp = command_parsers.add_parser(
+ 'gtest',
+ help='googletest-based C++ tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddGTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'instrumentation',
+ help='InstrumentationTestCase-based Java tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddInstrumentationTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'junit',
+ help='JUnit4-based Java tests')
+ AddCommonOptions(subp)
+ AddJUnitTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'linker',
+ help='linker tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddLinkerTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'monkey',
+ help="tests based on Android's monkey command")
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddMonkeyTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'perf',
+ help='performance tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddPerfTestOptions(subp)
+ AddTracingOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'python',
+ help='python tests based on unittest.TestCase')
+ AddCommonOptions(subp)
+ AddPythonTestOptions(subp)
+
+ args, unknown_args = parser.parse_known_args()
+ if unknown_args:
+ if hasattr(args, 'allow_unknown') and args.allow_unknown:
+ args.command_line_flags = unknown_args
+ else:
+ parser.error('unrecognized arguments: %s' % ' '.join(unknown_args))
try:
return RunTestsCommand(args)
diff --git a/chromium/build/android/test_runner.pydeps b/chromium/build/android/test_runner.pydeps
index 1cb513af08f..31b3ef53181 100644
--- a/chromium/build/android/test_runner.pydeps
+++ b/chromium/build/android/test_runner.pydeps
@@ -13,7 +13,6 @@
../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
../../third_party/catapult/common/py_utils/py_utils/lock.py
-../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
@@ -48,7 +47,6 @@
../../third_party/catapult/devil/devil/android/sdk/aapt.py
../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
../../third_party/catapult/devil/devil/android/sdk/build_tools.py
-../../third_party/catapult/devil/devil/android/sdk/gce_adb_wrapper.py
../../third_party/catapult/devil/devil/android/sdk/intent.py
../../third_party/catapult/devil/devil/android/sdk/keyevent.py
../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
@@ -121,7 +119,6 @@
../../tools/swarming_client/libs/logdog/stream.py
../../tools/swarming_client/libs/logdog/streamname.py
../../tools/swarming_client/libs/logdog/varint.py
-../find_depot_tools.py
../util/lib/common/unittest_util.py
devil_chromium.py
pylib/__init__.py
@@ -179,7 +176,6 @@ pylib/utils/__init__.py
pylib/utils/decorators.py
pylib/utils/device_dependencies.py
pylib/utils/dexdump.py
-pylib/utils/google_storage_helper.py
pylib/utils/logdog_helper.py
pylib/utils/proguard.py
pylib/utils/repo_utils.py
diff --git a/chromium/build/android/test_wrapper/logdog_wrapper.py b/chromium/build/android/test_wrapper/logdog_wrapper.py
index 14ed7d138b4..5213cb367d3 100755
--- a/chromium/build/android/test_wrapper/logdog_wrapper.py
+++ b/chromium/build/android/test_wrapper/logdog_wrapper.py
@@ -11,81 +11,82 @@ import os
import signal
import subprocess
import sys
-import urllib
+_SRC_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
+ 'py_utils'))
+
+from devil.utils import signal_handler
+from py_utils import tempfile_ext
+
+PROJECT = 'chromium'
+OUTPUT = 'logdog'
+COORDINATOR_HOST = 'luci-logdog.appspot.com'
+SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
+ '/service-account-luci-logdog-publisher.json')
def CommandParser():
# Parses the command line arguments being passed in
parser = argparse.ArgumentParser()
+ parser.add_argument('--target', required=True,
+ help='The test target to be run.')
parser.add_argument('--logdog-bin-cmd', required=True,
- help='Command for running logdog butler binary')
- parser.add_argument('--project', required=True,
- help='Name of logdog project')
- parser.add_argument('--logdog-server',
- default='services-dot-luci-logdog.appspot.com',
- help='URL of logdog server, https:// is assumed.')
- parser.add_argument('--service-account-json', required=True,
- help='Location of authentication json')
- parser.add_argument('--prefix', required=True,
- help='Prefix to be used for logdog stream')
- parser.add_argument('--source', required=True,
- help='Location of file for logdog to stream')
- parser.add_argument('--name', required=True,
- help='Name to be used for logdog stream')
+ help='The logdog bin cmd.')
+ parser.add_argument('--target-devices-file', required=False,
+ help='The target devices file.')
+ parser.add_argument('--logcat-output-file',
+ help='The logcat output file.')
return parser
-
-def CreateUrl(server, project, prefix, name):
- stream_name = '%s/%s/+/%s' % (project, prefix, name)
- return 'https://%s/v/?s=%s' % (server, urllib.quote_plus(stream_name))
-
-
-def CreateSignalForwarder(proc):
- def handler(signum, _frame):
+def CreateStopTestsMethod(proc):
+ def StopTests(signum, _frame):
logging.error('Forwarding signal %s to test process', str(signum))
proc.send_signal(signum)
-
- return handler
-
+ return StopTests
def main():
parser = CommandParser()
- args, test_cmd = parser.parse_known_args(sys.argv[1:])
+ args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
+
logging.basicConfig(level=logging.INFO)
- if not test_cmd:
- parser.error('Must specify command to run after the logdog flags')
- test_proc = subprocess.Popen(test_cmd)
- original_sigterm_handler = signal.signal(
- signal.SIGTERM, CreateSignalForwarder(test_proc))
- try:
- result = test_proc.wait()
- finally:
- signal.signal(signal.SIGTERM, original_sigterm_handler)
- if '${SWARMING_TASK_ID}' in args.prefix:
- args.prefix = args.prefix.replace('${SWARMING_TASK_ID}',
- os.environ.get('SWARMING_TASK_ID'))
- url = CreateUrl('luci-logdog.appspot.com', args.project, args.prefix,
- args.name)
- logdog_cmd = [args.logdog_bin_cmd, '-project', args.project,
- '-output', 'logdog,host=%s' % args.logdog_server,
- '-prefix', args.prefix,
- '-service-account-json', args.service_account_json,
- 'stream', '-source', args.source,
- '-stream', '-name=%s' % args.name]
+ with tempfile_ext.NamedTemporaryDirectory() as logcat_output_dir:
+ test_cmd = [
+ os.path.join('bin', 'run_%s' % args.target),
+ '--logcat-output-file',
+ (args.logcat_output_file if args.logcat_output_file
+ else os.path.join(logcat_output_dir, 'logcats')),
+ '--upload-logcats-file',
+ '--target-devices-file', args.target_devices_file,
+ '-v'] + extra_cmd_args
+
+ with tempfile_ext.NamedTemporaryDirectory(
+ prefix='tmp_android_logdog_wrapper') as temp_directory:
+ if not os.path.exists(args.logdog_bin_cmd):
+ logging.error(
+ 'Logdog binary %s unavailable. Unable to create logdog client',
+ args.logdog_bin_cmd)
+ else:
+ streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
+ 'butler.sock')
+ prefix = os.path.join('android', 'swarming', 'logcats',
+ os.environ.get('SWARMING_TASK_ID'))
- if not os.path.exists(args.logdog_bin_cmd):
- logging.error(
- 'Logdog binary %s unavailable. Unable to upload logcats.',
- args.logdog_bin_cmd)
- elif not os.path.exists(args.source):
- logging.error(
- 'Logcat sources not found at %s. Unable to upload logcats.',
- args.source)
- else:
- subprocess.call(logdog_cmd)
- logging.info('Logcats are located at: %s', url)
- return result
+ # Call test_cmdline through logdog butler subcommand.
+ test_cmd = [
+ args.logdog_bin_cmd, '-project', PROJECT,
+ '-output', OUTPUT,
+ '-prefix', prefix,
+ '--service-account-json', SERVICE_ACCOUNT_JSON,
+ '-coordinator-host', COORDINATOR_HOST,
+ 'run', '-streamserver-uri', streamserver_uri, '--'] + test_cmd
+ test_proc = subprocess.Popen(test_cmd)
+ with signal_handler.SignalHandler(signal.SIGTERM,
+ CreateStopTestsMethod(test_proc)):
+ result = test_proc.wait()
+ return result
if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/android/test_wrapper/logdog_wrapper.pydeps b/chromium/build/android/test_wrapper/logdog_wrapper.pydeps
new file mode 100644
index 00000000000..cd57f2fd2f3
--- /dev/null
+++ b/chromium/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -0,0 +1,11 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+test_wrapper/logdog_wrapper.py
diff --git a/chromium/build/android/tombstones.py b/chromium/build/android/tombstones.py
index 9838accbf48..dbc6281c2f5 100755
--- a/chromium/build/android/tombstones.py
+++ b/chromium/build/android/tombstones.py
@@ -49,6 +49,8 @@ def _ListTombstones(device):
datetime.datetime.fromtimestamp(entry['st_mtime']))
except device_errors.CommandFailedError:
logging.exception('Could not retrieve tombstones.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('Device unreachable retrieving tombstones.')
except device_errors.CommandTimeoutError:
logging.exception('Timed out retrieving tombstones.')
diff --git a/chromium/build/args/headless.gn b/chromium/build/args/headless.gn
index 6d073ae1a0d..639c4f70926 100644
--- a/chromium/build/args/headless.gn
+++ b/chromium/build/args/headless.gn
@@ -23,7 +23,6 @@ icu_use_data_file = false
# to simplify deployment.
v8_use_external_startup_data = false
-enable_basic_printing = false
enable_nacl = false
enable_print_preview = false
enable_remoting = false
diff --git a/chromium/build/config/BUILD.gn b/chromium/build/config/BUILD.gn
index b9eb2ef50aa..890785c34dd 100644
--- a/chromium/build/config/BUILD.gn
+++ b/chromium/build/config/BUILD.gn
@@ -13,14 +13,6 @@ import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/ui.gni")
import("//build/toolchain/goma.gni")
-# One common error that happens is that GYP-generated headers within gen/ get
-# included rather than the GN-generated ones within gen/ subdirectories.
-# TODO(GYP_GONE): Remove once GYP is gone (as well as exec_script exception).
-assert(
- exec_script("//build/dir_exists.py", [ "obj.host" ], "string") == "False",
- "GYP artifacts detected in $root_build_dir.$0x0A" +
- "You must wipe this directory before building with GN.")
-
declare_args() {
# When set (the default) enables C++ iterator debugging in debug builds.
# Iterator debugging is always off in release builds (technically, this flag
@@ -65,10 +57,6 @@ config("feature_flags") {
# TODO(brettw) should probably be "=1".
defines += [ "USE_UDEV" ]
}
- if (ui_compositor_image_transport) {
- # TODO(brettw) should probably be "=1".
- defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ]
- }
if (use_ash) {
defines += [ "USE_ASH=1" ]
}
@@ -109,10 +97,7 @@ config("feature_flags") {
defines += [ "ADDRESS_SANITIZER" ]
}
if (is_lsan) {
- defines += [
- "LEAK_SANITIZER",
- "WTF_USE_LEAK_SANITIZER=1",
- ]
+ defines += [ "LEAK_SANITIZER" ]
}
if (is_tsan) {
defines += [
@@ -127,15 +112,9 @@ config("feature_flags") {
if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
defines += [ "UNDEFINED_SANITIZER" ]
}
- if (use_external_popup_menu) {
- defines += [ "USE_EXTERNAL_POPUP_MENU=1" ]
- }
if (!enable_nacl) {
defines += [ "DISABLE_NACL" ]
}
- if (enable_wayland_server) {
- defines += [ "ENABLE_WAYLAND_SERVER=1" ]
- }
if (safe_browsing_mode == 1) {
defines += [ "FULL_SAFE_BROWSING" ]
defines += [ "SAFE_BROWSING_CSD" ]
@@ -154,9 +133,6 @@ config("feature_flags") {
if (enable_media_router) {
defines += [ "ENABLE_MEDIA_ROUTER=1" ]
}
- if (enable_webvr) {
- defines += [ "ENABLE_WEBVR" ]
- }
if (is_syzyasan) {
defines += [
"SYZYASAN",
@@ -218,6 +194,12 @@ config("release") {
defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
}
}
+
+ if (is_ios) {
+ # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
+ # follows XCode's default behavior for Release builds.
+ defines += [ "NS_BLOCK_ASSERTIONS=1" ]
+ }
}
# Default libraries ------------------------------------------------------------
@@ -300,6 +282,9 @@ config("executable_config") {
if (is_win) {
configs += _windows_linker_configs
+
+ # Currently only turn on linker CFI for executables.
+ configs += [ "//build/config/win:cfi_linker" ]
} else if (is_mac) {
configs += [
"//build/config/mac:mac_dynamic_flags",
@@ -392,7 +377,6 @@ config("precompiled_headers") {
# precompiled configurations.
cflags_c = [ "/wd4206" ]
} else if (is_mac) {
- precompiled_header = "build/precompile.h"
precompiled_source = "//build/precompile.h"
}
}
diff --git a/chromium/build/config/android/BUILD.gn b/chromium/build/config/android/BUILD.gn
index fddca1be0b3..ea55411920e 100644
--- a/chromium/build/config/android/BUILD.gn
+++ b/chromium/build/config/android/BUILD.gn
@@ -39,7 +39,6 @@ config("compiler") {
}
ldflags = [
- "-Wl,--build-id=sha1",
"-Wl,--no-undefined",
# Don't allow visible symbols from libgcc or libc++ to be
diff --git a/chromium/build/config/android/OWNERS b/chromium/build/config/android/OWNERS
index 39f58e9eb69..2dd8a673646 100644
--- a/chromium/build/config/android/OWNERS
+++ b/chromium/build/config/android/OWNERS
@@ -1 +1,3 @@
agrieve@chromium.org
+
+# COMPONENT: Build
diff --git a/chromium/build/config/android/config.gni b/chromium/build/config/android/config.gni
index 2f68c894eb1..1d276ffcedc 100644
--- a/chromium/build/config/android/config.gni
+++ b/chromium/build/config/android/config.gni
@@ -12,7 +12,19 @@ if (is_android) {
[ rebase_path("//clank", root_build_dir) ],
"string") == "True"
- if (has_chrome_android_internal) {
+ # We are using a separate declare_args block for only this argument so that
+ # we can decide if we have to pull in definitions from the internal config
+ # early.
+ declare_args() {
+ # Enables using the internal Chrome for Android repository. The default
+ # value depends on whether the repository is available, and if it's not but
+ # this argument is manually set to True, the generation will fail.
+ # The main purpose of this argument is to avoid having to maintain 2
+ # repositories to support both public only and internal builds.
+ enable_chrome_android_internal = has_chrome_android_internal
+ }
+
+ if (enable_chrome_android_internal) {
import("//clank/config.gni")
}
@@ -31,15 +43,15 @@ if (is_android) {
if (!defined(default_android_sdk_root)) {
default_android_sdk_root = "//third_party/android_tools/sdk"
- default_android_sdk_version = "24"
- default_android_sdk_build_tools_version = "24.0.2"
+ default_android_sdk_version = "25"
+ default_android_sdk_build_tools_version = "25.0.2"
}
if (!defined(default_lint_android_sdk_root)) {
# Purposefully repeated so that downstream can change
# default_android_sdk_root without changing lint version.
default_lint_android_sdk_root = "//third_party/android_tools/sdk"
- default_lint_android_sdk_version = "24"
+ default_lint_android_sdk_version = "25"
}
if (!defined(default_extras_android_sdk_root)) {
@@ -75,7 +87,7 @@ if (is_android) {
}
webview_public_framework_jar =
- "//third_party/android_platform/webview/frameworks_7.0.0_r1.jar"
+ "//third_party/android_platform/webview/frameworks_7.1.1_r28.jar"
if (!defined(webview_framework_jar)) {
webview_framework_jar = webview_public_framework_jar
}
diff --git a/chromium/build/config/android/internal_rules.gni b/chromium/build/config/android/internal_rules.gni
index d2d9f998b3d..e4a4e2d1f6f 100644
--- a/chromium/build/config/android/internal_rules.gni
+++ b/chromium/build/config/android/internal_rules.gni
@@ -38,7 +38,6 @@ _java_target_whitelist = [
# Targets that match the whitelist but are not actually java targets.
_java_target_blacklist = [
"//chrome:packed_resources",
- "//remoting/android:remoting_android_raw_resources",
"*:*_unpack_aar",
]
@@ -483,7 +482,11 @@ template("test_runner_script") {
script = "//build/android/gyp/create_test_runner_script.py"
depfile = "$target_gen_dir/$target_name.d"
- data_deps += [ "//build/android:test_runner_py" ]
+ data_deps += [
+ "//build/android:test_runner_py",
+ "//build/android:logdog_wrapper_py",
+ ]
+
data = []
test_runner_args = [
diff --git a/chromium/build/config/android/rules.gni b/chromium/build/config/android/rules.gni
index c265b56908d..e6e26c06d35 100644
--- a/chromium/build/config/android/rules.gni
+++ b/chromium/build/config/android/rules.gni
@@ -1926,6 +1926,9 @@ if (enable_java_templates) {
if (defined(invoker.proguard_configs)) {
_proguard_configs += invoker.proguard_configs
}
+ if (enable_multidex) {
+ _proguard_configs += [ "//build/android/multidex.flags" ]
+ }
assert(_proguard_configs != []) # Mark as used.
_proguard_target = "${_template_name}__proguard"
proguard(_proguard_target) {
@@ -2620,20 +2623,22 @@ if (enable_java_templates) {
"--srcjar",
rebase_path(srcjar_path, root_build_dir),
]
- if (defined(invoker.import_include) && invoker.import_include != "") {
+ if (defined(invoker.import_include) && invoker.import_include != []) {
# TODO(cjhopman): aidl supports creating a depfile. We should be able to
# switch to constructing a depfile for the overall action from that
# instead of having all the .java files in the include paths as inputs.
- rebased_import_includes =
- rebase_path([ invoker.import_include ], root_build_dir)
- args += [ "--includes=$rebased_import_includes" ]
-
- _java_files_build_rel =
- exec_script("//build/android/gyp/find.py",
- rebase_path([ invoker.import_include ], root_build_dir),
- "list lines")
- _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
- inputs += _java_files
+ rebased_import_paths = []
+ foreach(import_path, invoker.import_include) {
+ _rebased_import_path = []
+ _rebased_import_path += rebase_path([ import_path ], root_build_dir)
+ rebased_import_paths += _rebased_import_path
+ _java_files_build_rel = []
+ _java_files_build_rel = exec_script("//build/android/gyp/find.py",
+ _rebased_import_path,
+ "list lines")
+ inputs += rebase_path(_java_files_build_rel, ".", root_build_dir)
+ }
+ args += [ "--includes=$rebased_import_paths" ]
}
args += rebase_path(sources, root_build_dir)
}
diff --git a/chromium/build/config/chromecast/BUILD.gn b/chromium/build/config/chromecast/BUILD.gn
index 811582f162f..3c99eba50f0 100644
--- a/chromium/build/config/chromecast/BUILD.gn
+++ b/chromium/build/config/chromecast/BUILD.gn
@@ -7,20 +7,22 @@ import("//build/config/chromecast_build.gni")
assert(is_chromecast)
config("static_config") {
- ldflags = [
- # Don't allow visible symbols from libraries that contain
- # assembly code with symbols that aren't hidden properly.
- # http://b/26390825
- "-Wl,--exclude-libs=libffmpeg.a",
- ]
-
- if (!is_android) {
- ldflags += [
- # We want to statically link libstdc++/libgcc on Linux.
- # (On Android, libstdc++ and libgcc aren't used.)
- "-static-libstdc++",
- "-static-libgcc",
+ if (!is_clang) {
+ ldflags = [
+ # Don't allow visible symbols from libraries that contain
+ # assembly code with symbols that aren't hidden properly.
+ # http://b/26390825
+ "-Wl,--exclude-libs=libffmpeg.a",
]
+
+ if (!is_android) {
+ ldflags += [
+ # We want to statically link libstdc++/libgcc on Linux.
+ # (On Android, libstdc++ and libgcc aren't used.)
+ "-static-libstdc++",
+ "-static-libgcc",
+ ]
+ }
}
}
@@ -49,7 +51,7 @@ config("ldconfig") {
config("executable_config") {
configs = [ ":ldconfig" ]
- if (current_cpu == "arm") {
+ if (!is_clang && current_cpu == "arm") {
ldflags = [
# Export stdlibc++ and libgcc symbols to force shlibs to refer to these
# symbols from the executable.
diff --git a/chromium/build/config/clang/BUILD.gn b/chromium/build/config/clang/BUILD.gn
index 0aeccd07da1..940c1829d51 100644
--- a/chromium/build/config/clang/BUILD.gn
+++ b/chromium/build/config/clang/BUILD.gn
@@ -3,7 +3,6 @@
# found in the LICENSE file.
import("clang.gni")
-import("//build/config/chromecast_build.gni")
config("find_bad_constructs") {
if (clang_use_chrome_plugins) {
@@ -41,7 +40,7 @@ config("find_bad_constructs") {
"check-auto-raw-pointer",
]
- if ((is_linux || is_android) && !is_chromecast) {
+ if (is_linux || is_android) {
cflags += [
"-Xclang",
"-plugin-arg-find-bad-constructs",
diff --git a/chromium/build/config/compiler/BUILD.gn b/chromium/build/config/compiler/BUILD.gn
index 91370289130..3b77c055108 100644
--- a/chromium/build/config/compiler/BUILD.gn
+++ b/chromium/build/config/compiler/BUILD.gn
@@ -4,6 +4,7 @@
import("//build/config/android/config.gni")
import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/nacl/config.gni")
import("//build/toolchain/cc_wrapper.gni")
@@ -61,11 +62,8 @@ declare_args() {
# Omit unwind support in official builds to save space.
# We can use breakpad for these builds.
- exclude_unwind_tables = is_chrome_branded && is_official_build
-
- # If true, gold linker will save symbol table inside object files.
- # This speeds up gdb startup by 60%
- gdb_index = false
+ exclude_unwind_tables = (is_chrome_branded && is_official_build) ||
+ (is_chromecast && !is_cast_desktop_build && !is_debug)
# If true, optimize for size. Does not affect windows builds.
# Linux & Mac favor speed over size.
@@ -166,11 +164,6 @@ config("compiler") {
configs += [ "//build/config/mac:compiler" ]
}
- # Applies to all Posix systems.
- if (is_posix) {
- configs += [ "//build/config/posix:compiler" ]
- }
-
# See the definitions below.
configs += [
":compiler_cpu_abi",
@@ -277,6 +270,16 @@ config("compiler") {
}
}
+ if (is_official_build) {
+ # Explicitly pass --build-id to ld. Compilers used to always pass this
+ # implicitly but don't any more (in particular clang when built without
+ # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build
+ # id, so explicitly enable it in official builds. It's not needed in
+ # unofficial builds and computing it does slow down the link, so go with
+ # faster links in unofficial builds.
+ ldflags += [ "-Wl,--build-id=sha1" ]
+ }
+
defines += [ "_FILE_OFFSET_BITS=64" ]
if (!is_android) {
@@ -302,13 +305,15 @@ config("compiler") {
# Linux/Android common flags setup.
# ---------------------------------
if (is_linux || is_android) {
- cflags += [
- "-fPIC",
- "-pipe", # Use pipes for communicating between sub-processes. Faster.
- ]
+ if (use_pic) {
+ cflags += [ "-fPIC" ]
+ ldflags += [ "-fPIC" ]
+ }
+
+ # Use pipes for communicating between sub-processes. Faster.
+ cflags += [ "-pipe" ]
ldflags += [
- "-fPIC",
"-Wl,-z,noexecstack",
"-Wl,-z,now",
"-Wl,-z,relro",
@@ -372,10 +377,6 @@ config("compiler") {
}
}
- if (gdb_index) {
- ldflags += [ "-Wl,--gdb-index" ]
- }
-
# TODO(thestig): Make this flag work with GN.
#if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
# ldflags += [
@@ -471,7 +472,11 @@ config("compiler") {
# linker jobs. This is still suboptimal to a potential dynamic
# resource allocation scheme, but should be good enough.
if (use_lld) {
- ldflags += [ "-Wl,--thinlto-jobs=8" ]
+ ldflags += [
+ "-Wl,--thinlto-jobs=8",
+ "-Wl,--thinlto-cache-dir=" +
+ rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+ ]
} else {
ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
}
@@ -894,6 +899,10 @@ config("default_warnings") {
# This is necessary for the shared library build.
"/wd4251",
+ # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
+ # TODO(brucedawson): fix warnings, crbug.com/554200
+ "/wd4312",
+
# C4351: new behavior: elements of array 'array' will be default
# initialized
# This is a silly "warning" that basically just alerts you that the
@@ -956,10 +965,10 @@ config("default_warnings") {
"/wd4459",
]
- cflags += [
- # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
- # TODO(brucedawson): fix warnings, crbug.com/554200
- "/wd4312",
+ cflags_cc += [
+ # Allow "noexcept" annotations even though we compile with exceptions
+ # disabled.
+ "/wd4577",
]
if (current_cpu == "x86") {
@@ -1036,13 +1045,9 @@ config("default_warnings") {
]
}
- if (is_chromeos) {
- # TODO(thakis): Remove, http://crbug.com/263960
- if (is_clang) {
- cflags_cc += [ "-Wno-reserved-user-defined-literal" ]
- } else {
- cflags_cc += [ "-Wno-literal-suffix" ]
- }
+ if (is_chromeos && is_clang && !is_nacl) {
+ # TODO(thakis): Enable this, crbug.com/507717
+ cflags += [ "-Wno-shift-negative-value" ]
}
if (is_clang) {
@@ -1060,11 +1065,6 @@ config("default_warnings") {
# also contain a default: branch. Chrome is full of that.
"-Wno-covered-switch-default",
- # Clang considers the `register` keyword as deprecated, but e.g.
- # code generated by flex (used in angle) contains that keyword.
- # http://crbug.com/255186
- "-Wno-deprecated-register",
-
# TODO(thakis): This used to be implied by -Wno-unused-function,
# which we no longer use. Check if it makes sense to remove
# this as well. http://crbug.com/316352
@@ -1074,14 +1074,10 @@ config("default_warnings") {
"-Wno-inconsistent-missing-override",
]
- # Chrome's hermetic Clang compiler, NaCl's Clang compiler and Xcode's Clang
- # compiler will almost always have different versions. Certain flags may not
- # be recognized by one version or the other.
- if (!is_nacl) {
- # Flags NaCl (Clang 3.7) does not recognize.
+ if (is_linux && target_cpu == "x86") {
cflags += [
- # TODO(thakis): Enable this, crbug.com/507717
- "-Wno-shift-negative-value",
+ # TODO(thakis): Remove from 32-bit Linux eventually, https://707084
+ "-Wno-deprecated-register",
]
}
@@ -1100,9 +1096,6 @@ config("default_warnings") {
# TODO(hans): https://crbug.com/637306
"-Wno-address-of-packed-member",
- # TODO(thakis): Consider turning this on, https://crbug.com/691120
- "-Wno-block-capture-autoreleasing",
-
# TODO(hans): https://crbug.com/681136
"-Wno-unused-lambda-capture",
@@ -1388,18 +1381,22 @@ if (is_win) {
}
config("default_stack_frames") {
- if (is_posix && !(is_mac || is_ios)) {
- if (using_sanitizer || enable_profiling || is_debug ||
- current_cpu == "arm64") {
- # Explicitly ask for frame pointers, otherwise:
- # * Stacks may be missing for sanitizer and profiling builds.
- # * Debug tcmalloc can crash (crbug.com/636489).
- # * Stacks may be missing for arm64 crash dumps (crbug.com/391706).
+ if (is_posix) {
+ if (enable_frame_pointers) {
cflags = [ "-fno-omit-frame-pointer" ]
- } else if (is_android) {
+ } else {
cflags = [ "-fomit-frame-pointer" ]
}
}
+ # On Windows, the flag to enable framepointers "/Oy-" must always come after
+ # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
+ # the "optimize" configs, see rest of this file. The ordering that cflags are
+ # applied is well-defined by the GN spec, and there is no way to ensure that
+ # cflags set by "default_stack_frames" is applied after those set by an
+ # "optimize" config. Similarly, there is no way to propagate state from this
+ # config into the "optimize" config. We always apply the "/Oy-" config in the
+ # definition for common_optimize_on_cflags definition, even though this may
+ # not be correct.
}
# Default "optimization on" config.
@@ -1654,6 +1651,21 @@ config("symbols") {
}
asmflags = cflags
ldflags = []
+
+ # TODO(thakis): Figure out if there's a way to make this go for 32-bit,
+ # currently we get "warning:
+ # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o:
+ # DWARF info may be corrupt; offsets in a range list entry are in different
+ # sections" there. Maybe just a bug in nacl_switch_32.S.
+ # TODO(thakis): Figure out if there's a way to make this go for official
+ # builds, currently get
+ # "third_party/binutils/Linux_x64/Release/bin/ld.gold: warning:
+ # /tmp/lto-llvm-0b5201.o: corrupt debug info in .debug_info"
+ if (!is_mac && !is_ios && !is_nacl && target_cpu != "x86" &&
+ (use_gold || use_lld) && !allow_posix_link_time_opt &&
+ !is_official_build) {
+ ldflags += [ "-Wl,--gdb-index" ]
+ }
}
}
@@ -1675,9 +1687,8 @@ config("minimal_symbols") {
} else {
cflags = [ "-g1" ]
}
- if (use_debug_fission) {
- cflags += [ "-gsplit-dwarf" ]
- }
+
+ # Note: -gsplit-dwarf implicitly turns on -g2 with clang, so don't pass it.
asmflags = cflags
ldflags = []
}
diff --git a/chromium/build/config/compiler/compiler.gni b/chromium/build/config/compiler/compiler.gni
index 787990a0a62..e4f463a76d6 100644
--- a/chromium/build/config/compiler/compiler.gni
+++ b/chromium/build/config/compiler/compiler.gni
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/config/android/config.gni")
+import("//build/config/arm.gni")
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/compiler/pgo/pgo.gni")
@@ -15,7 +16,9 @@ declare_args() {
# How many symbols to include in the build. This affects the performance of
# the build since the symbols are large and dealing with them is slow.
# 2 means regular build with symbols.
- # 1 means minimal symbols, usually enough for backtraces only.
+ # 1 means minimal symbols, usually enough for backtraces only. Symbols with
+ # internal linkage (static functions or those in anonymous namespaces) may not
+ # appear when using this level.
# 0 means no symbols.
# -1 means auto-set according to debug/release and platform.
symbol_level = -1
@@ -58,9 +61,59 @@ declare_args() {
#
# See crbug.com/669854.
linkrepro_root_dir = ""
+
+ # Whether or not we should use position independent code.
+ use_pic = true
+}
+
+# Determine the default setting for enable_frame_pointers, based on the platform
+# and build arguments.
+if (is_mac || is_ios) {
+ _default_enable_frame_pointers = true
+} else if (is_win) {
+ # 64-bit Windows ABI doesn't support frame pointers.
+ if (target_cpu == "x64") {
+ _default_enable_frame_pointers = false
+ } else {
+ _default_enable_frame_pointers = true
+ }
+} else if (current_cpu == "arm64") {
+ # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
+ _default_enable_frame_pointers = true
+} else if (is_chromeos && current_cpu == "arm" && arm_use_thumb) {
+ # TODO(711784): Building ARM Thumb without frame pointers can lead to code
+ # in ChromeOS which triggers some ARM A12/A17 errata.
+ _default_enable_frame_pointers = true
+} else {
+ # Explicitly ask for frame pointers, otherwise:
+ # * Stacks may be missing for sanitizer and profiling builds.
+ # * Debug tcmalloc can crash (crbug.com/636489).
+ _default_enable_frame_pointers =
+ using_sanitizer || enable_profiling || is_debug
}
declare_args() {
+ # True if frame pointers should be generated, false otherwise.
+ enable_frame_pointers = _default_enable_frame_pointers
+}
+
+# In general assume that if we have frame pointers then we can use them to
+# unwind the stack. However, this requires that they are enabled by default for
+# most translation units, that they are emitted correctly, and that the
+# compiler or platform provides a way to access them.
+can_unwind_with_frame_pointers = enable_frame_pointers
+if (current_cpu == "arm" && arm_use_thumb) {
+ # We cannot currently unwind ARM Thumb frame pointers correctly.
+ can_unwind_with_frame_pointers = false
+} else if (is_win) {
+ # Windows 32-bit does provide frame pointers, but the compiler does not
+ # provide intrinsics to access them, so we don't use them.
+ can_unwind_with_frame_pointers = false
+}
+
+assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
+
+declare_args() {
# Whether or not the official builds should be built with full WPO. Enabled by
# default for the PGO and the x64 builds.
if (chrome_pgo_phase > 0) {
diff --git a/chromium/build/config/features.gni b/chromium/build/config/features.gni
index 564b45a8b82..441bd21026a 100644
--- a/chromium/build/config/features.gni
+++ b/chromium/build/config/features.gni
@@ -67,22 +67,7 @@ declare_args() {
use_gconf = is_linux && !is_chromeos && !is_chromecast
use_gio = is_linux && !is_chromeos && !is_chromecast
-
- # Whether or not to use external popup menu.
- use_external_popup_menu = is_android || is_mac
-
- # Enable WebVR support by default on Android
- # Still requires command line flag to access API
- # TODO(bshe): Enable for other architecture too. Currently we only support arm
- # and arm64.
- enable_webvr = is_android && (current_cpu == "arm" || current_cpu == "arm64")
}
-
-# Additional dependent variables -----------------------------------------------
-
-# Chrome OS: whether to also build the upcoming version of
-# ChromeVox, which can then be enabled via a command-line switch.
-enable_chromevox_next = false
#
# =============================================
# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
diff --git a/chromium/build/config/freetype/BUILD.gn b/chromium/build/config/freetype/BUILD.gn
new file mode 100644
index 00000000000..85d81cac144
--- /dev/null
+++ b/chromium/build/config/freetype/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+
+group("freetype") {
+ if (is_chromecast || is_android || is_win) {
+ public_deps = [
+ "//third_party/freetype",
+ ]
+ } else if (is_linux) {
+ public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
+ }
+}
diff --git a/chromium/build/config/freetype/OWNERS b/chromium/build/config/freetype/OWNERS
new file mode 100644
index 00000000000..3277f87312e
--- /dev/null
+++ b/chromium/build/config/freetype/OWNERS
@@ -0,0 +1,2 @@
+bungeman@chromium.org
+drott@chromium.org
diff --git a/chromium/build/config/ios/rules.gni b/chromium/build/config/ios/rules.gni
index 489e06f9481..2cafb96c34f 100644
--- a/chromium/build/config/ios/rules.gni
+++ b/chromium/build/config/ios/rules.gni
@@ -1146,13 +1146,16 @@ template("ios_framework_bundle") {
":$_link_target_name($default_toolchain)",
]
- if (!defined(public_configs)) {
- public_configs = []
- }
- public_configs += [ ":$_framework_public_config" ]
if (_has_public_headers) {
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
public_configs += [ ":$_framework_headers_config" ]
}
+ if (!defined(all_dependent_configs)) {
+ all_dependent_configs = []
+ }
+ all_dependent_configs += [ ":$_framework_public_config" ]
}
if (defined(invoker.bundle_deps)) {
@@ -1166,6 +1169,12 @@ template("ios_framework_bundle") {
_compile_headers_map_target = _target_name + "_compile_headers_map"
action(_compile_headers_map_target) {
visibility = [ ":$_framework_headers_target" ]
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
script = "//build/config/ios/write_framework_hmap.py"
outputs = [
_header_map_filename,
@@ -1211,6 +1220,7 @@ template("ios_framework_bundle") {
}
group(_framework_headers_target) {
+ forward_variables_from(invoker, [ "testonly" ])
deps = [
":$_compile_headers_map_target",
":$_copy_public_headers_target",
@@ -1297,13 +1307,16 @@ template("ios_framework_bundle") {
}
public_deps += [ ":$_target_name" ]
- if (!defined(public_configs)) {
- public_configs = []
- }
- public_configs += [ ":$_framework_public_config" ]
if (_has_public_headers) {
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
public_configs += [ ":$_framework_headers_config" ]
}
+ if (!defined(all_dependent_configs)) {
+ all_dependent_configs = []
+ }
+ all_dependent_configs += [ ":$_framework_public_config" ]
}
bundle_data(_target_name + "+bundle") {
diff --git a/chromium/build/config/linux/gtk/gtk.gni b/chromium/build/config/linux/gtk/gtk.gni
index b3fd973d512..0521a8cd48c 100644
--- a/chromium/build/config/linux/gtk/gtk.gni
+++ b/chromium/build/config/linux/gtk/gtk.gni
@@ -7,6 +7,9 @@
# //build/config/linux/gtk and it will switch for you.
declare_args() {
- # Whether to compile agains GTKv3 instead of GTKv2.
- use_gtk3 = false
+ # Whether to compile against GTKv3 instead of GTKv2.
+ # TODO(thomasanderson): Currently, the 32-bit clusterfuzz schroot does not
+ # have libgtk-3-0 installed. set use_gtk3 = true on all architectures when
+ # the package is added (crbug.com/699669).
+ use_gtk3 = target_cpu == "x64"
}
diff --git a/chromium/build/config/linux/gtk2/BUILD.gn b/chromium/build/config/linux/gtk2/BUILD.gn
index 58af02a9b19..010d592df4a 100644
--- a/chromium/build/config/linux/gtk2/BUILD.gn
+++ b/chromium/build/config/linux/gtk2/BUILD.gn
@@ -26,7 +26,6 @@ pkg_config("gtk2_internal_config") {
# parts that explicitly need GTK2 are whitelisted on this target.
group("gtk2") {
visibility = [
- "//gpu/gles2_conform_support:gles2_conform_test_windowless",
"//build/config/linux/gtk",
"//chrome/browser/ui/libgtkui:*",
]
diff --git a/chromium/build/config/posix/BUILD.gn b/chromium/build/config/posix/BUILD.gn
index d7e917af441..11cad83d075 100644
--- a/chromium/build/config/posix/BUILD.gn
+++ b/chromium/build/config/posix/BUILD.gn
@@ -13,19 +13,6 @@ group("posix") {
visibility = [ "//:optimize_gn_gen" ]
}
-# This is included by reference in the //build/config/compiler config that
-# is applied to all Posix targets. It is here to separate out the logic that is
-# Posix-only. Note that this is in addition to an OS-specific variant of this
-# config.
-config("compiler") {
- if ((allow_posix_link_time_opt || is_cfi) && !is_nacl) {
- arflags = [
- "--plugin",
- rebase_path("$clang_base_path/lib/LLVMgold.so", root_build_dir),
- ]
- }
-}
-
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is Posix-only. Please see that target for advice on what should go in
diff --git a/chromium/build/config/sanitizers/BUILD.gn b/chromium/build/config/sanitizers/BUILD.gn
index 845d992d6ed..f0343310392 100644
--- a/chromium/build/config/sanitizers/BUILD.gn
+++ b/chromium/build/config/sanitizers/BUILD.gn
@@ -200,7 +200,10 @@ config("default_sanitizer_ldflags") {
ldflags = []
if (is_asan) {
ldflags += [ "-fsanitize=address" ]
- if (!is_mac) {
+ if (is_mac) {
+ # https://crbug.com/708707
+ ldflags += [ "-fno-sanitize-address-use-after-scope" ]
+ } else {
ldflags += [ "-fsanitize-address-use-after-scope" ]
}
}
@@ -314,6 +317,9 @@ config("asan_flags") {
cflags += [ "-fsanitize=address" ]
if (!is_mac && !is_win) {
cflags += [ "-fsanitize-address-use-after-scope" ]
+ } else if (!is_win) {
+ # https://crbug.com/708707
+ cflags += [ "-fno-sanitize-address-use-after-scope" ]
}
if (!asan_globals) {
cflags += [
@@ -385,6 +391,10 @@ config("cfi_flags") {
]
}
+ if (use_cfi_icall) {
+ cflags += [ "-fsanitize=cfi-icall" ]
+ }
+
if (use_cfi_diag) {
cflags += [
"-fno-sanitize-trap=cfi",
diff --git a/chromium/build/config/sanitizers/sanitizers.gni b/chromium/build/config/sanitizers/sanitizers.gni
index 2a91c3de95c..6feef783ad9 100644
--- a/chromium/build/config/sanitizers/sanitizers.gni
+++ b/chromium/build/config/sanitizers/sanitizers.gni
@@ -62,6 +62,11 @@ declare_args() {
# https://crbug.com/626794
use_cfi_cast = false
+ # Enable checks for indirect function calls via a function pointer.
+ # TODO(pcc): remove this when we're ready to add these checks by default.
+ # https://crbug.com/701919
+ use_cfi_icall = false
+
# By default, Control Flow Integrity will crash the program if it detects a
# violation. Set this to true to print detailed diagnostics instead.
use_cfi_diag = false
diff --git a/chromium/build/config/sysroot.gni b/chromium/build/config/sysroot.gni
index 2bcd6caae1e..aa43c2cd0bd 100644
--- a/chromium/build/config/sysroot.gni
+++ b/chromium/build/config/sysroot.gni
@@ -12,23 +12,13 @@ declare_args() {
# the target toolchain.
target_sysroot = ""
- # The absolute path to directory containing sysroots for linux 32 and 64bit
- target_sysroot_dir = ""
+ # The absolute path to directory containing linux sysroot images
+ target_sysroot_dir = "//build/linux"
use_sysroot = true
-
- # TODO(tonikitoo): Remove this arg when wheezy is finally dropped and we
- # can just use debian/jessie everywhere by default. crbug.com/564904.
- use_jessie_sysroot = false
}
-if (is_linux && target_sysroot_dir != "") {
- if (current_cpu == "x64") {
- sysroot = target_sysroot_dir + "/wheezy-x64"
- } else if (current_cpu == "x86") {
- sysroot = target_sysroot_dir + "/wheezy-ia32"
- }
-} else if (current_os == target_os && current_cpu == target_cpu &&
+if (current_os == target_os && current_cpu == target_cpu &&
target_sysroot != "") {
sysroot = target_sysroot
} else if (is_android) {
@@ -46,46 +36,26 @@ if (is_linux && target_sysroot_dir != "") {
} else if (current_cpu == "mips64el") {
sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
} else {
- sysroot = ""
+ assert(false, "No android sysroot for cpu: $target_cpu")
}
} else if (is_linux && use_sysroot) {
# By default build against a sysroot image downloaded from Cloud Storage
# during gclient runhooks.
- if (is_chromeos) {
- # Regular ChromeOS builds use custom sysroots, but desktop ChromeOS (where
- # we're building linux desktop binaries but using the ChromeOS UI and
- # functionality where possible) needs to run on the buliders, which are
- # running precise. The precise build has a different set of dependencies
- # from the wheezy build, so we cannot use the wheezy sysroot.
- sysroot = "//build/linux/ubuntu_precise_amd64-sysroot"
+ if (current_cpu == "x64") {
+ sysroot = "$target_sysroot_dir/debian_jessie_amd64-sysroot"
+ } else if (current_cpu == "x86") {
+ sysroot = "$target_sysroot_dir/debian_jessie_i386-sysroot"
+ } else if (current_cpu == "mipsel") {
+ sysroot = "$target_sysroot_dir/debian_jessie_mips-sysroot"
+ } else if (current_cpu == "arm") {
+ sysroot = "$target_sysroot_dir/debian_jessie_arm-sysroot"
+ } else if (current_cpu == "arm64") {
+ sysroot = "$target_sysroot_dir/debian_jessie_arm64-sysroot"
} else {
- if (current_cpu == "x64") {
- sysroot = "//build/linux/debian_wheezy_amd64-sysroot"
-
- if (use_jessie_sysroot) {
- sysroot = "//build/linux/debian_jessie_amd64-sysroot"
- }
- } else if (current_cpu == "x86") {
- sysroot = "//build/linux/debian_wheezy_i386-sysroot"
- } else if (current_cpu == "mipsel") {
- sysroot = "//build/linux/debian_wheezy_mips-sysroot"
- } else if (current_cpu == "arm") {
- sysroot = "//build/linux/debian_wheezy_arm-sysroot"
- } else if (current_cpu == "arm64") {
- sysroot = "//build/linux/debian_jessie_arm64-sysroot"
- } else {
- # Any other builds don't use a sysroot.
- sysroot = ""
- }
+ assert(false, "No linux sysroot for cpu: $target_cpu")
}
if (sysroot != "") {
- # Our sysroot images only contains gcc 4.6 headers, but chromium requires
- # gcc 4.9. Clang is able to detect and work with the 4.6 headers while
- # gcc is not. This check can be removed if we ever update to a more modern
- # sysroot.
- assert(is_clang, "sysroot images require clang (try use_sysroot=false)")
-
_script_arch = current_cpu
if (_script_arch == "x86") {
_script_arch = "i386"
diff --git a/chromium/build/config/ui.gni b/chromium/build/config/ui.gni
index 926c0d06b88..aed71b47c66 100644
--- a/chromium/build/config/ui.gni
+++ b/chromium/build/config/ui.gni
@@ -35,12 +35,6 @@ declare_args() {
# Whether we should use glib, a low level C utility library.
use_glib = is_linux
-
- # Indicates if Wayland display server support is enabled.
- enable_wayland_server = is_chromeos
-
- # Enable experimental vulkan backend.
- enable_vulkan = false
}
declare_args() {
@@ -53,10 +47,6 @@ declare_args() {
#
# These variables depend on other variables and can't be set externally.
-# Use GPU accelerated cross process image transport by default on linux builds
-# with the Aura window manager.
-ui_compositor_image_transport = use_aura && is_linux
-
# Indicates if the UI toolkit depends on X11.
use_x11 = is_linux && !use_ozone
diff --git a/chromium/build/config/win/BUILD.gn b/chromium/build/config/win/BUILD.gn
index 5e1f7fcc4d8..215631366da 100644
--- a/chromium/build/config/win/BUILD.gn
+++ b/chromium/build/config/win/BUILD.gn
@@ -123,7 +123,7 @@ config("compiler") {
}
config("vs_code_analysis") {
- if (use_vs_code_analysis) {
+ if (use_vs_code_analysis && !is_clang) {
# When use_vs_code_analysis is specified add the /analyze switch to enable
# static analysis. Specifying /analyze:WX- says that /analyze warnings
# should not be treated as errors.
@@ -252,6 +252,15 @@ config("common_linker_setup") {
ldflags += [ "/DYNAMICBASE" ]
}
+ if (win_linker_timing) {
+ ldflags += [
+ "/time",
+ "/verbose:incr",
+ ]
+ }
+}
+
+config("cfi_linker") {
# Control Flow Guard (CFG)
# https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
# /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can’t be
@@ -259,14 +268,7 @@ config("common_linker_setup") {
# TODO(thakis): Turn this on with lld once supported, https://crbug.com/693709
if (!is_debug && !use_lld) {
# Turn on CFG in msvc linker, regardless of compiler used.
- ldflags += [ "/guard:cf" ]
- }
-
- if (win_linker_timing) {
- ldflags += [
- "/time",
- "/verbose:incr",
- ]
+ ldflags = [ "/guard:cf" ]
}
}
diff --git a/chromium/build/config/win/msvs_dependencies.isolate b/chromium/build/config/win/msvs_dependencies.isolate
deleted file mode 100644
index d33aec4950b..00000000000
--- a/chromium/build/config/win/msvs_dependencies.isolate
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'conditions': [
- # Copy the VS runtime DLLs into the isolate so that they
- # don't have to be preinstalled on the target machine.
- #
- # VS2013 runtimes
- ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/x64/msvcp120d.dll',
- '<(PRODUCT_DIR)/x64/msvcr120d.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/x64/msvcp120.dll',
- '<(PRODUCT_DIR)/x64/msvcr120.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/msvcp120d.dll',
- '<(PRODUCT_DIR)/msvcr120d.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/msvcp120.dll',
- '<(PRODUCT_DIR)/msvcr120.dll',
- ],
- },
- }],
- # VS2015 runtimes
- ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/x64/msvcp140d.dll',
- '<(PRODUCT_DIR)/x64/vccorlib140d.dll',
- '<(PRODUCT_DIR)/x64/vcruntime140d.dll',
- '<(PRODUCT_DIR)/x64/ucrtbased.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/x64/msvcp140.dll',
- '<(PRODUCT_DIR)/x64/vccorlib140.dll',
- '<(PRODUCT_DIR)/x64/vcruntime140.dll',
- '<(PRODUCT_DIR)/x64/ucrtbase.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/msvcp140d.dll',
- '<(PRODUCT_DIR)/vccorlib140d.dll',
- '<(PRODUCT_DIR)/vcruntime140d.dll',
- '<(PRODUCT_DIR)/ucrtbased.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/msvcp140.dll',
- '<(PRODUCT_DIR)/vccorlib140.dll',
- '<(PRODUCT_DIR)/vcruntime140.dll',
- '<(PRODUCT_DIR)/ucrtbase.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2015 and component=="shared_library" and target_arch=="ia32"', {
- # 32-bit builds have an x64 directory which also needs Windows 10
- # Universal C Runtime binaries copied over.
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/x64/api-ms-win-core-console-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-datetime-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-debug-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-errorhandling-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l1-2-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l2-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-handle-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-heap-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-interlocked-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-libraryloader-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-localization-l1-2-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-memory-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-namedpipe-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-processenvironment-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-processthreads-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-processthreads-l1-1-1.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-profile-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-rtlsupport-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-string-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-synch-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-synch-l1-2-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-sysinfo-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-timezone-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-core-util-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-conio-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-convert-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-environment-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-filesystem-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-heap-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-locale-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-math-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-multibyte-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-private-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-process-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-runtime-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-stdio-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-string-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-time-l1-1-0.dll',
- '<(PRODUCT_DIR)/x64/api-ms-win-crt-utility-l1-1-0.dll',
- ],
- },
- }],
- ['OS=="win" and msvs_version==2015 and component=="shared_library"', {
- # Windows 10 Universal C Runtime binaries.
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/api-ms-win-core-console-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-datetime-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-debug-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-errorhandling-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-file-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-file-l1-2-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-file-l2-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-handle-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-heap-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-interlocked-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-libraryloader-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-localization-l1-2-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-memory-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-namedpipe-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-processenvironment-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-processthreads-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-processthreads-l1-1-1.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-profile-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-rtlsupport-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-string-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-synch-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-synch-l1-2-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-sysinfo-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-timezone-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-core-util-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-conio-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-convert-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-environment-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-filesystem-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-heap-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-locale-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-math-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-multibyte-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-private-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-process-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-runtime-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-stdio-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-string-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-time-l1-1-0.dll',
- '<(PRODUCT_DIR)/api-ms-win-crt-utility-l1-1-0.dll',
- ],
- },
- }],
- ],
-}
diff --git a/chromium/build/dotfile_settings.gni b/chromium/build/dotfile_settings.gni
index 62ca3c70b95..eec0020d5b6 100644
--- a/chromium/build/dotfile_settings.gni
+++ b/chromium/build/dotfile_settings.gni
@@ -7,7 +7,6 @@
build_dotfile_settings = {
exec_script_whitelist = [
- "//build/config/BUILD.gn",
"//build/config/android/config.gni",
"//build/config/android/internal_rules.gni",
"//build/config/android/rules.gni",
diff --git a/chromium/build/experimental/install-build-deps.py b/chromium/build/experimental/install-build-deps.py
index 1687b94b103..04a26ab9d8f 100755
--- a/chromium/build/experimental/install-build-deps.py
+++ b/chromium/build/experimental/install-build-deps.py
@@ -13,7 +13,6 @@ import sys
SUPPORTED_UBUNTU_VERSIONS = (
- {'number': '12.04', 'codename': 'precise'},
{'number': '14.04', 'codename': 'trusty'},
{'number': '14.10', 'codename': 'utopic'},
{'number': '15.04', 'codename': 'vivid'},
@@ -300,9 +299,7 @@ def compute_dynamic_package_lists():
lsb_codename = lsb_release_short_codename()
# Find the proper version of libstdc++6-4.x-dbg.
- if lsb_codename == 'precise':
- _packages_dbg += ('libstdc++6-4.6-dbg',)
- elif lsb_codename == 'trusty':
+ if lsb_codename == 'trusty':
_packages_dbg += ('libstdc++6-4.8-dbg',)
else:
_packages_dbg += ('libstdc++6-4.9-dbg',)
diff --git a/chromium/build/fix_gn_headers.py b/chromium/build/fix_gn_headers.py
new file mode 100755
index 00000000000..aa98c73fb09
--- /dev/null
+++ b/chromium/build/fix_gn_headers.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fix header files missing in GN.
+
+This script takes the missing header files from check_gn_headers.py, and
+try to fix them by adding them to the GN files.
+Manual cleaning up is likely required afterwards.
+"""
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+
+def GitGrep(pattern):
+ p = subprocess.Popen(
+ ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'],
+ stdout=subprocess.PIPE)
+ out, _ = p.communicate()
+ return out, p.returncode
+
+
+def ValidMatches(basename, cc, grep_lines):
+ """Filter out 'git grep' matches with header files already."""
+ matches = []
+ for line in grep_lines:
+ gnfile, linenr, contents = line.split(':')
+ linenr = int(linenr)
+ new = re.sub(cc, basename, contents)
+ lines = open(gnfile).read().splitlines()
+ assert contents in lines[linenr - 1]
+ # Skip if it's already there. It could be before or after the match.
+ if lines[linenr] == new:
+ continue
+ if lines[linenr - 2] == new:
+ continue
+ print ' ', gnfile, linenr, new
+ matches.append((gnfile, linenr, new))
+ return matches
+
+
+def AddHeadersNextToCC(headers, skip_ambiguous=True):
+ """Add header files next to the corresponding .cc files in GN files.
+
+ When skip_ambiguous is True, skip if multiple .cc files are found.
+ Returns unhandled headers.
+
+ Manual cleaning up is likely required, especially if not skip_ambiguous.
+ """
+ edits = {}
+ unhandled = []
+ for filename in headers:
+ filename = filename.strip()
+ if not (filename.endswith('.h') or filename.endswith('.hh')):
+ continue
+ basename = os.path.basename(filename)
+ print filename
+ cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b'
+ out, returncode = GitGrep('(/|")' + cc + '"')
+ if returncode != 0 or not out:
+ unhandled.append(filename)
+ continue
+
+ matches = ValidMatches(basename, cc, out.splitlines())
+
+ if len(matches) == 0:
+ continue
+ if len(matches) > 1:
+ print '\n[WARNING] Ambiguous matching for', filename
+ for i in enumerate(matches, 1):
+ print '%d: %s' % (i[0], i[1])
+ print
+ if skip_ambiguous:
+ continue
+
+ picked = raw_input('Pick the matches ("2,3" for multiple): ')
+ try:
+ matches = [matches[int(i) - 1] for i in picked.split(',')]
+ except (ValueError, IndexError):
+ continue
+
+ for match in matches:
+ gnfile, linenr, new = match
+ print ' ', gnfile, linenr, new
+ edits.setdefault(gnfile, {})[linenr] = new
+
+ for gnfile in edits:
+ lines = open(gnfile).read().splitlines()
+ for l in sorted(edits[gnfile].keys(), reverse=True):
+ lines.insert(l, edits[gnfile][l])
+ open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+ return unhandled
+
+
+def AddHeadersToSources(headers, skip_ambiguous=True):
+ """Add header files to the sources list in the first GN file.
+
+ The target GN file is the first one up the parent directories.
+ This usually does the wrong thing for _test files if the test and the main
+ target are in the same .gn file.
+ When skip_ambiguous is True, skip if multiple sources arrays are found.
+
+ "git cl format" afterwards is required. Manually cleaning up duplicated items
+ is likely required.
+ """
+ for filename in headers:
+ filename = filename.strip()
+ print filename
+ dirname = os.path.dirname(filename)
+ while not os.path.exists(os.path.join(dirname, 'BUILD.gn')):
+ dirname = os.path.dirname(dirname)
+ rel = filename[len(dirname) + 1:]
+ gnfile = os.path.join(dirname, 'BUILD.gn')
+
+ lines = open(gnfile).read().splitlines()
+ matched = [i for i, l in enumerate(lines) if ' sources = [' in l]
+ if skip_ambiguous and len(matched) > 1:
+ print '[WARNING] Multiple sources in', gnfile
+ continue
+
+ if len(matched) < 1:
+ continue
+ print ' ', gnfile, rel
+ index = matched[0]
+ lines.insert(index + 1, '"%s",' % rel)
+ open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('input_file',
+ help="missing headers, output of check_gn_headers.py")
+ parser.add_argument('--prefix',
+ help="only handle path name with this prefix")
+
+ args, _extras = parser.parse_known_args()
+
+ headers = open(args.input_file).readlines()
+
+ if args.prefix:
+ headers = [i for i in headers if i.startswith(args.prefix)]
+
+ unhandled = AddHeadersNextToCC(headers)
+ AddHeadersToSources(unhandled)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/get_landmines.py b/chromium/build/get_landmines.py
index c8d501bc218..05e7b1a5e10 100755
--- a/chromium/build/get_landmines.py
+++ b/chromium/build/get_landmines.py
@@ -44,7 +44,7 @@ def print_landmines():
if platform() in ('win', 'mac'):
print ('Improper dependency for create_nmf.py broke in r240802, '
'fixed in r240860.')
- if (platform() == 'win' and gyp_msvs_version().startswith('2015')):
+ if platform() == 'win':
print 'Switch to VS2015 Update 3, 14393 SDK'
print 'Need to clobber everything due to an IDL change in r154579 (blink)'
print 'Need to clobber everything due to gen file moves in r175513 (Blink)'
@@ -72,7 +72,8 @@ def print_landmines():
print 'Clobber to remove libsystem.dylib. See crbug.com/620075'
if platform() == 'mac':
print 'Clobber to get past mojo gen build error (crbug.com/679607)'
-
+ if platform() == 'win':
+ print 'Clobber Windows to fix strange PCH-not-rebuilt errors.'
def main():
print_landmines()
diff --git a/chromium/build/install-build-deps-android.sh b/chromium/build/install-build-deps-android.sh
index 6ae6e908d56..06f79aee998 100755
--- a/chromium/build/install-build-deps-android.sh
+++ b/chromium/build/install-build-deps-android.sh
@@ -1,4 +1,4 @@
-#!/bin/bash -e
+#!/bin/bash
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -8,10 +8,6 @@
# items requiring sudo privileges.
# See https://www.chromium.org/developers/how-tos/android-build-instructions
-# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
-# a license agreement, so upon installation it will prompt the user. To get
-# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
-
args="$@"
if ! uname -m | egrep -q "i686|x86_64"; then
@@ -19,85 +15,64 @@ if ! uname -m | egrep -q "i686|x86_64"; then
exit
fi
-lsb_release=$(lsb_release --codename --short)
+# Exit if any commands fail.
+set -e
-case $lsb_release in
- xenial|yakkety)
- java_alternative="java-1.8.0-openjdk-amd64"
- java_pkgs="openjdk-8-jre openjdk-8-jdk"
- ;;
- *)
- java_alternative="java-1.7.0-openjdk-amd64"
- java_pkgs="openjdk-7-jre openjdk-7-jdk"
- ;;
-esac
+lsb_release=$(lsb_release --codename --short)
# Install first the default Linux build deps.
"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
--no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
-# The temporary directory used to store output of update-java-alternatives
-TEMPDIR=$(mktemp -d)
-cleanup() {
- local status=${?}
- trap - EXIT
- rm -rf "${TEMPDIR}"
- exit ${status}
-}
-trap cleanup EXIT
-
# Fix deps
sudo apt-get -f install
-# Install deps
-# This step differs depending on what Ubuntu release we are running
-# on since the package names are different, and Sun's Java must
-# be installed manually on late-model versions.
-
# common
sudo apt-get -y install lib32z1 lighttpd python-pexpect xvfb x11-utils
# Some binaries in the Android SDK require 32-bit libraries on the host.
# See https://developer.android.com/sdk/installing/index.html?pkg=tools
-if [[ $lsb_release == "precise" ]]; then
- sudo apt-get -y install ia32-libs
-else
- sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
-fi
+sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
+# Required by //components/cronet/tools/generate_javadoc.py
+# TODO(375324): Stop requiring ANT.
sudo apt-get -y install ant
# Required for apk-patch-size-estimator
sudo apt-get -y install bsdiff
-# Install openjdk and openjre stuff
-sudo apt-get -y install $java_pkgs
-
-# Switch version of Java to openjdk 7.
-# Some Java plugins (e.g. for firefox, mozilla) are not required to build, and
-# thus are treated only as warnings. Any errors in updating java alternatives
-# which are not '*-javaplugin.so' will cause errors and stop the script from
-# completing successfully.
-if ! sudo update-java-alternatives -s $java_alternative \
- >& "${TEMPDIR}"/update-java-alternatives.out
-then
- # Check that there are the expected javaplugin.so errors for the update
- if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
- /dev/null
- then
- # Print as warnings all the javaplugin.so errors
- echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
- grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
- fi
- # Check if there are any errors that are not javaplugin.so
- if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
- >& /dev/null
- then
- # If there are non-javaplugin.so errors, treat as errors and exit
- echo 'ERRORS: Failed to update alternatives for java-6-sun:'
- grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
- exit 1
- fi
+# Do our own error handling for java.
+set +e
+
+function IsJava8() {
+ # Arg is either "java" or "javac"
+ $1 -version 2>&1 | grep -q '1\.8'
+}
+
+if ! (IsJava8 java && IsJava8 javac); then
+ sudo apt-get -y install openjdk-8-jre openjdk-8-jdk
+fi
+
+# There can be several reasons why java8 is not default despite being installed.
+# Just show an error and exit.
+if ! (IsJava8 java && IsJava8 javac); then
+ echo
+ echo "Automatic java installation failed."
+ echo '`java -version` reports:'
+ java -version
+ echo
+ echo '`javac -version` reports:'
+ javac -version
+ echo
+ echo "Please ensure that JDK 8 is installed and resolves first in your PATH."
+ echo -n '`which java` reports: '
+ which java
+ echo -n '`which javac` reports: '
+ which javac
+ echo
+ echo "You might also try running:"
+ echo " sudo update-java-alternatives -s java-1.8.0-openjdk-amd64"
+ exit 1
fi
echo "install-build-deps-android.sh complete."
diff --git a/chromium/build/install-build-deps.sh b/chromium/build/install-build-deps.sh
index 7f19b7f975d..0618b2a0df5 100755
--- a/chromium/build/install-build-deps.sh
+++ b/chromium/build/install-build-deps.sh
@@ -111,12 +111,14 @@ if ! which lsb_release > /dev/null; then
fi
lsb_release=$(lsb_release --codename --short)
-supported_releases="(precise|trusty|utopic|vivid|wily|xenial|yakkety|jessie)"
+supported_releases="(trusty|xenial|yakkety|jessie)"
if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
if [[ ! $lsb_release =~ $supported_releases ]]; then
- echo "ERROR: Only Ubuntu 12.04 (precise), 14.04 (trusty), " \
- "14.10 (utopic), 15.04 (vivid), 15.10 (wily), 16.04 (xenial), " \
- "16.10 (yakkety) and Debian 8 (jessie) are currently supported" >&2
+ echo -e "ERROR: The only supported distros are\n" \
+ "\tUbuntu 14.04 (trusty)\n" \
+ "\tUbuntu 16.04 (xenial)\n" \
+ "\tUbuntu 16.10 (yakkety)\n" \
+ "\tDebian 8 (jessie)" >&2
exit 1
fi
@@ -282,7 +284,6 @@ dbg_list="\
libxdamage1-dbg
libxdmcp6-dbg
libxext6-dbg
- libxfixes3-dbg
libxi6-dbg
libxinerama1-dbg
libxrandr2-dbg
@@ -291,10 +292,12 @@ dbg_list="\
zlib1g-dbg
"
+if [[ ! $lsb_release =~ "yakkety" ]]; then
+ dbg_list="${dbg_list} libxfixes3-dbg"
+fi
+
# Find the proper version of libstdc++6-4.x-dbg.
-if [ "x$lsb_release" = "xprecise" ]; then
- dbg_list="${dbg_list} libstdc++6-4.6-dbg"
-elif [ "x$lsb_release" = "xtrusty" ]; then
+if [ "x$lsb_release" = "xtrusty" ]; then
dbg_list="${dbg_list} libstdc++6-4.8-dbg"
else
dbg_list="${dbg_list} libstdc++6-4.9-dbg"
@@ -314,7 +317,7 @@ EOF
EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
case $lsb_release in
- "jessie")
+ jessie)
eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
arm_list="libc6-dev:armhf
@@ -337,12 +340,7 @@ case $lsb_release in
fi
fi
;;
- "precise")
- arm_list="libc6-dev-armhf-cross
- linux-libc-dev-armhf-cross
- ${GPP_ARM_PACKAGE}"
- ;;
- "*")
+ *)
arm_list="binutils-aarch64-linux-gnu
libc6-dev-armhf-cross
linux-libc-dev-armhf-cross
@@ -356,7 +354,7 @@ case $lsb_release in
arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
gcc-4.8-multilib-arm-linux-gnueabihf"
;;
- wily|xenial|yakkety)
+ xenial|yakkety)
arm_list+=" g++-5-multilib-arm-linux-gnueabihf
gcc-5-multilib-arm-linux-gnueabihf
gcc-arm-linux-gnueabihf"
@@ -582,9 +580,7 @@ if [ 1 -eq "${do_quick_check-0}" ] ; then
fi
if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then
- if [[ ! $lsb_release =~ (precise) ]]; then
- sudo dpkg --add-architecture i386
- fi
+ sudo dpkg --add-architecture i386
if [[ $lsb_release = "jessie" ]]; then
sudo dpkg --add-architecture armhf
fi
diff --git a/chromium/build/linux/BUILD.gn b/chromium/build/linux/BUILD.gn
index 410f832a466..e36b69433ff 100644
--- a/chromium/build/linux/BUILD.gn
+++ b/chromium/build/linux/BUILD.gn
@@ -49,20 +49,9 @@ group("fontconfig") {
}
if (!is_chromecast) {
- pkg_config("freetype2_config") {
+ pkg_config("freetype_from_pkgconfig") {
visibility = [ ":freetype2" ]
packages = [ "freetype2" ]
}
}
-group("freetype2") {
- if (is_chromecast) {
- # Chromecast platform doesn't provide freetype, so use Chromium's.
- # The version in freetype-android is unmodified from freetype2 upstream.
- public_deps = [
- "//third_party/freetype-android:freetype",
- ]
- } else {
- public_configs = [ ":freetype2_config" ]
- }
-}
diff --git a/chromium/build/linux/install-chromeos-fonts.py b/chromium/build/linux/install-chromeos-fonts.py
index b912084790d..54d0ede235b 100755
--- a/chromium/build/linux/install-chromeos-fonts.py
+++ b/chromium/build/linux/install-chromeos-fonts.py
@@ -16,14 +16,14 @@ URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
'distfiles/%(name)s-%(version)s.tar.bz2')
# Taken from the media-fonts/<name> ebuilds in chromiumos-overlay.
+# noto-cjk used to be here, but is removed because fc-cache takes too long
+# regenerating the fontconfig cache (See crbug.com/697954.)
+# TODO(jshin): Add it back when the above issue can be avoided.
SOURCES = [
{
'name': 'notofonts',
'version': '20161129'
}, {
- 'name': 'noto-cjk',
- 'version': '20150910'
- }, {
'name': 'robotofonts',
'version': '2.132'
}
diff --git a/chromium/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg b/chromium/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg
deleted file mode 100644
index d587901e440..00000000000
--- a/chromium/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg
+++ /dev/null
Binary files differ
diff --git a/chromium/build/linux/sysroot_scripts/install-sysroot.py b/chromium/build/linux/sysroot_scripts/install-sysroot.py
index d79c12bbbbc..90b70689d54 100755
--- a/chromium/build/linux/sysroot_scripts/install-sysroot.py
+++ b/chromium/build/linux/sysroot_scripts/install-sysroot.py
@@ -6,16 +6,17 @@
"""Install Debian sysroots for building chromium.
"""
-# The sysroot is needed to ensure that binaries will run on Debian Wheezy,
-# the oldest supported linux distribution. For ARM64 linux, we have Debian
-# Jessie sysroot as Jessie is the first version with ARM64 support. This script
-# can be run manually but is more often run as part of gclient hooks. When run
-# from hooks this script is a no-op on non-linux platforms.
-
-# The sysroot image could be constructed from scratch based on the current
-# state or Debian Wheezy/Jessie but for consistency we currently use a
-# pre-built root image. The image will normally need to be rebuilt every time
-# chrome's build dependencies are changed.
+# The sysroot is needed to ensure that binaries that get built will run on
+# the oldest stable version of Debian that we currently support.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script is a no-op on non-linux platforms.
+
+# The sysroot image could be constructed from scratch based on the current state
+# of the Debian archive but for consistency we use a pre-built root image (we
+# don't want upstream changes to Debian to effect the chromium build until we
+# choose to pull them in). The images will normally need to be rebuilt every
+# time chrome's build dependencies are changed but should also be updated
+# periodically to include upstream security fixes from Debian.
import hashlib
import json
@@ -121,12 +122,6 @@ def InstallDefaultSysroots(host_arch):
if host_arch == 'amd64':
InstallDefaultSysrootForArch('i386')
- # Desktop Chromium OS builds require the precise sysroot.
- # TODO(thomasanderson): only download this when the GN arg target_os
- # == 'chromeos', when the functionality to perform the check becomes
- # available.
- InstallSysroot('Precise', 'amd64')
-
# If we can detect a non-standard target_arch such as ARM or MIPS,
# then install the sysroot too. Don't attempt to install arm64
# since this is currently and android-only architecture.
@@ -134,14 +129,6 @@ def InstallDefaultSysroots(host_arch):
if target_arch and target_arch not in (host_arch, 'i386'):
InstallDefaultSysrootForArch(target_arch)
- # Desktop Linux ozone builds require libxkbcommon* which is not
- # available in Wheezy.
- # TODO(thomasanderson): Remove this once the Jessie sysroot is used
- # by default.
- gyp_defines = gyp_chromium.GetGypVars(gyp_chromium.GetSupplementalFiles())
- if gyp_defines.get('use_ozone') == '1':
- InstallSysroot('Jessie', 'amd64')
-
def main(args):
parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__)
@@ -150,6 +137,9 @@ def main(args):
' Installs default sysroot images.')
parser.add_option('--arch', type='choice', choices=VALID_ARCHS,
help='Sysroot architecture: %s' % ', '.join(VALID_ARCHS))
+ parser.add_option('--all', action='store_true',
+ help='Install all sysroot images (useful when updating the'
+ ' images)')
options, _ = parser.parse_args(args)
if options.running_as_hook and not sys.platform.startswith('linux'):
return 0
@@ -160,27 +150,23 @@ def main(args):
if host_arch in ['ppc','s390']:
return 0
InstallDefaultSysroots(host_arch)
- else:
- if not options.arch:
- print 'You much specify either --arch or --running-as-hook'
- return 1
+ elif options.arch:
InstallDefaultSysrootForArch(options.arch)
+ elif options.all:
+ for arch in VALID_ARCHS:
+ InstallDefaultSysrootForArch(arch)
+ else:
+ print 'You much specify either --arch, --all or --running-as-hook'
+ return 1
return 0
+
def InstallDefaultSysrootForArch(target_arch):
- if target_arch == 'amd64':
- InstallSysroot('Wheezy', 'amd64')
- elif target_arch == 'arm':
- InstallSysroot('Wheezy', 'arm')
- elif target_arch == 'arm64':
- InstallSysroot('Jessie', 'arm64')
- elif target_arch == 'i386':
- InstallSysroot('Wheezy', 'i386')
- elif target_arch == 'mips':
- InstallSysroot('Wheezy', 'mips')
- else:
+ if target_arch not in VALID_ARCHS:
raise Error('Unknown architecture: %s' % target_arch)
+ InstallSysroot('Jessie', target_arch)
+
def InstallSysroot(target_platform, target_arch):
# The sysroot directory should match the one specified in build/common.gypi.
diff --git a/chromium/build/linux/sysroot_scripts/packagelist.precise.amd64 b/chromium/build/linux/sysroot_scripts/packagelist.precise.amd64
deleted file mode 100644
index 8e6fc7023c5..00000000000
--- a/chromium/build/linux/sysroot_scripts/packagelist.precise.amd64
+++ /dev/null
@@ -1,179 +0,0 @@
-main/a/alsa-lib/libasound2_1.0.25-1ubuntu10.2_amd64.deb
-main/a/alsa-lib/libasound2-dev_1.0.25-1ubuntu10.2_amd64.deb
-main/a/atk1.0/libatk1.0-0_2.4.0-0ubuntu1_amd64.deb
-main/a/atk1.0/libatk1.0-dev_2.4.0-0ubuntu1_amd64.deb
-main/a/avahi/libavahi-client3_0.6.30-5ubuntu2.2_amd64.deb
-main/a/avahi/libavahi-common3_0.6.30-5ubuntu2.2_amd64.deb
-main/b/bluez/libbluetooth3_4.98-2ubuntu7.2_amd64.deb
-main/b/bluez/libbluetooth-dev_4.98-2ubuntu7.2_amd64.deb
-main/b/brltty/libbrlapi0.5_4.3-1ubuntu5_amd64.deb
-main/b/brltty/libbrlapi-dev_4.3-1ubuntu5_amd64.deb
-main/c/cairo/libcairo2_1.10.2-6.1ubuntu3_amd64.deb
-main/c/cairo/libcairo2-dev_1.10.2-6.1ubuntu3_amd64.deb
-main/c/cairo/libcairo-gobject2_1.10.2-6.1ubuntu3_amd64.deb
-main/c/cairo/libcairo-script-interpreter2_1.10.2-6.1ubuntu3_amd64.deb
-main/c/cups/libcups2_1.5.3-0ubuntu8.7_amd64.deb
-main/c/cups/libcups2-dev_1.5.3-0ubuntu8.7_amd64.deb
-main/d/dbus-glib/libdbus-glib-1-2_0.98-1ubuntu1.1_amd64.deb
-main/d/dbus/libdbus-1-3_1.4.18-1ubuntu1.8_amd64.deb
-main/d/dbus/libdbus-1-dev_1.4.18-1ubuntu1.8_amd64.deb
-main/e/e2fsprogs/comerr-dev_2.1-1.42-1ubuntu2.3_amd64.deb
-main/e/e2fsprogs/libcomerr2_1.42-1ubuntu2.3_amd64.deb
-main/e/eglibc/libc6_2.15-0ubuntu10.15_amd64.deb
-main/e/eglibc/libc6-dev_2.15-0ubuntu10.15_amd64.deb
-main/e/elfutils/libelf1_0.152-1ubuntu3.1_amd64.deb
-main/e/elfutils/libelf-dev_0.152-1ubuntu3.1_amd64.deb
-main/e/expat/libexpat1_2.0.1-7.2ubuntu1.4_amd64.deb
-main/e/expat/libexpat1-dev_2.0.1-7.2ubuntu1.4_amd64.deb
-main/f/fontconfig/libfontconfig1_2.8.0-3ubuntu9.2_amd64.deb
-main/f/fontconfig/libfontconfig1-dev_2.8.0-3ubuntu9.2_amd64.deb
-main/f/freetype/libfreetype6_2.4.8-1ubuntu2.3_amd64.deb
-main/f/freetype/libfreetype6-dev_2.4.8-1ubuntu2.3_amd64.deb
-main/g/gcc-4.6/gcc-4.6_4.6.3-1ubuntu5_amd64.deb
-main/g/gcc-4.6/libgcc1_4.6.3-1ubuntu5_amd64.deb
-main/g/gcc-4.6/libgomp1_4.6.3-1ubuntu5_amd64.deb
-main/g/gcc-4.6/libquadmath0_4.6.3-1ubuntu5_amd64.deb
-main/g/gcc-4.6/libstdc++6_4.6.3-1ubuntu5_amd64.deb
-main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-1ubuntu5_amd64.deb
-main/g/gconf/libgconf-2-4_3.2.5-0ubuntu2_amd64.deb
-main/g/gconf/libgconf2-4_3.2.5-0ubuntu2_amd64.deb
-main/g/gconf/libgconf2-dev_3.2.5-0ubuntu2_amd64.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1ubuntu1.5_amd64.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1ubuntu1.5_amd64.deb
-main/g/glib2.0/libglib2.0-0_2.32.4-0ubuntu1_amd64.deb
-main/g/glib2.0/libglib2.0-dev_2.32.4-0ubuntu1_amd64.deb
-main/g/gnutls26/libgnutls26_2.12.14-5ubuntu3.13_amd64.deb
-main/g/gnutls26/libgnutls-dev_2.12.14-5ubuntu3.13_amd64.deb
-main/g/gnutls26/libgnutls-openssl27_2.12.14-5ubuntu3.13_amd64.deb
-main/g/gnutls26/libgnutlsxx27_2.12.14-5ubuntu3.13_amd64.deb
-main/g/gtk+2.0/libgtk2.0-0_2.24.10-0ubuntu6.3_amd64.deb
-main/g/gtk+2.0/libgtk2.0-dev_2.24.10-0ubuntu6.3_amd64.deb
-main/g/gtk+3.0/libgtk-3-0_3.4.2-0ubuntu0.9_amd64.deb
-main/g/gtk+3.0/libgtk-3-dev_3.4.2-0ubuntu0.9_amd64.deb
-main/k/keyutils/libkeyutils1_1.5.2-2_amd64.deb
-main/k/krb5/krb5-multidev_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libgssapi-krb5-2_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libgssrpc4_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libk5crypto3_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkadm5clnt-mit8_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkadm5srv-mit8_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkdb5-6_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkrb5-3_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkrb5-dev_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/k/krb5/libkrb5support0_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb
-main/libc/libcap2/libcap2_2.22-1ubuntu3_amd64.deb
-main/libc/libcap2/libcap-dev_2.22-1ubuntu3_amd64.deb
-main/libd/libdrm/libdrm2_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libdrm-dev_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libdrm-intel1_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libdrm-nouveau1a_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libdrm-nouveau2_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libdrm-radeon1_2.4.52-1~precise2_amd64.deb
-main/libd/libdrm/libkms1_2.4.46-1ubuntu0.0.0.1_amd64.deb
-main/libf/libffi/libffi6_3.0.11~rc1-5_amd64.deb
-main/libf/libffi/libffi-dev_3.0.11~rc1-5_amd64.deb
-main/libg/libgcrypt11/libgcrypt11_1.5.0-3ubuntu0.6_amd64.deb
-main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-3ubuntu0.6_amd64.deb
-main/libg/libgnome-keyring/libgnome-keyring0_3.2.2-2_amd64.deb
-main/libg/libgnome-keyring/libgnome-keyring-dev_3.2.2-2_amd64.deb
-main/libg/libgpg-error/libgpg-error0_1.10-2ubuntu1_amd64.deb
-main/libg/libgpg-error/libgpg-error-dev_1.10-2ubuntu1_amd64.deb
-main/libn/libnss-db/libnss-db_2.2.3pre1-3.2ubuntu3_amd64.deb
-main/libp/libp11/libp11-2_0.2.8-2_amd64.deb
-main/libp/libpng/libpng12-0_1.2.46-3ubuntu4.2_amd64.deb
-main/libp/libpng/libpng12-dev_1.2.46-3ubuntu4.2_amd64.deb
-main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_amd64.deb
-main/libs/libselinux/libselinux1_2.1.0-4.1ubuntu1_amd64.deb
-main/libt/libtasn1-3/libtasn1-3_2.10-1ubuntu1.5_amd64.deb
-main/libx/libx11/libx11-6_1.4.99.1-0ubuntu2.3_amd64.deb
-main/libx/libx11/libx11-dev_1.4.99.1-0ubuntu2.3_amd64.deb
-main/libx/libx11/libx11-xcb1_1.4.99.1-0ubuntu2.3_amd64.deb
-main/libx/libx11/libx11-xcb-dev_1.4.99.1-0ubuntu2.3_amd64.deb
-main/libx/libxau/libxau6_1.0.6-4_amd64.deb
-main/libx/libxau/libxau-dev_1.0.6-4_amd64.deb
-main/libx/libxcb/libxcb1_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb1-dev_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb-glx0_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb-render0_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb-render0-dev_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb-shm0_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcb/libxcb-shm0-dev_1.8.1-1ubuntu0.2_amd64.deb
-main/libx/libxcomposite/libxcomposite1_0.4.3-2build1_amd64.deb
-main/libx/libxcomposite/libxcomposite-dev_0.4.3-2build1_amd64.deb
-main/libx/libxcursor/libxcursor1_1.1.12-1ubuntu0.1_amd64.deb
-main/libx/libxcursor/libxcursor-dev_1.1.12-1ubuntu0.1_amd64.deb
-main/libx/libxdamage/libxdamage1_1.1.3-2build1_amd64.deb
-main/libx/libxdamage/libxdamage-dev_1.1.3-2build1_amd64.deb
-main/libx/libxdmcp/libxdmcp6_1.1.0-4_amd64.deb
-main/libx/libxdmcp/libxdmcp-dev_1.1.0-4_amd64.deb
-main/libx/libxext/libxext6_1.3.0-3ubuntu0.2_amd64.deb
-main/libx/libxext/libxext-dev_1.3.0-3ubuntu0.2_amd64.deb
-main/libx/libxfixes/libxfixes3_5.0-4ubuntu4.4_amd64.deb
-main/libx/libxfixes/libxfixes-dev_5.0-4ubuntu4.4_amd64.deb
-main/libx/libxi/libxi6_1.7.1.901-1ubuntu1~precise3_amd64.deb
-main/libx/libxi/libxi-dev_1.7.1.901-1ubuntu1~precise3_amd64.deb
-main/libx/libxinerama/libxinerama1_1.1.1-3ubuntu0.1_amd64.deb
-main/libx/libxinerama/libxinerama-dev_1.1.1-3ubuntu0.1_amd64.deb
-main/libx/libxrandr/libxrandr2_1.3.2-2ubuntu0.3_amd64.deb
-main/libx/libxrandr/libxrandr-dev_1.3.2-2ubuntu0.3_amd64.deb
-main/libx/libxrender/libxrender1_0.9.6-2ubuntu0.2_amd64.deb
-main/libx/libxrender/libxrender-dev_0.9.6-2ubuntu0.2_amd64.deb
-main/libx/libxss/libxss1_1.2.1-2_amd64.deb
-main/libx/libxss/libxss-dev_1.2.1-2_amd64.deb
-main/libx/libxt/libxt6_1.1.1-2ubuntu0.1_amd64.deb
-main/libx/libxt/libxt-dev_1.1.1-2ubuntu0.1_amd64.deb
-main/libx/libxtst/libxtst6_1.2.0-4ubuntu0.1_amd64.deb
-main/libx/libxtst/libxtst-dev_1.2.0-4ubuntu0.1_amd64.deb
-main/libx/libxxf86vm/libxxf86vm1_1.1.1-2ubuntu0.1_amd64.deb
-main/l/linux/linux-libc-dev_3.2.0-123.166_amd64.deb
-main/m/mesa/libegl1-mesa_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libegl1-mesa-dev_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libegl1-mesa-drivers_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libgbm1_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libgbm-dev_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libgl1-mesa-dev_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libgl1-mesa-glx_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/libglapi-mesa_8.0.4-0ubuntu0.7_amd64.deb
-main/m/mesa/mesa-common-dev_8.0.4-0ubuntu0.7_amd64.deb
-main/n/nspr/libnspr4_4.12-0ubuntu0.12.04.1_amd64.deb
-main/n/nspr/libnspr4-dev_4.12-0ubuntu0.12.04.1_amd64.deb
-main/n/nss/libnss3_3.26.2-0ubuntu0.12.04.1_amd64.deb
-main/n/nss/libnss3-dev_3.26.2-0ubuntu0.12.04.1_amd64.deb
-main/o/openssl/libssl1.0.0_1.0.1-4ubuntu5.39_amd64.deb
-main/o/openssl/libssl-dev_1.0.1-4ubuntu5.39_amd64.deb
-main/o/orbit2/liborbit2_2.14.19-0.1ubuntu1_amd64.deb
-main/p/p11-kit/libp11-kit0_0.12-2ubuntu1_amd64.deb
-main/p/pam/libpam0g_1.1.3-7ubuntu2.3_amd64.deb
-main/p/pam/libpam0g-dev_1.1.3-7ubuntu2.3_amd64.deb
-main/p/pango1.0/libpango1.0-0_1.30.0-0ubuntu3.1_amd64.deb
-main/p/pango1.0/libpango1.0-dev_1.30.0-0ubuntu3.1_amd64.deb
-main/p/pciutils/libpci3_3.1.8-2ubuntu6_amd64.deb
-main/p/pciutils/libpci-dev_3.1.8-2ubuntu6_amd64.deb
-main/p/pcre3/libpcre3_8.12-4ubuntu0.2_amd64.deb
-main/p/pcre3/libpcre3-dev_8.12-4ubuntu0.2_amd64.deb
-main/p/pcre3/libpcrecpp0_8.12-4ubuntu0.2_amd64.deb
-main/p/pixman/libpixman-1-0_0.30.2-1ubuntu0.0.0.0.3_amd64.deb
-main/p/pixman/libpixman-1-dev_0.30.2-1ubuntu0.0.0.0.3_amd64.deb
-main/p/pulseaudio/libpulse0_1.1-0ubuntu15.4_amd64.deb
-main/p/pulseaudio/libpulse-dev_1.1-0ubuntu15.4_amd64.deb
-main/p/pulseaudio/libpulse-mainloop-glib0_1.1-0ubuntu15.4_amd64.deb
-main/s/speech-dispatcher/libspeechd2_0.7.1-6ubuntu3_amd64.deb
-main/s/speech-dispatcher/libspeechd-dev_0.7.1-6ubuntu3_amd64.deb
-main/s/speech-dispatcher/speech-dispatcher_0.7.1-6ubuntu3_amd64.deb
-main/w/wayland/libwayland0_0.85.0-1ubuntu2_amd64.deb
-main/w/wayland/libwayland-dev_0.85.0-1ubuntu2_amd64.deb
-main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
-main/x/x11proto-core/x11proto-core-dev_7.0.22-1ubuntu0.2_all.deb
-main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
-main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2ubuntu1_all.deb
-main/x/x11proto-input/x11proto-input-dev_2.3-1~precise2_all.deb
-main/x/x11proto-kb/x11proto-kb-dev_1.0.5-2_all.deb
-main/x/x11proto-randr/x11proto-randr-dev_1.4.0+git20120101.is.really.1.4.0-0ubuntu1~precise2_all.deb
-main/x/x11proto-record/x11proto-record-dev_1.14.1-2_all.deb
-main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
-main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.1-2_all.deb
-main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1~precise2_all.deb
-main/z/zlib/zlib1g_1.2.3.4.dfsg-3ubuntu4_amd64.deb
-main/z/zlib/zlib1g-dev_1.2.3.4.dfsg-3ubuntu4_amd64.deb
-universe/libx/libxkbcommon/libxkbcommon0_0.1.0~1-0ubuntu1_amd64.deb
-universe/libx/libxkbcommon/libxkbcommon-dev_0.1.0~1-0ubuntu1_amd64.deb
diff --git a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.amd64 b/chromium/build/linux/sysroot_scripts/packagelist.wheezy.amd64
deleted file mode 100644
index 60073a9ad1d..00000000000
--- a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.amd64
+++ /dev/null
@@ -1,180 +0,0 @@
-main/a/alsa-lib/libasound2_1.0.25-4_amd64.deb
-main/a/alsa-lib/libasound2-dev_1.0.25-4_amd64.deb
-main/a/atk1.0/libatk1.0-0_2.4.0-2_amd64.deb
-main/a/atk1.0/libatk1.0-dev_2.4.0-2_amd64.deb
-main/a/attr/libattr1_2.4.46-8_amd64.deb
-main/a/avahi/libavahi-client3_0.6.31-2_amd64.deb
-main/a/avahi/libavahi-common3_0.6.31-2_amd64.deb
-main/b/bluez/libbluetooth3_4.99-2_amd64.deb
-main/b/bluez/libbluetooth-dev_4.99-2_amd64.deb
-main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_amd64.deb
-main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_amd64.deb
-main/c/cairo/libcairo2_1.12.2-3_amd64.deb
-main/c/cairo/libcairo2-dev_1.12.2-3_amd64.deb
-main/c/cairo/libcairo-gobject2_1.12.2-3_amd64.deb
-main/c/cairo/libcairo-script-interpreter2_1.12.2-3_amd64.deb
-main/c/cups/libcups2_1.5.3-5+deb7u6_amd64.deb
-main/c/cups/libcups2-dev_1.5.3-5+deb7u6_amd64.deb
-main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_amd64.deb
-main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_amd64.deb
-main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_amd64.deb
-main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_amd64.deb
-main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_amd64.deb
-main/e/eglibc/libc6_2.13-38+deb7u10_amd64.deb
-main/e/eglibc/libc6-dev_2.13-38+deb7u10_amd64.deb
-main/e/elfutils/libelf1_0.152-1+wheezy1_amd64.deb
-main/e/elfutils/libelf-dev_0.152-1+wheezy1_amd64.deb
-main/e/expat/libexpat1_2.1.0-1+deb7u2_amd64.deb
-main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_amd64.deb
-main/f/fontconfig/libfontconfig1_2.9.0-7.1_amd64.deb
-main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_amd64.deb
-main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_amd64.deb
-main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_amd64.deb
-main/g/gcc-4.6/gcc-4.6_4.6.3-14_amd64.deb
-main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_amd64.deb
-main/g/gcc-4.7/libgcc1_4.7.2-5_amd64.deb
-main/g/gcc-4.7/libgomp1_4.7.2-5_amd64.deb
-main/g/gcc-4.7/libquadmath0_4.7.2-5_amd64.deb
-main/g/gcc-4.7/libstdc++6_4.7.2-5_amd64.deb
-main/g/gconf/libgconf-2-4_3.2.5-1+build1_amd64.deb
-main/g/gconf/libgconf2-4_3.2.5-1+build1_amd64.deb
-main/g/gconf/libgconf2-dev_3.2.5-1+build1_amd64.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_amd64.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_amd64.deb
-main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_amd64.deb
-main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_amd64.deb
-main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_amd64.deb
-main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_amd64.deb
-main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_amd64.deb
-main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_amd64.deb
-main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_amd64.deb
-main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_amd64.deb
-main/g/gtk+3.0/libgtk-3-0_3.4.2-7+deb7u1_amd64.deb
-main/g/gtk+3.0/libgtk-3-dev_3.4.2-7+deb7u1_amd64.deb
-main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_amd64.deb
-main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_amd64.deb
-main/libc/libcap2/libcap2_2.22-1.2_amd64.deb
-main/libc/libcap2/libcap-dev_2.22-1.2_amd64.deb
-main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_amd64.deb
-main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_amd64.deb
-main/libd/libdrm/libdrm-intel1_2.4.40-1~deb7u2_amd64.deb
-main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_amd64.deb
-main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_amd64.deb
-main/libd/libdrm/libkms1_2.4.40-1~deb7u2_amd64.deb
-main/libf/libffi/libffi5_3.0.10-3_amd64.deb
-main/libf/libffi/libffi-dev_3.0.10-3_amd64.deb
-main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_amd64.deb
-main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_amd64.deb
-main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_amd64.deb
-main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_amd64.deb
-main/libg/libgpg-error/libgpg-error0_1.10-3.1_amd64.deb
-main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_amd64.deb
-main/libn/libnss-db/libnss-db_2.2.3pre1-4_amd64.deb
-main/libp/libp11/libp11-2_0.2.8-2_amd64.deb
-main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_amd64.deb
-main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_amd64.deb
-main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_amd64.deb
-main/libs/libselinux/libselinux1_2.1.9-5_amd64.deb
-main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_amd64.deb
-main/libx/libx11/libx11-6_1.5.0-1+deb7u2_amd64.deb
-main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_amd64.deb
-main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_amd64.deb
-main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_amd64.deb
-main/libx/libxau/libxau6_1.0.7-1_amd64.deb
-main/libx/libxau/libxau-dev_1.0.7-1_amd64.deb
-main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_amd64.deb
-main/libx/libxcomposite/libxcomposite1_0.4.3-2_amd64.deb
-main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_amd64.deb
-main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_amd64.deb
-main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_amd64.deb
-main/libx/libxdamage/libxdamage1_1.1.3-2_amd64.deb
-main/libx/libxdamage/libxdamage-dev_1.1.3-2_amd64.deb
-main/libx/libxdmcp/libxdmcp6_1.1.1-1_amd64.deb
-main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_amd64.deb
-main/libx/libxext/libxext6_1.3.1-2+deb7u1_amd64.deb
-main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_amd64.deb
-main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_amd64.deb
-main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_amd64.deb
-main/libx/libxi/libxi6_1.6.1-1+deb7u1_amd64.deb
-main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_amd64.deb
-main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_amd64.deb
-main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_amd64.deb
-main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_amd64.deb
-main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_amd64.deb
-main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_amd64.deb
-main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_amd64.deb
-main/libx/libxss/libxss1_1.2.2-1_amd64.deb
-main/libx/libxss/libxss-dev_1.2.2-1_amd64.deb
-main/libx/libxt/libxt6_1.1.3-1+deb7u1_amd64.deb
-main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_amd64.deb
-main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_amd64.deb
-main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_amd64.deb
-main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_amd64.deb
-main/l/linux/linux-libc-dev_3.2.78-1_amd64.deb
-main/m/mesa/libegl1-mesa_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libegl1-mesa-dev_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libegl1-mesa-drivers_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libgbm1_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libgbm-dev_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_amd64.deb
-main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_amd64.deb
-main/n/nspr/libnspr4_4.9.2-1+deb7u3_amd64.deb
-main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_amd64.deb
-main/n/nss/libnss3_3.14.5-1+deb7u5_amd64.deb
-main/n/nss/libnss3-dev_3.14.5-1+deb7u5_amd64.deb
-main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_amd64.deb
-main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_amd64.deb
-main/o/orbit2/liborbit2_2.14.19-0.1_amd64.deb
-main/p/p11-kit/libp11-kit0_0.12-3_amd64.deb
-main/p/pam/libpam0g_1.1.3-7.1_amd64.deb
-main/p/pam/libpam0g-dev_1.1.3-7.1_amd64.deb
-main/p/pango1.0/libpango1.0-0_1.30.0-1_amd64.deb
-main/p/pango1.0/libpango1.0-dev_1.30.0-1_amd64.deb
-main/p/pciutils/libpci3_3.1.9-6_amd64.deb
-main/p/pciutils/libpci-dev_3.1.9-6_amd64.deb
-main/p/pcre3/libpcre3_8.30-5_amd64.deb
-main/p/pcre3/libpcre3-dev_8.30-5_amd64.deb
-main/p/pcre3/libpcrecpp0_8.30-5_amd64.deb
-main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_amd64.deb
-main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_amd64.deb
-main/p/pulseaudio/libpulse0_2.0-6.1_amd64.deb
-main/p/pulseaudio/libpulse-dev_2.0-6.1_amd64.deb
-main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_amd64.deb
-main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_amd64.deb
-main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_amd64.deb
-main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_amd64.deb
-main/u/udev/libudev0_175-7.2_amd64.deb
-main/u/udev/libudev-dev_175-7.2_amd64.deb
-main/w/wayland/libwayland0_0.85.0-2_amd64.deb
-main/w/wayland/libwayland-dev_0.85.0-2_amd64.deb
-main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
-main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
-main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
-main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
-main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
-main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
-main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
-main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
-main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
-main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
-main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
-main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb
-main/z/zlib/zlib1g_1.2.7.dfsg-13_amd64.deb
-main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_amd64.deb
diff --git a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.arm b/chromium/build/linux/sysroot_scripts/packagelist.wheezy.arm
deleted file mode 100644
index 18f54a122b1..00000000000
--- a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.arm
+++ /dev/null
@@ -1,179 +0,0 @@
-main/a/alsa-lib/libasound2_1.0.25-4_armhf.deb
-main/a/alsa-lib/libasound2-dev_1.0.25-4_armhf.deb
-main/a/atk1.0/libatk1.0-0_2.4.0-2_armhf.deb
-main/a/atk1.0/libatk1.0-dev_2.4.0-2_armhf.deb
-main/a/attr/libattr1_2.4.46-8_armhf.deb
-main/a/avahi/libavahi-client3_0.6.31-2_armhf.deb
-main/a/avahi/libavahi-common3_0.6.31-2_armhf.deb
-main/b/bluez/libbluetooth3_4.99-2_armhf.deb
-main/b/bluez/libbluetooth-dev_4.99-2_armhf.deb
-main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_armhf.deb
-main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_armhf.deb
-main/c/cairo/libcairo2_1.12.2-3_armhf.deb
-main/c/cairo/libcairo2-dev_1.12.2-3_armhf.deb
-main/c/cairo/libcairo-gobject2_1.12.2-3_armhf.deb
-main/c/cairo/libcairo-script-interpreter2_1.12.2-3_armhf.deb
-main/c/cups/libcups2_1.5.3-5+deb7u6_armhf.deb
-main/c/cups/libcups2-dev_1.5.3-5+deb7u6_armhf.deb
-main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb
-main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_armhf.deb
-main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_armhf.deb
-main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_armhf.deb
-main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_armhf.deb
-main/e/eglibc/libc6_2.13-38+deb7u10_armhf.deb
-main/e/eglibc/libc6-dev_2.13-38+deb7u10_armhf.deb
-main/e/elfutils/libelf1_0.152-1+wheezy1_armhf.deb
-main/e/elfutils/libelf-dev_0.152-1+wheezy1_armhf.deb
-main/e/expat/libexpat1_2.1.0-1+deb7u2_armhf.deb
-main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_armhf.deb
-main/f/fontconfig/libfontconfig1_2.9.0-7.1_armhf.deb
-main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_armhf.deb
-main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_armhf.deb
-main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_armhf.deb
-main/g/gcc-4.6/gcc-4.6_4.6.3-14_armhf.deb
-main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_armhf.deb
-main/g/gcc-4.7/libgcc1_4.7.2-5_armhf.deb
-main/g/gcc-4.7/libgomp1_4.7.2-5_armhf.deb
-main/g/gcc-4.7/libstdc++6_4.7.2-5_armhf.deb
-main/g/gconf/libgconf2-4_3.2.5-1+build1_armhf.deb
-main/g/gconf/libgconf-2-4_3.2.5-1+build1_armhf.deb
-main/g/gconf/libgconf2-dev_3.2.5-1+build1_armhf.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_armhf.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_armhf.deb
-main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_armhf.deb
-main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_armhf.deb
-main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_armhf.deb
-main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_armhf.deb
-main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_armhf.deb
-main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_armhf.deb
-main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_armhf.deb
-main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_armhf.deb
-main/g/gtk+3.0/libgtk-3-0_3.4.2-7+deb7u1_armhf.deb
-main/g/gtk+3.0/libgtk-3-dev_3.4.2-7+deb7u1_armhf.deb
-main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_armhf.deb
-main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_armhf.deb
-main/libc/libcap2/libcap2_2.22-1.2_armhf.deb
-main/libc/libcap2/libcap-dev_2.22-1.2_armhf.deb
-main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_armhf.deb
-main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_armhf.deb
-main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_armhf.deb
-main/libd/libdrm/libdrm-omap1_2.4.40-1~deb7u2_armhf.deb
-main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_armhf.deb
-main/libd/libdrm/libkms1_2.4.40-1~deb7u2_armhf.deb
-main/libf/libffi/libffi5_3.0.10-3+b1_armhf.deb
-main/libf/libffi/libffi-dev_3.0.10-3+b1_armhf.deb
-main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_armhf.deb
-main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_armhf.deb
-main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_armhf.deb
-main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_armhf.deb
-main/libg/libgpg-error/libgpg-error0_1.10-3.1_armhf.deb
-main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_armhf.deb
-main/libn/libnss-db/libnss-db_2.2.3pre1-4_armhf.deb
-main/libp/libp11/libp11-2_0.2.8-2_armhf.deb
-main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_armhf.deb
-main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_armhf.deb
-main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3+b1_armhf.deb
-main/libs/libselinux/libselinux1_2.1.9-5_armhf.deb
-main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_armhf.deb
-main/libx/libx11/libx11-6_1.5.0-1+deb7u2_armhf.deb
-main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_armhf.deb
-main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_armhf.deb
-main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_armhf.deb
-main/libx/libxau/libxau6_1.0.7-1_armhf.deb
-main/libx/libxau/libxau-dev_1.0.7-1_armhf.deb
-main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_armhf.deb
-main/libx/libxcomposite/libxcomposite1_0.4.3-2+b1_armhf.deb
-main/libx/libxcomposite/libxcomposite-dev_0.4.3-2+b1_armhf.deb
-main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_armhf.deb
-main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_armhf.deb
-main/libx/libxdamage/libxdamage1_1.1.3-2+b1_armhf.deb
-main/libx/libxdamage/libxdamage-dev_1.1.3-2+b1_armhf.deb
-main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb
-main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_armhf.deb
-main/libx/libxext/libxext6_1.3.1-2+deb7u1_armhf.deb
-main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_armhf.deb
-main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_armhf.deb
-main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_armhf.deb
-main/libx/libxi/libxi6_1.6.1-1+deb7u1_armhf.deb
-main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_armhf.deb
-main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_armhf.deb
-main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_armhf.deb
-main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_armhf.deb
-main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_armhf.deb
-main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_armhf.deb
-main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_armhf.deb
-main/libx/libxss/libxss1_1.2.2-1_armhf.deb
-main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb
-main/libx/libxt/libxt6_1.1.3-1+deb7u1_armhf.deb
-main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_armhf.deb
-main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_armhf.deb
-main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_armhf.deb
-main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_armhf.deb
-main/l/linux/linux-libc-dev_3.2.78-1_armhf.deb
-main/m/mesa/libegl1-mesa_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libegl1-mesa-dev_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libegl1-mesa-drivers_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libgbm1_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libgbm-dev_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_armhf.deb
-main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_armhf.deb
-main/n/nspr/libnspr4_4.9.2-1+deb7u3_armhf.deb
-main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_armhf.deb
-main/n/nss/libnss3_3.14.5-1+deb7u5_armhf.deb
-main/n/nss/libnss3-dev_3.14.5-1+deb7u5_armhf.deb
-main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_armhf.deb
-main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_armhf.deb
-main/o/orbit2/liborbit2_2.14.19-0.1_armhf.deb
-main/p/p11-kit/libp11-kit0_0.12-3_armhf.deb
-main/p/pam/libpam0g_1.1.3-7.1_armhf.deb
-main/p/pam/libpam0g-dev_1.1.3-7.1_armhf.deb
-main/p/pango1.0/libpango1.0-0_1.30.0-1_armhf.deb
-main/p/pango1.0/libpango1.0-dev_1.30.0-1_armhf.deb
-main/p/pciutils/libpci3_3.1.9-6_armhf.deb
-main/p/pciutils/libpci-dev_3.1.9-6_armhf.deb
-main/p/pcre3/libpcre3_8.30-5_armhf.deb
-main/p/pcre3/libpcre3-dev_8.30-5_armhf.deb
-main/p/pcre3/libpcrecpp0_8.30-5_armhf.deb
-main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_armhf.deb
-main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_armhf.deb
-main/p/pulseaudio/libpulse0_2.0-6.1_armhf.deb
-main/p/pulseaudio/libpulse-dev_2.0-6.1_armhf.deb
-main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_armhf.deb
-main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_armhf.deb
-main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_armhf.deb
-main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_armhf.deb
-main/u/udev/libudev0_175-7.2_armhf.deb
-main/u/udev/libudev-dev_175-7.2_armhf.deb
-main/w/wayland/libwayland0_0.85.0-2_armhf.deb
-main/w/wayland/libwayland-dev_0.85.0-2_armhf.deb
-main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
-main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
-main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
-main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
-main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
-main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
-main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
-main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
-main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
-main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
-main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
-main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb
-main/z/zlib/zlib1g_1.2.7.dfsg-13_armhf.deb
-main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_armhf.deb
diff --git a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.i386 b/chromium/build/linux/sysroot_scripts/packagelist.wheezy.i386
deleted file mode 100644
index 7672da8445d..00000000000
--- a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.i386
+++ /dev/null
@@ -1,180 +0,0 @@
-main/a/alsa-lib/libasound2_1.0.25-4_i386.deb
-main/a/alsa-lib/libasound2-dev_1.0.25-4_i386.deb
-main/a/atk1.0/libatk1.0-0_2.4.0-2_i386.deb
-main/a/atk1.0/libatk1.0-dev_2.4.0-2_i386.deb
-main/a/attr/libattr1_2.4.46-8_i386.deb
-main/a/avahi/libavahi-client3_0.6.31-2_i386.deb
-main/a/avahi/libavahi-common3_0.6.31-2_i386.deb
-main/b/bluez/libbluetooth3_4.99-2_i386.deb
-main/b/bluez/libbluetooth-dev_4.99-2_i386.deb
-main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_i386.deb
-main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_i386.deb
-main/c/cairo/libcairo2_1.12.2-3_i386.deb
-main/c/cairo/libcairo2-dev_1.12.2-3_i386.deb
-main/c/cairo/libcairo-gobject2_1.12.2-3_i386.deb
-main/c/cairo/libcairo-script-interpreter2_1.12.2-3_i386.deb
-main/c/cups/libcups2_1.5.3-5+deb7u6_i386.deb
-main/c/cups/libcups2-dev_1.5.3-5+deb7u6_i386.deb
-main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_i386.deb
-main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_i386.deb
-main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_i386.deb
-main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_i386.deb
-main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_i386.deb
-main/e/eglibc/libc6_2.13-38+deb7u10_i386.deb
-main/e/eglibc/libc6-dev_2.13-38+deb7u10_i386.deb
-main/e/elfutils/libelf1_0.152-1+wheezy1_i386.deb
-main/e/elfutils/libelf-dev_0.152-1+wheezy1_i386.deb
-main/e/expat/libexpat1_2.1.0-1+deb7u2_i386.deb
-main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_i386.deb
-main/f/fontconfig/libfontconfig1_2.9.0-7.1_i386.deb
-main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_i386.deb
-main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_i386.deb
-main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_i386.deb
-main/g/gcc-4.6/gcc-4.6_4.6.3-14_i386.deb
-main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_i386.deb
-main/g/gcc-4.7/libgcc1_4.7.2-5_i386.deb
-main/g/gcc-4.7/libgomp1_4.7.2-5_i386.deb
-main/g/gcc-4.7/libquadmath0_4.7.2-5_i386.deb
-main/g/gcc-4.7/libstdc++6_4.7.2-5_i386.deb
-main/g/gconf/libgconf-2-4_3.2.5-1+build1_i386.deb
-main/g/gconf/libgconf2-4_3.2.5-1+build1_i386.deb
-main/g/gconf/libgconf2-dev_3.2.5-1+build1_i386.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_i386.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_i386.deb
-main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_i386.deb
-main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_i386.deb
-main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_i386.deb
-main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_i386.deb
-main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_i386.deb
-main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_i386.deb
-main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_i386.deb
-main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_i386.deb
-main/g/gtk+3.0/libgtk-3-0_3.4.2-7+deb7u1_i386.deb
-main/g/gtk+3.0/libgtk-3-dev_3.4.2-7+deb7u1_i386.deb
-main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_i386.deb
-main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_i386.deb
-main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_i386.deb
-main/libc/libcap2/libcap2_2.22-1.2_i386.deb
-main/libc/libcap2/libcap-dev_2.22-1.2_i386.deb
-main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_i386.deb
-main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_i386.deb
-main/libd/libdrm/libdrm-intel1_2.4.40-1~deb7u2_i386.deb
-main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_i386.deb
-main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_i386.deb
-main/libd/libdrm/libkms1_2.4.40-1~deb7u2_i386.deb
-main/libf/libffi/libffi5_3.0.10-3_i386.deb
-main/libf/libffi/libffi-dev_3.0.10-3_i386.deb
-main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_i386.deb
-main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_i386.deb
-main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_i386.deb
-main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_i386.deb
-main/libg/libgpg-error/libgpg-error0_1.10-3.1_i386.deb
-main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_i386.deb
-main/libn/libnss-db/libnss-db_2.2.3pre1-4_i386.deb
-main/libp/libp11/libp11-2_0.2.8-2_i386.deb
-main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_i386.deb
-main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_i386.deb
-main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_i386.deb
-main/libs/libselinux/libselinux1_2.1.9-5_i386.deb
-main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_i386.deb
-main/libx/libx11/libx11-6_1.5.0-1+deb7u2_i386.deb
-main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_i386.deb
-main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_i386.deb
-main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_i386.deb
-main/libx/libxau/libxau6_1.0.7-1_i386.deb
-main/libx/libxau/libxau-dev_1.0.7-1_i386.deb
-main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_i386.deb
-main/libx/libxcomposite/libxcomposite1_0.4.3-2_i386.deb
-main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_i386.deb
-main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_i386.deb
-main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_i386.deb
-main/libx/libxdamage/libxdamage1_1.1.3-2_i386.deb
-main/libx/libxdamage/libxdamage-dev_1.1.3-2_i386.deb
-main/libx/libxdmcp/libxdmcp6_1.1.1-1_i386.deb
-main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_i386.deb
-main/libx/libxext/libxext6_1.3.1-2+deb7u1_i386.deb
-main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_i386.deb
-main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_i386.deb
-main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_i386.deb
-main/libx/libxi/libxi6_1.6.1-1+deb7u1_i386.deb
-main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_i386.deb
-main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_i386.deb
-main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_i386.deb
-main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_i386.deb
-main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_i386.deb
-main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_i386.deb
-main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_i386.deb
-main/libx/libxss/libxss1_1.2.2-1_i386.deb
-main/libx/libxss/libxss-dev_1.2.2-1_i386.deb
-main/libx/libxt/libxt6_1.1.3-1+deb7u1_i386.deb
-main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_i386.deb
-main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_i386.deb
-main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_i386.deb
-main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_i386.deb
-main/l/linux/linux-libc-dev_3.2.78-1_i386.deb
-main/m/mesa/libegl1-mesa_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libegl1-mesa-dev_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libegl1-mesa-drivers_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libgbm1_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libgbm-dev_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_i386.deb
-main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_i386.deb
-main/n/nspr/libnspr4_4.9.2-1+deb7u3_i386.deb
-main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_i386.deb
-main/n/nss/libnss3_3.14.5-1+deb7u5_i386.deb
-main/n/nss/libnss3-dev_3.14.5-1+deb7u5_i386.deb
-main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_i386.deb
-main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_i386.deb
-main/o/orbit2/liborbit2_2.14.19-0.1_i386.deb
-main/p/p11-kit/libp11-kit0_0.12-3_i386.deb
-main/p/pam/libpam0g_1.1.3-7.1_i386.deb
-main/p/pam/libpam0g-dev_1.1.3-7.1_i386.deb
-main/p/pango1.0/libpango1.0-0_1.30.0-1_i386.deb
-main/p/pango1.0/libpango1.0-dev_1.30.0-1_i386.deb
-main/p/pciutils/libpci3_3.1.9-6_i386.deb
-main/p/pciutils/libpci-dev_3.1.9-6_i386.deb
-main/p/pcre3/libpcre3_8.30-5_i386.deb
-main/p/pcre3/libpcre3-dev_8.30-5_i386.deb
-main/p/pcre3/libpcrecpp0_8.30-5_i386.deb
-main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_i386.deb
-main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_i386.deb
-main/p/pulseaudio/libpulse0_2.0-6.1_i386.deb
-main/p/pulseaudio/libpulse-dev_2.0-6.1_i386.deb
-main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_i386.deb
-main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_i386.deb
-main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_i386.deb
-main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_i386.deb
-main/u/udev/libudev0_175-7.2_i386.deb
-main/u/udev/libudev-dev_175-7.2_i386.deb
-main/w/wayland/libwayland0_0.85.0-2_i386.deb
-main/w/wayland/libwayland-dev_0.85.0-2_i386.deb
-main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
-main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
-main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
-main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
-main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
-main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
-main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
-main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
-main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
-main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
-main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
-main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb
-main/z/zlib/zlib1g_1.2.7.dfsg-13_i386.deb
-main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_i386.deb
diff --git a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.mipsel b/chromium/build/linux/sysroot_scripts/packagelist.wheezy.mipsel
deleted file mode 100644
index 0179fe726f8..00000000000
--- a/chromium/build/linux/sysroot_scripts/packagelist.wheezy.mipsel
+++ /dev/null
@@ -1,178 +0,0 @@
-main/a/alsa-lib/libasound2_1.0.25-4_mipsel.deb
-main/a/alsa-lib/libasound2-dev_1.0.25-4_mipsel.deb
-main/a/atk1.0/libatk1.0-0_2.4.0-2_mipsel.deb
-main/a/atk1.0/libatk1.0-dev_2.4.0-2_mipsel.deb
-main/a/attr/libattr1_2.4.46-8_mipsel.deb
-main/a/avahi/libavahi-client3_0.6.31-2_mipsel.deb
-main/a/avahi/libavahi-common3_0.6.31-2_mipsel.deb
-main/b/bluez/libbluetooth3_4.99-2_mipsel.deb
-main/b/bluez/libbluetooth-dev_4.99-2_mipsel.deb
-main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_mipsel.deb
-main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_mipsel.deb
-main/c/cairo/libcairo2_1.12.2-3_mipsel.deb
-main/c/cairo/libcairo2-dev_1.12.2-3_mipsel.deb
-main/c/cairo/libcairo-gobject2_1.12.2-3_mipsel.deb
-main/c/cairo/libcairo-script-interpreter2_1.12.2-3_mipsel.deb
-main/c/cups/libcups2_1.5.3-5+deb7u6_mipsel.deb
-main/c/cups/libcups2-dev_1.5.3-5+deb7u6_mipsel.deb
-main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_mipsel.deb
-main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_mipsel.deb
-main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_mipsel.deb
-main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_mipsel.deb
-main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_mipsel.deb
-main/e/eglibc/libc6_2.13-38+deb7u10_mipsel.deb
-main/e/eglibc/libc6-dev_2.13-38+deb7u10_mipsel.deb
-main/e/elfutils/libelf1_0.152-1+wheezy1_mipsel.deb
-main/e/elfutils/libelf-dev_0.152-1+wheezy1_mipsel.deb
-main/e/expat/libexpat1_2.1.0-1+deb7u2_mipsel.deb
-main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_mipsel.deb
-main/f/fontconfig/libfontconfig1_2.9.0-7.1_mipsel.deb
-main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_mipsel.deb
-main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_mipsel.deb
-main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_mipsel.deb
-main/g/gcc-4.6/gcc-4.6_4.6.3-14_mipsel.deb
-main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_mipsel.deb
-main/g/gcc-4.7/libgcc1_4.7.2-5_mipsel.deb
-main/g/gcc-4.7/libgomp1_4.7.2-5_mipsel.deb
-main/g/gcc-4.7/libstdc++6_4.7.2-5_mipsel.deb
-main/g/gconf/libgconf2-4_3.2.5-1+build1_mipsel.deb
-main/g/gconf/libgconf-2-4_3.2.5-1+build1_mipsel.deb
-main/g/gconf/libgconf2-dev_3.2.5-1+build1_mipsel.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_mipsel.deb
-main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_mipsel.deb
-main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_mipsel.deb
-main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_mipsel.deb
-main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_mipsel.deb
-main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_mipsel.deb
-main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_mipsel.deb
-main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_mipsel.deb
-main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_mipsel.deb
-main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_mipsel.deb
-main/g/gtk+3.0/libgtk-3-0_3.4.2-7+deb7u1_mipsel.deb
-main/g/gtk+3.0/libgtk-3-dev_3.4.2-7+deb7u1_mipsel.deb
-main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_mipsel.deb
-main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_mipsel.deb
-main/libc/libcap2/libcap2_2.22-1.2_mipsel.deb
-main/libc/libcap2/libcap-dev_2.22-1.2_mipsel.deb
-main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_mipsel.deb
-main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_mipsel.deb
-main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_mipsel.deb
-main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_mipsel.deb
-main/libd/libdrm/libkms1_2.4.40-1~deb7u2_mipsel.deb
-main/libf/libffi/libffi5_3.0.10-3_mipsel.deb
-main/libf/libffi/libffi-dev_3.0.10-3_mipsel.deb
-main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_mipsel.deb
-main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_mipsel.deb
-main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_mipsel.deb
-main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_mipsel.deb
-main/libg/libgpg-error/libgpg-error0_1.10-3.1_mipsel.deb
-main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_mipsel.deb
-main/libn/libnss-db/libnss-db_2.2.3pre1-4_mipsel.deb
-main/libp/libp11/libp11-2_0.2.8-2_mipsel.deb
-main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_mipsel.deb
-main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_mipsel.deb
-main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_mipsel.deb
-main/libs/libselinux/libselinux1_2.1.9-5_mipsel.deb
-main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_mipsel.deb
-main/libx/libx11/libx11-6_1.5.0-1+deb7u2_mipsel.deb
-main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_mipsel.deb
-main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_mipsel.deb
-main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_mipsel.deb
-main/libx/libxau/libxau6_1.0.7-1_mipsel.deb
-main/libx/libxau/libxau-dev_1.0.7-1_mipsel.deb
-main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_mipsel.deb
-main/libx/libxcomposite/libxcomposite1_0.4.3-2_mipsel.deb
-main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_mipsel.deb
-main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_mipsel.deb
-main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_mipsel.deb
-main/libx/libxdamage/libxdamage1_1.1.3-2_mipsel.deb
-main/libx/libxdamage/libxdamage-dev_1.1.3-2_mipsel.deb
-main/libx/libxdmcp/libxdmcp6_1.1.1-1_mipsel.deb
-main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_mipsel.deb
-main/libx/libxext/libxext6_1.3.1-2+deb7u1_mipsel.deb
-main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_mipsel.deb
-main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_mipsel.deb
-main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_mipsel.deb
-main/libx/libxi/libxi6_1.6.1-1+deb7u1_mipsel.deb
-main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_mipsel.deb
-main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_mipsel.deb
-main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_mipsel.deb
-main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_mipsel.deb
-main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_mipsel.deb
-main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_mipsel.deb
-main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_mipsel.deb
-main/libx/libxss/libxss1_1.2.2-1_mipsel.deb
-main/libx/libxss/libxss-dev_1.2.2-1_mipsel.deb
-main/libx/libxt/libxt6_1.1.3-1+deb7u1_mipsel.deb
-main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_mipsel.deb
-main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_mipsel.deb
-main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_mipsel.deb
-main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_mipsel.deb
-main/l/linux/linux-libc-dev_3.2.78-1_mipsel.deb
-main/m/mesa/libegl1-mesa_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libegl1-mesa-dev_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libegl1-mesa-drivers_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libgbm1_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libgbm-dev_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_mipsel.deb
-main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_mipsel.deb
-main/n/nspr/libnspr4_4.9.2-1+deb7u3_mipsel.deb
-main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_mipsel.deb
-main/n/nss/libnss3_3.14.5-1+deb7u5_mipsel.deb
-main/n/nss/libnss3-dev_3.14.5-1+deb7u5_mipsel.deb
-main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_mipsel.deb
-main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_mipsel.deb
-main/o/orbit2/liborbit2_2.14.19-0.1_mipsel.deb
-main/p/p11-kit/libp11-kit0_0.12-3_mipsel.deb
-main/p/pam/libpam0g_1.1.3-7.1_mipsel.deb
-main/p/pam/libpam0g-dev_1.1.3-7.1_mipsel.deb
-main/p/pango1.0/libpango1.0-0_1.30.0-1_mipsel.deb
-main/p/pango1.0/libpango1.0-dev_1.30.0-1_mipsel.deb
-main/p/pciutils/libpci3_3.1.9-6_mipsel.deb
-main/p/pciutils/libpci-dev_3.1.9-6_mipsel.deb
-main/p/pcre3/libpcre3_8.30-5_mipsel.deb
-main/p/pcre3/libpcre3-dev_8.30-5_mipsel.deb
-main/p/pcre3/libpcrecpp0_8.30-5_mipsel.deb
-main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_mipsel.deb
-main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_mipsel.deb
-main/p/pulseaudio/libpulse0_2.0-6.1_mipsel.deb
-main/p/pulseaudio/libpulse-dev_2.0-6.1_mipsel.deb
-main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_mipsel.deb
-main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_mipsel.deb
-main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_mipsel.deb
-main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_mipsel.deb
-main/u/udev/libudev0_175-7.2_mipsel.deb
-main/u/udev/libudev-dev_175-7.2_mipsel.deb
-main/w/wayland/libwayland0_0.85.0-2_mipsel.deb
-main/w/wayland/libwayland-dev_0.85.0-2_mipsel.deb
-main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
-main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
-main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
-main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
-main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
-main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
-main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
-main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
-main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
-main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
-main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
-main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb
-main/z/zlib/zlib1g_1.2.7.dfsg-13_mipsel.deb
-main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_mipsel.deb
diff --git a/chromium/build/linux/sysroot_scripts/sysroot-creator-precise.sh b/chromium/build/linux/sysroot_scripts/sysroot-creator-precise.sh
deleted file mode 100755
index 8c536b4abfa..00000000000
--- a/chromium/build/linux/sysroot_scripts/sysroot-creator-precise.sh
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/bin/bash
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-DISTRO=ubuntu
-DIST=precise
-DIST_UPDATES=precise-updates
-REPO_EXTRA="universe"
-
-# This is where we get all the debian packages from.
-APT_REPO=http://archive.ubuntu.com/ubuntu
-APT_REPO_ARM=http://ports.ubuntu.com
-APT_REPO_ARM64=http://ports.ubuntu.com
-KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
-
-HAS_ARCH_AMD64=1
-
-# Precise supports these architectures but they are not needed by chrome.
-# HAS_ARCH_I386=1
-# HAS_ARCH_ARM=1
-
-# Sysroot packages: these are the packages needed to build chrome.
-# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
-# by running this script in GeneratePackageList mode.
-DEBIAN_PACKAGES="\
- comerr-dev
- gcc-4.6
- krb5-multidev
- libasound2
- libasound2-dev
- libatk1.0-0
- libatk1.0-dev
- libavahi-client3
- libavahi-common3
- libbluetooth3
- libbluetooth-dev
- libbrlapi0.5
- libbrlapi-dev
- libc6
- libc6-dev
- libcairo2
- libcairo2-dev
- libcairo-gobject2
- libcairo-script-interpreter2
- libcap-dev
- libcap2
- libcomerr2
- libcups2
- libcups2-dev
- libdbus-1-3
- libdbus-1-dev
- libdbus-glib-1-2
- libdrm-dev
- libdrm-intel1
- libdrm-nouveau1a
- libdrm-nouveau2
- libdrm-radeon1
- libdrm2
- libegl1-mesa
- libegl1-mesa-dev
- libegl1-mesa-drivers
- libelf1
- libelf-dev
- libexpat1
- libexpat1-dev
- libffi6
- libffi-dev
- libfontconfig1
- libfontconfig1-dev
- libfreetype6
- libfreetype6-dev
- libgbm1
- libgbm-dev
- libgcc1
- libgconf-2-4
- libgconf2-4
- libgconf2-dev
- libgcrypt11
- libgcrypt11-dev
- libgdk-pixbuf2.0-0
- libgdk-pixbuf2.0-dev
- libgl1-mesa-dev
- libgl1-mesa-glx
- libglapi-mesa
- libglib2.0-0
- libglib2.0-dev
- libgnome-keyring0
- libgnome-keyring-dev
- libgnutls26
- libgnutls-dev
- libgnutls-openssl27
- libgnutlsxx27
- libgomp1
- libgpg-error0
- libgpg-error-dev
- libgssapi-krb5-2
- libgssrpc4
- libgtk-3-0
- libgtk-3-dev
- libgtk2.0-0
- libgtk2.0-dev
- libk5crypto3
- libkadm5clnt-mit8
- libkadm5srv-mit8
- libkdb5-6
- libkeyutils1
- libkms1
- libkrb5-3
- libkrb5-dev
- libkrb5support0
- libnspr4
- libnspr4-dev
- libnss3
- libnss3-dev
- libnss-db
- liborbit2
- libp11-2
- libp11-kit0
- libpam0g
- libpam0g-dev
- libpango1.0-0
- libpango1.0-dev
- libpci3
- libpci-dev
- libpcre3
- libpcre3-dev
- libpcrecpp0
- libpixman-1-0
- libpixman-1-dev
- libpng12-0
- libpng12-dev
- libpthread-stubs0-dev
- libpulse0
- libpulse-dev
- libpulse-mainloop-glib0
- libselinux1
- libspeechd2
- libspeechd-dev
- libssl1.0.0
- libssl-dev
- libstdc++6
- libstdc++6-4.6-dev
- libtasn1-3
- libwayland0
- libwayland-dev
- libx11-6
- libx11-dev
- libx11-xcb1
- libx11-xcb-dev
- libxau6
- libxau-dev
- libxcb1
- libxcb1-dev
- libxcb-glx0
- libxcb-render0
- libxcb-render0-dev
- libxcb-shm0
- libxcb-shm0-dev
- libxcomposite1
- libxcomposite-dev
- libxcursor1
- libxcursor-dev
- libxdamage1
- libxdamage-dev
- libxdmcp6
- libxdmcp-dev
- libxext6
- libxext-dev
- libxfixes3
- libxfixes-dev
- libxi6
- libxi-dev
- libxinerama1
- libxinerama-dev
- libxkbcommon0
- libxkbcommon-dev
- libxrandr2
- libxrandr-dev
- libxrender1
- libxrender-dev
- libxss1
- libxss-dev
- libxt6
- libxt-dev
- libxtst6
- libxtst-dev
- libxxf86vm1
- linux-libc-dev
- mesa-common-dev
- speech-dispatcher
- x11proto-composite-dev
- x11proto-core-dev
- x11proto-damage-dev
- x11proto-fixes-dev
- x11proto-input-dev
- x11proto-kb-dev
- x11proto-randr-dev
- x11proto-record-dev
- x11proto-render-dev
- x11proto-scrnsaver-dev
- x11proto-xext-dev
- zlib1g
- zlib1g-dev
-"
-
-DEBIAN_PACKAGES_X86="libquadmath0"
-DEBIAN_PACKAGES_ARM="libdrm-omap1"
-
-. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/chromium/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh b/chromium/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
deleted file mode 100755
index 7f8c9cedf6b..00000000000
--- a/chromium/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
+++ /dev/null
@@ -1,208 +0,0 @@
-#!/bin/bash
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-DISTRO=debian
-DIST=wheezy
-DIST_UPDATES=wheezy-updates
-
-APT_REPO=http://http.us.debian.org/debian
-KEYRING_FILE="${SCRIPT_DIR}/debian-archive-wheezy-stable.gpg"
-
-HAS_ARCH_AMD64=1
-HAS_ARCH_I386=1
-HAS_ARCH_ARM=1
-HAS_ARCH_MIPS=1
-
-# Sysroot packages: these are the packages needed to build chrome.
-# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
-# by running this script in GeneratePackageList mode.
-DEBIAN_PACKAGES="\
- comerr-dev
- gcc-4.6
- krb5-multidev
- libasound2
- libasound2-dev
- libatk1.0-0
- libatk1.0-dev
- libattr1
- libavahi-client3
- libavahi-common3
- libbluetooth3
- libbluetooth-dev
- libbrlapi0.5
- libbrlapi-dev
- libc6
- libc6-dev
- libcairo2
- libcairo2-dev
- libcairo-gobject2
- libcairo-script-interpreter2
- libcap-dev
- libcap2
- libcomerr2
- libcups2
- libcups2-dev
- libdbus-1-3
- libdbus-1-dev
- libdbus-glib-1-2
- libdrm2
- libdrm-dev
- libdrm-nouveau1a
- libdrm-radeon1
- libegl1-mesa
- libegl1-mesa-dev
- libegl1-mesa-drivers
- libelf1
- libelf-dev
- libexpat1
- libexpat1-dev
- libffi5
- libffi-dev
- libfontconfig1
- libfontconfig1-dev
- libfreetype6
- libfreetype6-dev
- libgbm1
- libgbm-dev
- libgcc1
- libgconf-2-4
- libgconf2-4
- libgconf2-dev
- libgcrypt11
- libgcrypt11-dev
- libgdk-pixbuf2.0-0
- libgdk-pixbuf2.0-dev
- libgl1-mesa-dev
- libgl1-mesa-glx
- libglapi-mesa
- libglib2.0-0
- libglib2.0-dev
- libgnome-keyring0
- libgnome-keyring-dev
- libgnutls26
- libgnutls-dev
- libgnutls-openssl27
- libgnutlsxx27
- libgomp1
- libgpg-error0
- libgpg-error-dev
- libgssapi-krb5-2
- libgssrpc4
- libgtk-3-0
- libgtk-3-dev
- libgtk2.0-0
- libgtk2.0-dev
- libk5crypto3
- libkadm5clnt-mit8
- libkadm5srv-mit8
- libkdb5-6
- libkeyutils1
- libkms1
- libkrb5-3
- libkrb5-dev
- libkrb5support0
- libnspr4
- libnspr4-dev
- libnss3
- libnss3-dev
- libnss-db
- liborbit2
- libp11-2
- libp11-kit0
- libpam0g
- libpam0g-dev
- libpango1.0-0
- libpango1.0-dev
- libpci3
- libpci-dev
- libpcre3
- libpcre3-dev
- libpcrecpp0
- libpixman-1-0
- libpixman-1-dev
- libpng12-0
- libpng12-dev
- libpthread-stubs0-dev
- libpulse0
- libpulse-dev
- libpulse-mainloop-glib0
- libselinux1
- libspeechd2
- libspeechd-dev
- libssl1.0.0
- libssl-dev
- libstdc++6
- libstdc++6-4.6-dev
- libtasn1-3
- libudev-dev
- libudev0
- libwayland0
- libwayland-dev
- libx11-6
- libx11-dev
- libx11-xcb1
- libx11-xcb-dev
- libxau6
- libxau-dev
- libxcb1
- libxcb1-dev
- libxcb-glx0
- libxcb-render0
- libxcb-render0-dev
- libxcb-shm0
- libxcb-shm0-dev
- libxcomposite1
- libxcomposite-dev
- libxcursor1
- libxcursor-dev
- libxdamage1
- libxdamage-dev
- libxdmcp6
- libxdmcp-dev
- libxext6
- libxext-dev
- libxfixes3
- libxfixes-dev
- libxi6
- libxi-dev
- libxinerama1
- libxinerama-dev
- libxrandr2
- libxrandr-dev
- libxrender1
- libxrender-dev
- libxss1
- libxss-dev
- libxt6
- libxt-dev
- libxtst6
- libxtst-dev
- libxxf86vm1
- linux-libc-dev
- mesa-common-dev
- speech-dispatcher
- x11proto-composite-dev
- x11proto-core-dev
- x11proto-damage-dev
- x11proto-fixes-dev
- x11proto-input-dev
- x11proto-kb-dev
- x11proto-randr-dev
- x11proto-record-dev
- x11proto-render-dev
- x11proto-scrnsaver-dev
- x11proto-xext-dev
- x11proto-xinerama-dev
- zlib1g
- zlib1g-dev
-"
-
-DEBIAN_PACKAGES_X86="libquadmath0 libdrm-intel1"
-DEBIAN_PACKAGES_ARM="libdrm-omap1"
-DEBIAN_PACKAGES_AMD64=""
-
-. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/chromium/build/linux/sysroot_scripts/sysroots.json b/chromium/build/linux/sysroot_scripts/sysroots.json
index 3b2333a4356..d4b2f77daad 100644
--- a/chromium/build/linux/sysroot_scripts/sysroots.json
+++ b/chromium/build/linux/sysroot_scripts/sysroots.json
@@ -29,40 +29,10 @@
"SysrootDir": "debian_jessie_mips-sysroot",
"Tarball": "debian_jessie_mips_sysroot.tgz"
},
- "precise_amd64": {
- "Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
- "Sha1Sum": "8fffb717217b2dc4e29b3a1305877bcd0552f55e",
- "SysrootDir": "ubuntu_precise_amd64-sysroot",
- "Tarball": "ubuntu_precise_amd64_sysroot.tgz"
- },
"trusty_arm": {
"Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
"Sha1Sum": "f78eb929410b94cdf48276db82a7e7adcafcc277",
"SysrootDir": "ubuntu_trusty_arm-sysroot",
"Tarball": "ubuntu_trusty_arm_sysroot.tgz"
- },
- "wheezy_amd64": {
- "Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
- "Sha1Sum": "f86a3d88e777ac6e52f61d97ac0c008b2b088429",
- "SysrootDir": "debian_wheezy_amd64-sysroot",
- "Tarball": "debian_wheezy_amd64_sysroot.tgz"
- },
- "wheezy_arm": {
- "Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
- "Sha1Sum": "e1939f3de5c814a9309ba4668b7d324fa0f90ba8",
- "SysrootDir": "debian_wheezy_arm-sysroot",
- "Tarball": "debian_wheezy_arm_sysroot.tgz"
- },
- "wheezy_i386": {
- "Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
- "Sha1Sum": "b28d8ea2eecb51c30c20e05cc399e428b4ad5af9",
- "SysrootDir": "debian_wheezy_i386-sysroot",
- "Tarball": "debian_wheezy_i386_sysroot.tgz"
- },
- "wheezy_mips": {
- "Revision": "d3d82f7c4e34a753953581a48e62ef577b334529",
- "Sha1Sum": "370fac62175c1ea41070cc6e115e4f086136cfee",
- "SysrootDir": "debian_wheezy_mips-sysroot",
- "Tarball": "debian_wheezy_mips_sysroot.tgz"
}
}
diff --git a/chromium/build/linux/unbundle/README b/chromium/build/linux/unbundle/README
index 47336a1a330..6e4f0a95c9f 100644
--- a/chromium/build/linux/unbundle/README
+++ b/chromium/build/linux/unbundle/README
@@ -1,6 +1,11 @@
-This directory contains files that make it possible to use system libraries.
+This directory contains files that make it possible for Linux
+distributions to build Chromium using system libraries and exclude the
+source code for Chromium's bundled copies of system libraries in a
+consistent manner. Nothing here is used in normal developer builds.
-For more info please read the following:
+
+For more info on the Linux distros' philosophy on bundling system
+libraries and why this exists, please read the following:
- https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
- https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
@@ -14,10 +19,13 @@ Additional resources which might provide even more context:
- http://events.linuxfoundation.org/sites/events/files/slides/LinuxCon%202014%20Slides_0.pdf
- https://lwn.net/Articles/619158/
-This directory is provided in the source tree to follow above guidelines.
-It is a compromise solution which takes into account Chromium developers
-who want to avoid the perceived burden of more conditional code in build files,
-and expectations of Open Source community, where using system-provided
+
+This directory is provided in the source tree so one can follow the
+above guidelines without having to download additional tools and worry
+about having the right version of the tool. It is a compromise solution
+which takes into account Chromium developers who want to avoid the
+perceived burden of more conditional code in build files, and
+expectations of Open Source community, where using system-provided
libraries is the norm.
Usage:
@@ -28,15 +36,22 @@ Usage:
The script scans sources looking for third_party directories.
Everything that is not explicitly preserved is removed (except for
- gyp files), and the script fails if any directory passed on command
- line does not exist (to ensure list is kept up to date).
+ GYP/GN build files), and the script fails if any directory passed on
+ command line does not exist (to ensure list is kept up to date).
- This is intended to be used on sources extracted from a tarball,
- not a repository.
+ This is intended to be used on source code extracted from a tarball,
+ not on a git repository.
NOTE: by default this will not remove anything (for safety). Pass
- --do-remove flag to actually remove files.
+ the --do-remove flag to actually remove files.
2. replace_gn_files.py --system-libraries lib...
+ This swaps out a normal library GN build file that is intended for
+ use with a bundled library for a build file that is set up to use
+ the system library. While some build files have use_system_libfoo
+ build flags, using unbundled build files has the advantage that Linux
+ distros can build Chromium without having to specify many additional
+ build flags.
+
For example: replace_gn_files.py --system-libraries libxml
diff --git a/chromium/build/linux/unbundle/ffmpeg.gn b/chromium/build/linux/unbundle/ffmpeg.gn
index 9a4fbfd75a4..f4f0e263499 100644
--- a/chromium/build/linux/unbundle/ffmpeg.gn
+++ b/chromium/build/linux/unbundle/ffmpeg.gn
@@ -13,6 +13,10 @@ pkg_config("system_ffmpeg") {
]
}
+config("using_system_ffmpeg") {
+ defines = [ "USE_SYSTEM_FFMPEG=1" ]
+}
+
shim_headers("ffmpeg_shim") {
root_path = "."
headers = [
@@ -26,5 +30,8 @@ source_set("ffmpeg") {
deps = [
":ffmpeg_shim",
]
- public_configs = [ ":system_ffmpeg" ]
+ public_configs = [
+ ":system_ffmpeg",
+ ":using_system_ffmpeg",
+ ]
}
diff --git a/chromium/build/linux/unbundle/libdrm.gn b/chromium/build/linux/unbundle/libdrm.gn
new file mode 100644
index 00000000000..22df98ae716
--- /dev/null
+++ b/chromium/build/linux/unbundle/libdrm.gn
@@ -0,0 +1,22 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libdrm") {
+ packages = [ "libdrm" ]
+}
+
+shim_headers("libdrm_shim") {
+ root_path = "src/include"
+ headers = [ "drm.h" ]
+}
+
+source_set("libdrm") {
+ deps = [
+ ":libdrm_shim",
+ ]
+ public_configs = [ ":system_libdrm" ]
+}
diff --git a/chromium/build/linux/unbundle/libjpeg.gn b/chromium/build/linux/unbundle/libjpeg.gn
index 81ce3f97d7e..51c352c2c58 100644
--- a/chromium/build/linux/unbundle/libjpeg.gn
+++ b/chromium/build/linux/unbundle/libjpeg.gn
@@ -21,3 +21,6 @@ source_set("simd") {
source_set("simd_asm") {
}
+
+config("libjpeg_config") {
+}
diff --git a/chromium/build/linux/unbundle/replace_gn_files.py b/chromium/build/linux/unbundle/replace_gn_files.py
index 790eb18722b..7570a8bb683 100755
--- a/chromium/build/linux/unbundle/replace_gn_files.py
+++ b/chromium/build/linux/unbundle/replace_gn_files.py
@@ -21,6 +21,7 @@ REPLACEMENTS = {
'flac': 'third_party/flac/BUILD.gn',
'harfbuzz-ng': 'third_party/harfbuzz-ng/BUILD.gn',
'icu': 'third_party/icu/BUILD.gn',
+ 'libdrm': 'third_party/libdrm/BUILD.gn',
'libevent': 'base/third_party/libevent/BUILD.gn',
'libjpeg': 'build/secondary/third_party/libjpeg_turbo/BUILD.gn',
'libpng': 'third_party/libpng/BUILD.gn',
diff --git a/chromium/build/mac/copy_asan_runtime_dylib.sh b/chromium/build/mac/copy_asan_runtime_dylib.sh
deleted file mode 100755
index 42814903dca..00000000000
--- a/chromium/build/mac/copy_asan_runtime_dylib.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# For app bundles built with ASan, copies the runtime lib
-# (libclang_rt.asan_osx_dynamic.dylib), on which their executables depend, from
-# the compiler installation path into the bundle and fixes the dylib's install
-# name in the binary to be relative to @executable_path.
-
-set -e
-
-BINARY="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
-
-if [[ ! -f "$BINARY" ]]; then
- # This is neither an .app bundle nor a standalone executable.
- # Most certainly the script has been called for a data bundle.
- exit 0
-fi
-
-BINARY_DIR="$(dirname "${BINARY}")"
-
-# Find the link to the ASan runtime encoded in the binary.
-BUILTIN_DYLIB_PATH=$(otool -L "${BINARY}" | \
- sed -Ene 's/^[[:blank:]]+(.*libclang_rt\.asan_.*_dynamic\.dylib).*$/\1/p')
-
-if [[ "${BUILTIN_DYLIB_PATH}" == *asan_iossim_dynamic* ]]; then
- ASAN_DYLIB_NAME=libclang_rt.asan_iossim_dynamic.dylib
-elif [[ "${BUILTIN_DYLIB_PATH}" == *asan_osx_dynamic* ]]; then
- ASAN_DYLIB_NAME=libclang_rt.asan_osx_dynamic.dylib
-fi
-
-if [[ -z "${BUILTIN_DYLIB_PATH}" ]]; then
- echo "${BINARY} does not depend on the ASan runtime library!" >&2
- exit 1
-fi
-
-# TODO(glider): this doesn't work if we set CC and CXX to override the default
-# Clang.
-SRCROOT="${BUILT_PRODUCTS_DIR}/../.."
-CLANGVER=$(python ${SRCROOT}/tools/clang/scripts/update.py --print-clang-version)
-ASAN_DYLIB=${SRCROOT}/third_party/llvm-build/Release+Asserts/lib/clang/${CLANGVER}/lib/darwin/${ASAN_DYLIB_NAME}
-
-DYLIB_BASENAME=$(basename "${ASAN_DYLIB}")
-if [[ "${DYLIB_BASENAME}" != "${ASAN_DYLIB_NAME}" ]]; then
- echo "basename(${ASAN_DYLIB}) != ${ASAN_DYLIB_NAME}" >&2
- exit 1
-fi
-
-# Check whether the directory containing the executable binary is named
-# "MacOS". In this case we're building a full-fledged OSX app and will put
-# the runtime into appname.app/Contents/Libraries/. Otherwise this is probably
-# an iOS gtest app, and the ASan runtime is put next to the executable.
-UPPER_DIR=$(dirname "${BINARY_DIR}")
-if [ "${UPPER_DIR}" == "MacOS" ]; then
- LIBRARIES_DIR="${UPPER_DIR}/Libraries"
- mkdir -p "${LIBRARIES_DIR}"
- NEW_LC_ID_DYLIB="@executable_path/../Libraries/${ASAN_DYLIB_NAME}"
-else
- LIBRARIES_DIR="${BINARY_DIR}"
- NEW_LC_ID_DYLIB="@executable_path/${ASAN_DYLIB_NAME}"
-fi
-
-cp "${ASAN_DYLIB}" "${LIBRARIES_DIR}"
-
-# Make LC_ID_DYLIB of the runtime copy point to its location.
-install_name_tool \
- -id "${NEW_LC_ID_DYLIB}" \
- "${LIBRARIES_DIR}/${ASAN_DYLIB_NAME}"
-
-# Fix the rpath to the runtime library recorded in the binary.
-install_name_tool \
- -change "${BUILTIN_DYLIB_PATH}" \
- "${NEW_LC_ID_DYLIB}" \
- "${BINARY}"
diff --git a/chromium/build/mac/copy_framework_unversioned.sh b/chromium/build/mac/copy_framework_unversioned.sh
deleted file mode 100755
index 380cc908407..00000000000
--- a/chromium/build/mac/copy_framework_unversioned.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Copies a framework to its new home, "unversioning" it.
-#
-# Normally, frameworks are versioned bundles. The contents of a framework are
-# stored in a versioned directory within the bundle, and symbolic links
-# provide access to the actual code and resources. See
-# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
-#
-# The symbolic links usually found in frameworks create problems. Symbolic
-# links are excluded from code signatures. That means that it's possible to
-# remove or retarget a symbolic link within a framework without affecting the
-# seal. In Chrome's case, the outer .app bundle contains a framework where
-# all application code and resources live. In order for the signature on the
-# .app to be meaningful, it encompasses the framework. Because framework
-# resources are accessed through the framework's symbolic links, this
-# arrangement results in a case where the resources can be altered without
-# affecting the .app signature's validity.
-#
-# Indirection through symbolic links also carries a runtime performance
-# penalty on open() operations, although open() typically completes so quickly
-# that this is not considered a major performance problem.
-#
-# To resolve these problems, the frameworks that ship within Chrome's .app
-# bundle are unversioned. Unversioning is simple: instead of using the
-# original outer .framework directory as the framework that ships within the
-# .app, the inner versioned directory is used. Instead of accessing bundled
-# resources through symbolic links, they are accessed directly. In normal
-# situations, the only hard-coded use of the versioned directory is by dyld,
-# when loading the framework's code, but this is handled through a normal
-# Mach-O load command, and it is easy to adjust the load command to point to
-# the unversioned framework code rather than the versioned counterpart.
-#
-# The resulting framework bundles aren't strictly conforming, but they work
-# as well as normal versioned framework bundles.
-#
-# An option to skip running install_name_tool is available. By passing -I as
-# the first argument to this script, install_name_tool will be skipped. This
-# is only suitable for copied frameworks that will not be linked against, or
-# when install_name_tool will be run on any linker output when something is
-# linked against the copied framework. This option exists to allow signed
-# frameworks to pass through without subjecting them to any modifications that
-# would break their signatures.
-
-set -e
-
-RUN_INSTALL_NAME_TOOL=1
-if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
- shift
- RUN_INSTALL_NAME_TOOL=
-fi
-
-if [ $# -ne 2 ] ; then
- echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
- exit 1
-fi
-
-# FRAMEWORK should be a path to a versioned framework bundle, ending in
-# .framework. DESTINATION_DIR is the directory that the unversioned framework
-# bundle will be copied to.
-
-FRAMEWORK="${1}"
-DESTINATION_DIR="${2}"
-
-FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
-if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
- echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
- exit 1
-fi
-FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
-
-# Find the current version.
-VERSIONS="${FRAMEWORK}/Versions"
-CURRENT_VERSION_LINK="${VERSIONS}/Current"
-CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
-CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
-
-# Make sure that the framework's structure makes sense as a versioned bundle.
-if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
- echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
- exit 1
-fi
-
-DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
-
-# Copy the versioned directory within the versioned framework to its
-# destination location.
-mkdir -p "${DESTINATION_DIR}"
-rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
- --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
-
-if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
- # Adjust the Mach-O LC_ID_DYLIB load command in the framework. This does not
- # change the LC_LOAD_DYLIB load commands in anything that may have already
- # linked against the framework. Not all frameworks will actually need this
- # to be changed. Some frameworks may already be built with the proper
- # LC_ID_DYLIB for use as an unversioned framework. Xcode users can do this
- # by setting LD_DYLIB_INSTALL_NAME to
- # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
- # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
- # at link time.
- FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
- LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
- grep -A10 "^ *cmd LC_ID_DYLIB$" |
- grep -m1 "^ *name" |
- sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
- VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
- LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
- sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
-
- if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
- install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
- fi
-fi
diff --git a/chromium/build/mac/strip_from_xcode b/chromium/build/mac/strip_from_xcode
deleted file mode 100755
index c26b9fb492b..00000000000
--- a/chromium/build/mac/strip_from_xcode
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2008 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a handy wrapper script that figures out how to call the strip
-# utility (strip_save_dsym in this case), if it even needs to be called at all,
-# and then does it. This script should be called by a post-link phase in
-# targets that might generate Mach-O executables, dynamic libraries, or
-# loadable bundles.
-#
-# An example "Strip If Needed" build phase placed after "Link Binary With
-# Libraries" would do:
-# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
-
-if [ "${CONFIGURATION}" != "Release" ] ; then
- # Only strip in release mode.
- exit 0
-fi
-
-declare -a FLAGS
-
-# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
-# Weird.
-if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
- [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
- # Strip everything (no special flags). No-op.
- true
-elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
- [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
- # Strip debugging symbols and local symbols
- FLAGS[${#FLAGS[@]}]=-S
- FLAGS[${#FLAGS[@]}]=-x
-elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
- # Don't strip static libraries.
- exit 0
-else
- # Warn, but don't treat this as an error.
- echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
- exit 0
-fi
-
-if [ -n "${STRIPFLAGS}" ] ; then
- # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
- # Flags".
- for stripflag in "${STRIPFLAGS}" ; do
- FLAGS[${#FLAGS[@]}]="${stripflag}"
- done
-fi
-
-if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
- # An Xcode project can communicate a file listing symbols to saved in this
- # environment variable by setting it as a build setting. This isn't a
- # standard Xcode setting. It's used in preference to STRIPFLAGS to
- # eliminate quoting ambiguity concerns.
- FLAGS[${#FLAGS[@]}]=-s
- FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
-fi
-
-exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
- "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/chromium/build/mac/strip_save_dsym b/chromium/build/mac/strip_save_dsym
deleted file mode 100755
index c9cf2266375..00000000000
--- a/chromium/build/mac/strip_save_dsym
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
-#
-# strip_save_dsym is a wrapper around the standard strip utility. Given an
-# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
-# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
-# Note that the .dSYM file is a "fake" in that it's not a self-contained
-# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
-# file, and therefore contains references to object files on the filesystem.
-# The generated .dSYM bundle is therefore unsuitable for debugging in the
-# absence of these .o files.
-#
-# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
-# this utility does nothing. That allows strip_save_dsym to be run on a file
-# that has already been stripped without trashing the .dSYM.
-#
-# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
-# slow. On the other hand, doing a file copy (which is really all that
-# dsymutil does) is comparatively fast. Since we usually just want to strip
-# a release-mode executable but still be able to debug it, and we don't care
-# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
-# If a real dSYM is ever needed, it's still possible to create one by running
-# dsymutil and pointing it at the original Mach-O file inside the "fake"
-# bundle, provided that the object files are available.
-
-import errno
-import os
-import re
-import shutil
-import subprocess
-import sys
-import time
-
-# Returns a list of architectures contained in a Mach-O file. The file can be
-# a universal (fat) file, in which case there will be one list element for
-# each contained architecture, or it can be a thin single-architecture Mach-O
-# file, in which case the list will contain a single element identifying the
-# architecture. On error, returns an empty list. Determines the architecture
-# list by calling file.
-def macho_archs(macho):
- macho_types = ["executable",
- "dynamically linked shared library",
- "bundle"]
- macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
-
- file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
- stdout=subprocess.PIPE)
-
- archs = []
-
- type_line = file_cmd.stdout.readline()
- type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
- if type_match:
- archs.append(type_match.group(1))
- return [type_match.group(1)]
- else:
- type_match = re.match("^Mach-O universal binary with (.*) architectures$",
- type_line)
- if type_match:
- for i in range(0, int(type_match.group(1))):
- arch_line = file_cmd.stdout.readline()
- arch_match = re.match(
- "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
- arch_line)
- if arch_match:
- archs.append(arch_match.group(1))
-
- if file_cmd.wait() != 0:
- archs = []
-
- if len(archs) == 0:
- print >> sys.stderr, "No architectures in %s" % macho
-
- return archs
-
-# Returns a dictionary mapping architectures contained in the file as returned
-# by macho_archs to the LC_UUID load command for that architecture.
-# Architectures with no LC_UUID load command are omitted from the dictionary.
-# Determines the UUID value by calling otool.
-def macho_uuids(macho):
- uuids = {}
-
- archs = macho_archs(macho)
- if len(archs) == 0:
- return uuids
-
- for arch in archs:
- if arch == "":
- continue
-
- otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
- macho],
- stdout=subprocess.PIPE)
- # state 0 is when nothing UUID-related has been seen yet. State 1 is
- # entered after a load command begins, but it may not be an LC_UUID load
- # command. States 2, 3, and 4 are intermediate states while reading an
- # LC_UUID command. State 5 is the terminal state for a successful LC_UUID
- # read. State 6 is the error state.
- state = 0
- uuid = ""
- for otool_line in otool_cmd.stdout:
- if state == 0:
- if re.match("^Load command .*$", otool_line):
- state = 1
- elif state == 1:
- if re.match("^ cmd LC_UUID$", otool_line):
- state = 2
- else:
- state = 0
- elif state == 2:
- if re.match("^ cmdsize 24$", otool_line):
- state = 3
- else:
- state = 6
- elif state == 3:
- # The UUID display format changed in the version of otool shipping
- # with the Xcode 3.2.2 prerelease. The new format is traditional:
- # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
- # and with Xcode 3.2.6, then line is indented one more space:
- # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
- # The old format, from cctools-750 and older's otool, breaks the UUID
- # up into a sequence of bytes:
- # uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
- # 0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
- new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
- otool_line)
- if new_uuid_match:
- uuid = new_uuid_match.group(1)
-
- # Skip state 4, there is no second line to read.
- state = 5
- else:
- old_uuid_match = re.match("^ uuid 0x(..) 0x(..) 0x(..) 0x(..) "
- "0x(..) 0x(..) 0x(..) 0x(..)$",
- otool_line)
- if old_uuid_match:
- state = 4
- uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
- old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
- old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
- old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
- else:
- state = 6
- elif state == 4:
- old_uuid_match = re.match("^ 0x(..) 0x(..) 0x(..) 0x(..) "
- "0x(..) 0x(..) 0x(..) 0x(..)$",
- otool_line)
- if old_uuid_match:
- state = 5
- uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
- old_uuid_match.group(3) + old_uuid_match.group(4) + \
- old_uuid_match.group(5) + old_uuid_match.group(6) + \
- old_uuid_match.group(7) + old_uuid_match.group(8)
- else:
- state = 6
-
- if otool_cmd.wait() != 0:
- state = 6
-
- if state == 5:
- uuids[arch] = uuid.upper()
-
- if len(uuids) == 0:
- print >> sys.stderr, "No UUIDs in %s" % macho
-
- return uuids
-
-# Given a path to a Mach-O file and possible information from the environment,
-# determines the desired path to the .dSYM.
-def dsym_path(macho):
- # If building a bundle, the .dSYM should be placed next to the bundle. Use
- # WRAPPER_NAME to make this determination. If called from xcodebuild,
- # WRAPPER_NAME will be set to the name of the bundle.
- dsym = ""
- if "WRAPPER_NAME" in os.environ:
- if "BUILT_PRODUCTS_DIR" in os.environ:
- dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
- os.environ["WRAPPER_NAME"])
- else:
- dsym = os.environ["WRAPPER_NAME"]
- else:
- dsym = macho
-
- dsym += ".dSYM"
-
- return dsym
-
-# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
-# architectures and UUIDs specified by the uuids map.
-def make_fake_dsym(macho, dsym):
- uuids = macho_uuids(macho)
- if len(uuids) == 0:
- return False
-
- dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
- dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
- try:
- os.makedirs(dwarf_dir)
- except OSError, (err, error_string):
- if err != errno.EEXIST:
- raise
- shutil.copyfile(macho, dwarf_file)
-
- # info_template is the same as what dsymutil would have written, with the
- # addition of the fake_dsym key.
- info_template = \
-'''<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
- <dict>
- <key>CFBundleDevelopmentRegion</key>
- <string>English</string>
- <key>CFBundleIdentifier</key>
- <string>com.apple.xcode.dsym.%(root_name)s</string>
- <key>CFBundleInfoDictionaryVersion</key>
- <string>6.0</string>
- <key>CFBundlePackageType</key>
- <string>dSYM</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>CFBundleShortVersionString</key>
- <string>1.0</string>
- <key>CFBundleVersion</key>
- <string>1</string>
- <key>dSYM_UUID</key>
- <dict>
-%(uuid_dict)s </dict>
- <key>fake_dsym</key>
- <true/>
- </dict>
-</plist>
-'''
-
- root_name = os.path.basename(dsym)[:-5] # whatever.dSYM without .dSYM
- uuid_dict = ""
- for arch in sorted(uuids):
- uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
- "\t\t\t<string>" + uuids[arch] + "</string>\n"
- info_dict = {
- "root_name": root_name,
- "uuid_dict": uuid_dict,
- }
- info_contents = info_template % info_dict
- info_file = os.path.join(dsym, "Contents", "Info.plist")
- info_fd = open(info_file, "w")
- info_fd.write(info_contents)
- info_fd.close()
-
- return True
-
-# For a Mach-O file, determines where the .dSYM bundle should be located. If
-# the bundle does not exist or has a modification time older than the Mach-O
-# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
-# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
-# file to be identical.
-def strip_and_make_fake_dsym(macho):
- dsym = dsym_path(macho)
- macho_stat = os.stat(macho)
- dsym_stat = None
- try:
- dsym_stat = os.stat(dsym)
- except OSError, (err, error_string):
- if err != errno.ENOENT:
- raise
-
- if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
- # Make a .dSYM bundle
- if not make_fake_dsym(macho, dsym):
- return False
-
- # Strip the Mach-O file
- remove_dsym = True
- try:
- strip_cmdline = ['xcrun', 'strip'] + sys.argv[1:]
- strip_cmd = subprocess.Popen(strip_cmdline)
- if strip_cmd.wait() == 0:
- remove_dsym = False
- finally:
- if remove_dsym:
- shutil.rmtree(dsym)
-
- # Update modification time on the Mach-O file and .dSYM bundle
- now = time.time()
- os.utime(macho, (now, now))
- os.utime(dsym, (now, now))
-
- return True
-
-def main(argv=None):
- if argv is None:
- argv = sys.argv
-
- # This only supports operating on one file at a time. Look at the arguments
- # to strip to figure out what the source to be stripped is. Arguments are
- # processed in the same way that strip does, although to reduce complexity,
- # this doesn't do all of the same checking as strip. For example, strip
- # has no -Z switch and would treat -Z on the command line as an error. For
- # the purposes this is needed for, that's fine.
- macho = None
- process_switches = True
- ignore_argument = False
- for arg in argv[1:]:
- if ignore_argument:
- ignore_argument = False
- continue
- if process_switches:
- if arg == "-":
- process_switches = False
- # strip has these switches accept an argument:
- if arg in ["-s", "-R", "-d", "-o", "-arch"]:
- ignore_argument = True
- if arg[0] == "-":
- continue
- if macho is None:
- macho = arg
- else:
- print >> sys.stderr, "Too many things to strip"
- return 1
-
- if macho is None:
- print >> sys.stderr, "Nothing to strip"
- return 1
-
- if not strip_and_make_fake_dsym(macho):
- return 1
-
- return 0
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
diff --git a/chromium/build/sample_arg_file.gn b/chromium/build/sample_arg_file.gn
new file mode 100644
index 00000000000..91e90456e82
--- /dev/null
+++ b/chromium/build/sample_arg_file.gn
@@ -0,0 +1,6 @@
+# Build arguments go here. Here are some of the most commonly set ones.
+# Run `gn args <out_dir> --list` for the full list.
+# is_component_build = true
+# is_debug = true
+# symbol_level = 2
+# use_goma = false
diff --git a/chromium/build/sanitizers/lsan_suppressions.cc b/chromium/build/sanitizers/lsan_suppressions.cc
index 27a2426ad72..4b75528ad79 100644
--- a/chromium/build/sanitizers/lsan_suppressions.cc
+++ b/chromium/build/sanitizers/lsan_suppressions.cc
@@ -14,83 +14,85 @@
// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
// for the instructions on writing suppressions.
char kLSanDefaultSuppressions[] =
-// Intentional leak used as sanity test for Valgrind/memcheck.
-"leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
-
-// ================ Leaks in third-party code ================
-
-// False positives in libfontconfig. http://crbug.com/39050
-"leak:libfontconfig\n"
-// eglibc-2.19/string/strdup.c creates false positive leak errors because of the
-// same reason as crbug.com/39050. The leak error stack trace, when unwind on
-// malloc, includes a call to libfontconfig. But the default stack trace is too
-// short in leak sanitizer bot to make the libfontconfig suppression works.
-// http://crbug.com/605286
-"leak:__strdup\n"
-
-// Leaks in Nvidia's libGL.
-"leak:libGL.so\n"
-
-// TODO(eugenis): revisit NSS suppressions after the switch to BoringSSL
-// NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
-"leak:net::NSSCertDatabase::ImportFromPKCS12\n"
-"leak:net::NSSCertDatabase::ListCerts\n"
-"leak:net::NSSCertDatabase::DeleteCertAndKey\n"
-"leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
-// Another leak due to not shutting down NSS properly. http://crbug.com/124445
-"leak:error_get_my_stack\n"
-// The NSS suppressions above will not fire when the fast stack unwinder is
-// used, because it can't unwind through NSS libraries. Apply blanket
-// suppressions for now.
-"leak:libnssutil3\n"
-"leak:libnspr4\n"
-"leak:libnss3\n"
-"leak:libplds4\n"
-"leak:libnssckbi\n"
-
-// XRandR has several one time leaks.
-"leak:libxrandr\n"
-
-// xrandr leak. http://crbug.com/119677
-"leak:XRRFindDisplay\n"
-
-// http://crbug.com/431213, http://crbug.com/416665
-"leak:gin/object_template_builder.h\n"
-
-// Leaks in swrast_dri.so. http://crbug.com/540042
-"leak:swrast_dri.so\n"
-
-// Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
-"leak:__gconv_lookup_cache\n"
-
-// ================ Leaks in Chromium code ================
-// PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
-// Instead, commits that introduce memory leaks should be reverted. Suppressing
-// the leak is acceptable in some cases when reverting is impossible, i.e. when
-// enabling leak detection for the first time for a test target with
-// pre-existing leaks.
-
-// Small test-only leak in ppapi_unittests. http://crbug.com/258113
-"leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test\n"
-
-// http://crbug.com/322671
-"leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
-
-// http://crbug.com/355641
-"leak:TrayAccessibilityTest\n"
-
-// http://crbug.com/354644
-"leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
-
-// http://crbug.com/356306
-"leak:content::SetProcessTitleFromCommandLine\n"
-
-// http://crbug.com/601435
-"leak:mojo/edk/js/handle.h\n"
-
-// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
-
-// End of suppressions.
-; // Please keep this semicolon.
+ // Intentional leak used as sanity test for Valgrind/memcheck.
+ "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+ // ================ Leaks in third-party code ================
+
+ // False positives in libfontconfig. http://crbug.com/39050
+ "leak:libfontconfig\n"
+ // eglibc-2.19/string/strdup.c creates false positive leak errors because of
+ // the same reason as crbug.com/39050. The leak error stack trace, when
+ // unwind on malloc, includes a call to libfontconfig. But the default stack
+ // trace is too short in leak sanitizer bot to make the libfontconfig
+ // suppression works. http://crbug.com/605286
+ "leak:__strdup\n"
+
+ // Leaks in Nvidia's libGL.
+ "leak:libGL.so\n"
+
+ // TODO(eugenis): revisit NSS suppressions after the switch to BoringSSL
+ // NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+ "leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+ "leak:net::NSSCertDatabase::ListCerts\n"
+ "leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+ "leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+ // Another leak due to not shutting down NSS properly.
+ // http://crbug.com/124445
+ "leak:error_get_my_stack\n"
+ // The NSS suppressions above will not fire when the fast stack unwinder is
+ // used, because it can't unwind through NSS libraries. Apply blanket
+ // suppressions for now.
+ "leak:libnssutil3\n"
+ "leak:libnspr4\n"
+ "leak:libnss3\n"
+ "leak:libplds4\n"
+ "leak:libnssckbi\n"
+
+ // XRandR has several one time leaks.
+ "leak:libxrandr\n"
+
+ // xrandr leak. http://crbug.com/119677
+ "leak:XRRFindDisplay\n"
+
+ // http://crbug.com/431213, http://crbug.com/416665
+ "leak:gin/object_template_builder.h\n"
+
+ // Leaks in swrast_dri.so. http://crbug.com/540042
+ "leak:swrast_dri.so\n"
+
+ // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
+ "leak:__gconv_lookup_cache\n"
+
+ // ================ Leaks in Chromium code ================
+ // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+ // Instead, commits that introduce memory leaks should be reverted.
+ // Suppressing the leak is acceptable in some cases when reverting is
+ // impossible, i.e. when enabling leak detection for the first time for a
+ // test target with pre-existing leaks.
+
+ // Small test-only leak in ppapi_unittests. http://crbug.com/258113
+ "leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_"
+ "Test\n"
+
+ // http://crbug.com/322671
+ "leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+ // http://crbug.com/355641
+ "leak:TrayAccessibilityTest\n"
+
+ // http://crbug.com/354644
+ "leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+ // http://crbug.com/356306
+ "leak:service_manager::SetProcessTitleFromCommandLine\n"
+
+ // http://crbug.com/601435
+ "leak:mojo/edk/js/handle.h\n"
+
+ // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+ // End of suppressions.
+ ; // Please keep this semicolon.
#endif // LEAK_SANITIZER
diff --git a/chromium/build/sanitizers/sanitizer_options.cc b/chromium/build/sanitizers/sanitizer_options.cc
index 81958f214de..1f4250111ec 100644
--- a/chromium/build/sanitizers/sanitizer_options.cc
+++ b/chromium/build/sanitizers/sanitizer_options.cc
@@ -66,13 +66,15 @@ void _sanitizer_options_link_helper() { }
const char kAsanDefaultOptions[] =
"legacy_pthread_cond=1 malloc_context_size=5 "
"symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 "
- "strip_path_prefix=/../../ fast_unwind_on_fatal=1";
+ "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "allow_user_segv_handler=1 ";
#else
// Default AddressSanitizer options for buildbots and non-official builds.
-const char *kAsanDefaultOptions =
+const char* kAsanDefaultOptions =
"symbolize=1 check_printf=1 use_sigaltstack=1 "
"detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
- "detect_stack_use_after_return=1 ";
+ "detect_stack_use_after_return=1 "
+ "allow_user_segv_handler=1 ";
#endif // GOOGLE_CHROME_BUILD
#elif defined(OS_MACOSX)
diff --git a/chromium/build/sanitizers/tsan_suppressions.cc b/chromium/build/sanitizers/tsan_suppressions.cc
index 9523b3cc4f7..74b802aef56 100644
--- a/chromium/build/sanitizers/tsan_suppressions.cc
+++ b/chromium/build/sanitizers/tsan_suppressions.cc
@@ -29,11 +29,6 @@ char kTSanDefaultSuppressions[] =
// Races in libevent, http://crbug.com/23244.
"race:libevent/event.c\n"
-// http://crbug.com/46840.
-"race:base::HistogramSamples::IncreaseSum\n"
-"race:base::Histogram::Add\n"
-"race:base::HistogramSamples::Add\n"
-
// http://crbug.com/84094.
"race:sqlite3StatusSet\n"
"race:pcache1EnforceMaxPage\n"
@@ -125,12 +120,6 @@ char kTSanDefaultSuppressions[] =
// http://crbug.com/272095
"race:base::g_top_manager\n"
-// http://crbug.com/280466
-"race:content::WebRtcAudioCapturer::SetCapturerSource\n"
-
-// http://crbug.com/285242
-"race:media::PulseAudioOutputStream::SetVolume\n"
-
// http://crbug.com/308590
"race:CustomThreadWatcher::~CustomThreadWatcher\n"
@@ -177,22 +166,12 @@ char kTSanDefaultSuppressions[] =
"race:content::"
"RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
-// http://crbug.com/345618
-"race:WebCore::AudioDestinationNode::render\n"
-
-// http://crbug.com/345624
-"race:media::DataSource::set_host\n"
-
// http://crbug.com/347534
"race:v8::internal::V8::TearDown\n"
// http://crbug.com/347538
"race:sctp_timer_start\n"
-// http://crbug.com/347548
-"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n"
-"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n"
-
// http://crbug.com/347553
"race:blink::WebString::reset\n"
@@ -235,10 +214,6 @@ char kTSanDefaultSuppressions[] =
// http://crbug.com/490856
"deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
-// http://crbug.com/417193
-// Suppressing both AudioContext.{cpp,h}.
-"race:modules/webaudio/AudioContext\n"
-
// https://code.google.com/p/skia/issues/detail?id=3294
"race:SkBaseMutex::acquire\n"
@@ -264,12 +239,16 @@ char kTSanDefaultSuppressions[] =
// http://crbug.com/633145
"race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
-// http://crbug.com/638583
-"race:webrtc/modules/audio_processing/aec/aec_rdft.cc\n"
-
// http://crbug.com/587199
"race:base::TimerTest_OneShotTimer_CustomTaskRunner_Test::TestBody\n"
+// http://crbug.com/v8/6065
+"race:net::(anonymous namespace)::ProxyResolverV8TracingImpl::RequestImpl"
+"::~RequestImpl()\n"
+
+// http://crbug.com/691029
+"deadlock:libGLX.so*\n"
+
// End of suppressions.
; // Please keep this semicolon.
diff --git a/chromium/build/secondary/third_party/android_tools/BUILD.gn b/chromium/build/secondary/third_party/android_tools/BUILD.gn
index cbd58b2cf48..3161db12d48 100644
--- a/chromium/build/secondary/third_party/android_tools/BUILD.gn
+++ b/chromium/build/secondary/third_party/android_tools/BUILD.gn
@@ -50,9 +50,15 @@ android_aar_prebuilt("android_support_design_java") {
aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
}
+android_aar_prebuilt("android_support_transition_java") {
+ deps = [
+ ":android_support_v7_appcompat_java",
+ ]
+ _lib_name = "transition"
+ aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
android_aar_prebuilt("android_support_multidex_java") {
- # TODO(jbudorick): remove requires_android after crbug.com/522043 is fixed.
- requires_android = false
aar_path = "$lib_path/multidex/1.0.1/multidex-1.0.1.aar"
}
@@ -149,6 +155,49 @@ android_aar_prebuilt("android_support_v7_recyclerview_java") {
aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
}
+android_aar_prebuilt("android_support_v7_preference_java") {
+ deps = [
+ ":android_support_v7_appcompat_java",
+ ]
+ _lib_name = "preference-v7"
+ aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
+android_aar_prebuilt("android_support_v14_preference_java") {
+ deps = [
+ ":android_support_v7_preference_java",
+ ]
+ _lib_name = "preference-v14"
+ aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
+android_aar_prebuilt("android_support_v17_leanback_java") {
+ deps = [
+ ":android_support_v4_java",
+ ":android_support_v7_recyclerview_java",
+ ]
+ _lib_name = "leanback-v17"
+ aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
+android_aar_prebuilt("android_support_v17_preference_java") {
+ deps = [
+ ":android_support_v14_preference_java",
+ ":android_support_v17_leanback_java",
+ ":android_support_v4_java",
+ ":android_support_v7_appcompat_java",
+ ":android_support_v7_preference_java",
+ ":android_support_v7_recyclerview_java",
+ ]
+ _lib_name = "preference-leanback-v17"
+ aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
+android_library("android_support_chromium_java") {
+ testonly = true
+ java_files = [ "$android_sdk_root/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java" ]
+}
+
# TODO(dgn): Remove this once no other target has a dependency on it
java_group("google_play_services_default_resources") {
deps = []
@@ -165,25 +214,12 @@ java_group("google_play_services_default_java") {
":google_play_services_cast_java",
":google_play_services_gcm_java",
":google_play_services_iid_java",
+ ":google_play_services_location_java",
":google_play_services_nearby_java",
":google_play_services_vision_java",
]
}
-android_aar_prebuilt("android_support_v17_leanback_java") {
- deps = [
- ":android_support_v7_recyclerview_java",
- ]
- _lib_name = "leanback-v17"
- aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-}
-
-# TODO(jbudorick): Remove this once net_java_test_support no longer needs it.
-android_java_prebuilt("legacy_http_javalib") {
- testonly = true
- jar_path = "$android_sdk/optional/org.apache.http.legacy.jar"
-}
-
# TODO(dgn): Use the POM files instead of hardcoding the dependencies.
gms_path = "$default_extras_android_sdk_root/extras/google/m2repository/com/google/android/gms"
gms_version = "10.2.0"
@@ -270,6 +306,16 @@ android_aar_prebuilt("google_play_services_gcm_java") {
ignore_manifest = true
}
+android_aar_prebuilt("google_play_services_location_java") {
+ deps = [
+ ":google_play_services_base_java",
+ ":google_play_services_basement_java",
+ ]
+ _lib_name = "play-services-location"
+ aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+ ignore_manifest = true
+}
+
android_aar_prebuilt("google_play_services_nearby_java") {
deps = [
":google_play_services_base_java",
diff --git a/chromium/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn b/chromium/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn
index d2ce8b91774..c4ca09c266e 100644
--- a/chromium/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn
+++ b/chromium/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn
@@ -14,6 +14,8 @@ static_library("handler_lib") {
"mac/exception_handler_server.h",
"prune_crash_reports_thread.cc",
"prune_crash_reports_thread.h",
+ "user_stream_data_source.cc",
+ "user_stream_data_source.h",
"win/crash_report_exception_handler.cc",
"win/crash_report_exception_handler.h",
]
diff --git a/chromium/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn b/chromium/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn
index d18fd71c30c..ebc8f3bb522 100644
--- a/chromium/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn
+++ b/chromium/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn
@@ -59,6 +59,8 @@ static_library("minidump") {
"minidump_thread_writer.h",
"minidump_unloaded_module_writer.cc",
"minidump_unloaded_module_writer.h",
+ "minidump_user_extension_stream_data_source.cc",
+ "minidump_user_extension_stream_data_source.h",
"minidump_user_stream_writer.cc",
"minidump_user_stream_writer.h",
"minidump_writable.cc",
diff --git a/chromium/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn b/chromium/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn
index 63033d7aee6..7695e18aeed 100644
--- a/chromium/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn
+++ b/chromium/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn
@@ -49,6 +49,8 @@ if (is_mac) {
static_library("util") {
sources = [
+ "file/delimited_file_reader.cc",
+ "file/delimited_file_reader.h",
"file/file_io.cc",
"file/file_io.h",
"file/file_io_posix.cc",
@@ -82,6 +84,9 @@ static_library("util") {
"misc/initialization_state_dcheck.h",
"misc/metrics.cc",
"misc/metrics.h",
+ "misc/paths.h",
+ "misc/paths_mac.cc",
+ "misc/paths_win.cc",
"misc/pdb_structures.cc",
"misc/pdb_structures.h",
"misc/random_string.cc",
@@ -120,6 +125,12 @@ static_library("util") {
"posix/drop_privileges.h",
"posix/process_info.h",
"posix/process_info_mac.cc",
+ "posix/scoped_dir.cc",
+ "posix/scoped_dir.h",
+ "posix/scoped_mmap.cc",
+ "posix/scoped_mmap.h",
+ "posix/signals.cc",
+ "posix/signals.h",
"posix/symbolic_constants_posix.cc",
"posix/symbolic_constants_posix.h",
"stdlib/aligned_allocator.cc",
diff --git a/chromium/build/toolchain/android/BUILD.gn b/chromium/build/toolchain/android/BUILD.gn
index ad7b76278aa..9b3b162ca6d 100644
--- a/chromium/build/toolchain/android/BUILD.gn
+++ b/chromium/build/toolchain/android/BUILD.gn
@@ -25,6 +25,9 @@ template("android_gcc_toolchain") {
toolchain_args = invoker.toolchain_args
toolchain_args.current_os = "android"
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
# Make our manually injected libs relative to the build dir.
_ndk_lib =
rebase_path(invoker.sysroot + "/" + invoker.lib_dir, root_build_dir)
diff --git a/chromium/build/toolchain/clang_static_analyzer_wrapper.py b/chromium/build/toolchain/clang_static_analyzer_wrapper.py
index cc13888f3c2..0ae62dabf7b 100755
--- a/chromium/build/toolchain/clang_static_analyzer_wrapper.py
+++ b/chromium/build/toolchain/clang_static_analyzer_wrapper.py
@@ -1,61 +1,77 @@
#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
+# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Invokes the Clang static analysis command using arguments provided on the
-command line.
+"""Adds an analysis build step to invocations of the Clang C/C++ compiler.
+
+Usage: clang_static_analyzer_wrapper.py <compiler> [args...]
"""
import argparse
import fnmatch
+import itertools
import os
-import shutil
import sys
-import tempfile
-
import wrapper_utils
+# Flags used to enable analysis for Clang invocations.
+analyzer_enable_flags = [
+ '--analyze',
+]
+
+# Flags used to configure the analyzer's behavior.
+analyzer_option_flags = [
+ '-fdiagnostics-show-option',
+ '-analyzer-checker=cplusplus',
+ '-analyzer-opt-analyze-nested-blocks',
+ '-analyzer-eagerly-assume',
+ '-analyzer-output=text',
+ '-analyzer-config',
+ 'suppress-c++-stdlib=true',
+
+# List of checkers to execute.
+# The full list of checkers can be found at
+# https://clang-analyzer.llvm.org/available_checks.html.
+ '-analyzer-checker=core',
+ '-analyzer-checker=unix',
+ '-analyzer-checker=deadcode',
+]
+
+
+# Prepends every element of a list |args| with |token|.
+# e.g. ['-analyzer-foo', '-analyzer-bar'] => ['-Xanalyzer', '-analyzer-foo',
+# '-Xanalyzer', '-analyzer-bar']
+def interleave_args(args, token):
+ return list(sum(zip([token] * len(args), args), ()))
+
def main():
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--clang-cc-path',
- help='Path to the clang compiler.',
- metavar='PATH')
- parser.add_argument('--clang-cxx-path',
- help='Path to the clang++ compiler',
- metavar='PATH')
- parser.add_argument('--analyzer',
- help='Path to the language-specific Clang analysis tool.',
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--mode',
+ choices=['clang', 'cl'],
required=True,
- metavar='PATH')
- args, compile_args = parser.parse_known_args()
-
- # Check that only one of --clang-cc-path or --clang-cxx-path are set.
- assert ((args.clang_cc_path != None) != (args.clang_cxx_path != None))
-
- is_cxx = args.clang_cxx_path != None
- env = os.environ
- env['CCC_ANALYZER_FORCE_ANALYZE_DEBUG_CODE'] = '0'
- env['CCC_ANALYZER_OUTPUT_FORMAT'] = 'text'
- clang_path = args.clang_cxx_path or args.clang_cc_path
- if is_cxx:
- env['CCC_CXX'] = clang_path
- env['CLANG_CXX'] = clang_path
- else:
- env['CCC_CC'] = clang_path
- env['CLANG'] = clang_path
-
- # TODO(kmarshall): Place the summarized output in a useful directory.
- temp_dir = tempfile.mkdtemp()
- try:
- env['CCC_ANALYZER_HTML'] = temp_dir
- returncode, stderr = wrapper_utils.CaptureCommandStderr(
- wrapper_utils.CommandToRun([args.analyzer] + compile_args), env)
- sys.stderr.write(stderr)
- return returncode
- finally:
- shutil.rmtree(temp_dir)
-
-if __name__ == "__main__":
+ help='Specifies the compiler argument convention to use.')
+ parser.add_argument('args', nargs=argparse.REMAINDER)
+ parsed_args = parser.parse_args()
+
+ prefix = '-Xclang' if parsed_args.mode == 'cl' else '-Xanalyzer'
+ cmd = parsed_args.args + analyzer_enable_flags + \
+ interleave_args(analyzer_option_flags, prefix)
+ returncode, stderr = wrapper_utils.CaptureCommandStderr(
+ wrapper_utils.CommandToRun(cmd))
+ sys.stderr.write(stderr)
+ if returncode != 0:
+ sys.stderr.write(
+ """WARNING! The Clang static analyzer exited with error code %d.
+ Please share the error details in crbug.com/695243 if this looks like
+ a new regression.\n""" % (returncode))
+
+ returncode, stderr = wrapper_utils.CaptureCommandStderr(
+ wrapper_utils.CommandToRun(parsed_args.args))
+ sys.stderr.write(stderr)
+
+ return returncode
+
+if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/toolchain/gcc_link_wrapper.py b/chromium/build/toolchain/gcc_link_wrapper.py
index c589fe330d4..0e256fab2fb 100755
--- a/chromium/build/toolchain/gcc_link_wrapper.py
+++ b/chromium/build/toolchain/gcc_link_wrapper.py
@@ -15,6 +15,8 @@ import os
import subprocess
import sys
+import wrapper_utils
+
# When running on a Windows host and using a toolchain whose tools are
# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
@@ -37,9 +39,12 @@ def main():
help='The strip binary to run',
metavar='PATH')
parser.add_argument('--unstripped-file',
- required=True,
help='Executable file produced by linking command',
metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
parser.add_argument('--output',
required=True,
help='Final output executable file',
@@ -51,7 +56,8 @@ def main():
# Work-around for gold being slow-by-default. http://crbug.com/632230
fast_env = dict(os.environ)
fast_env['LC_ALL'] = 'C'
- result = subprocess.call(CommandToRun(args.command), env=fast_env)
+ result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env,
+ map_file=args.map_file)
if result != 0:
return result
diff --git a/chromium/build/toolchain/gcc_solink_wrapper.py b/chromium/build/toolchain/gcc_solink_wrapper.py
index 426f9d66332..7efc4906e13 100755
--- a/chromium/build/toolchain/gcc_solink_wrapper.py
+++ b/chromium/build/toolchain/gcc_solink_wrapper.py
@@ -78,6 +78,10 @@ def main():
required=True,
help='Output table-of-contents file',
metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
parser.add_argument('--output',
required=True,
help='Final output shared object file',
@@ -99,8 +103,10 @@ def main():
whitelist_candidates, args.resource_whitelist)
# First, run the actual link.
- result = subprocess.call(
- wrapper_utils.CommandToRun(args.command), env=fast_env)
+ command = wrapper_utils.CommandToRun(args.command)
+ result = wrapper_utils.RunLinkWithOptionalMapFile(command, env=fast_env,
+ map_file=args.map_file)
+
if result != 0:
return result
diff --git a/chromium/build/toolchain/gcc_toolchain.gni b/chromium/build/toolchain/gcc_toolchain.gni
index b31980668d8..dfd187954cd 100644
--- a/chromium/build/toolchain/gcc_toolchain.gni
+++ b/chromium/build/toolchain/gcc_toolchain.gni
@@ -12,6 +12,13 @@ import("//build/toolchain/clang_static_analyzer.gni")
import("//build/toolchain/goma.gni")
import("//build/toolchain/toolchain.gni")
+# Path to the Clang static analysis wrapper script.
+# REVIEWERS: can you suggest a better location for this?
+# GN is really picky about dead stores of variables except at the global scope.
+analyzer_wrapper =
+ rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+ root_build_dir) + " --mode=clang"
+
# This template defines a toolchain for something that works like gcc
# (including clang).
#
@@ -126,22 +133,49 @@ template("gcc_toolchain") {
} else {
toolchain_cc_wrapper = cc_wrapper
}
+ assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
+ "Goma and cc_wrapper can't be used together.")
- # Compute the compiler prefix.
+ # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+ # toolchain args, use those values, otherwise default to the global one.
+ # This works because the only reasonable override that toolchains might
+ # supply for these values are to force-disable them.
if (toolchain_uses_goma) {
- assert(toolchain_cc_wrapper == "",
- "Goma and cc_wrapper can't be used together.")
- compiler_prefix = "$goma_dir/gomacc "
- } else if (toolchain_cc_wrapper != "") {
- compiler_prefix = toolchain_cc_wrapper + " "
+ goma_path = "$goma_dir/gomacc"
+
+ # Use the static analysis script if static analysis is turned on
+ # AND the tool has not opted out by setting
+ # 'is_clang_static_analysis_supported' to false.
+ if (is_clang && use_clang_static_analyzer &&
+ (!defined(invoker.is_clang_analysis_supported) ||
+ invoker.is_clang_analysis_supported)) {
+ compiler_prefix = "${analyzer_wrapper} ${goma_path} "
+
+ # Create a distinct variable for "asm", since analysis runs pass
+ # a bunch of flags to clang/clang++ that are nonsensical on assembler
+ # runs.
+ asm = "${goma_path} ${invoker.cc}"
+ } else {
+ compiler_prefix = "${goma_path} "
+ }
} else {
- compiler_prefix = ""
+ if (is_clang && use_clang_static_analyzer &&
+ (!defined(invoker.is_clang_analysis_supported) ||
+ invoker.is_clang_analysis_supported)) {
+ compiler_prefix = "${analyzer_wrapper} "
+ asm = invoker.cc
+ } else {
+ compiler_prefix = "${toolchain_cc_wrapper} "
+ }
}
cc = compiler_prefix + invoker.cc
cxx = compiler_prefix + invoker.cxx
ar = invoker.ar
ld = invoker.ld
+ if (!defined(asm)) {
+ asm = cc
+ }
if (defined(invoker.readelf)) {
readelf = invoker.readelf
} else {
@@ -214,6 +248,9 @@ template("gcc_toolchain") {
extra_ldflags = ""
}
+ enable_linker_map =
+ defined(invoker.enable_linker_map) && invoker.enable_linker_map
+
# These library switches can apply to all tools below.
lib_switch = "-l"
lib_dir_switch = "-L"
@@ -260,7 +297,7 @@ template("gcc_toolchain") {
tool("asm") {
# For GCC we can just use the C compiler to compile assembly.
depfile = "{{output}}.d"
- command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+ command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
depsformat = "gcc"
description = "ASM {{output}}"
outputs = [
@@ -319,18 +356,27 @@ template("gcc_toolchain") {
link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ # With target_os="android", libchrome.so.map.gz is ~20MB.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_sofile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
assert(defined(readelf), "to solink you must have a readelf")
assert(defined(nm), "to solink you must have an nm")
strip_switch = ""
if (defined(invoker.strip)) {
- strip_switch = "--strip=${invoker.strip}"
+ strip_switch = "--strip=${invoker.strip} "
}
# This needs a Python script to avoid using a complex shell command
# requiring sh control structures, pipelines, and POSIX utilities.
# The host might not have a POSIX shell and utilities (e.g. Windows).
solink_wrapper = rebase_path("//build/toolchain/gcc_solink_wrapper.py")
- command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\" --output=\"$sofile\"$whitelist_flag -- $link_command"
+ command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch--sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\"$whitelist_flag -- $link_command"
rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
@@ -365,6 +411,9 @@ template("gcc_toolchain") {
if (sofile != unstripped_sofile) {
outputs += [ unstripped_sofile ]
}
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
link_output = sofile
depend_output = tocfile
}
@@ -433,12 +482,25 @@ template("gcc_toolchain") {
unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
}
- command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" -Wl,--start-group @\"$rspfile\" {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ # With target_os="android", libchrome.so.map.gz is ~20MB.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_outfile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
+ link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" -Wl,--start-group @\"$rspfile\" {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+
+ strip_switch = ""
if (defined(invoker.strip)) {
- link_wrapper =
- rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
- command = "$python_path \"$link_wrapper\" --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\" --output=\"$outfile\" -- $command"
+ strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
}
+
+ link_wrapper =
+ rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
+ command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch -- $link_command"
description = "LINK $outfile"
rspfile_content = "{{inputs}}"
outputs = [
@@ -450,6 +512,9 @@ template("gcc_toolchain") {
if (defined(invoker.link_outputs)) {
outputs += invoker.link_outputs
}
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
}
# These two are really entirely generic, but have to be repeated in
@@ -487,31 +552,16 @@ template("clang_toolchain") {
cc = "$prefix/clang"
cxx = "$prefix/clang++"
ld = cxx
-
- if (use_clang_static_analyzer) {
- # Static analysis isn't supported under GOMA. See crbug.com/687245
- # for progress on this issue.
- assert(!use_goma, "'use_clang_static_analyzer' cannot be used with GOMA.")
-
- # Call "ccc-analyzer" or "c++-analyzer" instead of directly calling Clang.
- # |wrapper_tool| sets the environment variables which are read by the
- # analyzer tools.
- analyzer_wrapper =
- rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
- root_build_dir)
- cc = analyzer_wrapper + " --clang-cc-path=${cc} --analyzer=" +
- rebase_path("//third_party/scan-build/src/libexec/ccc-analyzer",
- root_build_dir)
- cxx = analyzer_wrapper + " --clang-cxx-path=${cxx} --analyzer=" +
- rebase_path("//third_party/scan-build/src/libexec/c++-analyzer",
- root_build_dir)
- }
-
readelf = "${toolprefix}readelf"
- ar = "${toolprefix}ar"
+ ar = "${prefix}/llvm-ar"
nm = "${toolprefix}nm"
- forward_variables_from(invoker, [ "strip" ])
+ forward_variables_from(invoker,
+ [
+ "strip",
+ "is_clang_analysis_supported",
+ "enable_linker_map",
+ ])
toolchain_args = {
if (defined(invoker.toolchain_args)) {
diff --git a/chromium/build/toolchain/linux/BUILD.gn b/chromium/build/toolchain/linux/BUILD.gn
index 86cd7dabc6e..3be5c36bca6 100644
--- a/chromium/build/toolchain/linux/BUILD.gn
+++ b/chromium/build/toolchain/linux/BUILD.gn
@@ -58,6 +58,9 @@ gcc_toolchain("arm") {
}
clang_toolchain("clang_x86") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
toolchain_args = {
current_cpu = "x86"
current_os = "linux"
@@ -89,6 +92,9 @@ gcc_toolchain("x86") {
ar = "ar"
ld = cxx
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
toolchain_args = {
current_cpu = "x86"
current_os = "linux"
@@ -97,6 +103,9 @@ gcc_toolchain("x86") {
}
clang_toolchain("clang_x64") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
toolchain_args = {
current_cpu = "x64"
current_os = "linux"
@@ -128,6 +137,9 @@ gcc_toolchain("x64") {
ar = "ar"
ld = cxx
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
toolchain_args = {
current_cpu = "x64"
current_os = "linux"
diff --git a/chromium/build/toolchain/nacl/BUILD.gn b/chromium/build/toolchain/nacl/BUILD.gn
index 89e642eb9cf..c5899873a5c 100644
--- a/chromium/build/toolchain/nacl/BUILD.gn
+++ b/chromium/build/toolchain/nacl/BUILD.gn
@@ -74,13 +74,16 @@ template("pnacl_toolchain") {
if (defined(invoker.strip)) {
strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix
}
+ forward_variables_from(invoker,
+ [
+ "executable_extension",
+ "is_clang_analysis_supported",
+ ])
# Note this is not the usual "ld = cxx" because "ld" uses are
# never run via goma, so this needs scriptprefix.
ld = scriptprefix + toolprefix + "clang++" + scriptsuffix
- executable_extension = invoker.executable_extension
-
toolchain_args = {
is_clang = true
current_cpu = "pnacl"
@@ -104,6 +107,10 @@ pnacl_toolchain("newlib_pnacl") {
pnacl_toolchain("newlib_pnacl_nonsfi") {
executable_extension = ""
strip = "strip"
+
+ if (use_clang_static_analyzer) {
+ is_clang_analysis_supported = false
+ }
}
template("nacl_glibc_toolchain") {
diff --git a/chromium/build/toolchain/nacl_toolchain.gni b/chromium/build/toolchain/nacl_toolchain.gni
index eb6ffcce0c0..e51a26de02c 100644
--- a/chromium/build/toolchain/nacl_toolchain.gni
+++ b/chromium/build/toolchain/nacl_toolchain.gni
@@ -32,6 +32,7 @@ template("nacl_toolchain") {
"cc",
"cxx",
"deps",
+ "is_clang_analysis_supported",
"ld",
"link_outputs",
"nm",
diff --git a/chromium/build/toolchain/win/BUILD.gn b/chromium/build/toolchain/win/BUILD.gn
index dd1017fb3d9..296f829cbb3 100644
--- a/chromium/build/toolchain/win/BUILD.gn
+++ b/chromium/build/toolchain/win/BUILD.gn
@@ -6,6 +6,7 @@ import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
import("//build/toolchain/goma.gni")
import("//build/toolchain/toolchain.gni")
@@ -95,6 +96,14 @@ template("msvc_toolchain") {
cl = invoker.cl
}
+ if (toolchain_uses_clang && use_clang_static_analyzer) {
+ analyzer_prefix =
+ "$python_path " +
+ rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+ root_build_dir) + " --mode=cl"
+ cl = "${analyzer_prefix} ${cl}"
+ }
+
if (use_lld) {
if (host_os == "win") {
lld_link = "lld-link.exe"
diff --git a/chromium/build/toolchain/win/setup_toolchain.py b/chromium/build/toolchain/win/setup_toolchain.py
index ec60564070c..e8b08495745 100644
--- a/chromium/build/toolchain/win/setup_toolchain.py
+++ b/chromium/build/toolchain/win/setup_toolchain.py
@@ -127,6 +127,11 @@ def _LoadToolchainEnv(cpu, sdk_dir):
os.environ['GYP_MSVS_OVERRIDE_PATH'],
'VC/vcvarsall.bat'))
if not os.path.exists(script_path):
+ # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
+ # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
+ # variable.
+ if 'VSINSTALLDIR' in os.environ:
+ del os.environ['VSINSTALLDIR']
other_path = os.path.normpath(os.path.join(
os.environ['GYP_MSVS_OVERRIDE_PATH'],
'VC/Auxiliary/Build/vcvarsall.bat'))
@@ -134,7 +139,10 @@ def _LoadToolchainEnv(cpu, sdk_dir):
raise Exception('%s is missing - make sure VC++ tools are installed.' %
script_path)
script_path = other_path
- args = [script_path, 'amd64_x86' if cpu == 'x86' else 'amd64']
+ # Chromium requires the 10.0.14393.0 SDK. Previous versions don't have all
+ # of the required declarations, and 10.0.15063.0 is buggy.
+ args = [script_path, 'amd64_x86' if cpu == 'x86' else 'amd64',
+ '10.0.14393.0']
variables = _LoadEnvFromBat(args)
return _ExtractImportantEnvironment(variables)
diff --git a/chromium/build/toolchain/win/tool_wrapper.py b/chromium/build/toolchain/win/tool_wrapper.py
index 4e69deafb5f..3a81368f346 100644
--- a/chromium/build/toolchain/win/tool_wrapper.py
+++ b/chromium/build/toolchain/win/tool_wrapper.py
@@ -315,16 +315,6 @@ class WinTool(object):
dirname = dirname[0] if dirname else None
return subprocess.call(args, shell=True, env=env, cwd=dirname)
- def ExecClCompile(self, project_dir, selected_files):
- """Executed by msvs-ninja projects when the 'ClCompile' target is used to
- build selected C/C++ files."""
- project_dir = os.path.relpath(project_dir, BASE_DIR)
- selected_files = selected_files.split(';')
- ninja_targets = [os.path.join(project_dir, filename) + '^^'
- for filename in selected_files]
- cmd = ['ninja.exe']
- cmd.extend(ninja_targets)
- return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
diff --git a/chromium/build/toolchain/wrapper_utils.py b/chromium/build/toolchain/wrapper_utils.py
index 467d85d9e2a..f76192e2063 100644
--- a/chromium/build/toolchain/wrapper_utils.py
+++ b/chromium/build/toolchain/wrapper_utils.py
@@ -4,16 +4,31 @@
"""Helper functions for gcc_toolchain.gni wrappers."""
+import gzip
import os
import re
import subprocess
import shlex
+import shutil
import sys
+import threading
_BAT_PREFIX = 'cmd /c call '
_WHITELIST_RE = re.compile('whitelisted_resource_(?P<resource_id>[0-9]+)')
+def _GzipThenDelete(src_path, dest_path):
+ # Results for Android map file with GCC on a z620:
+ # Uncompressed: 207MB
+ # gzip -9: 16.4MB, takes 8.7 seconds.
+ # gzip -1: 21.8MB, takes 2.0 seconds.
+ # Piping directly from the linker via -print-map (or via -Map with a fifo)
+ # adds a whopping 30-45 seconds!
+ with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out:
+ shutil.copyfileobj(f_in, f_out)
+ os.unlink(src_path)
+
+
def CommandToRun(command):
"""Generates commands compatible with Windows.
@@ -36,6 +51,37 @@ def CommandToRun(command):
return command
+def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
+ """Runs the given command, adding in -Wl,-Map when |map_file| is given.
+
+ Also takes care of gzipping when |map_file| ends with .gz.
+
+ Args:
+ command: List of arguments comprising the command.
+ env: Environment variables.
+ map_file: Path to output map_file.
+
+ Returns:
+ The exit code of running |command|.
+ """
+ tmp_map_path = None
+ if map_file and map_file.endswith('.gz'):
+ tmp_map_path = map_file + '.tmp'
+ command.append('-Wl,-Map,' + tmp_map_path)
+ elif map_file:
+ command.append('-Wl,-Map,' + map_file)
+
+ result = subprocess.call(command, env=env)
+
+ if tmp_map_path and result == 0:
+ threading.Thread(
+ target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start()
+ elif tmp_map_path and os.path.exists(tmp_map_path):
+ os.unlink(tmp_map_path)
+
+ return result
+
+
def ResolveRspLinks(inputs):
"""Return a list of files contained in a response file.
diff --git a/chromium/build/util/LASTCHANGE b/chromium/build/util/LASTCHANGE
index e87f75f4976..a28f8c87360 100644
--- a/chromium/build/util/LASTCHANGE
+++ b/chromium/build/util/LASTCHANGE
@@ -1 +1 @@
-LASTCHANGE=c58c69e965fb9b338f7c9417371744b8f6147e83
+LASTCHANGE=b13af3d253c3274544f1246bf486f692a1997859
diff --git a/chromium/build/util/LASTCHANGE.blink b/chromium/build/util/LASTCHANGE.blink
index e87f75f4976..a28f8c87360 100644
--- a/chromium/build/util/LASTCHANGE.blink
+++ b/chromium/build/util/LASTCHANGE.blink
@@ -1 +1 @@
-LASTCHANGE=c58c69e965fb9b338f7c9417371744b8f6147e83
+LASTCHANGE=b13af3d253c3274544f1246bf486f692a1997859
diff --git a/chromium/build/util/lastchange.py b/chromium/build/util/lastchange.py
index c81e0bd52ec..df4eeabed1a 100755
--- a/chromium/build/util/lastchange.py
+++ b/chromium/build/util/lastchange.py
@@ -21,48 +21,6 @@ class VersionInfo(object):
self.revision = revision
-def FetchSVNRevision(directory, svn_url_regex):
- """
- Fetch the Subversion branch and revision for a given directory.
-
- Errors are swallowed.
-
- Returns:
- A VersionInfo object or None on error.
- """
- try:
- proc = subprocess.Popen(['svn', 'info'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=directory,
- shell=(sys.platform=='win32'))
- except OSError:
- # command is apparently either not installed or not executable.
- return None
- if not proc:
- return None
-
- attrs = {}
- for line in proc.stdout:
- line = line.strip()
- if not line:
- continue
- key, val = line.split(': ', 1)
- attrs[key] = val
-
- try:
- match = svn_url_regex.search(attrs['URL'])
- if match:
- url = match.group(2)
- else:
- url = ''
- revision = attrs['Revision']
- except KeyError:
- return None
-
- return VersionInfo(url, revision)
-
-
def RunGitCommand(directory, command):
"""
Launches git subcommand.
@@ -124,52 +82,9 @@ def FetchGitRevision(directory, hash_only):
return VersionInfo('git', '%s-%s' % (hsh, pos))
-def FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper):
- """
- Fetch the Subversion URL and revision through Git.
-
- Errors are swallowed.
-
- Returns:
- A tuple containing the Subversion URL and revision.
- """
- git_args = ['log', '-1', '--format=%b']
- if go_deeper:
- git_args.append('--grep=git-svn-id')
- proc = RunGitCommand(directory, git_args)
- if proc:
- output = proc.communicate()[0].strip()
- if proc.returncode == 0 and output:
- # Extract the latest SVN revision and the SVN URL.
- # The target line is the last "git-svn-id: ..." line like this:
- # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
- match = _GIT_SVN_ID_REGEX.search(output)
- if match:
- revision = match.group(2)
- url_match = svn_url_regex.search(match.group(1))
- if url_match:
- url = url_match.group(2)
- else:
- url = ''
- return url, revision
- return None, None
-
-
-def FetchGitSVNRevision(directory, svn_url_regex, go_deeper):
- """
- Fetch the Git-SVN identifier for the local tree.
-
- Errors are swallowed.
- """
- url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper)
- if url and revision:
- return VersionInfo(url, revision)
- return None
-
-
-def FetchVersionInfo(default_lastchange, directory=None,
+def FetchVersionInfo(directory=None,
directory_regex_prior_to_src_url='chrome|blink|svn',
- go_deeper=False, hash_only=False):
+ hash_only=False):
"""
Returns the last change (in the form of a branch, revision tuple),
from some appropriate revision control system.
@@ -177,17 +92,12 @@ def FetchVersionInfo(default_lastchange, directory=None,
svn_url_regex = re.compile(
r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
- version_info = (FetchSVNRevision(directory, svn_url_regex) or
- FetchGitSVNRevision(directory, svn_url_regex, go_deeper) or
- FetchGitRevision(directory, hash_only))
+ version_info = FetchGitRevision(directory, hash_only)
if not version_info:
- if default_lastchange and os.path.exists(default_lastchange):
- revision = open(default_lastchange, 'r').read().strip()
- version_info = VersionInfo(None, revision)
- else:
- version_info = VersionInfo(None, None)
+ version_info = VersionInfo(None, None)
return version_info
+
def GetHeaderGuard(path):
"""
Returns the header #define guard for the given file path.
@@ -203,6 +113,7 @@ def GetHeaderGuard(path):
guard = guard.upper()
return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
def GetHeaderContents(path, define, version):
"""
Returns what the contents of the header file should be that indicate the given
@@ -225,6 +136,7 @@ def GetHeaderContents(path, define, version):
'version': version }
return header_contents
+
def WriteIfChanged(file_name, contents):
"""
Writes the specified contents to the specified file_name
@@ -246,8 +158,6 @@ def main(argv=None):
argv = sys.argv
parser = optparse.OptionParser(usage="lastchange.py [options]")
- parser.add_option("-d", "--default-lastchange", metavar="FILE",
- help="Default last change input FILE.")
parser.add_option("-m", "--version-macro",
help="Name of C #define when using --header. Defaults to " +
"LAST_CHANGE.",
@@ -263,9 +173,6 @@ def main(argv=None):
"file-output-related options.")
parser.add_option("-s", "--source-dir", metavar="DIR",
help="Use repository in the given directory.")
- parser.add_option("--git-svn-go-deeper", action='store_true',
- help="In a Git-SVN repo, dig down to the last committed " +
- "SVN change (historic behaviour).")
parser.add_option("--git-hash-only", action="store_true",
help="In a Git repo with commit positions, report only " +
"the hash of the latest commit with a position.")
@@ -287,9 +194,7 @@ def main(argv=None):
else:
src_dir = os.path.dirname(os.path.abspath(__file__))
- version_info = FetchVersionInfo(opts.default_lastchange,
- directory=src_dir,
- go_deeper=opts.git_svn_go_deeper,
+ version_info = FetchVersionInfo(directory=src_dir,
hash_only=opts.git_hash_only)
if version_info.revision == None:
diff --git a/chromium/build/vs_toolchain.py b/chromium/build/vs_toolchain.py
index c5db1b249e4..90a562d3fb6 100755
--- a/chromium/build/vs_toolchain.py
+++ b/chromium/build/vs_toolchain.py
@@ -21,9 +21,6 @@ sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
json_data_file = os.path.join(script_dir, 'win_toolchain.json')
-import gyp
-
-
# Use MSVS2015 as the default toolchain.
CURRENT_DEFAULT_TOOLCHAIN_VERSION = '2015'
@@ -60,6 +57,12 @@ def SetEnvironmentAndGetRuntimeDllDirs():
os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
os.environ['GYP_MSVS_VERSION'] = version
+
+ # Limit the scope of the gyp import to only where it is used. This
+ # potentially lets build configs that never execute this block to drop
+ # their GYP checkout.
+ import gyp
+
# We need to make sure windows_sdk_path is set to the automated
# toolchain values in GYP_DEFINES, but don't want to override any
# otheroptions.express
@@ -68,6 +71,7 @@ def SetEnvironmentAndGetRuntimeDllDirs():
gyp_defines_dict['windows_sdk_path'] = win_sdk
os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
for k, v in gyp_defines_dict.iteritems())
+
os.environ['WINDOWSSDKDIR'] = win_sdk
os.environ['WDK_DIR'] = wdk
# Include the VS runtime in the PATH in case it's not machine-installed.
@@ -133,7 +137,6 @@ def DetectVisualStudioPath():
# build/toolchain/win/setup_toolchain.py as well.
version_as_year = GetVisualStudioVersion()
year_to_version = {
- '2013': '12.0',
'2015': '14.0',
'2017': '15.0',
}
@@ -169,14 +172,11 @@ def _VersionNumber():
"""Gets the standard version number ('120', '140', etc.) based on
GYP_MSVS_VERSION."""
vs_version = GetVisualStudioVersion()
- if vs_version == '2013':
- return '120'
- elif vs_version == '2015':
+ if vs_version == '2015':
return '140'
- elif vs_version == '2017':
+ if vs_version == '2017':
return '150'
- else:
- raise ValueError('Unexpected GYP_MSVS_VERSION')
+ raise ValueError('Unexpected GYP_MSVS_VERSION')
def _CopyRuntimeImpl(target, source, verbose=True):
@@ -199,17 +199,7 @@ def _CopyRuntimeImpl(target, source, verbose=True):
os.chmod(target, stat.S_IWRITE)
-def _CopyRuntime2013(target_dir, source_dir, dll_pattern):
- """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
- exist, but the target directory does exist."""
- for file_part in ('p', 'r'):
- dll = dll_pattern % file_part
- target = os.path.join(target_dir, dll)
- source = os.path.join(source_dir, dll)
- _CopyRuntimeImpl(target, source)
-
-
-def _CopyUCRTRuntime(target_dir, source_dir, dll_pattern, suffix):
+def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
"""Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
exist, but the target directory does exist."""
for file_part in ('msvcp', 'vccorlib', 'vcruntime'):
@@ -217,14 +207,16 @@ def _CopyUCRTRuntime(target_dir, source_dir, dll_pattern, suffix):
target = os.path.join(target_dir, dll)
source = os.path.join(source_dir, dll)
_CopyRuntimeImpl(target, source)
- # OS installs of Visual Studio (and all installs of Windows 10) put the
- # universal CRT files in c:\Windows\System32\downlevel - look for them there
- # to support DEPOT_TOOLS_WIN_TOOLCHAIN=0.
- if os.path.exists(os.path.join(source_dir, 'downlevel')):
- ucrt_src_glob = os.path.join(source_dir, 'downlevel', 'api-ms-win-*.dll')
- else:
- ucrt_src_glob = os.path.join(source_dir, 'api-ms-win-*.dll')
- ucrt_files = glob.glob(ucrt_src_glob)
+ # Copy the UCRT files needed by VS 2015 from the Windows SDK. This location
+ # includes the api-ms-win-crt-*.dll files that are not found in the Windows
+ # directory. These files are needed for component builds.
+ # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
+ # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+ win_sdk_dir = os.path.normpath(
+ os.environ.get('WINDOWSSDKDIR',
+ 'C:\\Program Files (x86)\\Windows Kits\\10'))
+ ucrt_dll_dirs = os.path.join(win_sdk_dir, r'Redist\ucrt\DLLs', target_cpu)
+ ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
assert len(ucrt_files) > 0
for ucrt_src_file in ucrt_files:
file_part = os.path.basename(ucrt_src_file)
@@ -236,13 +228,11 @@ def _CopyUCRTRuntime(target_dir, source_dir, dll_pattern, suffix):
def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
"""Copy the VS runtime DLLs, only if the target doesn't exist, but the target
- directory does exist. Handles VS 2013, VS 2015, and VS 2017."""
+ directory does exist. Handles VS 2015 and VS 2017."""
suffix = "d.dll" if debug else ".dll"
- if GetVisualStudioVersion() == '2015' or GetVisualStudioVersion() == '2017':
- # VS 2017 RC uses the same CRT DLLs as VS 2015.
- _CopyUCRTRuntime(target_dir, source_dir, '%s140' + suffix, suffix)
- else:
- _CopyRuntime2013(target_dir, source_dir, 'msvc%s120' + suffix)
+ # VS 2017 uses the same CRT DLLs as VS 2015.
+ _CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix,
+ suffix)
# Copy the PGO runtime library to the release directories.
if not debug and os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
@@ -314,37 +304,18 @@ def CopyDlls(target_dir, configuration, target_cpu):
if configuration == 'Debug':
_CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
- _CopyDebugger(target_dir, target_cpu)
-
-
-def _CopyDebugger(target_dir, target_cpu):
- """Copy cdb.exe into the requested directory as needed.
-
- target_cpu is one of 'x86' or 'x64'.
-
- This is used for the GN build.
- """
- win_sdk_dir = SetEnvironmentAndGetSDKDir()
- if not win_sdk_dir:
- return
-
- debugger_files = (
- 'cdb.exe', 'dbgeng.dll', 'dbghelp.dll', 'dbgmodel.dll', 'dbgcore.dll')
-
- for debug_file in debugger_files:
- full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
- target_path = os.path.join(target_dir, debug_file)
- _CopyRuntimeImpl(target_path, full_path)
-
def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
to build with."""
- if GetVisualStudioVersion() == '2015':
+ env_version = GetVisualStudioVersion()
+ if env_version == '2015':
# Update 3 final with patches with 10.0.14393.0 SDK.
return ['d3cb0e37bdd120ad0ac4650b674b09e81be45616']
- else:
- return ['03a4e939cd325d6bc5216af41b92d02dda1366a6']
+ if env_version == '2017':
+ # VS 2017 RTM with 10.0.14393.0 SDK and dbghelp.dll fixes.
+ return ['4e8a360587a3c8ff3fa46aa9271e982bf3e948ec']
+ raise Exception('Unsupported VS version %s' % env_version)
def ShouldUpdateToolchain():
diff --git a/chromium/build/whitespace_file.txt b/chromium/build/whitespace_file.txt
index 363a265eb17..0319d781407 100644
--- a/chromium/build/whitespace_file.txt
+++ b/chromium/build/whitespace_file.txt
@@ -165,4 +165,6 @@ No, really, I couldn't eat another bit.
When I hunger I think of you, and a pastrami sandwich.
Do make a terrible mistake every once in a while.
I just made two.
-Mistakes are the best sometimes
+Mistakes are the best sometimes.
+\o/
+This is groovy.
diff --git a/chromium/build/win/BUILD.gn b/chromium/build/win/BUILD.gn
index 466e7ee84bf..b0aeee4c1df 100644
--- a/chromium/build/win/BUILD.gn
+++ b/chromium/build/win/BUILD.gn
@@ -14,3 +14,67 @@ windows_manifest("default_exe_manifest") {
]
type = "exe"
}
+
+if (is_win) {
+ action("copy_cdb_to_output") {
+ script = "//build/win/copy_cdb_to_output.py"
+ inputs = [
+ script,
+ ]
+ outputs = [
+ "$root_out_dir/cdb/cdb.exe",
+ "$root_out_dir/cdb/dbgeng.dll",
+ "$root_out_dir/cdb/dbghelp.dll",
+ "$root_out_dir/cdb/dbgmodel.dll",
+ "$root_out_dir/cdb/winext/ext.dll",
+ "$root_out_dir/cdb/winext/uext.dll",
+ "$root_out_dir/cdb/winxp/exts.dll",
+ "$root_out_dir/cdb/winxp/ntsdexts.dll",
+ "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
+ "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
+ "$root_out_dir/cdb/ucrtbase.dll",
+ ]
+ args = [
+ rebase_path("$root_out_dir/cdb", root_out_dir),
+ current_cpu,
+ ]
+ }
+}
diff --git a/chromium/build/win/run_pgo_profiling_benchmarks.py b/chromium/build/win/run_pgo_profiling_benchmarks.py
index 13132cd96a7..23134a21f3a 100644
--- a/chromium/build/win/run_pgo_profiling_benchmarks.py
+++ b/chromium/build/win/run_pgo_profiling_benchmarks.py
@@ -44,11 +44,11 @@ _BENCHMARKS_TO_RUN = {
'dromaeo.jslibstyleprototype',
'dromaeo.jslibtraversejquery',
'dromaeo.jslibtraverseprototype',
- 'indexeddb_perf',
'media.tough_video_cases',
'octane',
'smoothness.top_25_smooth',
'speedometer',
+ 'storage.indexeddb_endure_tracing',
'sunspider',
}