summaryrefslogtreecommitdiff
path: root/chromium/build
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/build')
-rw-r--r--chromium/build/PRESUBMIT.py26
-rw-r--r--chromium/build/README.md2
-rw-r--r--chromium/build/android/AndroidManifest.xml2
-rwxr-xr-xchromium/build/android/apk_operations.py67
-rw-r--r--chromium/build/android/bytecode/BUILD.gn1
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java234
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java153
-rwxr-xr-xchromium/build/android/diff_resource_sizes.py48
-rw-r--r--chromium/build/android/docs/README.md11
-rw-r--r--chromium/build/android/docs/life_of_a_resource.md233
-rw-r--r--chromium/build/android/gradle/android.jinja2
-rwxr-xr-xchromium/build/android/gradle/generate_gradle.py2
-rw-r--r--chromium/build/android/gradle/root.jinja2
-rwxr-xr-xchromium/build/android/gyp/apkbuilder.py18
-rwxr-xr-xchromium/build/android/gyp/bundletool.py2
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py31
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py142
-rwxr-xr-xchromium/build/android/gyp/create_app_bundle.py35
-rwxr-xr-xchromium/build/android/gyp/dex.py62
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py133
-rw-r--r--chromium/build/android/gyp/dexsplitter.pydeps (renamed from chromium/build/android/gyp/generate_proguarded_module_jar.pydeps)4
-rwxr-xr-xchromium/build/android/gyp/generate_linker_version_script.py70
-rw-r--r--chromium/build/android/gyp/generate_linker_version_script.pydeps7
-rwxr-xr-xchromium/build/android/gyp/generate_proguarded_module_jar.py159
-rwxr-xr-xchromium/build/android/gyp/jar.py8
-rwxr-xr-xchromium/build/android/gyp/javac.py52
-rwxr-xr-xchromium/build/android/gyp/lint.py20
-rwxr-xr-xchromium/build/android/gyp/proguard.py7
-rw-r--r--chromium/build/android/gyp/util/build_utils.py81
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py24
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py39
-rw-r--r--chromium/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java4
-rw-r--r--chromium/build/android/lint/suppressions.xml15
-rw-r--r--chromium/build/android/main_dex_classes.flags4
-rw-r--r--chromium/build/android/pylib/base/base_test_result.py6
-rw-r--r--chromium/build/android/pylib/base/environment.py6
-rw-r--r--chromium/build/android/pylib/base/test_run.py15
-rw-r--r--chromium/build/android/pylib/constants/host_paths.py2
-rw-r--r--chromium/build/android/pylib/junit/junit_test_instance.py5
-rw-r--r--chromium/build/android/pylib/local/device/local_device_environment.py11
-rw-r--r--chromium/build/android/pylib/local/device/local_device_gtest_run.py14
-rw-r--r--chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py11
-rw-r--r--chromium/build/android/pylib/local/device/local_device_perf_test_run.py25
-rw-r--r--chromium/build/android/pylib/local/device/local_device_test_run.py14
-rw-r--r--chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py23
-rw-r--r--chromium/build/android/pylib/results/json_results.py33
-rwxr-xr-xchromium/build/android/pylib/results/json_results_test.py27
-rwxr-xr-xchromium/build/android/resource_sizes.py110
-rw-r--r--chromium/build/android/resource_sizes.pydeps7
-rwxr-xr-xchromium/build/android/test_runner.py26
-rw-r--r--chromium/build/android/test_wrapper/logdog_wrapper.pydeps1
-rw-r--r--chromium/build/build_config.h2
-rwxr-xr-xchromium/build/chromeos/run_vm_test.py33
-rw-r--r--chromium/build/compiled_action.gni6
-rw-r--r--chromium/build/config/BUILD.gn17
-rw-r--r--chromium/build/config/BUILDCONFIG.gn1
-rw-r--r--chromium/build/config/OWNERS2
-rw-r--r--chromium/build/config/android/BUILD.gn7
-rw-r--r--chromium/build/config/android/config.gni5
-rw-r--r--chromium/build/config/android/internal_rules.gni317
-rw-r--r--chromium/build/config/android/linker_version_script.gni37
-rw-r--r--chromium/build/config/android/rules.gni342
-rw-r--r--chromium/build/config/chromeos/rules.gni4
-rw-r--r--chromium/build/config/compiler/BUILD.gn127
-rw-r--r--chromium/build/config/compiler/compiler.gni11
-rw-r--r--chromium/build/config/coverage/BUILD.gn34
-rw-r--r--chromium/build/config/coverage/coverage.gni16
-rw-r--r--chromium/build/config/fuchsia/BUILD.gn5
-rw-r--r--chromium/build/config/fuchsia/fidl_library.gni151
-rw-r--r--chromium/build/config/fuchsia/testing_sandbox_policy1
-rw-r--r--chromium/build/config/gcc/BUILD.gn11
-rw-r--r--chromium/build/config/ios/ios_sdk_overrides.gni4
-rw-r--r--chromium/build/config/jumbo.gni66
-rw-r--r--chromium/build/config/linux/libdrm/BUILD.gn33
-rw-r--r--chromium/build/config/mac/package_framework.py2
-rwxr-xr-xchromium/build/config/merge_for_jumbo.py3
-rw-r--r--chromium/build/config/posix/BUILD.gn12
-rw-r--r--chromium/build/config/sanitizers/BUILD.gn24
-rw-r--r--chromium/build/config/sanitizers/sanitizers.gni15
-rw-r--r--chromium/build/config/win/BUILD.gn41
-rw-r--r--chromium/build/docs/writing_gn_templates.md33
-rwxr-xr-xchromium/build/download_nacl_toolchains.py5
-rw-r--r--chromium/build/fuchsia/boot_data.py86
-rw-r--r--chromium/build/fuchsia/common_args.py10
-rw-r--r--chromium/build/fuchsia/device_target.py22
-rwxr-xr-xchromium/build/fuchsia/exe_runner.py7
-rw-r--r--chromium/build/fuchsia/fidlgen_js/BUILD.gn63
-rw-r--r--chromium/build/fuchsia/fidlgen_js/DEPS4
-rw-r--r--chromium/build/fuchsia/fidlgen_js/fidl.py549
-rwxr-xr-xchromium/build/fuchsia/fidlgen_js/gen.py673
-rw-r--r--chromium/build/fuchsia/fidlgen_js/runtime/fidl.mjs270
-rw-r--r--chromium/build/fuchsia/fidlgen_js/runtime/zircon.cc438
-rw-r--r--chromium/build/fuchsia/fidlgen_js/runtime/zircon.h58
-rw-r--r--chromium/build/fuchsia/fidlgen_js/third_party/__init__.py0
-rw-r--r--chromium/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE32
-rw-r--r--chromium/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium15
-rw-r--r--chromium/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py837
-rw-r--r--chromium/build/fuchsia/linux.sdk.sha12
-rw-r--r--chromium/build/fuchsia/mac.sdk.sha12
-rw-r--r--chromium/build/fuchsia/qemu_target.py44
-rw-r--r--chromium/build/fuchsia/run_package.py59
-rwxr-xr-xchromium/build/fuchsia/test_runner.py12
-rw-r--r--chromium/build/gn_helpers.py15
-rw-r--r--chromium/build/gn_helpers_unittest.py9
-rw-r--r--chromium/build/gyp_chromium.py38
-rw-r--r--chromium/build/gyp_environment.py3
-rw-r--r--chromium/build/gyp_helper.py68
-rw-r--r--chromium/build/gypi_to_gn.py192
-rwxr-xr-xchromium/build/install-build-deps.sh2
-rw-r--r--chromium/build/linux/chrome.safestack.map93
-rwxr-xr-xchromium/build/mac_toolchain.py11
-rw-r--r--chromium/build/sanitizers/OWNERS8
-rw-r--r--chromium/build/secondary/third_party/android_tools/AndroidManifest.xml.jinja214
-rw-r--r--chromium/build/secondary/third_party/android_tools/BUILD.gn42
-rw-r--r--chromium/build/toolchain/OWNERS6
-rwxr-xr-xchromium/build/toolchain/clang_code_coverage_wrapper.py83
-rwxr-xr-xchromium/build/toolchain/clang_static_analyzer_wrapper.py1
-rw-r--r--chromium/build/toolchain/gcc_toolchain.gni64
-rw-r--r--chromium/build/toolchain/mac/BUILD.gn24
-rw-r--r--chromium/build/toolchain/nacl_toolchain.gni1
-rw-r--r--chromium/build/toolchain/toolchain.gni2
-rw-r--r--chromium/build/toolchain/win/BUILD.gn116
-rw-r--r--chromium/build/toolchain/win/midl.gni3
-rwxr-xr-xchromium/build/toolchain/win/ml.py287
-rwxr-xr-xchromium/build/toolchain/win/rc/rc.py3
-rw-r--r--chromium/build/toolchain/win/setup_toolchain.py3
-rw-r--r--chromium/build/toolchain/win/tool_wrapper.py25
-rw-r--r--chromium/build/util/LASTCHANGE2
-rw-r--r--chromium/build/util/LASTCHANGE.committime2
-rwxr-xr-xchromium/build/vs_toolchain.py118
-rwxr-xr-xchromium/build/win/merge_pgc_files.py144
-rwxr-xr-xchromium/build/win/reorder-imports.py2
-rw-r--r--chromium/build/win/run_pgo_profiling_benchmarks.py123
133 files changed, 6147 insertions, 1943 deletions
diff --git a/chromium/build/PRESUBMIT.py b/chromium/build/PRESUBMIT.py
deleted file mode 100644
index b88632679a8..00000000000
--- a/chromium/build/PRESUBMIT.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Presubmit script for //build.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
-for more details about the presubmit API built into depot_tools.
-"""
-
-def PostUploadHook(cl, change, output_api):
- """git cl upload will call this hook after the issue is created/modified.
-
- This hook modifies the CL description in order to run extra tests.
- """
-
- def affects_gn_checker(f):
- return 'check_gn_headers' in f.LocalPath()
- if not change.AffectedFiles(file_filter=affects_gn_checker):
- return []
- return output_api.EnsureCQIncludeTrybotsAreAdded(
- cl,
- [
- 'luci.chromium.try:linux_chromium_dbg_ng',
- ],
- 'Automatically added tests to run on CQ.')
diff --git a/chromium/build/README.md b/chromium/build/README.md
index 47e059db695..26bcc067025 100644
--- a/chromium/build/README.md
+++ b/chromium/build/README.md
@@ -27,4 +27,4 @@ Files referenced by `//.gn`:
* [Writing GN Templates](docs/writing_gn_templates.md)
* [Debugging Slow Builds](docs/debugging_slow_builds.md)
* [Mac Hermetic Toolchains](docs/mac_hermetic_toolchain.md)
-* [Android Build Documentation](android/docs)
+* [Android Build Documentation](android/docs/README.md)
diff --git a/chromium/build/android/AndroidManifest.xml b/chromium/build/android/AndroidManifest.xml
index 5439a5ab08a..fe21b80b4ba 100644
--- a/chromium/build/android/AndroidManifest.xml
+++ b/chromium/build/android/AndroidManifest.xml
@@ -15,6 +15,6 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.dummy">
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="24" />
+ <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="24" />
</manifest>
diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py
index 5475c80b6e5..26c56e9a54c 100755
--- a/chromium/build/android/apk_operations.py
+++ b/chromium/build/android/apk_operations.py
@@ -306,8 +306,8 @@ def _DuHelper(device, path_spec, run_as=None):
run as root.
Returns:
- A dict of path->size in kb containing all paths in |path_spec| that exist on
- device. Paths that do not exist are silently ignored.
+ A dict of path->size in KiB containing all paths in |path_spec| that exist
+ on device. Paths that do not exist are silently ignored.
"""
# Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
# 144 /data/data/org.chromium.chrome/cache
@@ -341,7 +341,7 @@ def _DuHelper(device, path_spec, run_as=None):
raise
-def _RunDiskUsage(devices, package_name, verbose):
+def _RunDiskUsage(devices, package_name):
# Measuring dex size is a bit complicated:
# https://source.android.com/devices/tech/dalvik/jit-compiler
#
@@ -391,13 +391,13 @@ def _RunDiskUsage(devices, package_name, verbose):
def disk_usage_helper(d):
package_output = '\n'.join(d.RunShellCommand(
['dumpsys', 'package', package_name], check_return=True))
- # Prints a message but does not return error when apk is not installed.
- if 'Unable to find package:' in package_output:
+ # Does not return error when apk is not installed.
+ if not package_output or 'Unable to find package:' in package_output:
return None
- # Ignore system apks.
- idx = package_output.find('Hidden system packages:')
- if idx != -1:
- package_output = package_output[:idx]
+
+ # Ignore system apks that have updates installed.
+ package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+ package_output, flags=re.S | re.M)
try:
data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
@@ -406,6 +406,10 @@ def _RunDiskUsage(devices, package_name, verbose):
package_output).group(1)
except AttributeError:
raise Exception('Error parsing dumpsys output: ' + package_output)
+
+ if code_path.startswith('/system'):
+ logging.warning('Measurement of system image apks can be innacurate')
+
compilation_filters = set()
# Match "compilation_filter=value", where a line break can occur at any spot
# (refer to examples above).
@@ -459,10 +463,9 @@ def _RunDiskUsage(devices, package_name, verbose):
compilation_filter)
def print_sizes(desc, sizes):
- print '%s: %dkb' % (desc, sum(sizes.itervalues()))
- if verbose:
- for path, size in sorted(sizes.iteritems()):
- print ' %s: %skb' % (path, size)
+ print '%s: %d KiB' % (desc, sum(sizes.itervalues()))
+ for path, size in sorted(sizes.iteritems()):
+ print ' %s: %s KiB' % (path, size)
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
@@ -485,7 +488,7 @@ def _RunDiskUsage(devices, package_name, verbose):
if show_warning:
logging.warning('For a more realistic odex size, run:')
logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0])
- print 'Total: %skb (%.1fmb)' % (total, total / 1024.0)
+ print 'Total: %s KiB (%.1f MiB)' % (total, total / 1024.0)
class _LogcatProcessor(object):
@@ -906,16 +909,17 @@ class _Command(object):
args.__dict__.setdefault('apk_path', None)
args.__dict__.setdefault('incremental_json', None)
- if self.supports_incremental:
- incremental_apk_path = None
- if args.incremental_json and not args.non_incremental:
- with open(args.incremental_json) as f:
- install_dict = json.load(f)
- incremental_apk_path = os.path.join(
- args.output_directory, install_dict['apk_path'])
- if not os.path.exists(incremental_apk_path):
- incremental_apk_path = None
+ incremental_apk_path = None
+ if args.incremental_json and not (self.supports_incremental and
+ args.non_incremental):
+ with open(args.incremental_json) as f:
+ install_dict = json.load(f)
+ incremental_apk_path = os.path.join(args.output_directory,
+ install_dict['apk_path'])
+ if not os.path.exists(incremental_apk_path):
+ incremental_apk_path = None
+ if self.supports_incremental:
if args.incremental and args.non_incremental:
self._parser.error('Must use only one of --incremental and '
'--non-incremental')
@@ -931,13 +935,13 @@ class _Command(object):
self._parser.error('Both incremental and non-incremental apks exist. '
'Select using --incremental or --non-incremental')
- if ((self.needs_apk_path and not self.is_bundle) or args.apk_path
- or (self.supports_incremental and args.incremental_json)):
- if self.supports_incremental and incremental_apk_path:
+ if ((self.needs_apk_path and not self.is_bundle) or args.apk_path or
+ incremental_apk_path):
+ if args.apk_path:
+ self.apk_helper = apk_helper.ToHelper(args.apk_path)
+ elif incremental_apk_path:
self.install_dict = install_dict
self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
- elif args.apk_path:
- self.apk_helper = apk_helper.ToHelper(args.apk_path)
else:
self._parser.error('Apk is not built.')
@@ -952,7 +956,9 @@ class _Command(object):
self.devices = []
if self.need_device_args:
# See https://crbug.com/887964 regarding bundle support in apk_helper.
- abis = self.apk_helper.GetAbis() if not self.is_bundle else None
+ abis = None
+ if not self.is_bundle and self.apk_helper is not None:
+ abis = self.apk_helper.GetAbis()
self.devices = device_utils.DeviceUtils.HealthyDevices(
device_arg=args.devices,
enable_device_files_cache=bool(args.output_directory),
@@ -1179,8 +1185,7 @@ class _DiskUsageCommand(_Command):
all_devices_by_default = True
def Run(self):
- _RunDiskUsage(self.devices, self.args.package_name,
- bool(self.args.verbose_count))
+ _RunDiskUsage(self.devices, self.args.package_name)
class _MemUsageCommand(_Command):
diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn
index e65f63c36d5..5aa1ae36d46 100644
--- a/chromium/build/android/bytecode/BUILD.gn
+++ b/chromium/build/android/bytecode/BUILD.gn
@@ -11,6 +11,7 @@ java_binary("java_bytecode_rewriter") {
java_files = [
"java/org/chromium/bytecode/AssertionEnablerClassAdapter.java",
"java/org/chromium/bytecode/ByteCodeProcessor.java",
+ "java/org/chromium/bytecode/ClassPathValidator.java",
"java/org/chromium/bytecode/CustomClassLoaderClassWriter.java",
"java/org/chromium/bytecode/CustomResourcesClassAdapter.java",
"java/org/chromium/bytecode/TypeUtils.java",
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
index 8326a48bdd8..2b03b8ffe52 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -27,6 +27,14 @@ import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
import java.util.zip.CRC32;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@@ -44,76 +52,114 @@ class ByteCodeProcessor {
private static final String CLASS_FILE_SUFFIX = ".class";
private static final String TEMPORARY_FILE_SUFFIX = ".temp";
private static final int BUFFER_SIZE = 16384;
+ private static boolean sVerbose;
+ private static boolean sIsPrebuilt;
+ private static boolean sShouldAssert;
+ private static boolean sShouldUseCustomResources;
+ private static boolean sShouldUseThreadAnnotations;
+ private static boolean sShouldCheckClassPath;
+ private static ClassLoader sDirectClassPathClassLoader;
+ private static ClassLoader sFullClassPathClassLoader;
+ private static Set<String> sFullClassPathJarPaths;
+ private static ClassPathValidator sValidator;
- private static void writeZipEntry(ZipOutputStream zipStream, String zipPath, byte[] data)
- throws IOException {
- ZipEntry entry = new ZipEntry(zipPath);
- entry.setMethod(ZipEntry.STORED);
- entry.setTime(0);
- entry.setSize(data.length);
- CRC32 crc = new CRC32();
- crc.update(data);
- entry.setCrc(crc.getValue());
- zipStream.putNextEntry(entry);
- zipStream.write(data);
- zipStream.closeEntry();
+ private static class EntryDataPair {
+ private final ZipEntry mEntry;
+ private final byte[] mData;
+
+ private EntryDataPair(ZipEntry mEntry, byte[] mData) {
+ this.mEntry = mEntry;
+ this.mData = mData;
+ }
+
+ private static EntryDataPair create(String zipPath, byte[] data) {
+ ZipEntry entry = new ZipEntry(zipPath);
+ entry.setMethod(ZipEntry.STORED);
+ entry.setTime(0);
+ entry.setSize(data.length);
+ CRC32 crc = new CRC32();
+ crc.update(data);
+ entry.setCrc(crc.getValue());
+ return new EntryDataPair(entry, data);
+ }
}
- private static void process(String inputJarPath, String outputJarPath, boolean shouldAssert,
- boolean shouldUseCustomResources, boolean shouldUseThreadAnnotations,
- ClassLoader classPathJarsClassLoader) {
+ private static EntryDataPair processEntry(ZipEntry entry, byte[] data)
+ throws ClassPathValidator.ClassNotLoadedException {
+ // Copy all non-.class files to the output jar.
+ if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
+ return new EntryDataPair(entry, data);
+ }
+
+ ClassReader reader = new ClassReader(data);
+
+ if (sShouldCheckClassPath) {
+ sValidator.validateClassPathsAndOutput(reader, sDirectClassPathClassLoader,
+ sFullClassPathClassLoader, sFullClassPathJarPaths, sIsPrebuilt, sVerbose);
+ }
+
+ ClassWriter writer;
+ if (sShouldUseCustomResources) {
+ // Use the COMPUTE_FRAMES flag to have asm figure out the stack map frames.
+ // This is necessary because GCMBaseIntentService in android_gcm_java contains
+ // incorrect stack map frames. This option slows down processing time by 2x.
+ writer = new CustomClassLoaderClassWriter(
+ sFullClassPathClassLoader, reader, COMPUTE_FRAMES);
+ } else {
+ writer = new ClassWriter(reader, 0);
+ }
+ ClassVisitor chain = writer;
+ /* DEBUGGING:
+ To see the bytecode for a specific class:
+ if (entry.getName().contains("YourClassName")) {
+ chain = new TraceClassVisitor(chain, new PrintWriter(System.out));
+ }
+ To see objectweb.asm code that will generate bytecode for a given class:
+ java -cp "third_party/ow2_asm/lib/asm-5.0.1.jar:third_party/ow2_asm/lib/"\
+ "asm-util-5.0.1.jar:out/Debug/lib.java/jar_containing_yourclass.jar" \
+ org.objectweb.asm.util.ASMifier org.package.YourClassName
+ */
+ if (sShouldUseThreadAnnotations) {
+ chain = new ThreadAssertionClassAdapter(chain);
+ }
+ if (sShouldAssert) {
+ chain = new AssertionEnablerClassAdapter(chain);
+ }
+ if (sShouldUseCustomResources) {
+ chain = new CustomResourcesClassAdapter(
+ chain, reader.getClassName(), reader.getSuperName(), sFullClassPathClassLoader);
+ }
+ reader.accept(chain, 0);
+ byte[] patchedByteCode = writer.toByteArray();
+ return EntryDataPair.create(entry.getName(), patchedByteCode);
+ }
+
+ private static void process(String inputJarPath, String outputJarPath)
+ throws ClassPathValidator.ClassNotLoadedException, ExecutionException,
+ InterruptedException {
String tempJarPath = outputJarPath + TEMPORARY_FILE_SUFFIX;
+ ExecutorService executorService =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
try (ZipInputStream inputStream = new ZipInputStream(
new BufferedInputStream(new FileInputStream(inputJarPath)));
ZipOutputStream tempStream = new ZipOutputStream(
new BufferedOutputStream(new FileOutputStream(tempJarPath)))) {
- ZipEntry entry;
-
- while ((entry = inputStream.getNextEntry()) != null) {
- // Copy all non-.class files to the output jar.
- if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
- tempStream.putNextEntry(entry);
- tempStream.write(readAllBytes(inputStream));
- tempStream.closeEntry();
- continue;
- }
-
- ClassReader reader = new ClassReader(readAllBytes(inputStream));
-
- ClassWriter writer;
- if (shouldUseCustomResources) {
- // Use the COMPUTE_FRAMES flag to have asm figure out the stack map frames.
- // This is necessary because GCMBaseIntentService in android_gcm_java contains
- // incorrect stack map frames. This option slows down processing time by 2x.
- writer = new CustomClassLoaderClassWriter(
- classPathJarsClassLoader, reader, COMPUTE_FRAMES);
- } else {
- writer = new ClassWriter(reader, 0);
- }
- ClassVisitor chain = writer;
- /* DEBUGGING:
- To see the bytecode for a specific class:
- if (entry.getName().contains("YourClassName")) {
- chain = new TraceClassVisitor(chain, new PrintWriter(System.out));
- }
- To see objectweb.asm code that will generate bytecode for a given class:
- java -cp "third_party/ow2_asm/lib/asm-5.0.1.jar:third_party/ow2_asm/lib/"\
- "asm-util-5.0.1.jar:out/Debug/lib.java/jar_containing_yourclass.jar" \
- org.objectweb.asm.util.ASMifier org.package.YourClassName
- */
- if (shouldUseThreadAnnotations) {
- chain = new ThreadAssertionClassAdapter(chain);
+ List<Future<EntryDataPair>> list = new ArrayList<>();
+ while (true) {
+ ZipEntry entry = inputStream.getNextEntry();
+ if (entry == null) {
+ break;
}
- if (shouldAssert) {
- chain = new AssertionEnablerClassAdapter(chain);
- }
- if (shouldUseCustomResources) {
- chain = new CustomResourcesClassAdapter(chain, reader.getClassName(),
- reader.getSuperName(), classPathJarsClassLoader);
- }
- reader.accept(chain, 0);
- byte[] patchedByteCode = writer.toByteArray();
- writeZipEntry(tempStream, entry.getName(), patchedByteCode);
+ byte[] data = readAllBytes(inputStream);
+ list.add(executorService.submit(() -> processEntry(entry, data)));
+ }
+ executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+ // Write the zip file entries in order to preserve determinism.
+ for (Future<EntryDataPair> futurePair : list) {
+ EntryDataPair pair = futurePair.get();
+ tempStream.putNextEntry(pair.mEntry);
+ tempStream.write(pair.mData);
+ tempStream.closeEntry();
}
} catch (IOException e) {
throw new RuntimeException(e);
@@ -125,6 +171,15 @@ class ByteCodeProcessor {
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
+
+ if (sValidator.getNumClassPathErrors() > 0) {
+ System.err.println("Missing " + sValidator.getNumClassPathErrors()
+ + " classes missing in direct classpath. To fix, add GN deps for:");
+ for (String s : sValidator.getClassPathMissingJars()) {
+ System.err.println(s);
+ }
+ System.exit(1);
+ }
}
private static byte[] readAllBytes(InputStream inputStream) throws IOException {
@@ -141,11 +196,12 @@ class ByteCodeProcessor {
* Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
* given jars.
*/
- private static ClassLoader loadJars(ArrayList<String> paths) {
+ static ClassLoader loadJars(Collection<String> paths) {
URL[] jarUrls = new URL[paths.size()];
- for (int i = 0; i < paths.size(); ++i) {
+ int i = 0;
+ for (String path : paths) {
try {
- jarUrls[i] = new File(paths.get(i)).toURI().toURL();
+ jarUrls[i++] = new File(path).toURI().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
@@ -153,23 +209,43 @@ class ByteCodeProcessor {
return new URLClassLoader(jarUrls);
}
- public static void main(String[] args) {
+ public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+ ExecutionException, InterruptedException {
// Invoke this script using //build/android/gyp/bytecode_processor.py
- String inputJarPath = args[0];
- String outputJarPath = args[1];
- boolean shouldAssert = args[2].equals("--enable-assert");
- boolean shouldUseCustomResources = args[3].equals("--enable-custom-resources");
- boolean shouldUseThreadAnnotations = args[4].equals("--enable-thread-annotations");
+ int currIndex = 0;
+ String inputJarPath = args[currIndex++];
+ String outputJarPath = args[currIndex++];
+ sVerbose = args[currIndex++].equals("--verbose");
+ sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+ sShouldAssert = args[currIndex++].equals("--enable-assert");
+ sShouldUseCustomResources = args[currIndex++].equals("--enable-custom-resources");
+ sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations");
+ sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path");
+ int sdkJarsLength = Integer.parseInt(args[currIndex++]);
+ List<String> sdkJarPaths =
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + sdkJarsLength));
+ currIndex += sdkJarsLength;
+
+ int directJarsLength = Integer.parseInt(args[currIndex++]);
+ ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+ directClassPathJarPaths.add(inputJarPath);
+ directClassPathJarPaths.addAll(sdkJarPaths);
+ directClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + directJarsLength)));
+ currIndex += directJarsLength;
+ sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
// Load all jars that are on the classpath for the input jar for analyzing class hierarchy.
- ClassLoader classPathJarsClassLoader = null;
- if (shouldUseCustomResources) {
- ArrayList<String> classPathJarsPaths = new ArrayList<>();
- classPathJarsPaths.add(inputJarPath);
- classPathJarsPaths.addAll(Arrays.asList(Arrays.copyOfRange(args, 4, args.length)));
- classPathJarsClassLoader = loadJars(classPathJarsPaths);
- }
- process(inputJarPath, outputJarPath, shouldAssert, shouldUseCustomResources,
- shouldUseThreadAnnotations, classPathJarsClassLoader);
+ sFullClassPathJarPaths = new HashSet<>();
+ sFullClassPathJarPaths.clear();
+ sFullClassPathJarPaths.add(inputJarPath);
+ sFullClassPathJarPaths.addAll(sdkJarPaths);
+ sFullClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length)));
+ sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+ sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+ sValidator = new ClassPathValidator();
+ process(inputJarPath, outputJarPath);
}
}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 00000000000..b73cde805c9
--- /dev/null
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,153 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+ private final Set<String> mClassPathMissingJars = new HashSet<>();
+ private int mNumClassPathErrors;
+
+ static class ClassNotLoadedException extends ClassNotFoundException {
+ private final String mClassName;
+
+ ClassNotLoadedException(String className, Throwable ex) {
+ super("Couldn't load " + className, ex);
+ mClassName = className;
+ }
+
+ public String getClassName() {
+ return mClassName;
+ }
+ }
+
+ private static void printAndQuit(ClassNotLoadedException e, ClassReader classReader,
+ boolean verbose) throws ClassNotLoadedException {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found on any classpath. Used by class \"" + classReader.getClassName()
+ + "\"");
+ if (verbose) {
+ throw e;
+ }
+ System.exit(1);
+ }
+
+ private static void validateClass(ClassLoader classLoader, String className)
+ throws ClassNotLoadedException {
+ if (className.startsWith("[")) {
+ // Dealing with an array type which isn't encoded nicely in the constant pool.
+ // For example, [[Lorg/chromium/Class$1;
+ className = className.substring(className.lastIndexOf('[') + 1);
+ if (className.charAt(0) == 'L' && className.endsWith(";")) {
+ className = className.substring(1, className.length() - 1);
+ } else {
+ // Bailing out if we have an non-class array type.
+ // This could be something like [B
+ return;
+ }
+ }
+ if (className.matches(".*\\bR(\\$\\w+)?$")) {
+ // Resources in R.java files are not expected to be valid at this stage in the build.
+ return;
+ }
+ if (className.matches("^libcore\\b.*")) {
+ // libcore exists on devices, but is not included in the Android sdk as it is a private
+ // API.
+ return;
+ }
+ try {
+ classLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new ClassNotLoadedException(className, e);
+ } catch (NoClassDefFoundError e) {
+ // We assume that this is caused by another class that is not going to able to be
+ // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+ }
+ }
+
+ /**
+ * Given a .class file, see if every class referenced in the main class' constant pool can be
+ * loaded by the given ClassLoader.
+ *
+ * @param classReader .class file interface for reading the constant pool.
+ * @param classLoader classpath you wish to validate.
+ * @throws ClassNotLoadedException thrown if it can't load a certain class.
+ */
+ private static void validateClassPath(ClassReader classReader, ClassLoader classLoader)
+ throws ClassNotLoadedException {
+ char[] charBuffer = new char[classReader.getMaxStringLength()];
+ // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+ // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ for (int i = 1; i < classReader.getItemCount(); i++) {
+ int offset = classReader.getItem(i);
+ // Class entries correspond to 7 in the constant pool
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+ validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+ }
+ }
+ }
+
+ public void validateClassPathsAndOutput(ClassReader classReader,
+ ClassLoader directClassPathClassLoader, ClassLoader fullClassPathClassLoader,
+ Collection<String> jarsOnlyInFullClassPath, boolean isPrebuilt, boolean verbose)
+ throws ClassNotLoadedException {
+ if (isPrebuilt) {
+ // Prebuilts only need transitive dependencies checked, not direct dependencies.
+ try {
+ validateClassPath(classReader, fullClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ printAndQuit(e, classReader, verbose);
+ }
+ } else {
+ try {
+ validateClassPath(classReader, directClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ try {
+ validateClass(fullClassPathClassLoader, e.getClassName());
+ } catch (ClassNotLoadedException d) {
+ printAndQuit(d, classReader, verbose);
+ }
+ if (verbose) {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found in direct dependencies,"
+ + " but found in indirect dependiences.");
+ }
+ // Iterating through all jars that are in the full classpath but not the direct
+ // classpath to find which one provides the class we are looking for.
+ for (String s : jarsOnlyInFullClassPath) {
+ try {
+ ClassLoader smallLoader =
+ ByteCodeProcessor.loadJars(Collections.singletonList(s));
+ validateClass(smallLoader, e.getClassName());
+ mClassPathMissingJars.add(s);
+ mNumClassPathErrors++;
+ break;
+ } catch (ClassNotLoadedException f) {
+ }
+ }
+ }
+ }
+ }
+
+ public int getNumClassPathErrors() {
+ return mNumClassPathErrors;
+ }
+
+ public Set<String> getClassPathMissingJars() {
+ return mClassPathMissingJars;
+ }
+}
diff --git a/chromium/build/android/diff_resource_sizes.py b/chromium/build/android/diff_resource_sizes.py
index 96f64c84e67..4fcbec9f9d6 100755
--- a/chromium/build/android/diff_resource_sizes.py
+++ b/chromium/build/android/diff_resource_sizes.py
@@ -7,6 +7,7 @@
import argparse
import json
+import logging
import os
import subprocess
import sys
@@ -17,6 +18,9 @@ from pylib.utils import shared_preference_utils
with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
import perf_tests_results_helper # pylint: disable=import-error
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp', 'util')):
import build_utils # pylint: disable=import-error
@@ -30,7 +34,8 @@ _BASE_CHART = {
'charts': {},
}
-_RESULTS_FILENAME = 'results-chart.json'
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
def DiffResults(chartjson, base_results, diff_results):
@@ -99,7 +104,12 @@ def _CreateArgparser():
'APK.')
argparser.add_argument('--chartjson',
action='store_true',
- help='Sets output mode to chartjson.')
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument('--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
argparser.add_argument('--include-intermediate-results',
action='store_true',
help='Include the results from the resource_sizes.py '
@@ -124,12 +134,16 @@ def _CreateArgparser():
def main():
args, unknown_args = _CreateArgparser().parse_known_args()
- chartjson = _BASE_CHART.copy() if args.chartjson else None
+ # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ chartjson = _BASE_CHART.copy() if args.output_format else None
with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
# Run resource_sizes.py on the two APKs
resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
- shared_args = (['python', resource_sizes_path, '--chartjson']
+ shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+ unknown_args)
base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
@@ -151,18 +165,34 @@ def main():
raise
# Combine the separate results
- base_file = os.path.join(base_dir, _RESULTS_FILENAME)
- diff_file = os.path.join(diff_dir, _RESULTS_FILENAME)
+ base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+ diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
DiffResults(chartjson, base_results, diff_results)
if args.include_intermediate_results:
AddIntermediateResults(chartjson, base_results, diff_results)
- if args.chartjson:
- with open(os.path.join(os.path.abspath(args.output_dir),
- _RESULTS_FILENAME), 'w') as outfile:
+ if args.output_format:
+ chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+ _CHARTJSON_FILENAME)
+ logging.critical('Dumping diff chartjson to %s', chartjson_path)
+ with open(chartjson_path, 'w') as outfile:
json.dump(chartjson, outfile)
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(os.path.abspath(args.output_dir),
+ 'perf_results.json')
+ logging.critical('Dumping diff histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/android/docs/README.md b/chromium/build/android/docs/README.md
new file mode 100644
index 00000000000..b6f0a6e9c24
--- /dev/null
+++ b/chromium/build/android/docs/README.md
@@ -0,0 +1,11 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/chromium/build/android/docs/life_of_a_resource.md b/chromium/build/android/docs/life_of_a_resource.md
new file mode 100644
index 00000000000..f3a64194803
--- /dev/null
+++ b/chromium/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,233 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+ * Including AndroidManifest.xml files from libraries, which get merged
+ together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+ target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+ * AndroidManifest.xml (as binary xml)
+ * resources.arsc
+ * res/**
+* Final R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+ other resources will now use the id rather than the name for faster lookup at
+ runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+ the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+ dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+ other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+ using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+ resources as well as the name and path of non-string resources (ie. layouts
+ and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+ with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [whitelisted](#adding-resources-to-the-whitelist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the whitelist
+
+If a resource is accessed via `getIdentifier()` it needs to be whitelisted in an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_path` variable. To add a resource to the whitelist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it [rewrites all resources][ResourceRewriter.java] to
+have the correct package id. When deobfuscating webview resource ids, disregard
+the first two bytes in the id when looking it up in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[ResourceRewriter.java]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/glue/glue/generated_java/com/android/webview/chromium/ResourceRewriter.java
+
+## How R.java files are generated
+
+This is how a sample R.java file looks like:
+
+```
+package org.chromium.ui;
+
+public final class R {
+ public static final class attr {
+ public static final int buttonAlignment = 0x7f030038;
+ public static final int buttonColor = 0x7f03003e;
+ public static final int layout = 0x7f030094;
+ public static final int roundedfillColor = 0x7f0300bf;
+ public static final int secondaryButtonText = 0x7f0300c4;
+ public static final int stackedMargin = 0x7f0300d4;
+ }
+ public static final class id {
+ public static final int apart = 0x7f080021;
+ public static final int dropdown_body_footer_divider = 0x7f08003d;
+ public static final int dropdown_body_list = 0x7f08003e;
+ public static final int dropdown_footer = 0x7f08003f;
+ }
+ public static final class layout {
+ public static final int dropdown_item = 0x7f0a0022;
+ public static final int dropdown_window = 0x7f0a0023;
+ }
+}
+```
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk. The
+R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/chromium/build/android/gradle/android.jinja b/chromium/build/android/gradle/android.jinja
index 9ca39a07cfb..6826cea3c18 100644
--- a/chromium/build/android/gradle/android.jinja
+++ b/chromium/build/android/gradle/android.jinja
@@ -52,7 +52,7 @@ android {
defaultConfig {
vectorDrawables.useSupportLibrary = true
- minSdkVersion 16
+ minSdkVersion 19
targetSdkVersion {{ target_sdk_version }}
}
diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py
index e741f50bacd..4bd307fb2f9 100755
--- a/chromium/build/android/gradle/generate_gradle.py
+++ b/chromium/build/android/gradle/generate_gradle.py
@@ -55,8 +55,6 @@ _DEFAULT_TARGETS = [
'//chrome/android:chrome_junit_tests',
'//chrome/android:chrome_public_apk',
'//chrome/android:chrome_public_test_apk',
- '//chrome/android:chrome_sync_shell_apk',
- '//chrome/android:chrome_sync_shell_test_apk',
'//content/public/android:content_junit_tests',
'//content/shell/android:content_shell_apk',
# Below must be included even with --all since they are libraries.
diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja
index ff26840f00f..267070f41e6 100644
--- a/chromium/build/android/gradle/root.jinja
+++ b/chromium/build/android/gradle/root.jinja
@@ -10,7 +10,7 @@ buildscript {
}
dependencies {
{% if channel == 'canary' %}
- classpath "com.android.tools.build:gradle:3.3.0-alpha05"
+ classpath "com.android.tools.build:gradle:3.4.0-alpha03"
{% elif channel == 'beta' %}
classpath "com.android.tools.build:gradle:3.1.0-beta4"
{% else %}
diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py
index ce61e7562c3..c1d62a873f2 100755
--- a/chromium/build/android/gyp/apkbuilder.py
+++ b/chromium/build/android/gyp/apkbuilder.py
@@ -57,6 +57,8 @@ def _ParseArgs(args):
help='Path to the *.apk.res.info file')
parser.add_argument('--dex-file',
help='Path to the classes.dex to use')
+ parser.add_argument('--uncompress-dex', action='store_true',
+ help='Store .dex files uncompressed in the APK')
parser.add_argument('--native-libs',
action='append',
help='GYP-list of native libraries to include. '
@@ -327,8 +329,9 @@ def main(args):
resource_infos = resource_apk.infolist()
# 1. AndroidManifest.xml
- assert resource_infos[0].filename == 'AndroidManifest.xml'
- copy_resource(resource_infos[0], out_dir=apk_manifest_dir)
+ copy_resource(
+ resource_apk.getinfo('AndroidManifest.xml'),
+ out_dir=apk_manifest_dir)
# 2. Assets
if options.write_asset_list:
@@ -344,10 +347,12 @@ def main(args):
with zipfile.ZipFile(options.dex_file, 'r') as dex_zip:
for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
build_utils.AddToZipHermetic(out_apk, apk_dex_dir + dex,
- data=dex_zip.read(dex))
+ data=dex_zip.read(dex),
+ compress=not options.uncompress_dex)
elif options.dex_file:
build_utils.AddToZipHermetic(out_apk, apk_dex_dir + 'classes.dex',
- src_path=options.dex_file)
+ src_path=options.dex_file,
+ compress=not options.uncompress_dex)
# 4. Native libraries.
_AddNativeLibraries(out_apk,
@@ -376,8 +381,9 @@ def main(args):
build_utils.AddToZipHermetic(out_apk, apk_path, data='')
# 5. Resources
- for info in resource_infos[1:]:
- copy_resource(info)
+ for info in resource_infos:
+ if info.filename != 'AndroidManifest.xml':
+ copy_resource(info)
# 6. Java resources that should be accessible via
# Class.getResourceAsStream(), in particular parts of Emma jar.
diff --git a/chromium/build/android/gyp/bundletool.py b/chromium/build/android/gyp/bundletool.py
index cd803c18492..1f0b2bb772e 100755
--- a/chromium/build/android/gyp/bundletool.py
+++ b/chromium/build/android/gyp/bundletool.py
@@ -17,7 +17,7 @@ BUNDLETOOL_DIR = os.path.abspath(os.path.join(
__file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
'bundletool'))
-BUNDLETOOL_VERSION = '0.6.0'
+BUNDLETOOL_VERSION = '0.7.1'
BUNDLETOOL_JAR_PATH = os.path.join(
BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index 00f395542fa..76775d3958a 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -7,6 +7,7 @@
import argparse
import os
+import subprocess
import sys
from util import build_utils
@@ -24,21 +25,41 @@ def main(argv):
help='Path to the java binary wrapper script.')
parser.add_argument('--input-jar', required=True)
parser.add_argument('--output-jar', required=True)
- parser.add_argument('--extra-classpath-jar', dest='extra_jars',
+ parser.add_argument('--direct-classpath-jars', required=True)
+ parser.add_argument('--sdk-classpath-jars', required=True)
+ parser.add_argument('--extra-classpath-jars', dest='extra_jars',
action='append', default=[],
help='Extra inputs, passed last to the binary script.')
+ parser.add_argument('-v', '--verbose', action='store_true')
+ _AddSwitch(parser, '--is-prebuilt')
_AddSwitch(parser, '--enable-custom-resources')
_AddSwitch(parser, '--enable-assert')
_AddSwitch(parser, '--enable-thread-annotations')
+ _AddSwitch(parser, '--enable-check-class-path')
args = parser.parse_args(argv)
+
+ sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ assert len(sdk_jars) > 0
+
+ direct_jars = build_utils.ParseGnList(args.direct_classpath_jars)
+ assert len(direct_jars) > 0
+
extra_classpath_jars = []
for a in args.extra_jars:
extra_classpath_jars.extend(build_utils.ParseGnList(a))
- cmd = [args.script, args.input_jar, args.output_jar, args.enable_assert,
- args.enable_custom_resources,
- args.enable_thread_annotations] + extra_classpath_jars
- build_utils.CheckOutput(cmd)
+ if args.verbose:
+ verbose = '--verbose'
+ else:
+ verbose = '--not-verbose'
+
+ cmd = ([
+ args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
+ args.enable_assert, args.enable_custom_resources,
+ args.enable_thread_annotations, args.enable_check_class_path,
+ str(len(sdk_jars))
+ ] + sdk_jars + [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
+ subprocess.check_call(cmd)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 64a5cedf58b..f9c23fab2ad 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -27,6 +27,11 @@ from xml.etree import ElementTree
from util import build_utils
from util import resource_utils
+# Name of environment variable that can be used to force this script to
+# put temporary resource files into specific sub-directories, instead of
+# temporary ones.
+_ENV_DEBUG_VARIABLE = 'ANDROID_DEBUG_TEMP_RESOURCES_DIR'
+
# Import jinja2 from third_party/jinja2
sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party'))
from jinja2 import Template # pylint: disable=F0401
@@ -136,6 +141,17 @@ def _ParseArgs(args):
action='store_true',
help='Whether to strip xml namespaces from processed '
'xml resources')
+ input_opts.add_argument(
+ '--resources-config-path', help='Path to aapt2 resources config file.')
+ input_opts.add_argument(
+ '--optimize-resources',
+ default=False,
+ action='store_true',
+ help='Whether to run the `aapt2 optimize` step on the resources.')
+ input_opts.add_argument(
+ '--unoptimized-resources-path',
+ help='Path to output the intermediate apk before running '
+ '`aapt2 optimize`.')
input_opts.add_argument(
'--check-resources-pkg-id', type=_PackageIdArgument,
@@ -301,7 +317,6 @@ def _CreateLinkApkArgs(options):
'--version-name', options.version_name,
'--auto-add-overlay',
'--no-version-vectors',
- '-o', options.apk_path,
]
for j in options.include_resources:
@@ -476,7 +491,11 @@ def _CompileDeps(aapt2_path, dep_subdirs, temp_dir):
partial_path = os.path.join(partials_dir, dirname + '.zip')
compile_command = (partial_compile_command +
['--dir', directory, '-o', partial_path])
- build_utils.CheckOutput(compile_command)
+ build_utils.CheckOutput(
+ compile_command,
+ stderr_filter=lambda output:
+ build_utils.FilterLines(
+ output, r'ignoring configuration .* for styleable'))
# Sorting the files in the partial ensures deterministic output from the
# aapt2 link step which uses order of files in the partial.
@@ -536,7 +555,15 @@ def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
for directory in dep_subdirs:
renamed_paths.update(_MoveImagesToNonMdpiFolders(directory))
+ if options.optimize_resources:
+ if options.unoptimized_resources_path:
+ unoptimized_apk_path = options.unoptimized_resources_path
+ else:
+ unoptimized_apk_path = os.path.join(gen_dir, 'intermediate.ap_')
+ else:
+ unoptimized_apk_path = options.apk_path
link_command = _CreateLinkApkArgs(options)
+ link_command += ['-o', unoptimized_apk_path]
link_command += ['--output-text-symbols', r_txt_path]
# TODO(digit): Is this below actually required for R.txt generation?
link_command += ['--java', gen_dir]
@@ -552,10 +579,69 @@ def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
# Also creates R.txt
build_utils.CheckOutput(
link_command, print_stdout=False, print_stderr=False)
+
+ if options.optimize_resources:
+ _OptimizeApk(options, temp_dir, unoptimized_apk_path, r_txt_path)
+
_CreateResourceInfoFile(
renamed_paths, options.apk_info_path, options.dependencies_res_zips)
+def _OptimizeApk(options, temp_dir, unoptimized_apk_path, r_txt_path):
+ """Optimize intermediate .ap_ file with aapt2.
+
+ Args:
+ options: The command-line options tuple. E.g. the generated apk
+ will be written to |options.apk_path|.
+ temp_dir: A temporary directory.
+ unoptimized_apk_path: path of the apk to optimize.
+ r_txt_path: path to the R.txt file of the unoptimized apk.
+ """
+ # Resources of type ID are references to UI elements/views. They are used by
+ # UI automation testing frameworks. They are kept in so that they dont break
+ # tests, even though they may not actually be used during runtime. See
+ # https://crbug.com/900993
+ id_resources = _ExtractIdResources(r_txt_path)
+ gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+ if options.resources_config_path:
+ shutil.copyfile(options.resources_config_path, gen_config_path)
+ with open(gen_config_path, 'a+') as config:
+ for resource in id_resources:
+ config.write('{}#no_obfuscate\n'.format(resource))
+
+ # Optimize the resources.arsc file by obfuscating resource names and only
+ # allow usage via R.java constant.
+ optimize_command = [
+ options.aapt2_path,
+ 'optimize',
+ '--enable-resource-obfuscation',
+ '-o',
+ options.apk_path,
+ '--resources-config-path',
+ gen_config_path,
+ unoptimized_apk_path,
+ ]
+ build_utils.CheckOutput(
+ optimize_command, print_stdout=False, print_stderr=False)
+
+
+def _ExtractIdResources(rtxt_path):
+ """Extract resources of type ID from the R.txt file
+
+ Args:
+ rtxt_path: Path to R.txt file with all the resources
+ Returns:
+ List of id resources in the form of id/<resource_name>
+ """
+ id_resources = []
+ with open(rtxt_path) as rtxt:
+ for line in rtxt:
+ if ' id ' in line:
+ resource_name = line.split()[2]
+ id_resources.append('id/{}'.format(resource_name))
+ return id_resources
+
+
def _WriteFinalRTxtFile(options, aapt_r_txt_path):
"""Determine final R.txt and return its location.
@@ -581,8 +667,8 @@ def _WriteFinalRTxtFile(options, aapt_r_txt_path):
return r_txt_file
-def _OnStaleMd5(options):
- with resource_utils.BuildContext() as build:
+def _OnStaleMd5(options, debug_temp_resources_dir):
+ with resource_utils.BuildContext(debug_temp_resources_dir) as build:
dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
build.deps_dir)
@@ -634,34 +720,45 @@ def main(args):
# Order of these must match order specified in GN so that the correct one
# appears first in the depfile.
possible_output_paths = [
- options.apk_path,
- options.apk_path + '.info',
- options.r_text_out,
- options.srcjar_out,
- options.proguard_file,
- options.proguard_file_main_dex,
+ options.apk_path,
+ options.apk_path + '.info',
+ options.r_text_out,
+ options.srcjar_out,
+ options.proguard_file,
+ options.proguard_file_main_dex,
+ options.unoptimized_resources_path,
]
output_paths = [x for x in possible_output_paths if x]
# List python deps in input_strings rather than input_paths since the contents
# of them does not change what gets written to the depsfile.
input_strings = options.extra_res_packages + [
- options.shared_resources,
- options.resource_blacklist_regex,
- options.resource_blacklist_exceptions,
- str(options.debuggable),
- str(options.png_to_webp),
- str(options.support_zh_hk),
- str(options.no_xml_namespaces),
+ options.shared_resources,
+ options.resource_blacklist_regex,
+ options.resource_blacklist_exceptions,
+ str(options.debuggable),
+ str(options.png_to_webp),
+ str(options.support_zh_hk),
+ str(options.no_xml_namespaces),
+ str(options.optimize_resources),
]
input_strings.extend(_CreateLinkApkArgs(options))
+ debug_temp_resources_dir = os.environ.get(_ENV_DEBUG_VARIABLE)
+ if debug_temp_resources_dir:
+ debug_temp_resources_dir = os.path.join(debug_temp_resources_dir,
+ os.path.basename(options.apk_path))
+ build_utils.DeleteDirectory(debug_temp_resources_dir)
+ build_utils.MakeDirectory(debug_temp_resources_dir)
+
+
possible_input_paths = [
- options.aapt_path,
- options.aapt2_path,
- options.android_manifest,
- options.shared_resources_whitelist,
+ options.aapt_path,
+ options.aapt2_path,
+ options.android_manifest,
+ options.shared_resources_whitelist,
+ options.resources_config_path,
]
possible_input_paths += options.include_resources
input_paths = [x for x in possible_input_paths if x]
@@ -672,11 +769,12 @@ def main(args):
input_paths.append(options.webp_binary)
build_utils.CallAndWriteDepfileIfStale(
- lambda: _OnStaleMd5(options),
+ lambda: _OnStaleMd5(options, debug_temp_resources_dir),
options,
input_paths=input_paths,
input_strings=input_strings,
output_paths=output_paths,
+ force=bool(debug_temp_resources_dir),
depfile_deps=options.dependencies_res_zips + options.extra_r_text_files,
add_pydeps=False)
diff --git a/chromium/build/android/gyp/create_app_bundle.py b/chromium/build/android/gyp/create_app_bundle.py
index d58bf3003c3..c787bbd40be 100755
--- a/chromium/build/android/gyp/create_app_bundle.py
+++ b/chromium/build/android/gyp/create_app_bundle.py
@@ -50,6 +50,10 @@ def _ParseArgs(args):
help='Output bundle zip archive.')
parser.add_argument('--module-zips', required=True,
help='GN-list of module zip archives.')
+ parser.add_argument(
+ '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+ parser.add_argument(
+ '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
parser.add_argument('--uncompressed-assets', action='append',
help='GN-list of uncompressed assets.')
parser.add_argument('--uncompress-shared-libraries', action='append',
@@ -63,6 +67,7 @@ def _ParseArgs(args):
options = parser.parse_args(args)
options.module_zips = build_utils.ParseGnList(options.module_zips)
+ options.rtxt_in_paths = build_utils.ExpandFileArgs(options.rtxt_in_paths)
if len(options.module_zips) == 0:
raise Exception('The module zip list cannot be empty.')
@@ -166,9 +171,11 @@ def _RewriteLanguageAssetPath(src_path):
This will rewrite paths that look like locales/<locale>.pak into
locales#<language>/<locale>.pak, where <language> is the language code
from the locale.
+
+ Returns new path.
"""
if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
- return src_path
+ return [src_path]
locale = src_path[len(_LOCALES_SUBDIR):-4]
android_locale = resource_utils.CHROME_TO_ANDROID_LOCALE_MAP.get(
@@ -182,9 +189,15 @@ def _RewriteLanguageAssetPath(src_path):
android_language = android_locale
if android_language == _FALLBACK_LANGUAGE:
- return 'assets/locales/%s.pak' % locale
+ # Fallback language .pak files must be placed in a different directory
+ # to ensure they are always stored in the base module.
+ result_path = 'assets/fallback-locales/%s.pak' % locale
+ else:
+ # Other language .pak files go into a language-specific asset directory
+ # that bundletool will store in separate split APKs.
+ result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
- return 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+ return result_path
def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
@@ -225,9 +238,11 @@ def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
if src_path in language_files:
dst_path = _RewriteLanguageAssetPath(src_path)
- build_utils.AddToZipHermetic(dst_zip, dst_path,
- data=src_zip.read(src_path),
- compress=is_compressed)
+ build_utils.AddToZipHermetic(
+ dst_zip,
+ dst_path,
+ data=src_zip.read(src_path),
+ compress=is_compressed)
return tmp_zip
@@ -285,6 +300,14 @@ def main(args):
shutil.move(tmp_bundle, options.out_bundle)
+ if options.rtxt_out_path:
+ with open(options.rtxt_out_path, 'w') as rtxt_out:
+ for rtxt_in_path in options.rtxt_in_paths:
+ with open(rtxt_in_path, 'r') as rtxt_in:
+ rtxt_out.write('-- Contents of {}\n'.format(
+ os.path.basename(rtxt_in_path)))
+ rtxt_out.write(rtxt_in.read())
+
if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index 7fba5db41ee..74cdb94e05e 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -171,40 +171,6 @@ def _EnvWithArtLibPath(binary_path):
return env
-def _FilterOutput(output, filter_strings):
- """Output filter from build_utils.CheckOutput.
-
- Args:
- output: Executable output as from build_utils.CheckOutput.
- filter_strings: List of RE strings that will filter (remove) matching
- lines from |output|.
-
- Returns:
- The filtered output, as a single string.
- """
- filters = [re.compile(f) for f in filter_strings]
- filtered_output = []
- for line in output.splitlines():
- if any(filter.search(line) for filter in filters):
- continue
- else:
- filtered_output.append(line)
- return '\n'.join(filtered_output)
-
-
-def _FilterProfmanStderr(output):
- return _FilterOutput(output, [
- r'Could not find (method_id|proto_id|name):',
- r'Could not create type list',
- ])
-
-
-def _FilterDexlayoutStderr(output):
- return _FilterOutput(output, [
- r'Can.t mmap dex file.*please zipalign',
- ])
-
-
def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
"""Create a binary profile for dexlayout.
@@ -226,8 +192,13 @@ def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
'--dex-location=' + input_dex,
'--create-profile-from=' + text_profile,
'--reference-profile-file=' + binary_profile]
- build_utils.CheckOutput(profman_cmd, env=_EnvWithArtLibPath(profman_path),
- stderr_filter=_FilterProfmanStderr)
+ build_utils.CheckOutput(
+ profman_cmd,
+ env=_EnvWithArtLibPath(profman_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output, '|'.join(
+ [r'Could not find (method_id|proto_id|name):',
+ r'Could not create type list'])))
return binary_profile
@@ -252,8 +223,12 @@ def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
'-p', binary_profile,
'-w', dexlayout_output_dir,
input_dex ]
- build_utils.CheckOutput(dexlayout_cmd, env=_EnvWithArtLibPath(dexlayout_path),
- stderr_filter=_FilterDexlayoutStderr)
+ build_utils.CheckOutput(
+ dexlayout_cmd,
+ env=_EnvWithArtLibPath(dexlayout_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output,
+ r'Can.t mmap dex file.*please zipalign'))
output_files = os.listdir(dexlayout_output_dir)
if not output_files:
raise Exception('dexlayout unexpectedly produced no output')
@@ -339,15 +314,18 @@ def main(args):
# by creating an empty JAR
with zipfile.ZipFile(options.dex_path, 'w') as outfile:
outfile.comment = 'empty'
- elif is_dex:
+ else:
# .dex files can't specify a name for D8. Instead, we output them to a
# temp directory then move them after the command has finished running
# (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
with build_utils.TempDir() as tmp_dex_dir:
_RunD8(dex_cmd, paths, tmp_dex_dir)
- _MoveTempDexFile(tmp_dex_dir, options.dex_path)
- else:
- _RunD8(dex_cmd, paths, options.dex_path)
+ if is_dex:
+ _MoveTempDexFile(tmp_dex_dir, options.dex_path)
+ else:
+ # d8 supports outputting to a .zip, but does not have deterministic file
+ # ordering: https://issuetracker.google.com/issues/119945929
+ build_utils.ZipDir(options.dex_path, tmp_dex_dir)
if options.dexlayout_profile:
with build_utils.TempDir() as temp_dir:
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
new file mode 100755
index 00000000000..02b047c7de1
--- /dev/null
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile', help='Path to the depfile to write to.')
+ parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+ parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+ parser.add_argument(
+ '--input-dex-zip', help='Path to dex files in zip being split.')
+ parser.add_argument(
+ '--proguard-mapping-file', help='Path to proguard mapping file.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which compirse the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ options = parser.parse_args(args)
+
+ assert len(options.feature_names) == len(options.feature_jars) and len(
+ options.feature_names) == len(options.dex_dests)
+ options.features = {}
+ for i, name in enumerate(options.feature_names):
+ options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+ return options
+
+
+def _RunDexsplitter(options, output_dir):
+ cmd = [
+ 'java',
+ '-jar',
+ options.r8_path,
+ 'dexsplitter',
+ '--output',
+ output_dir,
+ '--proguard-map',
+ options.proguard_mapping_file,
+ ]
+
+ for base_jar in options.features['base']:
+ cmd += ['--base-jar', base_jar]
+
+ base_jars_lookup = set(options.features['base'])
+ for feature in options.features:
+ if feature == 'base':
+ continue
+ for feature_jar in options.features[feature]:
+ if feature_jar not in base_jars_lookup:
+ cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+ with build_utils.TempDir() as temp_dir:
+ unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+ for file_name in unzipped_files:
+ cmd += ['--input', file_name]
+ build_utils.CheckOutput(cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ input_paths = []
+ for feature_jars in options.features.itervalues():
+ for feature_jar in feature_jars:
+ input_paths.append(feature_jar)
+
+ with build_utils.TempDir() as dexsplitter_output_dir:
+ curr_location_to_dest = []
+ if len(options.features) == 1:
+ # Don't run dexsplitter since it needs at least 1 feature module.
+ curr_location_to_dest.append((options.input_dex_zip,
+ options.dex_dests[0]))
+ else:
+ _RunDexsplitter(options, dexsplitter_output_dir)
+
+ for i, dest in enumerate(options.dex_dests):
+ module_dex_file = os.path.join(dexsplitter_output_dir,
+ options.feature_names[i], 'classes.dex')
+ if os.path.exists(module_dex_file):
+ curr_location_to_dest.append((module_dex_file, dest))
+ else:
+ module_dex_file += '.zip'
+ assert os.path.exists(
+ module_dex_file), 'Dexsplitter tool output not found.'
+ curr_location_to_dest.append((module_dex_file + '.zip', dest))
+
+ for curr_location, dest in curr_location_to_dest:
+ with build_utils.AtomicOutput(dest) as f:
+ if curr_location.endswith('.zip'):
+ if dest.endswith('.zip'):
+ shutil.move(curr_location, f.name)
+ else:
+ with zipfile.ZipFile(curr_location, 'r') as z:
+ namelist = z.namelist()
+ assert len(namelist) == 1, (
+ 'Unzipping to single dex file, but not single dex file in ' +
+ options.input_dex_zip)
+ z.extract(namelist[0], f.name)
+ else:
+ if dest.endswith('.zip'):
+ build_utils.ZipDir(
+ f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+ else:
+ shutil.move(curr_location, f.name)
+
+ build_utils.Touch(options.stamp)
+ build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/build/android/gyp/generate_proguarded_module_jar.pydeps b/chromium/build/android/gyp/dexsplitter.pydeps
index 6d52b4ec543..5935d238853 100644
--- a/chromium/build/android/gyp/generate_proguarded_module_jar.pydeps
+++ b/chromium/build/android/gyp/dexsplitter.pydeps
@@ -1,7 +1,7 @@
# Generated by running:
-# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_proguarded_module_jar.pydeps build/android/gyp/generate_proguarded_module_jar.py
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
../../gn_helpers.py
-generate_proguarded_module_jar.py
+dexsplitter.py
util/__init__.py
util/build_utils.py
util/md5_check.py
diff --git a/chromium/build/android/gyp/generate_linker_version_script.py b/chromium/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 00000000000..5ffff033c31
--- /dev/null
+++ b/chromium/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE. DO NOT MODIFY.
+#
+# See: %s
+
+{
+ global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+ local:
+ *;
+};
+"""
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='Path to output linker version script file.')
+ parser.add_argument(
+ '--export-java-symbols',
+ action='store_true',
+ help='Export Java_* JNI methods')
+ parser.add_argument(
+ '--export-symbol-whitelist-file',
+ help='Path to input file containing whitelist of extra '
+ 'symbols to export. One symbol per line.')
+ options = parser.parse_args()
+
+ # JNI_OnLoad is always exported.
+ symbol_list = ['JNI_OnLoad']
+
+ if options.export_java_symbols:
+ symbol_list.append('Java_*')
+
+ if options.export_symbol_whitelist_file:
+ with open(options.export_symbol_whitelist_file, 'rt') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ symbol_list.append(line)
+
+ script_content = [_SCRIPT_HEADER]
+ for symbol in symbol_list:
+ script_content.append(' %s;\n' % symbol)
+ script_content.append(_SCRIPT_FOOTER)
+
+ script = ''.join(script_content)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ f.write(script)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/build/android/gyp/generate_linker_version_script.pydeps b/chromium/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 00000000000..d1e3ad61819
--- /dev/null
+++ b/chromium/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/chromium/build/android/gyp/generate_proguarded_module_jar.py b/chromium/build/android/gyp/generate_proguarded_module_jar.py
deleted file mode 100755
index 97b3027a51e..00000000000
--- a/chromium/build/android/gyp/generate_proguarded_module_jar.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Extracts a bundle module's classes from jar created in the synchronized
-proguarding step and packages them into a new jar.
-
-Synchronized proguarding means that, when several app modules are combined into
-an app bundle, all un-optimized jars for all modules are grouped and sent to a
-single proguard command, which generates a single, common, intermediate
-optimized jar, and its mapping file.
-
-This script is used to extract, from this synchronized proguard jar, all the
-optimized classes corresponding to a single module, into a new .jar file. The
-latter will be compiled later into the module's dex file.
-
-For this, the script reads the module's un-obfuscated class names from the
-module's unoptimized jars. Then, it maps those to obfuscated class names using
-the proguard mapping file. Finally, it extracts the module's class files from
-the proguarded jar and zips them into a new module jar. """
-
-import argparse
-import os
-import sys
-import zipfile
-
-from util import build_utils
-
-MANIFEST = """Manifest-Version: 1.0
-Created-By: generate_proguarded_module_jar.py
-"""
-
-
-# TODO(tiborg): Share with merge_jar_info_files.py.
-def _FullJavaNameFromClassFilePath(path):
- if not path.endswith('.class'):
- return ''
- path = os.path.splitext(path)[0]
- parts = []
- while path:
- # Use split to be platform independent.
- head, tail = os.path.split(path)
- path = head
- parts.append(tail)
- parts.reverse() # Package comes first
- return '.'.join(parts)
-
-
-def main(args):
- args = build_utils.ExpandFileArgs(args)
- parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
- parser.add_argument(
- '--proguarded-jar',
- required=True,
- help='Path to input jar produced by synchronized proguarding')
- parser.add_argument(
- '--proguard-mapping',
- required=True,
- help='Path to input proguard mapping produced by synchronized '
- 'proguarding')
- parser.add_argument(
- '--module-input-jars',
- required=True,
- help='GN-list of input paths to un-optimized jar files for the current '
- 'module. The optimized versions of their .class files will go into '
- 'the output jar.')
- parser.add_argument(
- '--output-jar',
- required=True,
- help='Path to output jar file containing the module\'s optimized class '
- 'files')
- parser.add_argument(
- '--is-base-module',
- action='store_true',
- help='Inidcates to extract class files for a base module')
- options = parser.parse_args(args)
- options.module_input_jars = build_utils.ParseGnList(options.module_input_jars)
-
- # Read class names of the currently processed module.
- classes = set()
- for module_jar in options.module_input_jars:
- with zipfile.ZipFile(module_jar) as zip_info:
- for path in zip_info.namelist():
- fully_qualified_name = _FullJavaNameFromClassFilePath(path)
- if fully_qualified_name:
- classes.add(fully_qualified_name)
-
- # Parse the proguarding mapping to be able to map un-obfuscated to obfuscated
- # names.
- # Proguard mapping files have the following format:
- #
- # {un-obfuscated class name 1} -> {obfuscated class name 1}:
- # {un-obfuscated member name 1} -> {obfuscated member name 1}
- # ...
- # {un-obfuscated class name 2} -> {obfuscated class name 2}:
- # ...
- # ...
- obfuscation_map = {}
- with open(options.proguard_mapping, 'r') as proguard_mapping_file:
- for line in proguard_mapping_file:
- # Skip indented lines since they map member names and not class names.
- if line.startswith(' '):
- continue
- line = line.strip()
- # Skip empty lines.
- if not line:
- continue
- assert line.endswith(':')
- full, obfuscated = line.strip(':').split(' -> ')
- assert full
- assert obfuscated
- obfuscation_map[full] = obfuscated
-
- # Collect the obfuscated names of classes, which should go into the currently
- # processed module.
- obfuscated_module_classes = set(
- obfuscation_map[c] for c in classes if c in obfuscation_map)
-
- # Collect horizontally merged classes to later make sure that those only go
- # into the base module. Merging classes horizontally means that proguard took
- # two classes that don't inherit from each other and merged them into one.
- horiz_merged_classes = set()
- obfuscated_classes = sorted(obfuscation_map.values())
- prev_obfuscated_class = None
- for obfuscated_class in obfuscated_classes:
- if prev_obfuscated_class and obfuscated_class == prev_obfuscated_class:
- horiz_merged_classes.add(obfuscated_class)
- prev_obfuscated_class = obfuscated_class
-
- # Move horizontally merged classes into the base module.
- if options.is_base_module:
- obfuscated_module_classes |= horiz_merged_classes
- else:
- obfuscated_module_classes -= horiz_merged_classes
-
- # Extract module class files from proguarded jar and store them in a module
- # split jar.
- with zipfile.ZipFile(
- os.path.abspath(options.output_jar), 'w',
- zipfile.ZIP_DEFLATED) as output_jar:
- with zipfile.ZipFile(os.path.abspath(options.proguarded_jar),
- 'r') as proguarded_jar:
- for obfuscated_class in obfuscated_module_classes:
- class_path = obfuscated_class.replace('.', '/') + '.class'
- class_file_content = proguarded_jar.read(class_path)
- output_jar.writestr(class_path, class_file_content)
- output_jar.writestr('META-INF/MANIFEST.MF', MANIFEST)
-
- if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, options.output_jar, options.module_input_jars +
- [options.proguard_mapping, options.proguarded_jar], add_pydeps=False)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/chromium/build/android/gyp/jar.py b/chromium/build/android/gyp/jar.py
index 3e85c3b67b5..fcfb7a5f861 100755
--- a/chromium/build/android/gyp/jar.py
+++ b/chromium/build/android/gyp/jar.py
@@ -63,12 +63,12 @@ def Jar(class_files, classes_dir, jar_path, manifest_file=None,
def JarDirectory(classes_dir, jar_path, manifest_file=None, predicate=None,
provider_configurations=None, additional_files=None):
- all_classes = sorted(build_utils.FindInDirectory(classes_dir, '*.class'))
+ all_files = sorted(build_utils.FindInDirectory(classes_dir, '*'))
if predicate:
- all_classes = [
- f for f in all_classes if predicate(os.path.relpath(f, classes_dir))]
+ all_files = [
+ f for f in all_files if predicate(os.path.relpath(f, classes_dir))]
- Jar(all_classes, classes_dir, jar_path, manifest_file=manifest_file,
+ Jar(all_files, classes_dir, jar_path, manifest_file=manifest_file,
provider_configurations=provider_configurations,
additional_files=additional_files)
diff --git a/chromium/build/android/gyp/javac.py b/chromium/build/android/gyp/javac.py
index 13eb7b1d031..5656abd75bc 100755
--- a/chromium/build/android/gyp/javac.py
+++ b/chromium/build/android/gyp/javac.py
@@ -259,6 +259,10 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
# Don't bother enabling incremental compilation for non-chromium code.
incremental = options.incremental and options.chromium_code
+ # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+ # rules run both in parallel, with Error Prone only used for checks.
+ save_outputs = not options.use_errorprone_path
+
with build_utils.TempDir() as temp_dir:
srcjars = options.java_srcjars
@@ -292,7 +296,11 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
# (by not extracting them).
javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
- generated_java_dir = options.generated_dir
+ if save_outputs:
+ generated_java_dir = options.generated_dir
+ else:
+ generated_java_dir = os.path.join(temp_dir, 'gen')
+
# Incremental means not all files will be extracted, so don't bother
# clearing out stale generated files.
if not incremental:
@@ -375,27 +383,35 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
os.unlink(pdb_path)
attempt_build()
- # Move any Annotation Processor-generated .java files into $out/gen
- # so that codesearch can find them.
- javac_generated_sources = []
- for src_path in build_utils.FindInDirectory(classes_dir, '*.java'):
- dst_path = os.path.join(
- generated_java_dir, os.path.relpath(src_path, classes_dir))
- build_utils.MakeDirectory(os.path.dirname(dst_path))
- shutil.move(src_path, dst_path)
- javac_generated_sources.append(dst_path)
-
- _CreateInfoFile(java_files, options, srcjar_files, javac_generated_sources)
+ if save_outputs:
+ # Move any Annotation Processor-generated .java files into $out/gen
+ # so that codesearch can find them.
+ javac_generated_sources = []
+ for src_path in build_utils.FindInDirectory(classes_dir, '*.java'):
+ dst_path = os.path.join(generated_java_dir,
+ os.path.relpath(src_path, classes_dir))
+ build_utils.MakeDirectory(os.path.dirname(dst_path))
+ shutil.move(src_path, dst_path)
+ javac_generated_sources.append(dst_path)
+
+ _CreateInfoFile(java_files, options, srcjar_files,
+ javac_generated_sources)
+ else:
+ build_utils.Touch(options.jar_path + '.info')
if options.incremental and (not java_files or not incremental):
# Make sure output exists.
build_utils.Touch(pdb_path)
- with build_utils.AtomicOutput(options.jar_path) as f:
- jar.JarDirectory(classes_dir,
- f.name,
- provider_configurations=options.provider_configurations,
- additional_files=options.additional_jar_files)
+ if options.incremental or save_outputs:
+ with build_utils.AtomicOutput(options.jar_path) as f:
+ jar.JarDirectory(
+ classes_dir,
+ f.name,
+ provider_configurations=options.provider_configurations,
+ additional_files=options.additional_jar_files)
+ else:
+ build_utils.Touch(options.jar_path)
def _ParseAndFlattenGnLists(gn_lists):
@@ -595,7 +611,7 @@ def main(argv):
output_paths = [
options.jar_path,
options.jar_path + '.info',
- ]
+ ]
if options.incremental:
output_paths.append(options.jar_path + '.pdb')
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index bc664ebe5ad..852f91efcd4 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -226,23 +226,13 @@ def _OnStaleMd5(lint_path, config_path, processed_config_path,
print 'File contents:'
with open(result_path) as f:
print f.read()
- if not can_fail_build:
+ if can_fail_build:
+ traceback.print_exc()
+ if can_fail_build:
+ raise
+ else:
return
- if can_fail_build and not silent:
- traceback.print_exc()
-
- # There are actual lint issues
- try:
- num_issues = _ParseAndShowResultFile()
- except Exception: # pylint: disable=broad-except
- if not silent:
- print 'Lint created unparseable xml file...'
- print 'File contents:'
- with open(result_path) as f:
- print f.read()
- raise
-
_ProcessResultFile()
if num_issues == 0 and include_unexpected:
msg = 'Please refer to output above for unexpected lint failures.\n'
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index c90e5639f6d..378ca0f9f26 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -66,6 +66,8 @@ def _ParseOptions(args):
parser.add_option('--main-dex-rules-path', action='append',
help='Paths to main dex rules for multidex'
'- only works with R8.')
+ parser.add_option('--min-api', default='',
+ help='Minimum Android API level compatibility.')
parser.add_option('--verbose', '-v', action='store_true',
help='Print all proguard output')
@@ -73,6 +75,8 @@ def _ParseOptions(args):
assert not options.main_dex_rules_path or options.r8_path, \
"R8 must be enabled to pass main dex rules."
+ assert not options.min_api or options.r8_path, \
+ "R8 must be enabled to pass min api."
classpath = []
for arg in options.classpath:
@@ -126,6 +130,9 @@ def _CreateR8Command(options, map_output_path, output_dir):
classpath = [
p for p in set(options.classpath) if p not in options.input_paths
]
+
+ # TODO(smaier): Add back min-api once crbug.com/892644 is done.
+
for lib in classpath:
cmd += ['--lib', lib]
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index f1764b9c89d..fabe15f6880 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -177,6 +177,22 @@ class CalledProcessError(Exception):
return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+def FilterLines(output, filter_string):
+ """Output filter from build_utils.CheckOutput.
+
+ Args:
+ output: Executable output as from build_utils.CheckOutput.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(
+ line for line in output.splitlines() if not re_filter.search(line))
+
+
# This can be used in most cases like subprocess.check_output(). The output,
# particularly when the command fails, better highlights the command's failure.
# If the command fails, raises a build_utils.CalledProcessError.
@@ -305,8 +321,18 @@ def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None,
zip_file.writestr(zipinfo, os.readlink(src_path))
return
+ # zipfile.write() does
+ # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16L
+ # but we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16L
+
if src_path:
- with file(src_path) as f:
+ with open(src_path, 'rb') as f:
data = f.read()
# zipfile will deflate even when it makes the file bigger. To avoid
@@ -322,15 +348,17 @@ def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None,
zip_file.writestr(zipinfo, data, compress_type)
-def DoZip(inputs, output, base_dir=None, compress_fn=None):
+def DoZip(inputs, output, base_dir=None, compress_fn=None,
+ zip_prefix_path=None):
"""Creates a zip file from a list of files.
Args:
inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
- output: Destination .zip file.
+ output: Path, fileobj, or ZipFile instance to add files to.
base_dir: Prefix to strip from inputs.
compress_fn: Applied to each input to determine whether or not to compress.
By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
"""
input_tuples = []
for tup in inputs:
@@ -340,13 +368,23 @@ def DoZip(inputs, output, base_dir=None, compress_fn=None):
# Sort by zip path to ensure stable zip ordering.
input_tuples.sort(key=lambda tup: tup[0])
- with zipfile.ZipFile(output, 'w') as outfile:
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
compress = compress_fn(zip_path) if compress_fn else None
- AddToZipHermetic(outfile, zip_path, src_path=fs_path, compress=compress)
+ AddToZipHermetic(out_zip, zip_path, src_path=fs_path, compress=compress)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
-def ZipDir(output, base_dir, compress_fn=None):
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
"""Creates a zip file from a directory."""
inputs = []
for root, _, files in os.walk(base_dir):
@@ -354,7 +392,8 @@ def ZipDir(output, base_dir, compress_fn=None):
inputs.append(os.path.join(root, f))
with AtomicOutput(output) as f:
- DoZip(inputs, f, base_dir, compress_fn=compress_fn)
+ DoZip(inputs, f, base_dir, compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
def MatchesGlob(path, filters):
@@ -362,23 +401,21 @@ def MatchesGlob(path, filters):
return filters and any(fnmatch.fnmatch(path, f) for f in filters)
-def MergeZips(output, input_zips, path_transform=None):
+def MergeZips(output, input_zips, path_transform=None, compress=None):
"""Combines all files from |input_zips| into |output|.
Args:
- output: Path or ZipFile instance to add files to.
+ output: Path, fileobj, or ZipFile instance to add files to.
input_zips: Iterable of paths to zip files to merge.
path_transform: Called for each entry path. Returns a new path, or None to
skip the file.
+ compress: Overrides compression setting from origin zip entries.
"""
path_transform = path_transform or (lambda p: p)
added_names = set()
- output_is_already_open = not isinstance(output, basestring)
- if output_is_already_open:
- assert isinstance(output, zipfile.ZipFile)
- out_zip = output
- else:
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
out_zip = zipfile.ZipFile(output, 'w')
try:
@@ -395,11 +432,18 @@ def MergeZips(output, input_zips, path_transform=None):
continue
already_added = dst_name in added_names
if not already_added:
- AddToZipHermetic(out_zip, dst_name, data=in_zip.read(info),
- compress=info.compress_type != zipfile.ZIP_STORED)
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ AddToZipHermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
added_names.add(dst_name)
finally:
- if not output_is_already_open:
+ if output is not out_zip:
out_zip.close()
@@ -483,6 +527,7 @@ def AddDepfileOption(parser):
def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, basestring) # Easy mistake to make
inputs = inputs or []
if add_pydeps:
inputs = _ComputePythonDependencies() + inputs
@@ -530,7 +575,7 @@ def ExpandFileArgs(args):
for k in lookup_path[1:]:
expansion = expansion[k]
- # This should match ParseGNList. The output is either a GN-formatted list
+ # This should match ParseGnList. The output is either a GN-formatted list
# or a literal (with no quotes).
if isinstance(expansion, list):
new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(expansion)
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index fff9e99a820..11a924ec9c9 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -407,11 +407,22 @@ def ExtractDeps(dep_zips, deps_dir):
class _ResourceBuildContext(object):
- """A temporary directory for packaging and compiling Android resources."""
- def __init__(self):
+ """A temporary directory for packaging and compiling Android resources.
+
+ Args:
+ temp_dir: Optional root build directory path. If None, a temporary
+ directory will be created, and removed in Close().
+ """
+ def __init__(self, temp_dir=None):
"""Initialized the context."""
# The top-level temporary directory.
- self.temp_dir = tempfile.mkdtemp()
+ if temp_dir:
+ self.temp_dir = temp_dir
+ self.remove_on_exit = False
+ else:
+ self.temp_dir = tempfile.mkdtemp()
+ self.remove_on_exit = True
+
# A location to store resources extracted form dependency zip files.
self.deps_dir = os.path.join(self.temp_dir, 'deps')
os.mkdir(self.deps_dir)
@@ -426,14 +437,15 @@ class _ResourceBuildContext(object):
def Close(self):
"""Close the context and destroy all temporary files."""
- shutil.rmtree(self.temp_dir)
+ if self.remove_on_exit:
+ shutil.rmtree(self.temp_dir)
@contextlib.contextmanager
-def BuildContext():
+def BuildContext(temp_dir=None):
"""Generator for a _ResourceBuildContext instance."""
try:
- context = _ResourceBuildContext()
+ context = _ResourceBuildContext(temp_dir)
yield context
finally:
context.Close()
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index f4d25bfe95b..2c1b6ff7f3c 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -348,6 +348,10 @@ dependencies for this APK.
The path of an zip archive containing the APK's resources compiled to the
protocol buffer format (instead of regular binary xml + resources.arsc).
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
* `native['libraries']`
List of native libraries for the primary ABI to be embedded in this APK.
E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
@@ -702,7 +706,9 @@ def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True):
include the .apk as a resource/asset, not to have the apk's classpath added.
"""
configs = [GetDepConfig(p) for p in dep_paths]
+ groups = DepsOfType('group', configs)
configs = _ResolveGroups(configs)
+ configs += groups
# Don't allow root targets to be considered as a dep.
if filter_root_targets:
configs = [c for c in configs if c['type'] not in _ROOT_TYPES]
@@ -710,6 +716,7 @@ def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True):
# Don't allow java libraries to cross through assets/resources.
if target_type in _RESOURCE_TYPES:
configs = [c for c in configs if c['type'] in _RESOURCE_TYPES]
+
return Deps([c['path'] for c in configs])
@@ -871,6 +878,9 @@ def main(argv):
parser.add_option('--apk-proto-resources',
help='Path to resources compiled in protocol buffer format '
' for this apk.')
+ parser.add_option(
+ '--module-rtxt-path',
+ help='Path to R.txt file for resources in a bundle module.')
parser.add_option('--generate-markdown-format-doc', action='store_true',
help='Dump the Markdown .build_config format documentation '
@@ -884,7 +894,7 @@ def main(argv):
if options.generate_markdown_format_doc:
doc_lines = _ExtractMarkdownDocumentation(__doc__)
for line in doc_lines:
- print(line)
+ print(line)
return 0
if options.fail:
@@ -919,6 +929,10 @@ def main(argv):
if options.type != 'android_app_bundle_module':
raise Exception('--apk-proto-resources can only be used with '
'--type=android_app_bundle_module')
+ if options.module_rtxt_path:
+ if options.type != 'android_app_bundle_module':
+ raise Exception('--module-rxt-path can only be used with '
+ '--type=android_app_bundle_module')
is_apk_or_module_target = options.type in ('android_apk',
'android_app_bundle_module')
@@ -960,6 +974,7 @@ def main(argv):
system_library_deps = deps.Direct('system_java_library')
direct_library_deps = deps.Direct('java_library')
+ group_deps = deps.All('group')
all_library_deps = deps.All('java_library')
all_resources_deps = deps.All('android_resources')
@@ -1017,7 +1032,8 @@ def main(argv):
# TODO(tiborg): Remove creation of JNI info for type group and java_library
# once we can generate the JNI registration based on APK / module targets as
# opposed to groups and libraries.
- if is_apk_or_module_target or options.type in ('group', 'java_library'):
+ if is_apk_or_module_target or options.type in (
+ 'group', 'java_library', 'junit_binary'):
config['jni'] = {}
all_java_sources = [c['java_sources_file'] for c in all_library_deps
if 'java_sources_file' in c]
@@ -1028,6 +1044,9 @@ def main(argv):
if options.apk_proto_resources:
deps_info['proto_resources_path'] = options.apk_proto_resources
+ if options.module_rtxt_path:
+ deps_info['module_rtxt_path'] = options.module_rtxt_path
+
if is_java_target:
deps_info['requires_android'] = bool(options.requires_android)
deps_info['supports_android'] = bool(options.supports_android)
@@ -1154,6 +1173,12 @@ def main(argv):
if is_apk_or_module_target:
deps_dex_files = [c['dex_path'] for c in all_library_deps]
+ if options.type == 'group':
+ if options.extra_classpath_jars:
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ extra_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+ deps_info['extra_classpath_jars'] = extra_jars
+
if is_java_target:
# The classpath used to compile this target when annotation processors are
# present.
@@ -1171,6 +1196,12 @@ def main(argv):
javac_full_classpath = [
c['unprocessed_jar_path'] for c in all_library_deps]
+ for dep in group_deps:
+ javac_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_full_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_interface_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_full_interface_classpath.extend(dep.get('extra_classpath_jars', []))
+
# Deps to add to the compile-time classpath (but not the runtime classpath).
# TODO(agrieve): Might be less confusing to fold these into bootclasspath.
javac_extra_jars = [c['unprocessed_jar_path']
@@ -1228,6 +1259,9 @@ def main(argv):
p for p in c.get('proguard_configs', []) if p not in all_configs)
extra_jars.extend(
p for p in c.get('extra_classpath_jars', []) if p not in extra_jars)
+ for c in group_deps:
+ extra_jars.extend(
+ p for p in c.get('extra_classpath_jars', []) if p not in extra_jars)
if options.type == 'android_app_bundle':
for c in deps.Direct('android_app_bundle_module'):
all_configs.extend(
@@ -1333,7 +1367,6 @@ def main(argv):
dex_config['path'] = options.final_dex_path
if is_java_target:
- config['javac']['bootclasspath'] = system_jars
config['javac']['classpath'] = javac_classpath
config['javac']['interface_classpath'] = javac_interface_classpath
# Direct() will be of type 'java_annotation_processor'.
diff --git a/chromium/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/chromium/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
index bb9c7159a40..10e438f6707 100644
--- a/chromium/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
+++ b/chromium/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
@@ -67,7 +67,9 @@ final class ClassLoaderPatcher {
dexDir = incrementalDexesDir;
}
- File[] dexFilesArr = dexDir.listFiles((File f) -> f.isFile()); // Ignore "oat" directory.
+ // Ignore "oat" directory.
+ // Also ignore files that sometimes show up (e.g. .jar.arm.flock).
+ File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar"));
if (dexFilesArr == null) {
throw new FileNotFoundException("Dex dir does not exist: " + dexDir);
}
diff --git a/chromium/build/android/lint/suppressions.xml b/chromium/build/android/lint/suppressions.xml
index c4b132f5ea3..3b4d64167df 100644
--- a/chromium/build/android/lint/suppressions.xml
+++ b/chromium/build/android/lint/suppressions.xml
@@ -119,6 +119,8 @@ Still reading?
<ignore regexp="content/public/android/java/res/drawable-xxxhdpi"/>
<ignore regexp="ui/android/java/res/drawable-xxhdpi"/>
<ignore regexp="ui/android/java/res/drawable-xxxhdpi"/>
+ <!-- This is intentional to reduce APK size. See: http://crrev/c/1352161 -->
+ <ignore regexp="chrome/android/java/res_autofill_assistant/drawable-*"/>
</issue>
<issue id="IconDipSize">
<ignore regexp="chromecast/internal"/>
@@ -181,10 +183,19 @@ Still reading?
</issue>
<!-- TODO(crbug.com/804453): Remove this after fixing. -->
<issue id="KeyboardInaccessibleWidget" severity="ignore"/>
- <issue id="LintError" severity="Error"/>
+ <issue id="LintError">
+ <!-- We no longer supply class files to lint. -->
+ <ignore regexp="No `.class` files were found in project"/>
+ </issue>
<issue id="LogConditional" severity="ignore"/>
<issue id="LongLogTag" severity="ignore"/>
<issue id="MissingApplicationIcon" severity="ignore"/>
+ <issue id="MissingDefaultResource">
+ <!-- Only used by ToolbarControlContainer guarded by tablet form-factor. -->
+ <ignore regexp="toolbar_background.9.png"/>
+ <!-- Only used by FirstRunFlowSequencer guarded by tablet form-factor. -->
+ <ignore regexp="window_background.xml"/>
+ </issue>
<issue id="MissingPermission" severity="ignore"/>
<!-- TODO(yolandyan) remove this once all tests are converted to junit4 -->
<issue id="MissingPrefix" severity="ignore"/>
@@ -254,6 +265,8 @@ Still reading?
<ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
</issue>
<issue id="PrivateApi" severity="ignore"/>
+ <!-- Chrome is a system app. -->
+ <issue id="ProtectedPermissions" severity="ignore"/>
<issue id="Recycle" severity="ignore"/>
<issue id="Registered" severity="ignore"/>
<issue id="ResourceAsColor" severity="ignore"/>
diff --git a/chromium/build/android/main_dex_classes.flags b/chromium/build/android/main_dex_classes.flags
index 9e4abfa9fe2..9163c5097f5 100644
--- a/chromium/build/android/main_dex_classes.flags
+++ b/chromium/build/android/main_dex_classes.flags
@@ -43,10 +43,6 @@
*;
}
-# Need test classes to be in the main dex because test listing does not
-# load secondary dex on Dalvik devices.
--keep @**.RunWith class * {}
-
# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules
# Ours differ in that:
# 1. It omits -keeps for application / instrumentation / backupagents (these are
diff --git a/chromium/build/android/pylib/base/base_test_result.py b/chromium/build/android/pylib/base/base_test_result.py
index a5535322965..bb25a74186e 100644
--- a/chromium/build/android/pylib/base/base_test_result.py
+++ b/chromium/build/android/pylib/base/base_test_result.py
@@ -10,13 +10,13 @@ import threading
class ResultType(object):
"""Class enumerating test types."""
# The test passed.
- PASS = 'PASS'
+ PASS = 'SUCCESS'
# The test was intentionally skipped.
- SKIP = 'SKIP'
+ SKIP = 'SKIPPED'
# The test failed.
- FAIL = 'FAIL'
+ FAIL = 'FAILURE'
# The test caused the containing process to crash.
CRASH = 'CRASH'
diff --git a/chromium/build/android/pylib/base/environment.py b/chromium/build/android/pylib/base/environment.py
index e4549f5f834..744c392c1bc 100644
--- a/chromium/build/android/pylib/base/environment.py
+++ b/chromium/build/android/pylib/base/environment.py
@@ -25,6 +25,9 @@ class Environment(object):
"""
self._output_manager = output_manager
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
def SetUp(self):
raise NotImplementedError
@@ -41,3 +44,6 @@ class Environment(object):
@property
def output_manager(self):
return self._output_manager
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/chromium/build/android/pylib/base/test_run.py b/chromium/build/android/pylib/base/test_run.py
index 59e595d926b..fc72d3a5476 100644
--- a/chromium/build/android/pylib/base/test_run.py
+++ b/chromium/build/android/pylib/base/test_run.py
@@ -18,17 +18,21 @@ class TestRun(object):
self._env = env
self._test_instance = test_instance
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
def TestPackage(self):
raise NotImplementedError
def SetUp(self):
raise NotImplementedError
- def RunTests(self):
- """Runs Tests and returns test results.
+ def RunTests(self, results):
+ """Runs Tests and populates |results|.
- Returns:
- Should return list of |base_test_result.TestRunResults| objects.
+ Args:
+ results: An array that should be populated with
+ |base_test_result.TestRunResults| objects.
"""
raise NotImplementedError
@@ -41,3 +45,6 @@ class TestRun(object):
def __exit__(self, exc_type, exc_val, exc_tb):
self.TearDown()
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/chromium/build/android/pylib/constants/host_paths.py b/chromium/build/android/pylib/constants/host_paths.py
index 9ebf8d5054e..b249d3c2919 100644
--- a/chromium/build/android/pylib/constants/host_paths.py
+++ b/chromium/build/android/pylib/constants/host_paths.py
@@ -24,6 +24,8 @@ DEVIL_PATH = os.path.join(
DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
PYMOCK_PATH = os.path.join(
DIR_SOURCE_ROOT, 'third_party', 'pymock')
+TRACING_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
@contextlib.contextmanager
def SysPath(path, position=None):
diff --git a/chromium/build/android/pylib/junit/junit_test_instance.py b/chromium/build/android/pylib/junit/junit_test_instance.py
index 4dccac92cbd..f258cbd7bb2 100644
--- a/chromium/build/android/pylib/junit/junit_test_instance.py
+++ b/chromium/build/android/pylib/junit/junit_test_instance.py
@@ -14,6 +14,7 @@ class JunitTestInstance(test_instance.TestInstance):
self._android_manifest_path = args.android_manifest_path
self._coverage_dir = args.coverage_dir
self._debug_socket = args.debug_socket
+ self._jacoco = args.jacoco
self._package_filter = args.package_filter
self._package_name = args.package_name
self._resource_zips = args.resource_zips
@@ -43,6 +44,10 @@ class JunitTestInstance(test_instance.TestInstance):
return self._coverage_dir
@property
+ def jacoco(self):
+ return self._jacoco
+
+ @property
def debug_socket(self):
return self._debug_socket
diff --git a/chromium/build/android/pylib/local/device/local_device_environment.py b/chromium/build/android/pylib/local/device/local_device_environment.py
index 1403504e005..ba58686ac3f 100644
--- a/chromium/build/android/pylib/local/device/local_device_environment.py
+++ b/chromium/build/android/pylib/local/device/local_device_environment.py
@@ -133,7 +133,8 @@ class LocalDeviceEnvironment(environment.Environment):
device_arg = self._device_serials
self._devices = device_utils.DeviceUtils.HealthyDevices(
- self._blacklist, enable_device_files_cache=self._enable_device_cache,
+ self._blacklist, retries=5, enable_usb_resets=True,
+ enable_device_files_cache=self._enable_device_cache,
default_retries=self._max_tries - 1, device_arg=device_arg)
if self._logcat_output_file:
@@ -211,6 +212,14 @@ class LocalDeviceEnvironment(environment.Environment):
elif self.trace_output:
self.DisableTracing()
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
if not self._devices:
return
diff --git a/chromium/build/android/pylib/local/device/local_device_gtest_run.py b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
index 47348834c99..32f1fe23c0c 100644
--- a/chromium/build/android/pylib/local/device/local_device_gtest_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_gtest_run.py
@@ -317,7 +317,11 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
for h, d in host_device_tuples]
dev.PushChangedFiles(
host_device_tuples_substituted,
- delete_device_stale=True)
+ delete_device_stale=True,
+ # Some gtest suites, e.g. unit_tests, have data dependencies that
+ # can take longer than the default timeout to push. See
+ # crbug.com/791632 for context.
+ timeout=600)
if not host_device_tuples:
dev.RemovePath(device_root, force=True, recursive=True, rename=True)
dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
@@ -599,6 +603,14 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
#override
def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
@local_device_environment.handle_shard_failures
@trace_event.traced
def individual_device_tear_down(dev):
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index f921fce1bd8..8857b88a536 100644
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -298,13 +298,22 @@ class LocalDeviceInstrumentationTestRun(
individual_device_set_up,
self._test_instance.GetDataDependencies())
if self._test_instance.wait_for_java_debugger:
+ apk = self._test_instance.apk_under_test or self._test_instance.test_apk
logging.warning('*' * 80)
logging.warning('Waiting for debugger to attach to process: %s',
- self._test_instance.apk_under_test.GetPackageName())
+ apk.GetPackageName())
logging.warning('*' * 80)
#override
def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
@local_device_environment.handle_shard_failures_with(
self._env.BlacklistDevice)
@trace_event.traced
diff --git a/chromium/build/android/pylib/local/device/local_device_perf_test_run.py b/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
index 656f233e5f4..bc828408a00 100644
--- a/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_perf_test_run.py
@@ -412,7 +412,7 @@ class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun):
return sorted(devices)
#override
- def RunTests(self):
+ def RunTests(self, results):
def run_no_devices_tests():
if not self._no_device_tests:
return []
@@ -451,7 +451,12 @@ class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun):
host_test_results, device_test_results = reraiser_thread.RunAsync(
[run_no_devices_tests, run_devices_tests])
- return host_test_results + device_test_results
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results. This looks like it can be done prior to dispatching
+ # tests, but will hold off on making this change unless it looks like it
+ # might provide utility.
+ results.extend(host_test_results + device_test_results)
# override
def TestPackage(self):
@@ -480,12 +485,16 @@ class OutputJsonList(LocalDevicePerfTestRun):
pass
# override
- def RunTests(self):
+ def RunTests(self, results):
result_type = self._test_instance.OutputJsonList()
result = base_test_result.TestRunResults()
result.AddResult(
base_test_result.BaseTestResult('OutputJsonList', result_type))
- return [result]
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
# override
def _CreateShards(self, _tests):
@@ -502,12 +511,16 @@ class PrintStep(LocalDevicePerfTestRun):
pass
# override
- def RunTests(self):
+ def RunTests(self, results):
result_type = self._test_instance.PrintTestOutput()
result = base_test_result.TestRunResults()
result.AddResult(
base_test_result.BaseTestResult('PrintStep', result_type))
- return [result]
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
# override
def _CreateShards(self, _tests):
diff --git a/chromium/build/android/pylib/local/device/local_device_test_run.py b/chromium/build/android/pylib/local/device/local_device_test_run.py
index 00d715b3ade..26d6d07a1cd 100644
--- a/chromium/build/android/pylib/local/device/local_device_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_test_run.py
@@ -54,7 +54,7 @@ class LocalDeviceTestRun(test_run.TestRun):
self._tools = {}
#override
- def RunTests(self):
+ def RunTests(self, results):
tests = self._GetTests()
exit_now = threading.Event()
@@ -113,9 +113,8 @@ class LocalDeviceTestRun(test_run.TestRun):
raise TestsTerminated()
try:
- with signal_handler.SignalHandler(signal.SIGTERM, stop_tests):
+ with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
tries = 0
- results = []
while tries < self._env.max_tries and tests:
logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
if tries > 0 and self._env.recover_devices:
@@ -144,6 +143,11 @@ class LocalDeviceTestRun(test_run.TestRun):
t, base_test_result.ResultType.NOTRUN)
for t in test_names if not t.endswith('*'))
+ # As soon as we know the names of the tests, we populate |results|.
+ # The tests in try_results will have their results updated by
+ # try_results.AddResult() as they are run.
+ results.append(try_results)
+
try:
if self._ShouldShard():
tc = test_collection.TestCollection(self._CreateShards(tests))
@@ -160,8 +164,6 @@ class LocalDeviceTestRun(test_run.TestRun):
base_test_result.ResultType.TIMEOUT,
log=_SIGTERM_TEST_LOG))
raise
- finally:
- results.append(try_results)
tries += 1
tests = self._GetTestsToRetry(tests, try_results)
@@ -174,8 +176,6 @@ class LocalDeviceTestRun(test_run.TestRun):
except TestsTerminated:
pass
- return results
-
def _GetTestsToRetry(self, tests, try_results):
def is_failure_result(test_result):
diff --git a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
index 147104b1f99..18c66789c28 100644
--- a/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
+++ b/chromium/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -12,6 +12,7 @@ from devil.utils import reraiser_thread
from pylib import constants
from pylib.base import base_test_result
from pylib.base import test_run
+from pylib.constants import host_paths
from pylib.results import json_results
from py_utils import tempfile_ext
@@ -29,7 +30,7 @@ class LocalMachineJunitTestRun(test_run.TestRun):
pass
#override
- def RunTests(self):
+ def RunTests(self, results):
with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
json_file_path = os.path.join(temp_dir, 'results.json')
@@ -97,9 +98,20 @@ class LocalMachineJunitTestRun(test_run.TestRun):
os.makedirs(self._test_instance.coverage_dir)
elif not os.path.isdir(self._test_instance.coverage_dir):
raise Exception('--coverage-dir takes a directory, not file path.')
- jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join(
- self._test_instance.coverage_dir,
- '%s.ec' % self._test_instance.suite))
+ if self._test_instance.jacoco:
+ jacoco_coverage_file = os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.exec' % self._test_instance.suite)
+ jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'jacoco',
+ 'lib', 'jacocoagent.jar')
+ jacoco_args = '-javaagent:{}=destfile={},includes=org.chromium.*'
+ jvm_args.append(jacoco_args.format(jacoco_agent_path,
+ jacoco_coverage_file))
+ else:
+ jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.ec' % self._test_instance.suite))
if jvm_args:
command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
@@ -119,8 +131,7 @@ class LocalMachineJunitTestRun(test_run.TestRun):
test_run_results = base_test_result.TestRunResults()
test_run_results.AddResults(results_list)
-
- return [test_run_results]
+ results.append(test_run_results)
#override
def TearDown(self):
diff --git a/chromium/build/android/pylib/results/json_results.py b/chromium/build/android/pylib/results/json_results.py
index 3f87b46d8c5..6a10ba4bc92 100644
--- a/chromium/build/android/pylib/results/json_results.py
+++ b/chromium/build/android/pylib/results/json_results.py
@@ -72,20 +72,6 @@ def GenerateResultsDict(test_run_results, global_tags=None):
# ],
# }
- def status_as_string(s):
- if s == base_test_result.ResultType.PASS:
- return 'SUCCESS'
- elif s == base_test_result.ResultType.SKIP:
- return 'SKIPPED'
- elif s == base_test_result.ResultType.FAIL:
- return 'FAILURE'
- elif s == base_test_result.ResultType.CRASH:
- return 'CRASH'
- elif s == base_test_result.ResultType.TIMEOUT:
- return 'TIMEOUT'
- elif s == base_test_result.ResultType.UNKNOWN:
- return 'UNKNOWN'
-
all_tests = set()
per_iteration_data = []
test_run_links = {}
@@ -103,10 +89,10 @@ def GenerateResultsDict(test_run_results, global_tags=None):
for r in results_iterable:
result_dict = {
- 'status': status_as_string(r.GetType()),
+ 'status': r.GetType(),
'elapsed_time_ms': r.GetDuration(),
'output_snippet': unicode(r.GetLog(), errors='replace'),
- 'losless_snippet': '',
+ 'losless_snippet': True,
'output_snippet_base64': '',
'links': r.GetLinks(),
}
@@ -152,18 +138,9 @@ def ParseResultsFromJson(json_results):
"""
def string_as_status(s):
- if s == 'SUCCESS':
- return base_test_result.ResultType.PASS
- elif s == 'SKIPPED':
- return base_test_result.ResultType.SKIP
- elif s == 'FAILURE':
- return base_test_result.ResultType.FAIL
- elif s == 'CRASH':
- return base_test_result.ResultType.CRASH
- elif s == 'TIMEOUT':
- return base_test_result.ResultType.TIMEOUT
- else:
- return base_test_result.ResultType.UNKNOWN
+ if s in base_test_result.ResultType.GetTypes():
+ return s
+ return base_test_result.ResultType.UNKNOWN
results_list = []
testsuite_runs = json_results['per_iteration_data']
diff --git a/chromium/build/android/pylib/results/json_results_test.py b/chromium/build/android/pylib/results/json_results_test.py
index e8b983b56ed..68e71f57859 100755
--- a/chromium/build/android/pylib/results/json_results_test.py
+++ b/chromium/build/android/pylib/results/json_results_test.py
@@ -175,6 +175,33 @@ class JsonResultsTest(unittest.TestCase):
[raw_results], global_tags=global_tags)
self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
+ def testGenerateResultsDict_loslessSnippet(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.FAIL)
+ log = 'blah-blah'
+ result.SetLog(log)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('losless_snippet' in test_iteration_result)
+ self.assertTrue(test_iteration_result['losless_snippet'])
+ self.assertTrue('output_snippet' in test_iteration_result)
+ self.assertEquals(log, test_iteration_result['output_snippet'])
+ self.assertTrue('output_snippet_base64' in test_iteration_result)
+ self.assertEquals('', test_iteration_result['output_snippet_base64'])
+
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py
index 67faab1d796..cc8681122bd 100755
--- a/chromium/build/android/resource_sizes.py
+++ b/chromium/build/android/resource_sizes.py
@@ -41,6 +41,9 @@ _APK_PATCH_SIZE_ESTIMATOR_PATH = os.path.join(
with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
import perf_tests_results_helper # pylint: disable=import-error
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
from util import build_utils # pylint: disable=import-error
@@ -177,23 +180,8 @@ def _ParseManifestAttributes(apk_path):
# Dex decompression overhead varies by Android version.
m = re.search(r'android:minSdkVersion\(\w+\)=\(type \w+\)(\w+)\n', output)
sdk_version = int(m.group(1), 16)
- # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
- # L, M: ART - .odex file is compiled version of the dex file (~4x).
- # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
- # shared apps (~4x).
- # Actual multipliers calculated using "apk_operations.py disk-usage".
- # Will need to update multipliers once apk obfuscation is enabled.
- # E.g. with obfuscation, the 4.04 changes to 4.46.
- if sdk_version < 21:
- dex_multiplier = 1.16
- elif sdk_version < 24:
- dex_multiplier = 4.04
- elif 'Monochrome' in apk_path or 'WebView' in apk_path:
- dex_multiplier = 4.04 # compilation_filter=speed
- else:
- dex_multiplier = 1.17 # compilation_filter=speed-profile
- return dex_multiplier, skip_extract_lib
+ return sdk_version, skip_extract_lib
def CountStaticInitializers(so_path, tool_prefix):
@@ -376,7 +364,31 @@ def GenerateApkAnalysis(apk_filename, tool_prefix, out_dir,
finally:
apk.close()
- dex_multiplier, skip_extract_lib = _ParseManifestAttributes(apk_filename)
+ sdk_version, skip_extract_lib = _ParseManifestAttributes(apk_filename)
+
+ # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
+ # L, M: ART - .odex file is compiled version of the dex file (~4x).
+ # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
+ # shared apps (~4x).
+ # Actual multipliers calculated using "apk_operations.py disk-usage".
+ # Will need to update multipliers once apk obfuscation is enabled.
+ # E.g. with obfuscation, the 4.04 changes to 4.46.
+ speed_profile_dex_multiplier = 1.17
+ is_shared_apk = sdk_version >= 24 and (
+ 'Monochrome' in apk_filename or 'WebView' in apk_filename)
+ if sdk_version < 21:
+ # JellyBean & KitKat
+ dex_multiplier = 1.16
+ elif sdk_version < 24:
+ # Lollipop & Marshmallow
+ dex_multiplier = 4.04
+ elif is_shared_apk:
+ # Oreo and above, compilation_filter=speed
+ dex_multiplier = 4.04
+ else:
+ # Oreo and above, compilation_filter=speed-profile
+ dex_multiplier = speed_profile_dex_multiplier
+
total_apk_size = os.path.getsize(apk_filename)
for member in apk_contents:
filename = member.filename
@@ -417,31 +429,45 @@ def GenerateApkAnalysis(apk_filename, tool_prefix, out_dir,
unknown.AddZipInfo(member)
total_install_size = total_apk_size
+ total_install_size_android_go = total_apk_size
zip_overhead = total_apk_size
for group in file_groups:
actual_size = group.ComputeZippedSize()
install_size = group.ComputeInstallSize()
uncompressed_size = group.ComputeUncompressedSize()
-
- total_install_size += group.ComputeExtractedSize()
+ extracted_size = group.ComputeExtractedSize()
+ total_install_size += extracted_size
zip_overhead -= actual_size
yield ('Breakdown', group.name + ' size', actual_size, 'bytes')
- yield ('InstallBreakdown', group.name + ' size', install_size, 'bytes')
+ yield ('InstallBreakdown', group.name + ' size', int(install_size), 'bytes')
# Only a few metrics are compressed in the first place.
# To avoid over-reporting, track uncompressed size only for compressed
# entries.
if uncompressed_size != actual_size:
yield ('Uncompressed', group.name + ' size', uncompressed_size, 'bytes')
+ if group is java_code and is_shared_apk:
+ # Updates are compiled using quicken, but system image uses speed-profile.
+ extracted_size = uncompressed_size * speed_profile_dex_multiplier
+ total_install_size_android_go += extracted_size
+ yield ('InstallBreakdownGo', group.name + ' size',
+ actual_size + extracted_size, 'bytes')
+ else:
+ total_install_size_android_go += extracted_size
+
# Per-file zip overhead is caused by:
# * 30 byte entry header + len(file name)
# * 46 byte central directory entry + len(file name)
# * 0-3 bytes for zipalign.
yield ('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
yield ('InstallSize', 'APK size', total_apk_size, 'bytes')
- yield ('InstallSize', 'Estimated installed size', total_install_size, 'bytes')
+ yield ('InstallSize', 'Estimated installed size', int(total_install_size),
+ 'bytes')
+ if is_shared_apk:
+ yield ('InstallSize', 'Estimated installed size (Android Go)',
+ int(total_install_size_android_go), 'bytes')
transfer_size = _CalculateCompressedSize(apk_filename)
yield ('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
@@ -502,7 +528,13 @@ def GenerateApkAnalysis(apk_filename, tool_prefix, out_dir,
apk_filename, arsc.GetNumEntries(), num_arsc_translations, out_dir))
yield ('Specifics', 'normalized apk size', normalized_apk_size, 'bytes')
- yield ('Specifics', 'file count', len(apk_contents), 'zip entries')
+ # The "file count" metric cannot be grouped with any other metrics when the
+ # end result is going to be uploaded to the perf dashboard in the HistogramSet
+ # format due to mixed units (bytes vs. zip entries) causing malformed
+ # summaries to be generated.
+ # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
+ # ever supported.
+ yield ('FileCount', 'file count', len(apk_contents), 'zip entries')
if unknown_handler is not None:
for info in unknown.AllEntries():
@@ -692,7 +724,12 @@ def main():
help='Location of the build artifacts.')
argparser.add_argument('--chartjson',
action='store_true',
- help='Sets output mode to chartjson.')
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument('--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
argparser.add_argument('--output-dir',
default='.',
help='Directory to save chartjson to.')
@@ -719,7 +756,11 @@ def main():
argparser.add_argument('apk', help='APK file path.')
args = argparser.parse_args()
- chartjson = _BASE_CHART.copy() if args.chartjson else None
+ # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ chartjson = _BASE_CHART.copy() if args.output_format else None
out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir)
if args.dump_sis and not out_dir:
argparser.error(
@@ -741,12 +782,31 @@ def main():
if args.estimate_patch_size:
_PrintPatchSizeEstimate(args.apk, args.reference_apk_builder,
args.reference_apk_bucket, chartjson=chartjson)
+
if chartjson:
results_path = os.path.join(args.output_dir, 'results-chart.json')
- logging.critical('Dumping json to %s', results_path)
+ logging.critical('Dumping chartjson to %s', results_path)
with open(results_path, 'w') as json_file:
json.dump(chartjson, json_file)
+ # We would ideally generate a histogram set directly instead of generating
+ # chartjson then converting. However, perf_tests_results_helper is in
+ # //build, which doesn't seem to have any precedent for depending on
+ # anything in Catapult. This can probably be fixed, but since this doesn't
+ # need to be super fast or anything, converting is a good enough solution
+ # for the time being.
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(results_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(args.output_dir, 'perf_results.json')
+ logging.critical('Dumping histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/android/resource_sizes.pydeps b/chromium/build/android/resource_sizes.pydeps
index 3b5878d53ba..42fddd0e149 100644
--- a/chromium/build/android/resource_sizes.pydeps
+++ b/chromium/build/android/resource_sizes.pydeps
@@ -24,6 +24,7 @@
../../third_party/catapult/devil/devil/android/sdk/dexdump.py
../../third_party/catapult/devil/devil/android/sdk/keyevent.py
../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
../../third_party/catapult/devil/devil/constants/__init__.py
../../third_party/catapult/devil/devil/constants/exit_codes.py
../../third_party/catapult/devil/devil/devil_env.py
@@ -34,7 +35,13 @@
../../third_party/catapult/devil/devil/utils/reraiser_thread.py
../../third_party/catapult/devil/devil/utils/timeout_retry.py
../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
../../third_party/depot_tools/breakpad.py
../../third_party/depot_tools/download_from_google_storage.py
../../third_party/depot_tools/subprocess2.py
diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py
index 4f841537aaa..6b551159f9e 100755
--- a/chromium/build/android/test_runner.py
+++ b/chromium/build/android/test_runner.py
@@ -496,6 +496,9 @@ def AddJUnitTestOptions(parser):
parser = parser.add_argument_group('junit arguments')
parser.add_argument(
+ '--jacoco', action='store_true',
+ help='Generate jacoco report.')
+ parser.add_argument(
'--coverage-dir', type=os.path.realpath,
help='Directory to store coverage info.')
parser.add_argument(
@@ -742,6 +745,7 @@ def RunTestsInPlatformMode(args):
### Set up sigterm handler.
+ contexts_to_notify_on_sigterm = []
def unexpected_sigterm(_signum, _frame):
msg = [
'Received SIGTERM. Shutting down.',
@@ -755,6 +759,9 @@ def RunTestsInPlatformMode(args):
live_thread.name, live_thread.ident),
thread_stack])
+ for context in contexts_to_notify_on_sigterm:
+ context.ReceivedSigterm()
+
infra_error('\n'.join(msg))
signal.signal(signal.SIGTERM, unexpected_sigterm)
@@ -834,6 +841,9 @@ def RunTestsInPlatformMode(args):
test_run = test_run_factory.CreateTestRun(
args, env, test_instance, infra_error)
+ contexts_to_notify_on_sigterm.append(env)
+ contexts_to_notify_on_sigterm.append(test_run)
+
### Run.
with out_manager, json_finalizer():
with json_writer(), logcats_uploader, env, test_instance, test_run:
@@ -844,12 +854,18 @@ def RunTestsInPlatformMode(args):
lambda: collections.defaultdict(int))
iteration_count = 0
for _ in repetitions:
- raw_results = test_run.RunTests()
+ # raw_results will be populated with base_test_result.TestRunResults by
+ # test_run.RunTests(). It is immediately added to all_raw_results so
+ # that in the event of an exception, all_raw_results will already have
+ # the up-to-date results and those can be written to disk.
+ raw_results = []
+ all_raw_results.append(raw_results)
+
+ test_run.RunTests(raw_results)
if not raw_results:
+ all_raw_results.pop()
continue
- all_raw_results.append(raw_results)
-
iteration_results = base_test_result.TestRunResults()
for r in reversed(raw_results):
iteration_results.AddTestRunResults(r)
@@ -1003,6 +1019,10 @@ def main():
parser.error('--replace-system-package and --enable-concurrent-adb cannot '
'be used together')
+ if (getattr(args, 'jacoco', False) and
+ not getattr(args, 'coverage_dir', '')):
+ parser.error('--jacoco requires --coverage-dir')
+
if (hasattr(args, 'debug_socket') or
(hasattr(args, 'wait_for_java_debugger') and
args.wait_for_java_debugger)):
diff --git a/chromium/build/android/test_wrapper/logdog_wrapper.pydeps b/chromium/build/android/test_wrapper/logdog_wrapper.pydeps
index cd57f2fd2f3..bb696587e3c 100644
--- a/chromium/build/android/test_wrapper/logdog_wrapper.pydeps
+++ b/chromium/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -3,6 +3,7 @@
../../third_party/catapult/common/py_utils/py_utils/__init__.py
../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/base_error.py
../../third_party/catapult/devil/devil/utils/__init__.py
../../third_party/catapult/devil/devil/utils/reraiser_thread.py
../../third_party/catapult/devil/devil/utils/signal_handler.py
diff --git a/chromium/build/build_config.h b/chromium/build/build_config.h
index c7b02664c89..4d1ba77f2bc 100644
--- a/chromium/build/build_config.h
+++ b/chromium/build/build_config.h
@@ -140,7 +140,7 @@
#define ARCH_CPU_ARMEL 1
#define ARCH_CPU_32_BITS 1
#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__aarch64__)
+#elif defined(__aarch64__) || defined(_M_ARM64)
#define ARCH_CPU_ARM_FAMILY 1
#define ARCH_CPU_ARM64 1
#define ARCH_CPU_64_BITS 1
diff --git a/chromium/build/chromeos/run_vm_test.py b/chromium/build/chromeos/run_vm_test.py
index f3894235ae2..390cc7090c5 100755
--- a/chromium/build/chromeos/run_vm_test.py
+++ b/chromium/build/chromeos/run_vm_test.py
@@ -155,10 +155,23 @@ class TastTest(RemoteTest):
return self._suite_name
def build_test_command(self):
+ if '--gtest_filter=%s' % self.suite_name in self._additional_args:
+ logging.info(
+ 'GTest filtering not supported for tast tests. The '
+ '--gtest_filter arg will be ignored.')
+ self._additional_args.remove('--gtest_filter=%s' % self.suite_name)
+ if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
+ logging.info(
+ '--gtest_repeat not supported for tast tests. The arg will be '
+ 'ignored.')
+ self._additional_args = [
+ arg for arg in self._additional_args if not arg.startswith(
+ '--gtest_repeat')]
+
if self._additional_args:
- raise TestFormatError(
- 'Tast tests should not have additional args: %s' % (
- self._additional_args))
+ logging.error(
+ 'Tast tests should not have additional args. These will be '
+ 'ignored: %s', self._additional_args)
self._vm_test_cmd += [
'--deploy',
@@ -244,6 +257,13 @@ class GTestTest(RemoteTest):
vpython_spec_path),
])
+ # Load vivid before running capture_unittests
+ # TODO(crbug.com/904730): Once we start loading vivid in init service,
+ # we can remove this code.
+ if self._test_exe == 'capture_unittests':
+ vm_test_script_contents.append(
+ 'echo "test0000" | sudo -S modprobe vivid n_devs=1 node_types=0x1')
+
test_invocation = (
'./%s --test-launcher-shard-index=%d '
'--test-launcher-total-shards=%d' % (
@@ -332,6 +352,13 @@ class BrowserSanityTest(RemoteTest):
'GTest filtering not supported for the sanity test. The '
'--gtest_filter arg will be ignored.')
self._additional_args.remove('--gtest_filter=%s' % SANITY_TEST_TARGET)
+ if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
+ logging.info(
+ '--gtest_repeat not supported for sanity test. The arg will be '
+ 'ignored.')
+ self._additional_args = [
+ arg for arg in self._additional_args if not arg.startswith(
+ '--gtest_repeat')]
if self._additional_args:
raise TestFormatError(
diff --git a/chromium/build/compiled_action.gni b/chromium/build/compiled_action.gni
index 02170af1bc5..7e25a0b6fc2 100644
--- a/chromium/build/compiled_action.gni
+++ b/chromium/build/compiled_action.gni
@@ -28,9 +28,9 @@
# of these change. If inputs is empty, the step will run only when the
# binary itself changes.
#
-# visibility
+# depfile
# deps
-# args (all optional)
+# visibility (all optional)
# Same meaning as action/action_foreach.
#
#
@@ -85,6 +85,7 @@ template("compiled_action") {
[
"data_deps",
"deps",
+ "depfile",
"inputs",
"outputs",
"testonly",
@@ -129,6 +130,7 @@ template("compiled_action_foreach") {
forward_variables_from(invoker,
[
"deps",
+ "depfile",
"inputs",
"outputs",
"sources",
diff --git a/chromium/build/config/BUILD.gn b/chromium/build/config/BUILD.gn
index b3cb81ca05a..da3b21cd354 100644
--- a/chromium/build/config/BUILD.gn
+++ b/chromium/build/config/BUILD.gn
@@ -32,8 +32,11 @@ declare_args() {
# Iterator debugging is generally useful for catching bugs. But it can
# introduce extra locking to check the state of an iterator against the state
# of the current object. For iterator- and thread-heavy code, this can
- # significantly slow execution.
- enable_iterator_debugging = true
+ # significantly slow execution - two orders of magnitude slowdown has been
+ # seen (crbug.com/903553) and iterator debugging also slows builds by making
+ # generation of snapshot_blob.bin take ~40-60 s longer. Therefore this
+ # defaults to off.
+ enable_iterator_debugging = false
}
# ==============================================
@@ -63,7 +66,7 @@ config("feature_flags") {
if (dcheck_always_on) {
defines += [ "DCHECK_ALWAYS_ON=1" ]
if (dcheck_is_configurable) {
- defines += [ "DCHECK_IS_CONFIGURABLE=1" ]
+ defines += [ "DCHECK_IS_CONFIGURABLE" ]
}
}
if (use_udev) {
@@ -349,11 +352,11 @@ config("executable_config") {
"//build/config/ios:ios_executable_flags",
]
} else if (is_linux || is_android || current_os == "aix") {
- configs += [ "//build/config/gcc:executable_ldconfig" ]
- if (is_android) {
- configs += [ "//build/config/android:executable_config" ]
- } else if (is_chromecast) {
+ configs += [ "//build/config/gcc:executable_config" ]
+ if (is_chromecast) {
configs += [ "//build/config/chromecast:executable_config" ]
+ } else if (is_fuchsia) {
+ configs += [ "//build/config/fuchsia:executable_config" ]
}
}
diff --git a/chromium/build/config/BUILDCONFIG.gn b/chromium/build/config/BUILDCONFIG.gn
index 387fdb2795a..f33cba178d7 100644
--- a/chromium/build/config/BUILDCONFIG.gn
+++ b/chromium/build/config/BUILDCONFIG.gn
@@ -203,6 +203,7 @@ if (host_toolchain == "") {
} else if (host_os == "win") {
# On Windows always use the target CPU for host builds for x86/x64. On the
# configurations we support this will always work and it saves build steps.
+ # Windows ARM64 targets require an x64 host for cross build.
if (target_cpu == "x86" || target_cpu == "x64") {
if (is_clang) {
host_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
diff --git a/chromium/build/config/OWNERS b/chromium/build/config/OWNERS
index f1592d3b1ae..925b5051b72 100644
--- a/chromium/build/config/OWNERS
+++ b/chromium/build/config/OWNERS
@@ -1,5 +1,7 @@
dpranke@chromium.org
scottmg@chromium.org
+per-file *jumbo*=bratell@opera.com
+
per-file BUILDCONFIG.gn=dpranke@chromium.org
per-file BUILDCONFIG.gn=set noparent
diff --git a/chromium/build/config/android/BUILD.gn b/chromium/build/config/android/BUILD.gn
index 5ebde213fbe..7761687f1bf 100644
--- a/chromium/build/config/android/BUILD.gn
+++ b/chromium/build/config/android/BUILD.gn
@@ -125,6 +125,7 @@ config("runtime_library") {
root_build_dir)
}
cflags_cc += [
+ "-nostdinc++",
"-isystem" + libcxx_include_path,
"-isystem" + libcxxabi_include_path,
]
@@ -181,12 +182,6 @@ config("runtime_library") {
}
}
-config("executable_config") {
- cflags = [ "-fPIE" ]
- asmflags = [ "-fPIE" ]
- ldflags = [ "-pie" ]
-}
-
config("hide_all_but_jni_onload") {
ldflags = [ "-Wl,--version-script=" + rebase_path(
"//build/android/android_only_explicit_jni_exports.lst",
diff --git a/chromium/build/config/android/config.gni b/chromium/build/config/android/config.gni
index 6d3ac969777..e71d730dd29 100644
--- a/chromium/build/config/android/config.gni
+++ b/chromium/build/config/android/config.gni
@@ -215,8 +215,9 @@ if (is_android || is_chromeos) {
android_sdk_tools_bundle_aapt2 =
"//third_party/android_build_tools/aapt2/aapt2"
- # Path to r8.jar. If specified, will be used instead of ProGuard for optimization.
- experimental_r8_path = ""
+ # Use r8 for Java optimization rather than ProGuard.
+ # This will evenutally be the default. https://crbug.com/872904
+ experimental_use_r8 = false
}
# We need a second declare_args block to make sure we are using the overridden
diff --git a/chromium/build/config/android/internal_rules.gni b/chromium/build/config/android/internal_rules.gni
index 69951610909..9948872a6f4 100644
--- a/chromium/build/config/android/internal_rules.gni
+++ b/chromium/build/config/android/internal_rules.gni
@@ -4,11 +4,11 @@
# Do not add any imports to non-//build directories here.
# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
-import("//build_overrides/build.gni")
import("//build/config/android/config.gni")
import("//build/config/dcheck_always_on.gni")
import("//build/config/python.gni")
import("//build/config/sanitizers/sanitizers.gni")
+import("//build_overrides/build.gni")
assert(is_android)
@@ -38,12 +38,10 @@ _java_target_whitelist = [
]
# Targets that match the whitelist but are not actually java targets.
-_java_target_blacklist = [
- "//chrome:packed_resources",
- "*:*_unpack_aar",
-]
+_java_target_blacklist = [ "*:*_unpack_aar" ]
_default_proguard_jar_path = "//third_party/proguard/lib/proguard.jar"
+_r8_path = "//third_party/r8/lib/r8.jar"
_dexlayout_path = "//third_party/android_build_tools/art/dexlayout"
_profman_path = "//third_party/android_build_tools/art/profman"
@@ -320,6 +318,11 @@ template("write_build_config") {
args += [ "--apk-proto-resources=$_rebased_proto_resources" ]
}
+ if (defined(invoker.module_rtxt_path)) {
+ _rebased_rtxt_path = rebase_path(invoker.module_rtxt_path, root_build_dir)
+ args += [ "--module-rtxt-path=$_rebased_rtxt_path" ]
+ }
+
if (defined(invoker.shared_libraries_runtime_deps_file)) {
# Don't list shared_libraries_runtime_deps_file as an input in order to
# avoid having to depend on the runtime_deps target. See comment in
@@ -831,6 +834,9 @@ if (enable_java_templates) {
if (!defined(deps)) {
deps = []
}
+ if (defined(invoker.srcjar_deps)) {
+ deps += invoker.srcjar_deps
+ }
if (defined(invoker.lint_suppressions_file)) {
lint_suppressions_file = invoker.lint_suppressions_file
@@ -838,7 +844,7 @@ if (enable_java_templates) {
lint_suppressions_file = "//build/android/lint/suppressions.xml"
}
- _lint_path = "$lint_android_sdk_root/tools/bin/lint"
+ _lint_path = "$lint_android_sdk_root/tools-lint/bin/lint"
_cache_dir = "$root_build_dir/android_lint_cache"
_result_path = "$target_gen_dir/$target_name/result.xml"
_config_path = "$target_gen_dir/$target_name/config.xml"
@@ -896,10 +902,7 @@ if (enable_java_templates) {
]
} else {
inputs += invoker.java_files
- inputs += [
- invoker.jar_path,
- invoker.build_config,
- ]
+ inputs += [ invoker.build_config ]
if (invoker.java_files != []) {
inputs += [ invoker.java_sources_file ]
_rebased_java_sources_file =
@@ -911,9 +914,6 @@ if (enable_java_templates) {
_rebased_build_config =
rebase_path(invoker.build_config, root_build_dir)
args += [
- "--jar-path",
- rebase_path(invoker.jar_path, root_build_dir),
- "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)",
"--srcjars=@FileArg($_rebased_build_config:gradle:bundled_srcjars)",
"--can-fail-build",
]
@@ -980,10 +980,10 @@ if (enable_java_templates) {
"--classpath",
"@FileArg($_rebased_build_config:android:sdk_jars)",
]
- if (experimental_r8_path != "") {
+ if (experimental_use_r8) {
args += [
"--r8-path",
- rebase_path(experimental_r8_path, root_build_dir),
+ rebase_path(_r8_path, root_build_dir),
]
}
if (defined(invoker.args)) {
@@ -1067,7 +1067,7 @@ if (enable_java_templates) {
_proguard_enabled =
defined(invoker.proguard_enabled) && invoker.proguard_enabled
- _proguarding_with_r8 = _proguard_enabled && experimental_r8_path != ""
+ _proguarding_with_r8 = _proguard_enabled && experimental_use_r8
assert(!(defined(invoker.input_jars) && _proguard_enabled),
"input_jars can't be specified when proguarding a dex.")
@@ -1131,6 +1131,13 @@ if (enable_java_templates) {
args += invoker.proguard_args
}
+ if (_proguarding_with_r8 && defined(invoker.min_sdk_version)) {
+ args += [
+ "--min-api",
+ "${invoker.min_sdk_version}",
+ ]
+ }
+
output_path = _proguard_output_path
}
}
@@ -1403,6 +1410,28 @@ if (enable_java_templates) {
_desugar = defined(invoker.supports_android) && invoker.supports_android
_emma_instrument = invoker.emma_instrument
+ _enable_bytecode_rewriter =
+ _enable_assert || _enable_custom_resources || _enable_thread_annotations
+ _is_prebuilt = defined(invoker.is_prebuilt) && invoker.is_prebuilt
+ _enable_bytecode_checks = !defined(invoker.enable_bytecode_checks) ||
+ invoker.enable_bytecode_checks
+
+ # Release builds don't have asserts enabled, so they often will not run the
+ # bytecode rewriter. We are okay with having release builds not run the
+ # bytecode checks at all, since the dependency errors can be caught in debug
+ # mode.
+ not_needed([
+ "_is_prebuilt",
+ "_enable_bytecode_checks",
+ ])
+ if (defined(invoker.enable_bytecode_rewriter)) {
+ not_needed([
+ "_enable_assert",
+ "_enable_custom_resources",
+ "_enable_thread_annotations",
+ ])
+ _enable_bytecode_rewriter = invoker.enable_bytecode_rewriter
+ }
_jar_excluded_patterns = []
if (defined(invoker.jar_excluded_patterns)) {
@@ -1420,8 +1449,7 @@ if (enable_java_templates) {
_deps = []
_previous_output_jar = _input_jar_path
- if (_enable_assert || _enable_custom_resources ||
- _enable_thread_annotations) {
+ if (_enable_bytecode_rewriter) {
_java_bytecode_rewriter_target = "${target_name}__bytecode_rewrite"
_java_bytecode_rewriter_input_jar = _previous_output_jar
_java_bytecode_rewriter_output_jar =
@@ -1451,6 +1479,9 @@ if (enable_java_templates) {
"--output-jar",
rebase_path(_java_bytecode_rewriter_output_jar, root_build_dir),
]
+ if (_is_prebuilt) {
+ args += [ "--is-prebuilt" ]
+ }
if (_enable_assert) {
args += [ "--enable-assert" ]
}
@@ -1460,10 +1491,15 @@ if (enable_java_templates) {
if (_enable_thread_annotations) {
args += [ "--enable-thread-annotations" ]
}
+ if (_enable_bytecode_checks) {
+ args += [ "--enable-check-class-path" ]
+ }
args += [
- "--extra-classpath-jar",
+ "--direct-classpath-jars",
+ "@FileArg($_rebased_build_config:javac:classpath)",
+ "--sdk-classpath-jars",
"@FileArg($_rebased_build_config:android:sdk_jars)",
- "--extra-classpath-jar",
+ "--extra-classpath-jars",
"@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
]
}
@@ -1940,14 +1976,15 @@ if (enable_java_templates) {
outputs = []
_android_aapt_path = android_default_aapt_path
+ _android_aapt2_path = android_sdk_tools_bundle_aapt2
if (_proto_format) {
- _android_aapt2_path = android_sdk_tools_bundle_aapt2
depfile = "$target_gen_dir/${invoker.target_name}_3.d"
}
inputs = [
invoker.build_config,
_android_aapt_path,
+ _android_aapt2_path,
]
_rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
@@ -1958,19 +1995,13 @@ if (enable_java_templates) {
"--include-resources=@FileArg($_rebased_build_config:android:sdk_jars)",
"--aapt-path",
rebase_path(_android_aapt_path, root_build_dir),
+ "--aapt2-path",
+ rebase_path(_android_aapt2_path, root_build_dir),
"--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
"--extra-res-packages=@FileArg($_rebased_build_config:resources:extra_package_names)",
"--extra-r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
]
- if (_proto_format) {
- inputs += [ _android_aapt2_path ]
- args += [
- "--aapt2-path",
- rebase_path(_android_aapt2_path, root_build_dir),
- ]
- }
-
inputs += [ invoker.android_manifest ]
args += [
"--android-manifest",
@@ -2007,6 +2038,25 @@ if (enable_java_templates) {
]
}
+ if (defined(invoker.optimize_resources) && invoker.optimize_resources) {
+ args += [ "--optimize-resources" ]
+ if (defined(invoker.resources_config_path)) {
+ inputs += [ invoker.resources_config_path ]
+ args += [
+ "--resources-config-path",
+ rebase_path(invoker.resources_config_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.unoptimized_resources_path)) {
+ args += [
+ "--unoptimized-resources-path",
+ rebase_path(invoker.unoptimized_resources_path, root_build_dir),
+ ]
+ outputs += [ invoker.unoptimized_resources_path ]
+ }
+ }
+
# Useful to have android:debuggable in the manifest even for Release
# builds. Just omit it for officai
if (debuggable_apks) {
@@ -2294,6 +2344,9 @@ if (enable_java_templates) {
"--key-passwd",
invoker.keystore_password,
]
+ if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) {
+ args += [ "--uncompress-dex" ]
+ }
if (defined(invoker.assets_build_config)) {
inputs += [ invoker.assets_build_config ]
_rebased_build_config =
@@ -2418,6 +2471,7 @@ if (enable_java_templates) {
"secondary_native_lib_placeholders",
"secondary_abi_native_libs_filearg",
"secondary_abi_loadable_modules",
+ "uncompress_dex",
"uncompress_shared_libraries",
"write_asset_list",
])
@@ -2456,6 +2510,13 @@ if (enable_java_templates) {
get_label_info(_incremental_compile_resources_target_name,
"target_gen_dir") + "/AndroidManifest.xml"
+ if (defined(invoker.unoptimized_resources_path)) {
+ _incremental_packaged_resources_path =
+ invoker.unoptimized_resources_path
+ } else {
+ _incremental_packaged_resources_path = invoker.packaged_resources_path
+ }
+
_rebased_build_config =
rebase_path(invoker.assets_build_config, root_build_dir)
@@ -2466,7 +2527,7 @@ if (enable_java_templates) {
inputs = [
_android_manifest,
invoker.assets_build_config,
- invoker.packaged_resources_path,
+ _incremental_packaged_resources_path,
]
outputs = [
# Output the non-compiled manifest for easy debugging (as opposed to
@@ -2481,7 +2542,7 @@ if (enable_java_templates) {
"--out-manifest",
rebase_path(_incremental_android_manifest, root_build_dir),
"--in-apk",
- rebase_path(invoker.packaged_resources_path, root_build_dir),
+ rebase_path(_incremental_packaged_resources_path, root_build_dir),
"--out-apk",
rebase_path(_incremental_compiled_resources_path, root_build_dir),
"--aapt-path",
@@ -2592,12 +2653,6 @@ if (enable_java_templates) {
_build_config = invoker.build_config
_chromium_code = invoker.chromium_code
- if (defined(invoker.enable_errorprone)) {
- _enable_errorprone = invoker.enable_errorprone
- } else {
- _enable_errorprone = use_errorprone_java_compiler && _chromium_code
- }
-
_provider_configurations = []
if (defined(invoker.provider_configurations)) {
_provider_configurations = invoker.provider_configurations
@@ -2648,7 +2703,6 @@ if (enable_java_templates) {
outputs = [
invoker.javac_jar_path,
- invoker.javac_jar_path + ".md5.stamp",
invoker.javac_jar_path + ".info",
]
inputs = invoker.java_files + _java_srcjars + [ _build_config ]
@@ -2690,7 +2744,7 @@ if (enable_java_templates) {
if (_chromium_code) {
args += [ "--chromium-code=1" ]
}
- if (_enable_errorprone) {
+ if (invoker.enable_errorprone) {
deps += [ "//third_party/errorprone:errorprone($default_toolchain)" ]
deps += [ "//tools/android/errorprone_plugin:errorprone_plugin_java($default_toolchain)" ]
_rebased_errorprone_processorpath = [
@@ -2713,7 +2767,8 @@ if (enable_java_templates) {
# Each element is of length two, [ path_to_file, path_to_put_in_jar ]
inputs += [ file_tuple[0] ]
args +=
- [ "--additional-jar-file=" + file_tuple[0] + ":" + file_tuple[1] ]
+ [ "--additional-jar-file=" +
+ rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ]
}
if (invoker.java_files != []) {
args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
@@ -2790,6 +2845,8 @@ if (enable_java_templates) {
# supports_android: Optional. True if target can run on Android.
# requires_android: Optional. True if target can only run on Android.
# java_files: Optional list of Java source file paths for this target.
+ # javac_args: Optional list of extra arguments to pass to javac.
+ # errorprone_args: Optional list of extra arguments to pass to .
# srcjar_deps: Optional list of .srcjar targets (not file paths). The Java
# source files they contain will also be compiled for this target.
# java_sources_file: Optional path to a file which will be written with
@@ -2855,7 +2912,7 @@ if (enable_java_templates) {
# jar_included_patterns: Optional list of .class file patterns to include
# in the final .jar file. jar_excluded_patterns take precedence over this.
#
- # For 'android_apk' targets only:
+ # For 'android_apk' and 'android_app_bundle_module' targets only:
#
# apk_path: Path to the final APK file.
# android_manifest: Path to AndroidManifest.xml file for the APK.
@@ -2877,6 +2934,11 @@ if (enable_java_templates) {
# be stored in the APK.
# uncompress_shared_libraries: Optional. True to store native shared
# libraries uncompressed and page-aligned.
+ # proto_resources_path: The path of an zip archive containing the APK's
+ # resources compiled to the protocol buffer format (instead of regular
+ # binary xml + resources.arsc).
+ # module_rtxt_path: The path of the R.txt file generated when compiling the
+ # resources for the bundle module.
#
# For 'java_binary' and 'junit_binary' targets only. Ignored by others:
#
@@ -3093,7 +3155,11 @@ if (enable_java_templates) {
])
}
if (type == "android_app_bundle_module") {
- forward_variables_from(invoker, [ "proto_resources_path" ])
+ forward_variables_from(invoker,
+ [
+ "proto_resources_path",
+ "module_rtxt_path",
+ ])
}
build_config = _build_config
is_prebuilt = _is_prebuilt
@@ -3154,55 +3220,86 @@ if (enable_java_templates) {
# TODO(agrieve): Enable lint for _has_sources rather than just _java_files.
_lint_enabled = _java_files != [] && _supports_android && _chromium_code &&
!disable_android_lint
+ if (defined(invoker.enable_errorprone)) {
+ _enable_errorprone = invoker.enable_errorprone
+ } else {
+ _enable_errorprone =
+ _java_files != [] && _chromium_code && use_errorprone_java_compiler
+ }
if (_has_sources) {
- _compile_java_target = "${_main_target_name}__compile_java"
- compile_java(_compile_java_target) {
- forward_variables_from(invoker,
- [
- "additional_jar_files",
- "apk_name",
- "enable_errorprone",
- "enable_incremental_javac_override",
- "processor_args_javac",
- "provider_configurations",
- "javac_args",
- ])
- main_target_name = _main_target_name
- build_config = _build_config
- java_files = _java_files
- if (_java_files != []) {
- java_sources_file = _java_sources_file
+ _type = invoker.type
+ template("compile_java_helper") {
+ compile_java(target_name) {
+ forward_variables_from(invoker, "*")
+ enable_errorprone = invoker.enable_errorprone
+ javac_jar_path = invoker.javac_jar_path
+
+ main_target_name = _main_target_name
+ build_config = _build_config
+ java_files = _java_files
+ if (_java_files != []) {
+ java_sources_file = _java_sources_file
+ }
+ srcjar_deps = _srcjar_deps
+ chromium_code = _chromium_code
+ requires_android = _requires_android
+ deps = _accumulated_deps + _accumulated_public_deps
+
+ # android_apk and junit_binary pass R.java srcjars via srcjar_deps.
+ if (_type == "java_library" && _requires_android) {
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ srcjar_filearg = "@FileArg($_rebased_build_config:deps_info:owned_resource_srcjars)"
+ }
}
- srcjar_deps = _srcjar_deps
- chromium_code = _chromium_code
- requires_android = _requires_android
- deps = _accumulated_deps + _accumulated_public_deps
+ }
+ _analysis_public_deps = []
+ _compile_java_target = "${_main_target_name}__compile_java"
+ _compile_java_forward_variables = [
+ "additional_jar_files",
+ "apk_name",
+ "enable_incremental_javac_override",
+ "processor_args_javac",
+ "provider_configurations",
+ "javac_args",
+ ]
+ compile_java_helper(_compile_java_target) {
+ forward_variables_from(invoker, _compile_java_forward_variables)
+ enable_errorprone = false
javac_jar_path = _javac_jar_path
-
- # android_apk and junit_binary pass R.java srcjars via srcjar_deps.
- if (invoker.type == "java_library" && _requires_android) {
- _rebased_build_config = rebase_path(_build_config, root_build_dir)
- srcjar_filearg = "@FileArg($_rebased_build_config:deps_info:owned_resource_srcjars)"
+ }
+ if (_enable_errorprone) {
+ _compile_java_errorprone_target =
+ "${_main_target_name}__compile_java_errorprone"
+ compile_java_helper(_compile_java_errorprone_target) {
+ forward_variables_from(invoker, _compile_java_forward_variables)
+ enable_errorprone = true
+ if (defined(invoker.errorprone_args)) {
+ if (!defined(javac_args)) {
+ javac_args = []
+ }
+ javac_args += invoker.errorprone_args
+ }
+ javac_jar_path = _javac_jar_path + ".errorprone.jar"
}
+ _analysis_public_deps += [ ":$_compile_java_errorprone_target" ]
}
- _accumulated_public_deps += [ ":$_compile_java_target" ]
-
if (defined(invoker.android_manifest_for_lint)) {
_android_manifest_for_lint = invoker.android_manifest_for_lint
assert(_android_manifest_for_lint != "") # Mark as used.
}
if (_lint_enabled) {
- android_lint("${_main_target_name}__lint") {
+ _android_lint_target = "${_main_target_name}__lint"
+ android_lint(_android_lint_target) {
if (invoker.type == "android_apk" ||
invoker.type == "android_app_bundle_module") {
forward_variables_from(invoker, [ "android_manifest" ])
} else if (defined(_android_manifest_for_lint)) {
android_manifest = _android_manifest_for_lint
}
+ srcjar_deps = _srcjar_deps
build_config = _build_config
requires_android = _requires_android
- jar_path = _javac_jar_path
deps = _accumulated_deps + _accumulated_public_deps
java_files = _java_files
if (_java_files != []) {
@@ -3212,16 +3309,20 @@ if (enable_java_templates) {
lint_suppressions_file = invoker.lint_suppressions_file
}
}
+ _analysis_public_deps += [ ":$_android_lint_target" ]
+ }
+ if (_analysis_public_deps != []) {
# Use an intermediate group() rather as the data_deps target in order to
- # avoid lint artifacts showing up as runtime_deps (while still having lint
- # run in parallel to other targets).
+ # avoid errorprone or lint artifacts showing up as runtime_deps (while
+ # still having them run in parallel to other targets).
group("${_main_target_name}__analysis") {
- public_deps = [
- ":${_main_target_name}__lint",
- ]
+ public_deps = _analysis_public_deps
}
}
+
+ # Update this after lint so that lint does not depend on javac.
+ _accumulated_public_deps += [ ":$_compile_java_target" ]
} # _has_sources
if (defined(_final_jar_path)) {
@@ -3245,9 +3346,12 @@ if (enable_java_templates) {
process_java_prebuilt(_process_prebuilt_target_name) {
forward_variables_from(invoker,
[
+ "enable_bytecode_checks",
+ "enable_bytecode_rewriter",
"jar_excluded_patterns",
"jar_included_patterns",
])
+ is_prebuilt = _is_prebuilt
supports_android = _supports_android
enable_build_hooks = _enable_build_hooks
enable_build_hooks_android = _enable_build_hooks_android
@@ -3303,7 +3407,7 @@ if (enable_java_templates) {
# BuildConfig, NativeLibraries, etc.
input_jar = _unprocessed_jar_path
output_jar = _final_ijar_path
- if (_lint_enabled) {
+ if (_lint_enabled || _enable_errorprone) {
if (!defined(data_deps)) {
data_deps = []
}
@@ -3360,7 +3464,7 @@ if (enable_java_templates) {
"visibility",
])
public_deps = _accumulated_public_deps
- if (_lint_enabled) {
+ if (_lint_enabled || _enable_errorprone) {
if (!defined(data_deps)) {
data_deps = []
}
@@ -3423,7 +3527,7 @@ template("create_android_app_bundle_module") {
"--format=bundle-module",
"--output-apk",
rebase_path(invoker.module_zip_path, root_build_dir),
- "--dex-file=${invoker.dex_path_file_arg}",
+ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)",
"--resource-apk=@FileArg(" +
"$_rebased_build_config:deps_info:proto_resources_path)",
"--assets=@FileArg($_rebased_build_config:assets)",
@@ -3445,40 +3549,51 @@ template("create_android_app_bundle_module") {
}
}
-# Extracts a bundle module's classes from jar created in the synchronized
-# proguarding step and packages them into a new jar.
+# Splits input dex file(s) based on given feature jars into seperate dex files
+# for each feature.
#
# Variables:
-# proguarded_jar: Path to input jar produced by synchronized proguarding.
# proguard_mapping: Path to input proguard mapping produced by synchronized
-# proguarding
-# output_jar: Path to output jar file containing the module's optimized class
-# files.
-# build_config: Path to build config of module.
-# is_base_module: True if this is processing a base module.
-template("generate_proguarded_module_jar") {
- _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
-
+# proguarding.
+# input_dex_zip: Path to zipped dex files to split.
+# all_modules: Path to list of all modules. Each Module must have
+# build_config, name, and build_config_target properties.
+template("dexsplitter") {
action_with_pydeps(target_name) {
forward_variables_from(invoker, [ "deps" ])
- script = "//build/android/gyp/generate_proguarded_module_jar.py"
+ script = "//build/android/gyp/dexsplitter.py"
+ inputs = [
+ invoker.input_dex_zip,
+ ]
+ _stamp = "${target_gen_dir}/${target_name}.stamp"
outputs = [
- invoker.output_jar,
+ _stamp,
]
+
depfile = "${target_gen_dir}/${target_name}.d"
args = [
+ "--stamp",
+ rebase_path(_stamp, root_build_dir),
"--depfile",
rebase_path(depfile, root_build_dir),
- "--proguarded-jar",
- rebase_path(invoker.proguarded_jar, root_build_dir),
- "--proguard-mapping",
+ "--r8-path",
+ rebase_path(_r8_path, root_build_dir),
+ "--input-dex-zip",
+ rebase_path(invoker.input_dex_zip, root_build_dir),
+ "--proguard-mapping-file",
rebase_path(invoker.proguard_mapping, root_build_dir),
- "--module-input-jars=@FileArg(${_rebased_build_config}:deps_info:java_runtime_classpath)",
- "--output-jar",
- rebase_path(invoker.output_jar, root_build_dir),
]
- if (defined(invoker.is_base_module) && invoker.is_base_module) {
- args += [ "--is-base-module" ]
+
+ foreach(_feature_module, invoker.all_modules) {
+ _rebased_module_build_config =
+ rebase_path(_feature_module.build_config, root_build_dir)
+ args += [
+ "--feature-name",
+ _feature_module.name,
+ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:java_runtime_classpath)",
+ "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)",
+ ]
+ deps += [ _feature_module.build_config_target ]
}
}
}
diff --git a/chromium/build/config/android/linker_version_script.gni b/chromium/build/config/android/linker_version_script.gni
new file mode 100644
index 00000000000..6cb3b090137
--- /dev/null
+++ b/chromium/build/config/android/linker_version_script.gni
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+
+# Generate a custom linker version script that can later be used with
+# "-Wl,--version-script=<path>" ldflags.
+#
+# Variables:
+# export_java_symbols: Optional. If true, also export all Java_* symbols
+# exported for JNI.
+# export_symbol_whitelist_file: Optional. Path to an input file containing
+# a whitelist of exported symbols.
+# linker_script: Path to output linker version script.
+#
+template("generate_linker_version_script") {
+ action_with_pydeps(target_name) {
+ script = "//build/android/gyp/generate_linker_version_script.py"
+ outputs = [
+ invoker.linker_script,
+ ]
+ inputs = []
+ args = [ "--output=" + rebase_path(invoker.linker_script, root_build_dir) ]
+
+ if (defined(invoker.export_java_symbols) && invoker.export_java_symbols) {
+ args += [ "--export-java-symbols" ]
+ }
+
+ if (defined(invoker.export_symbol_whitelist_file)) {
+ inputs += [ invoker.export_symbol_whitelist_file ]
+ args +=
+ [ "--export-symbol-whitelist-file=" +
+ rebase_path(invoker.export_symbol_whitelist_file, root_build_dir) ]
+ }
+ }
+}
diff --git a/chromium/build/config/android/rules.gni b/chromium/build/config/android/rules.gni
index d7e92f0ac6c..c1c7ae16cd8 100644
--- a/chromium/build/config/android/rules.gni
+++ b/chromium/build/config/android/rules.gni
@@ -249,6 +249,10 @@ if (enable_java_templates) {
rebase_path(_jni_generator_include, _jni_output_dir),
]
+ if (!is_java_debug) {
+ args += [ "--use_proxy_hash" ]
+ }
+
if (enable_profiling) {
args += [ "--enable_profiling" ]
}
@@ -386,25 +390,29 @@ if (enable_java_templates) {
# Declare a jni registration target.
#
- # This target generates a header file calling JNI registration functions
- # created by generate_jni and generate_jar_jni.
+ # This target generates a srcjar containing a copy of GEN_JNI.java, which has
+ # the native methods of all dependent java files. It can also create a .h file
+ # for use with manual JNI registration.
+ #
+ # The script does not scan any generated sources (those within .srcjars, or
+ # within root_build_dir). This could be fixed by adding deps & logic to scan
+ # .srcjars, but isn't currently needed.
#
# See base/android/jni_generator/jni_registration_generator.py for more info
# about the format of the header file.
#
# Variables
- # target: The Apk target to generate registrations for.
- # output: Path to the generated .h file.
- # exception_files: List of .java files that should be ignored when searching
- # for native methods. (optional)
+ # target: The Apk target to use for the java sources list.
+ # header_output: Path to the generated .h file (optional).
+ # sources_blacklist: List of .java files that should be skipped. (optional)
# namespace: Registration functions will be wrapped into this. (optional)
#
# Example
# generate_jni_registration("chrome_jni_registration") {
# target = ":chrome_public_apk"
- # output = "$root_gen_dir/chrome/browser/android/${target_name}.h"
- # exception_files = [
- # "//base/android/java/src/org/chromium/base/library_loader/Linker.java",
+ # header_output = "$target_gen_dir/$target_name.h"
+ # sources_blacklist = [
+ # "//path/to/Exception.java",
# ]
# }
template("generate_jni_registration") {
@@ -413,6 +421,7 @@ if (enable_java_templates) {
_build_config = get_label_info(invoker.target, "target_gen_dir") + "/" +
get_label_info(invoker.target, "name") + ".build_config"
_rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _srcjar_output = "$target_gen_dir/$target_name.srcjar"
script = "//base/android/jni_generator/jni_registration_generator.py"
deps = [
@@ -422,23 +431,37 @@ if (enable_java_templates) {
_build_config,
]
outputs = [
- invoker.output,
+ _srcjar_output,
]
depfile = "$target_gen_dir/$target_name.d"
args = [
# This is a list of .sources files.
- "--sources_files=@FileArg($_rebased_build_config:jni:all_source)",
- "--output",
- rebase_path(invoker.output, root_build_dir),
+ "--sources-files=@FileArg($_rebased_build_config:jni:all_source)",
+ "--srcjar-path",
+ rebase_path(_srcjar_output, root_build_dir),
"--depfile",
rebase_path(depfile, root_build_dir),
]
- if (defined(invoker.exception_files)) {
- _rebase_exception_java_files =
- rebase_path(invoker.exception_files, root_build_dir)
- args += [ "--no_register_java=$_rebase_exception_java_files" ]
+
+ if (!is_java_debug) {
+ args += [ "--use_proxy_hash" ]
+ }
+
+ if (defined(invoker.header_output)) {
+ outputs += [ invoker.header_output ]
+ args += [
+ "--header-path",
+ rebase_path(invoker.header_output, root_build_dir),
+ ]
}
+
+ if (defined(invoker.sources_blacklist)) {
+ _rebase_sources_blacklist =
+ rebase_path(invoker.sources_blacklist, root_build_dir)
+ args += [ "--sources-blacklist=$_rebase_sources_blacklist" ]
+ }
+
if (defined(invoker.namespace)) {
args += [ "--namespace=${invoker.namespace}" ]
}
@@ -1019,7 +1042,11 @@ if (enable_java_templates) {
# }
# }
template("java_group") {
- forward_variables_from(invoker, [ "testonly" ])
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "input_jars_paths",
+ ])
write_build_config("${target_name}__build_config") {
type = "group"
build_config = "$target_gen_dir/${invoker.target_name}.build_config"
@@ -1262,6 +1289,12 @@ if (enable_java_templates) {
}
}
+ _jni_srcjar_target = "${target_name}__final_jni"
+ _outer_target_name = target_name
+ generate_jni_registration(_jni_srcjar_target) {
+ target = ":$_outer_target_name"
+ }
+
java_library_impl(_java_binary_target_name) {
forward_variables_from(invoker, "*", [ "deps" ])
type = "junit_binary"
@@ -1281,6 +1314,7 @@ if (enable_java_templates) {
srcjar_deps = []
}
srcjar_deps += [
+ ":$_jni_srcjar_target",
":$_prepare_resources_target",
# This dep is required for any targets that depend on //base:base_java.
@@ -1356,6 +1390,7 @@ if (enable_java_templates) {
# [ [ path_to_file, path_to_put_in_jar ] ]
#
# javac_args: Additional arguments to pass to javac.
+ # errorprone_args: Additional arguments to pass to errorprone.
#
# data_deps, testonly
#
@@ -1726,6 +1761,8 @@ if (enable_java_templates) {
jar_excluded_patterns = []
}
jar_excluded_patterns += [
+ "*J/N.class",
+ "*/GEN_JNI.class",
"*/R.class",
"*/R\$*.class",
"*/Manifest.class",
@@ -1862,6 +1899,13 @@ if (enable_java_templates) {
# generate_buildconfig_java: If defined and false, skip generating the
# BuildConfig java class describing the build configuration. The default
# is true for non-test APKs.
+ # generate_final_jni: If defined and false, skip generating the
+ # GEN_JNI srcjar.
+ # jni_registration_header: If specified, causes the
+ # ${target_name}__final_jni target to additionally output a
+ # header file to this path for use with manual JNI registration.
+ # jni_sources_blacklist: List of source path to exclude from the
+ # final_jni step.
# firebase_app_id: The value for BuildConfig.FIREBASE_APP_ID (optional).
# Identifier is sent with crash reports to enable Java stack deobfuscation.
# aapt_locale_whitelist: If set, all locales not in this list will be
@@ -1884,6 +1928,11 @@ if (enable_java_templates) {
# uncompress_shared_libraries: True if shared libraries should be stored
# uncompressed in the APK. Must be unset or true if load_library_from_apk
# is set to true.
+ # uncompress_dex: Store final .dex files uncompressed in the apk.
+ # optimize_resources: True if resource names should be stripped from the
+ # resources.arsc file in the apk or module.
+ # resources_config_path: Path to the aapt2 optimize config file that tags
+ # resources with acceptable/non-acceptable optimizations.
template("android_apk_or_module") {
forward_variables_from(invoker, [ "testonly" ])
@@ -1913,10 +1962,11 @@ if (enable_java_templates) {
if (defined(invoker.final_apk_path)) {
_final_apk_path = invoker.final_apk_path
- } else if (!_is_bundle_module) {
- _final_apk_path = "$root_build_dir/apks/${invoker.name}.apk"
} else {
- _final_apk_path = "$root_build_dir/bundle_modules/${invoker.name}"
+ _final_apk_path = "$root_build_dir/apks/${invoker.name}.apk"
+ }
+ if (!_is_bundle_module) {
+ _final_rtxt_path = "${_final_apk_path}.R.txt"
}
_final_apk_path_no_ext_list =
process_file_template([ _final_apk_path ],
@@ -2079,6 +2129,12 @@ if (enable_java_templates) {
_generate_buildconfig_java = invoker.generate_buildconfig_java
}
+ # JNI generation usually goes hand-in-hand with buildconfig generation.
+ _generate_final_jni = _generate_buildconfig_java
+ if (defined(invoker.generate_final_jni)) {
+ _generate_final_jni = invoker.generate_final_jni
+ }
+
_proguard_enabled =
defined(invoker.proguard_enabled) && invoker.proguard_enabled
if (_proguard_enabled) {
@@ -2123,7 +2179,16 @@ if (enable_java_templates) {
_android_sdk_dep = "//third_party/android_tools:android_sdk_java"
}
+ _optimize_resources =
+ defined(invoker.optimize_resources) && invoker.optimize_resources
+ if (_optimize_resources) {
+ _unoptimized_resources_path =
+ "$target_out_dir/$_template_name.unoptimized.ap_"
+ }
+
_compile_resources_target = "${_template_name}__compile_resources"
+ _compile_resources_rtxt_out =
+ "${target_gen_dir}/${_compile_resources_target}_R.txt"
compile_resources(_compile_resources_target) {
forward_variables_from(invoker,
[
@@ -2133,6 +2198,7 @@ if (enable_java_templates) {
"aapt_locale_whitelist",
"resource_blacklist_regex",
"resource_blacklist_exceptions",
+ "resources_config_path",
"png_to_webp",
"no_xml_namespaces",
])
@@ -2144,12 +2210,16 @@ if (enable_java_templates) {
post_process_script = invoker.post_process_package_resources_script
}
srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
- r_text_out_path = "${target_gen_dir}/${target_name}_R.txt"
+ r_text_out_path = _compile_resources_rtxt_out
proguard_file = _generated_proguard_config
if (_enable_multidex) {
proguard_file_main_dex = _generated_proguard_main_dex_config
}
output = _packaged_resources_path
+ if (_optimize_resources) {
+ optimize_resources = true
+ unoptimized_resources_path = _unoptimized_resources_path
+ }
build_config = _build_config
deps = _deps + [
@@ -2182,6 +2252,28 @@ if (enable_java_templates) {
}
}
+ if (!_is_bundle_module) {
+ # Output the R.txt file to a more easily discoverable location for
+ # archiving. This is necessary when stripping resource names so that we
+ # have an archive of resource names to ids for shipped apks (for
+ # debugging purposes). We copy the file rather than change the location
+ # of the original because other targets rely on the location of the R.txt
+ # file.
+ _copy_rtxt_target = "${_template_name}__copy_rtxt"
+ copy(_copy_rtxt_target) {
+ deps = [
+ ":$_compile_resources_target",
+ ]
+ sources = [
+ _compile_resources_rtxt_out,
+ ]
+ outputs = [
+ _final_rtxt_path,
+ ]
+ }
+ _final_deps += [ ":$_copy_rtxt_target" ]
+ }
+
if (_is_base_module && _is_bundle_module) {
# Bundle modules have to reference resources from the base module.
# However, to compile the bundle module's resources we have to give it an
@@ -2259,7 +2351,7 @@ if (enable_java_templates) {
}
}
- java_cpp_template("${_template_name}__native_libraries_java") {
+ java_cpp_template("${_template_name}__native_libraries_srcjar") {
package_path = "org/chromium/base/library_loader"
sources = [
"//base/android/java/templates/NativeLibraries.template",
@@ -2298,7 +2390,7 @@ if (enable_java_templates) {
defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
}
}
- _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
+ _srcjar_deps += [ ":${_template_name}__native_libraries_srcjar" ]
}
_extra_native_libs = []
@@ -2315,7 +2407,7 @@ if (enable_java_templates) {
}
if (_generate_buildconfig_java) {
- generate_build_config_srcjar("${_template_name}__build_config_java") {
+ generate_build_config_srcjar("${_template_name}__build_config_srcjar") {
forward_variables_from(invoker, [ "firebase_app_id" ])
use_final_fields = true
build_config = _build_config
@@ -2328,7 +2420,20 @@ if (enable_java_templates) {
":$_build_config_target",
]
}
- _srcjar_deps += [ ":${_template_name}__build_config_java" ]
+ _srcjar_deps += [ ":${_template_name}__build_config_srcjar" ]
+ }
+
+ if (_generate_final_jni) {
+ generate_jni_registration("${_template_name}__final_jni") {
+ target = ":$_template_name"
+ if (defined(invoker.jni_registration_header)) {
+ header_output = invoker.jni_registration_header
+ }
+ if (defined(invoker.jni_sources_blacklist)) {
+ sources_blacklist = invoker.jni_sources_blacklist
+ }
+ }
+ _srcjar_deps += [ ":${_template_name}__final_jni" ]
}
_java_target = "${_template_name}__java"
@@ -2365,6 +2470,7 @@ if (enable_java_templates) {
if (_is_bundle_module) {
proto_resources_path = _packaged_resources_path
+ module_rtxt_path = _compile_resources_rtxt_out
}
if (!_is_bundle_module) {
@@ -2544,6 +2650,7 @@ if (enable_java_templates) {
"public_deps",
"shared_resources",
"write_asset_list",
+ "uncompress_dex",
])
packaged_resources_path = _packaged_resources_path
apk_path = _final_apk_path
@@ -2562,6 +2669,9 @@ if (enable_java_templates) {
if (_incremental_allowed) {
android_manifest = _android_manifest
base_path = _base_path
+ if (_optimize_resources) {
+ unoptimized_resources_path = _unoptimized_resources_path
+ }
}
# Incremental apk does not use native libs nor final dex.
@@ -2819,9 +2929,12 @@ if (enable_java_templates) {
"final_apk_path",
"firebase_app_id",
"generate_buildconfig_java",
+ "generate_final_jni",
"input_jars_paths",
"java_files",
"javac_args",
+ "jni_registration_header",
+ "jni_sources_blacklist",
"keystore_name",
"keystore_password",
"keystore_path",
@@ -2835,6 +2948,7 @@ if (enable_java_templates) {
"never_incremental",
"no_build_hooks",
"no_xml_namespaces",
+ "optimize_resources",
"png_to_webp",
"post_process_package_resources_script",
"product_version_resources_dep",
@@ -2843,6 +2957,7 @@ if (enable_java_templates) {
"proguard_jar_path",
"resource_blacklist_regex",
"resource_blacklist_exceptions",
+ "resources_config_path",
"secondary_abi_loadable_modules",
"secondary_abi_shared_libraries",
"secondary_native_lib_placeholders",
@@ -2853,6 +2968,7 @@ if (enable_java_templates) {
"support_zh_hk",
"testonly",
"uncompress_shared_libraries",
+ "uncompress_dex",
"use_chromium_linker",
"version_code",
"version_name",
@@ -2920,10 +3036,14 @@ if (enable_java_templates) {
"enable_chromium_linker_tests",
"enable_multidex",
"firebase_app_id",
+ "generate_buildconfig_java",
+ "generate_final_jni",
"input_jars_paths",
"is_base_module",
"java_files",
"javac_args",
+ "jni_registration_header",
+ "jni_sources_blacklist",
"load_library_from_apk",
"loadable_modules",
"min_sdk_version",
@@ -2932,6 +3052,7 @@ if (enable_java_templates) {
"native_lib_version_rule",
"negative_main_dex_globs",
"no_xml_namespaces",
+ "optimize_resources",
"png_to_webp",
"product_version_resources_dep",
"proguard_configs",
@@ -2939,6 +3060,7 @@ if (enable_java_templates) {
"proguard_jar_path",
"resource_blacklist_regex",
"resource_blacklist_exceptions",
+ "resources_config_path",
"secondary_abi_loadable_modules",
"secondary_abi_shared_libraries",
"secondary_native_lib_placeholders",
@@ -2977,27 +3099,8 @@ if (enable_java_templates) {
#
# This target creates an Android instrumentation test apk.
#
- # Variables
- # android_manifest: Path to AndroidManifest.xml.
- # data_deps: List of dependencies needed at runtime. These will be built but
- # won't change the generated .apk in any way (in fact they may be built
- # after the .apk is).
- # deps: List of dependencies. All Android java resources and libraries in the
- # "transitive closure" of these dependencies will be included in the apk.
- # Note: this "transitive closure" actually only includes such targets if
- # they are depended on through android_library or android_resources targets
- # (and so not through builtin targets like 'action', 'group', etc).
- # java_files: List of .java files to include in the apk.
- # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
- # will be added to java_files and be included in this apk.
- # apk_name: Name for final apk.
- # final_apk_path: Path to final built apk. Default is
- # $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
- # shared_libraries: List shared_library targets to bundle. If these
- # libraries depend on other shared_library targets, those dependencies will
- # also be included in the apk (e.g. for is_component_build).
- # apk_under_test: The apk being tested.
- # javac_args: Additional arguments to pass to javac.
+ # Supports all variables of android_apk(), plus:
+ # apk_under_test: The apk being tested (optional).
#
# Example
# instrumentation_test_apk("foo_test_apk") {
@@ -3112,6 +3215,7 @@ if (enable_java_templates) {
data = [
"$_final_apk_path.mapping",
]
+ data_deps += [ "//build/android/stacktrace:java_deobfuscate" ]
}
dist_ijar_path = _dist_ijar_path
@@ -3700,10 +3804,27 @@ if (enable_java_templates) {
# }
#
template("android_app_bundle") {
+ _bundle_base_path = "$root_build_dir/apks"
+ if (defined(invoker.bundle_base_path)) {
+ _bundle_base_path = invoker.bundle_base_path
+ }
+
+ _bundle_name = target_name
+ if (defined(invoker.bundle_name)) {
+ _bundle_name = invoker.bundle_name
+ }
+ _bundle_path = "$_bundle_base_path/${_bundle_name}.aab"
+ _rebased_bundle_path = rebase_path(_bundle_path, root_build_dir)
+
+ _base_target_name = get_label_info(invoker.base_module_target, "name")
+ _base_target_gen_dir =
+ get_label_info(invoker.base_module_target, "target_gen_dir")
_all_modules = [
{
name = "base"
module_target = invoker.base_module_target
+ build_config = "$_base_target_gen_dir/${_base_target_name}.build_config"
+ build_config_target = "${module_target}__build_config"
},
]
@@ -3718,8 +3839,15 @@ if (enable_java_templates) {
assert(
defined(_module.module_target),
"Missing 'module_target' field for extra module ${_module.name}.")
+ _module_target = _module.module_target
+ _module_target_name = get_label_info(_module_target, "name")
+ _module_target_gen_dir =
+ get_label_info(_module_target, "target_gen_dir")
+ _module.build_config =
+ "$_module_target_gen_dir/${_module_target_name}.build_config"
+ _module.build_config_target = "${_module_target}__build_config"
+ _all_modules += [ _module ]
}
- _all_modules += invoker.extra_modules
}
_proguard_enabled =
@@ -3732,8 +3860,10 @@ if (enable_java_templates) {
# Make build config, which is required for synchronized proguarding.
_module_targets = []
+ _module_build_config_targets = []
foreach(_module, _all_modules) {
_module_targets += [ _module.module_target ]
+ _module_build_config_targets += [ _module.build_config_target ]
}
_build_config = "$target_gen_dir/${target_name}.build_config"
_rebased_build_config = rebase_path(_build_config, root_build_dir)
@@ -3750,20 +3880,29 @@ if (enable_java_templates) {
}
if (_proguard_enabled) {
- # Proguard all modules together to keep binary size small while still
- # maintaining compatibility between modules.
- _proguard_output_jar_path =
- "${target_gen_dir}/${target_name}/${target_name}.proguard.jar"
- _sync_proguard_target = "${target_name}__sync_proguard"
- proguard(_sync_proguard_target) {
+ _proguard_mapping_path = "${_bundle_path}.mapping"
+ _unsplit_dex_zip =
+ "${target_gen_dir}/${target_name}/${target_name}__unsplit_dex.zip"
+ _unsplit_dex_target = "${target_name}__unsplit_dex"
+ dex(_unsplit_dex_target) {
+ enable_multidex = _enable_multidex
+ proguard_enabled = true
+ proguard_mapping_path = _proguard_mapping_path
forward_variables_from(invoker, [ "proguard_jar_path" ])
build_config = _build_config
- deps = _module_targets + [ ":$_build_config_target" ]
- output_path = _proguard_output_jar_path
- args = [
- "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
- "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)",
+ deps = _module_targets + _module_build_config_targets +
+ [ ":$_build_config_target" ]
+ output = _unsplit_dex_zip
+ }
+
+ _dexsplitter_target = "${_unsplit_dex_target}__dexspliter"
+ dexsplitter(_dexsplitter_target) {
+ input_dex_zip = _unsplit_dex_zip
+ proguard_mapping = _proguard_mapping_path
+ all_modules = _all_modules
+ deps = [
+ ":${_unsplit_dex_target}",
]
}
}
@@ -3773,65 +3912,13 @@ if (enable_java_templates) {
_all_module_build_configs = []
foreach(_module, _all_modules) {
_module_target = _module.module_target
- _module_target_name = get_label_info(_module_target, "name")
- _module_target_gen_dir = get_label_info(_module_target, "target_gen_dir")
- _module_build_config_target = "${_module_target}__build_config"
- _module_build_config =
- "$_module_target_gen_dir/${_module_target_name}.build_config"
+ _module_build_config = _module.build_config
+ _module_build_config_target = _module.build_config_target
if (_proguard_enabled) {
- # Extract optimized classes for each module and dex them.
-
- _module_final_dex_target = "${target_name}__${_module.name}__dex"
- _module_final_dex_target_dep = ":$_module_final_dex_target"
- if (_enable_multidex) {
- _module_final_dex_path =
- "$target_gen_dir/$target_name/${_module.name}/classes.dex.zip"
- } else {
- _module_final_dex_path =
- "$target_gen_dir/$target_name/${_module.name}/classes.dex"
- }
- _module_final_dex_path_file_arg =
- rebase_path(_module_final_dex_path, root_build_dir)
-
- _module_jar_path =
- "${target_gen_dir}/${target_name}/${_module.name}.optimized.jar"
- _generate_proguarded_module_jar_target =
- "${target_name}__${_module.name}__module_jar"
- generate_proguarded_module_jar(_generate_proguarded_module_jar_target) {
- proguarded_jar = _proguard_output_jar_path
- build_config = _module_build_config
- proguard_mapping = "${_proguard_output_jar_path}.mapping"
- output_jar = _module_jar_path
- is_base_module = _module.name == "base"
- deps = [
- ":${_sync_proguard_target}",
- "$_module_build_config_target",
- "${_module.module_target}",
- ]
- }
-
- dex(_module_final_dex_target) {
- deps = [
- ":${_generate_proguarded_module_jar_target}",
- ]
- input_jars = [ _module_jar_path ]
- output = _module_final_dex_path
-
- if (_enable_multidex && _module.name == "base") {
- enable_multidex = _enable_multidex
- extra_main_dex_proguard_config =
- "$_module_target_gen_dir/$_module_target_name/" +
- "$_module_target_name.resources.main-dex-proguard.txt"
- deps += [ "${_module_target}__compile_resources" ]
- }
- }
+ _dex_target_for_module = ":$_dexsplitter_target"
} else {
- _module_final_dex_target_dep = "${_module_target}__final_dex"
- _rebased_module_build_config =
- rebase_path(_module_build_config, root_build_dir)
- _module_final_dex_path_file_arg =
- "@FileArg($_rebased_module_build_config:final_dex:path)"
+ _dex_target_for_module = "${_module_target}__final_dex"
}
# Generate one module .zip file per bundle module.
@@ -3845,17 +3932,18 @@ if (enable_java_templates) {
create_android_app_bundle_module(_create_module_target) {
build_config = _module_build_config
module_zip_path = _module_zip_path
- dex_path_file_arg = _module_final_dex_path_file_arg
+
deps = [
- "$_module_build_config_target",
- "$_module_final_dex_target_dep",
- "$_module_target",
+ _dex_target_for_module,
+ _module_build_config_target,
+ _module_target,
]
}
_all_create_module_targets += [
":$_create_module_target",
_module_build_config_target,
+ "${_module_target}__compile_resources",
]
_all_module_zip_paths += [ _module_zip_path ]
_all_module_build_configs += [ _module_build_config ]
@@ -3864,19 +3952,6 @@ if (enable_java_templates) {
_all_rebased_module_zip_paths =
rebase_path(_all_module_zip_paths, root_build_dir)
- _bundle_name = target_name
- if (defined(invoker.bundle_name)) {
- _bundle_name = invoker.bundle_name
- }
-
- _bundle_base_path = "$root_build_dir/apks"
- if (defined(invoker.bundle_base_path)) {
- _bundle_base_path = invoker.bundle_base_path
- }
-
- _bundle_path = "$_bundle_base_path/$_bundle_name.aab"
- _rebased_bundle_path = rebase_path(_bundle_path, root_build_dir)
-
_sign_bundle = defined(invoker.sign_bundle) && invoker.sign_bundle
_split_dimensions = []
@@ -3914,6 +3989,7 @@ if (enable_java_templates) {
deps = _all_create_module_targets + [ ":$_build_config_target" ]
args = [
"--out-bundle=$_rebased_bundle_path",
+ "--rtxt-out-path=$_rebased_bundle_path.R.txt",
"--module-zips=$_all_rebased_module_zip_paths",
]
if (_sign_bundle) {
@@ -3936,6 +4012,8 @@ if (enable_java_templates) {
"$_rebased_build_config:uncompressed_assets)",
"--uncompress-shared-libraries=@FileArg(" +
"$_rebased_build_config:native:uncompress_shared_libraries)",
+ "--rtxt-in-paths=@FileArg(" +
+ "$_rebased_build_config:deps_info:module_rtxt_path)",
]
}
}
diff --git a/chromium/build/config/chromeos/rules.gni b/chromium/build/config/chromeos/rules.gni
index ef0e5060237..5dc04aec711 100644
--- a/chromium/build/config/chromeos/rules.gni
+++ b/chromium/build/config/chromeos/rules.gni
@@ -33,7 +33,8 @@ template("generate_vm_runner_script") {
_cache_path_prefix =
"//build/cros_cache/chrome-sdk/tarballs/${cros_board}+${cros_sdk_version}"
_vm_image_path = "${_cache_path_prefix}+chromiumos_qemu_image.tar.xz/"
- _qemu_dir = "${_cache_path_prefix}+qemu/"
+ _qemu_dir = "${_cache_path_prefix}+app-emulation/"
+ _firmware_dir = "${_cache_path_prefix}+sys-firmware/"
forward_variables_from(invoker,
[
@@ -91,6 +92,7 @@ template("generate_vm_runner_script") {
# as data here so that changes to it will trigger analyze.
"//chromeos/CHROMEOS_LKGM",
"//third_party/chromite/",
+ _firmware_dir,
_vm_image_path,
_qemu_dir,
]
diff --git a/chromium/build/config/compiler/BUILD.gn b/chromium/build/config/compiler/BUILD.gn
index 148e9139714..87968b38e00 100644
--- a/chromium/build/config/compiler/BUILD.gn
+++ b/chromium/build/config/compiler/BUILD.gn
@@ -375,10 +375,9 @@ config("compiler") {
# Linux/Android/Fuchsia common flags setup.
# ---------------------------------
if (is_linux || is_android || is_fuchsia) {
- if (use_pic) {
- cflags += [ "-fPIC" ]
- ldflags += [ "-fPIC" ]
- }
+ asmflags += [ "-fPIC" ]
+ cflags += [ "-fPIC" ]
+ ldflags += [ "-fPIC" ]
if (!is_clang) {
# Use pipes for communicating between sub-processes. Faster.
@@ -394,7 +393,7 @@ config("compiler") {
# Compiler instrumentation can introduce dependencies in DSOs to symbols in
# the executable they are loaded into, so they are unresolved at link-time.
- if (!using_sanitizer && !is_safestack) {
+ if (!using_sanitizer) {
ldflags += [
"-Wl,-z,defs",
"-Wl,--as-needed",
@@ -521,9 +520,7 @@ config("compiler") {
}
# Makes builds independent of absolute file path.
- # Currently disabled for nacl since its toolchain lacks this flag (too old).
- # TODO(zforman): Once nacl's toolchain is updated, remove check.
- if (symbol_level != 0 && is_clang && !is_nacl && !is_mac && !is_ios &&
+ if (symbol_level != 0 && is_clang && !is_mac && !is_ios &&
strip_absolute_paths_from_debug_symbols) {
# If debug option is given, clang includes $cwd in debug info by default.
# For such build, this flag generates reproducible obj files even we use
@@ -538,8 +535,20 @@ config("compiler") {
]
if (is_win && use_lld) {
- # Absolutize source file path for PDB.
- ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ]
+ if (symbol_level == 2 || (is_clang && using_sanitizer)) {
+ # Absolutize source file path for PDB. Pass the real build directory
+ # if the pdb contains source-level debug information.
+ ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ]
+ } else {
+ # On Windows, (non-sanitizier) symbol_level 1 builds don't contain
+ # debug information in obj files; the linker just creates enough
+ # debug info at link time to produce symbolized stacks (without line
+ # numbers). In that case, there's no downside in using a fake fixed
+ # base directory for paths in the pdb. This makes the pdb output
+ # fully deterministic and independent of the build directory.
+ assert(symbol_level == 1 && !(is_clang && using_sanitizer))
+ ldflags += [ "/PDBSourcePath:o:\fake\prefix" ]
+ }
}
}
@@ -652,16 +661,22 @@ config("compiler") {
# linker jobs. This is still suboptimal to a potential dynamic
# resource allocation scheme, but should be good enough.
if (use_lld) {
- # Limit the size of the ThinLTO cache to the lesser of 10% of available disk
- # space, 10GB and 100000 files.
- cache_policy =
- "cache_size=10%:cache_size_bytes=10g:cache_size_files=100000"
ldflags += [
"-Wl,--thinlto-jobs=8",
- "-Wl,--thinlto-cache-dir=" +
- rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
- "-Wl,--thinlto-cache-policy,$cache_policy",
]
+
+ # Disable caching on Chrome OS temporarily (crbug.com/889967)
+ if (!is_chromeos) {
+ # Limit the size of the ThinLTO cache to the lesser of 10% of
+ # available disk space, 10GB and 100000 files.
+ cache_policy =
+ "cache_size=10%:cache_size_bytes=10g:cache_size_files=100000"
+ ldflags += [
+ "-Wl,--thinlto-cache-dir=" +
+ rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+ "-Wl,--thinlto-cache-policy,$cache_policy",
+ ]
+ }
} else {
ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
}
@@ -1419,14 +1434,6 @@ config("default_warnings") {
cflags += [ "-Wno-maybe-uninitialized" ]
cflags += [ "-Wno-deprecated-declarations" ]
- # GCC assumes 'this' is never nullptr and optimizes away code
- # like "if (this == nullptr) ...": [1]. However, some Chromium
- # code relies on these types of null pointer checks [2], so
- # disable this optimization.
- # [1] https://gcc.gnu.org/gcc-6/porting_to.html#this-cannot-be-null
- # [2] https://crbug.com/784492#c13
- cflags += [ "-fno-delete-null-pointer-checks" ]
-
# -Wcomment gives too many false positives in the case a
# backslash ended comment line is followed by a new line of
# comments
@@ -1455,10 +1462,6 @@ config("default_warnings") {
# this is worth fixing.
"-Wno-c++11-narrowing",
- # Warns on switches on enums that cover all enum values but
- # also contain a default: branch. Chrome is full of that.
- "-Wno-covered-switch-default",
-
# TODO(thakis): This used to be implied by -Wno-unused-function,
# which we no longer use. Check if it makes sense to remove
# this as well. http://crbug.com/316352
@@ -1471,41 +1474,31 @@ config("default_warnings") {
cflags += [
# TODO(thakis): https://crbug.com/604888
"-Wno-undefined-var-template",
- ]
- if (current_toolchain == host_toolchain || !use_xcode_clang ||
- xcode_version_int >= 930) {
- cflags += [
- # TODO(thakis): https://crbug.com/617318
- "-Wno-nonportable-include-path",
- # TODO(thakis): https://crbug.com/683349
- "-Wno-user-defined-warnings",
+ # TODO(hans): https://crbug.com/766891
+ "-Wno-null-pointer-arithmetic",
+ ]
- # TODO(hans): https://crbug.com/681136
- "-Wno-unused-lambda-capture",
- ]
- }
- if (current_toolchain == host_toolchain || !use_xcode_clang ||
- xcode_version_int >= 1000) {
- cflags += [
- # TODO(hans): https://crbug.com/766891
- "-Wno-null-pointer-arithmetic",
- ]
+ if (is_win) {
+ # TODO(thakis): https://crbug.com/617318
+ # Currently goma can not handle case sensitiveness for windows well.
+ cflags += [ "-Wno-nonportable-include-path" ]
}
+
if (current_toolchain == host_toolchain || !use_xcode_clang) {
# Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not
# recognize.
cflags += [
- # TODO(thakis): https://crbug.com/753973
- "-Wno-enum-compare-switch",
-
# Ignore warnings about MSVC optimization pragmas.
# TODO(thakis): Only for no_chromium_code? http://crbug.com/505314
"-Wno-ignored-pragma-optimize",
-
- # TODO(hans): https://crbug.com/890307
- "-Wno-defaulted-function-deleted",
]
+ if (is_fuchsia) {
+ cflags += [
+ # TODO(hans): https://crbug.com/890307
+ "-Wno-defaulted-function-deleted",
+ ]
+ }
}
}
}
@@ -1772,7 +1765,7 @@ config("no_size_t_to_int_warning") {
# Code that currently generates warnings for this can include this
# config to disable them.
config("no_shorten_64_warnings") {
- if (current_cpu == "x64") {
+ if (current_cpu == "x64" || current_cpu == "arm64") {
if (is_clang) {
cflags = [ "-Wno-shorten-64-to-32" ]
} else {
@@ -2003,6 +1996,10 @@ config("no_optimize") {
} else {
cflags = [ "-Os" ] + common_optimize_on_cflags
}
+ } else if (is_fuchsia) {
+ # On Fuchsia, we optimize for size here to reduce the size of debug build
+ # packages so they can be run in a KVM. See crbug.com/910243 for details.
+ cflags = [ "-Og" ]
} else {
cflags = [ "-O0" ]
ldflags = []
@@ -2264,7 +2261,16 @@ config("symbols") {
# [1] crrev.com/a81d5ade0b043208e06ad71a38bcf9c348a1a52f
cflags += [ "-gdwarf-3" ]
}
- cflags += [ "-g2" ]
+
+ # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+ # elsewhere in this file), so they can't have build-dir-independent output.
+ # Disable symbols for nacl object files to get deterministic,
+ # build-directory-independent output. pnacl and nacl-clang do support that
+ # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+ # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+ if (!is_nacl || is_clang) {
+ cflags += [ "-g2" ]
+ }
}
if (use_debug_fission && !is_nacl && !is_android) {
# NOTE: Some Chrome OS builds globally set |use_debug_fission| to true,
@@ -2324,7 +2330,16 @@ config("minimal_symbols") {
# TODO(thakis): Remove this again once dump_syms is fixed.
cflags += [ "-gdwarf-3" ]
}
- cflags += [ "-g1" ]
+
+ # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+ # elsewhere in this file), so they can't have build-dir-independent output.
+ # Disable symbols for nacl object files to get deterministic,
+ # build-directory-independent output. pnacl and nacl-clang do support that
+ # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+ # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+ if (!is_nacl || is_clang) {
+ cflags += [ "-g1" ]
+ }
ldflags = []
if (is_android && is_clang) {
# Android defaults to symbol_level=1 builds in production builds
diff --git a/chromium/build/config/compiler/compiler.gni b/chromium/build/config/compiler/compiler.gni
index 1059a27dcdb..bfbafe5b59e 100644
--- a/chromium/build/config/compiler/compiler.gni
+++ b/chromium/build/config/compiler/compiler.gni
@@ -63,9 +63,6 @@ declare_args() {
# Windows build.
use_incremental_wpo = false
- # Whether or not we should use position independent code.
- use_pic = true
-
# Whether we're using a sample profile collected on an architecture different
# than the one we're compiling for.
#
@@ -193,12 +190,8 @@ declare_args() {
# If it wasn't manually set, set to an appropriate default.
assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
if (symbol_level == -1) {
- if (is_android && use_order_profiling) {
- # With instrumentation enabled, debug info puts libchrome.so over 4gb, which
- # causes the linker to produce an invalid ELF. http://crbug.com/574476
- symbol_level = 0
- } else if (is_android && !is_component_build &&
- !(android_64bit_target_cpu && !build_apk_secondary_abi)) {
+ if (is_android && !is_component_build &&
+ !(android_64bit_target_cpu && !build_apk_secondary_abi)) {
# Reduce symbol level when it will cause invalid elf files to be created
# (due to file size). https://crbug.com/648948.
symbol_level = 1
diff --git a/chromium/build/config/coverage/BUILD.gn b/chromium/build/config/coverage/BUILD.gn
index 6fd4fd5fe25..ae5435bbc28 100644
--- a/chromium/build/config/coverage/BUILD.gn
+++ b/chromium/build/config/coverage/BUILD.gn
@@ -6,22 +6,9 @@ import("//build/config/coverage/coverage.gni")
config("default_coverage") {
if (use_clang_coverage) {
- cflags = [
- "-fprofile-instr-generate",
- "-fcoverage-mapping",
-
- # Following experimental flags removes unused header functions from the
- # coverage mapping data embedded in the test binaries, and the reduction
- # of binary size enables building Chrome's large unit test targets on
- # MacOS. Please refer to crbug.com/796290 for more details.
- "-mllvm",
- "-limited-coverage-experimental=true",
- ]
-
ldflags = []
if (!is_win) {
ldflags += [ "-fprofile-instr-generate" ]
- cflags += [ "-fno-use-cxa-atexit" ]
} else {
# Windows directly calls link.exe instead of the compiler driver when
# linking. Hence, pass the runtime libraries instead of
@@ -37,5 +24,26 @@ config("default_coverage") {
"use_clang_coverage=true not supported yet for this target_cpu")
}
}
+
+ # Coverage flags are only on by default when instrument all source files.
+ # Otherwise, coverage flags are dynamically passed to the compile command
+ # via the //build/toolchain/clang_code_coverage_wrapper.py script.
+ if (coverage_instrumentation_input_file == "") {
+ cflags = [
+ "-fprofile-instr-generate",
+ "-fcoverage-mapping",
+
+ # Following experimental flags removes unused header functions from the
+ # coverage mapping data embedded in the test binaries, and the reduction
+ # of binary size enables building Chrome's large unit test targets on
+ # MacOS. Please refer to crbug.com/796290 for more details.
+ "-mllvm",
+ "-limited-coverage-experimental=true",
+ ]
+
+ if (!is_win) {
+ cflags += [ "-fno-use-cxa-atexit" ]
+ }
+ }
}
}
diff --git a/chromium/build/config/coverage/coverage.gni b/chromium/build/config/coverage/coverage.gni
index 61716dd425a..12ab03fcd0e 100644
--- a/chromium/build/config/coverage/coverage.gni
+++ b/chromium/build/config/coverage/coverage.gni
@@ -4,10 +4,26 @@
import("//build/toolchain/toolchain.gni")
+# There are two ways to enable code coverage instrumentation:
+# 1. When |use_clang_coverage| is true and |coverage_instrumentation_input_file|
+# is empty, all source files are instrumented.
+# 2. When |use_clang_coverage| is true and |coverage_instrumentation_input_file|
+# is NOT empty and points to a text file on the file system, ONLY source
+# files specified in the input file are instrumented.
declare_args() {
# Enable Clang's Source-based Code Coverage.
use_clang_coverage = false
+
+ # The path to the coverage instrumentation input file should be a source root
+ # absolute path (e.g. //out/Release/coverage_instrumentation_input.txt), and
+ # the file consists of multiple lines where each line represents a path to a
+ # source file, and the paths must be relative to the root build directory.
+ # e.g. ../../base/task/post_task.cc for build directory 'out/Release'.
+ coverage_instrumentation_input_file = ""
}
assert(!use_clang_coverage || is_clang,
"Clang Source-based Code Coverage requires clang.")
+
+assert(coverage_instrumentation_input_file == "" || use_clang_coverage,
+ "Instrument a subset of source files requires enabling clang coverage.")
diff --git a/chromium/build/config/fuchsia/BUILD.gn b/chromium/build/config/fuchsia/BUILD.gn
index 501cae4d6da..f002df84d7a 100644
--- a/chromium/build/config/fuchsia/BUILD.gn
+++ b/chromium/build/config/fuchsia/BUILD.gn
@@ -121,3 +121,8 @@ action("blobstore_extended_qcow2") {
rebase_path(blobstore_qcow_path),
]
}
+
+# Settings for executables.
+config("executable_config") {
+ ldflags = [ "-pie" ]
+}
diff --git a/chromium/build/config/fuchsia/fidl_library.gni b/chromium/build/config/fuchsia/fidl_library.gni
index 5ec90a49649..82433176d09 100644
--- a/chromium/build/config/fuchsia/fidl_library.gni
+++ b/chromium/build/config/fuchsia/fidl_library.gni
@@ -21,7 +21,11 @@ assert(is_fuchsia)
# files.
template("fidl_library") {
- forward_variables_from(invoker, [ "namespace", "languages" ])
+ forward_variables_from(invoker,
+ [
+ "languages",
+ "namespace",
+ ])
_library_basename = target_name
if (defined(invoker.library_name)) {
@@ -41,6 +45,17 @@ template("fidl_library") {
languages = [ "cpp" ]
}
+ _define_cpp_action = false
+ _define_js_action = false
+
+ foreach(language, languages) {
+ if (language == "cpp") {
+ _define_cpp_action = true
+ } else if (language == "js") {
+ _define_js_action = true
+ }
+ }
+
_response_file = "$target_gen_dir/$target_name.rsp"
_json_representation = "$target_gen_dir/${_library_name}.fidl.json"
_output_gen_dir = "$target_gen_dir/fidl"
@@ -135,71 +150,75 @@ template("fidl_library") {
]
}
- action("${target_name}_cpp_gen") {
- visibility = [ ":${invoker.target_name}" ]
- forward_variables_from(invoker, [ "testonly" ])
-
- deps = [
- ":${invoker.target_name}_compile",
- ]
-
- inputs = [
- # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
- rebase_path("$fuchsia_sdk/.hash"),
- _json_representation,
- ]
-
- outputs = [
- "${_output_base}.h",
- "${_output_base}.cc",
- ]
-
- script = "//build/gn_run_binary.py"
- args = [
- rebase_path("//third_party/fuchsia-sdk/sdk/tools/fidlgen",
- root_build_dir),
- "-generators",
- "cpp",
- "-json",
- rebase_path(_json_representation),
- "-include-base",
- rebase_path(_output_gen_dir),
- "-output-base",
- rebase_path("${_output_base}"),
- ]
+ if (_define_cpp_action) {
+ action("${target_name}_cpp_gen") {
+ visibility = [ ":${invoker.target_name}" ]
+ forward_variables_from(invoker, [ "testonly" ])
+
+ deps = [
+ ":${invoker.target_name}_compile",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ rebase_path("$fuchsia_sdk/.hash"),
+ _json_representation,
+ ]
+
+ outputs = [
+ "${_output_base}.h",
+ "${_output_base}.cc",
+ ]
+
+ script = "//build/gn_run_binary.py"
+ args = [
+ rebase_path("//third_party/fuchsia-sdk/sdk/tools/fidlgen",
+ root_build_dir),
+ "-generators",
+ "cpp",
+ "-json",
+ rebase_path(_json_representation),
+ "-include-base",
+ rebase_path(_output_gen_dir),
+ "-output-base",
+ rebase_path("${_output_base}"),
+ ]
+ }
}
- _output_js_path = "$_output_gen_dir/${_library_path}/js/fidl.js"
- action("${target_name}_js_gen") {
- visibility = [ ":${invoker.target_name}" ]
- forward_variables_from(invoker, [ "testonly" ])
-
- deps = [
- ":${invoker.target_name}_compile",
- ]
-
- inputs = [
- # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
- rebase_path("$fuchsia_sdk/.hash"),
- _json_representation,
- "//tools/fuchsia/fidlgen_js/fidl.py", # The schema helper file.
- ]
-
- outputs = [
- _output_js_path,
- ]
-
- script = "//tools/fuchsia/fidlgen_js/gen.py"
-
- args = [
- rebase_path(_json_representation),
- "--output",
- rebase_path("${_output_js_path}"),
- ]
-
- data = []
- foreach(o, outputs) {
- data += [ rebase_path(o) ]
+ if (_define_js_action) {
+ _output_js_path = "$_output_gen_dir/${_library_path}/js/fidl.js"
+ action("${target_name}_js_gen") {
+ visibility = [ ":${invoker.target_name}" ]
+ forward_variables_from(invoker, [ "testonly" ])
+
+ deps = [
+ ":${invoker.target_name}_compile",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ rebase_path("$fuchsia_sdk/.hash"),
+ _json_representation,
+ "//build/fuchsia/fidlgen_js/fidl.py", # The schema helper file.
+ ]
+
+ outputs = [
+ _output_js_path,
+ ]
+
+ script = "//build/fuchsia/fidlgen_js/gen.py"
+
+ args = [
+ rebase_path(_json_representation),
+ "--output",
+ rebase_path("${_output_js_path}"),
+ ]
+
+ data = []
+ foreach(o, outputs) {
+ data += [ rebase_path(o) ]
+ }
}
}
@@ -235,8 +254,8 @@ template("fidl_library") {
if (!defined(public_deps)) {
public_deps = []
}
- public_deps += [ "//third_party/fuchsia-sdk:fidl" ]
- public_deps += [ "//third_party/fuchsia-sdk:fidl_cpp" ]
+ public_deps += [ "//third_party/fuchsia-sdk/sdk:fidl" ]
+ public_deps += [ "//third_party/fuchsia-sdk/sdk:fidl_cpp" ]
public_configs = [ ":${invoker.target_name}_config" ]
}
diff --git a/chromium/build/config/fuchsia/testing_sandbox_policy b/chromium/build/config/fuchsia/testing_sandbox_policy
index b0ed870eddf..d295d05bbff 100644
--- a/chromium/build/config/fuchsia/testing_sandbox_policy
+++ b/chromium/build/config/fuchsia/testing_sandbox_policy
@@ -5,6 +5,7 @@
"services": [
"fuchsia.fonts.Provider",
"fuchsia.media.Audio",
+ "fuchsia.mediacodec.CodecFactory",
"fuchsia.net.LegacySocketProvider",
"fuchsia.netstack.Netstack",
"fuchsia.process.Launcher",
diff --git a/chromium/build/config/gcc/BUILD.gn b/chromium/build/config/gcc/BUILD.gn
index 365cbf90f09..257cdc7da6f 100644
--- a/chromium/build/config/gcc/BUILD.gn
+++ b/chromium/build/config/gcc/BUILD.gn
@@ -23,7 +23,7 @@ declare_args() {
# This config causes functions not to be automatically exported from shared
# libraries. By default, all symbols are exported but this means there are
# lots of exports that slow everything down. In general we explicitly mark
-# which functiosn we want to export from components.
+# which functions we want to export from components.
#
# Some third_party code assumes all functions are exported so this is separated
# into its own config so such libraries can remove this config to make symbols
@@ -53,7 +53,7 @@ config("symbol_visibility_default") {
#
# This is required for component builds since the build generates many shared
# libraries in the build directory that we expect to be automatically loaded.
-# It will be automatically applied in this case by :executable_ldconfig.
+# It will be automatically applied in this case by :executable_config.
#
# In non-component builds, certain test binaries may expect to load dynamic
# libraries from the current directory. As long as these aren't distributed,
@@ -85,8 +85,8 @@ config("rpath_for_built_shared_libraries") {
}
# Settings for executables.
-config("executable_ldconfig") {
- ldflags = []
+config("executable_config") {
+ ldflags = [ "-pie" ]
if (is_android) {
ldflags += [
"-Bdynamic",
@@ -98,9 +98,6 @@ config("executable_ldconfig") {
if (is_component_build) {
configs = [ ":rpath_for_built_shared_libraries" ]
}
- if (current_cpu == "mipsel" || current_cpu == "mips64el") {
- ldflags += [ "-pie" ]
- }
}
if (!is_android && current_os != "aix") {
diff --git a/chromium/build/config/ios/ios_sdk_overrides.gni b/chromium/build/config/ios/ios_sdk_overrides.gni
index 00105afd588..5699ebe06c2 100644
--- a/chromium/build/config/ios/ios_sdk_overrides.gni
+++ b/chromium/build/config/ios/ios_sdk_overrides.gni
@@ -7,11 +7,11 @@
declare_args() {
# Version of iOS that we're targeting.
- ios_deployment_target = "10.0"
+ ios_deployment_target = "11.0"
}
# Always assert that ios_deployment_target is used on non-iOS platforms to
# prevent unused args warnings.
if (!is_ios) {
- assert(ios_deployment_target == "10.0" || true)
+ assert(ios_deployment_target == "11.0" || true)
}
diff --git a/chromium/build/config/jumbo.gni b/chromium/build/config/jumbo.gni
index 2d4c1d5ac96..7834edf9439 100644
--- a/chromium/build/config/jumbo.gni
+++ b/chromium/build/config/jumbo.gni
@@ -10,8 +10,19 @@ declare_args() {
# compilation.
use_jumbo_build = false
- # A list of targets to exclude from jumbo builds, for optimal round trip time
- # when frequently changing a set of cpp files.
+ # A list of build targets to exclude from jumbo builds, for optimal
+ # round trip time when frequently changing a set of cpp files. The
+ # targets can be just the short name (in which case it matches any
+ # target with that name), a directory prefixed with the root
+ # specifier //, or a full build target label.
+ #
+ # Example:
+ # These would all exclude the "browser" target in a file
+ # content/browser/BUILD.gn, and potentially more.
+ #
+ # jumbo_build_excluded = [ "browser" ]
+ # jumbo_build_excluded = [ "//content/browser" ]
+ # jumbo_build_excluded = [ "//content/browser:browser" ]
jumbo_build_excluded = []
# How many files to group on average. Smaller numbers give more
@@ -67,15 +78,19 @@ template("internal_jumbo_target") {
if (defined(invoker.never_build_jumbo) && invoker.never_build_jumbo) {
use_jumbo_build_for_target = false
}
+
foreach(excluded_target, jumbo_build_excluded) {
- if (target_name == excluded_target) {
+ if (excluded_target == target_name ||
+ excluded_target == get_label_info(":" + target_name, "dir") ||
+ excluded_target ==
+ get_label_info(":" + target_name, "label_no_toolchain")) {
use_jumbo_build_for_target = false
}
}
excluded_sources = []
if (defined(invoker.jumbo_excluded_sources)) {
- excluded_sources += invoker.jumbo_excluded_sources
+ excluded_sources = invoker.jumbo_excluded_sources
}
if (defined(invoker.sources)) {
@@ -86,7 +101,7 @@ template("internal_jumbo_target") {
gen_target_dir = invoker.target_gen_dir
- assert(gen_target_dir != "") # Prevent "unused variable".
+ not_needed([ "gen_target_dir" ]) # Prevent "unused variable".
if (use_jumbo_build_for_target) {
jumbo_files = []
@@ -105,16 +120,15 @@ template("internal_jumbo_target") {
}
has_c_file = false
has_objective_c_file = false
- has_S_file = false
+ sources_in_jumbo_files = []
assert(merge_limit > 0)
foreach(source_file, invoker_sources) {
source_ext = get_path_info(source_file, "extension")
+ is_source_file = true
if (source_ext == "c") {
has_c_file = true
} else if (source_ext == "mm") {
has_objective_c_file = true
- } else if (source_ext == "S") {
- has_S_file = true
} else if (source_ext == "cc" || source_ext == "cpp") {
if (current_file_index == next_chunk_start) {
jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_" +
@@ -123,6 +137,11 @@ template("internal_jumbo_target") {
next_chunk_start += merge_limit
}
current_file_index += 1
+ } else {
+ is_source_file = false
+ }
+ if (is_source_file) {
+ sources_in_jumbo_files += [ source_file ]
}
}
@@ -130,9 +149,12 @@ template("internal_jumbo_target") {
# Empty sources list or a sources list with only header files or
# at most one non-header file.
use_jumbo_build_for_target = false
- assert(current_file_index <= 1) # Prevent "unused variable"
- assert(next_chunk_start >= 0) # Prevent "unused variable"
- assert(next_chunk_number <= 2) # Prevent "unused variable"
+ not_needed([
+ "sources_in_jumbo_files",
+ "current_file_index",
+ "next_chunk_start",
+ "next_chunk_number",
+ ])
}
if (has_c_file) {
@@ -141,19 +163,17 @@ template("internal_jumbo_target") {
if (has_objective_c_file) {
jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_mm.mm" ]
}
- if (has_S_file) {
- jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_S.S" ]
- }
}
if (use_jumbo_build_for_target) {
merge_action_name = target_name + "__jumbo_merge"
+ sources_in_jumbo_files -= excluded_sources
# Create an action that calls a script that merges all the source files.
action(merge_action_name) {
script = "//build/config/merge_for_jumbo.py"
response_file_contents =
- rebase_path(invoker_sources - excluded_sources, root_build_dir)
+ rebase_path(sources_in_jumbo_files, root_build_dir)
outputs = jumbo_files
args = [ "--outputs" ] + rebase_path(outputs, root_build_dir) +
[ "--file-list={{response_file_name}}" ]
@@ -162,16 +182,14 @@ template("internal_jumbo_target") {
# If the list subtraction triggers a gn error,
# jumbo_excluded_sources lists a file that is not in sources.
sources_after_exclusion = invoker_sources - excluded_sources
- assert(sources_after_exclusion != [] || true) # Prevent "unused variable".
+ not_needed([ "sources_after_exclusion" ])
}
target_type = invoker.target_type
if (use_jumbo_build_for_target && target_type == "split_static_library") {
# Meaningless and also impossible if split_count > len(jumbo_files)
target_type = "static_library"
-
- # Prevent "unused variable" warning.
- assert(!defined(invoker.split_count) || invoker.split_count > 0)
+ not_needed(invoker, [ "split_count" ])
}
# Perform the actual operation, either on the original sources or
@@ -189,15 +207,7 @@ template("internal_jumbo_target") {
variables_to_not_forward += [ "sources" ]
assert(jumbo_files != [])
set_sources_assignment_filter([]) # Prefiltered.
- sources = jumbo_files + excluded_sources
-
- # Need to keep the headers in sources so that dependency checks
- # work.
- foreach(source_file, invoker_sources) {
- if (get_path_info(source_file, "extension") == "h") {
- sources += [ source_file ]
- }
- }
+ sources = invoker_sources - sources_in_jumbo_files + jumbo_files
# Change include_dirs to make sure that the jumbo file can find its
# #included files.
diff --git a/chromium/build/config/linux/libdrm/BUILD.gn b/chromium/build/config/linux/libdrm/BUILD.gn
new file mode 100644
index 00000000000..daebcfd3a43
--- /dev/null
+++ b/chromium/build/config/linux/libdrm/BUILD.gn
@@ -0,0 +1,33 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux)
+
+declare_args() {
+ # Controls whether the build should use the version of libdrm
+ # library shipped with the system. In release builds of Chrome OS we
+ # use the system version, but when building on dev workstations we
+ # bundle it because Ubuntu doesn't ship a usable version.
+ # Chromecast will use this as well.
+ use_system_libdrm = false
+}
+
+if (use_system_libdrm) {
+ pkg_config("libdrm_config") {
+ packages = [ "libdrm" ]
+ }
+ group("libdrm") {
+ public_configs = [ ":libdrm_config" ]
+ }
+} else {
+ group("libdrm") {
+ public_deps = [
+ "//third_party/libdrm",
+ ]
+ }
+ config("libdrm_exynos_include_config") {
+ include_dirs = [ "//third_party/libdrm/src/exynos" ]
+ }
+}
diff --git a/chromium/build/config/mac/package_framework.py b/chromium/build/config/mac/package_framework.py
index f669528fe50..75604094e42 100644
--- a/chromium/build/config/mac/package_framework.py
+++ b/chromium/build/config/mac/package_framework.py
@@ -23,7 +23,7 @@ def Main():
# Foo.framework/Versions/Current symlink to it.
if args.version:
try:
- os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0744)
+ os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0755)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
diff --git a/chromium/build/config/merge_for_jumbo.py b/chromium/build/config/merge_for_jumbo.py
index 2ee23d94374..491b547938d 100755
--- a/chromium/build/config/merge_for_jumbo.py
+++ b/chromium/build/config/merge_for_jumbo.py
@@ -119,7 +119,7 @@ def main():
written_output_set = set() # Just for double checking
written_input_set = set() # Just for double checking
- for language_ext in (".cc", ".c", ".mm", ".S"):
+ for language_ext in (".cc", ".c", ".mm",):
if language_ext == ".cc":
ext_pattern = (".cc", ".cpp")
else:
@@ -135,6 +135,7 @@ def main():
write_jumbo_files(inputs, outputs, written_input_set, written_output_set)
assert set(args.outputs) == written_output_set, "Did not fill all outputs"
+ assert set(all_inputs) == written_input_set, "Did not use all inputs"
if args.verbose:
print("Generated %s (%d files) based on %s" % (
str(args.outputs), len(written_input_set), args.file_list))
diff --git a/chromium/build/config/posix/BUILD.gn b/chromium/build/config/posix/BUILD.gn
index 43655a6eb87..97113b98d01 100644
--- a/chromium/build/config/posix/BUILD.gn
+++ b/chromium/build/config/posix/BUILD.gn
@@ -42,12 +42,6 @@ config("runtime_library") {
defines += [ "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" ]
}
}
- if (!is_clang) {
- # Gcc has a built-in abs() definition with default visibility.
- # If it was not disabled, it would conflict with libc++'s abs()
- # with hidden visibility.
- cflags += [ "-fno-builtin-abs" ]
- }
cflags_cc += [
"-nostdinc++",
"-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
@@ -56,6 +50,7 @@ config("runtime_library") {
defines += [
"CR_LIBCXX_REVISION=$libcxx_svn_revision",
"CR_LIBCXXABI_REVISION=$libcxxabi_svn_revision",
+ "_LIBCPP_ENABLE_NODISCARD",
]
# Make sure we don't link against libc++ or libstdc++.
@@ -67,6 +62,11 @@ config("runtime_library") {
ldflags += [ "-nostdlib++" ]
}
} else {
+ # Gcc has a built-in abs() definition with default visibility.
+ # If it was not disabled, it would conflict with libc++'s abs()
+ # with hidden visibility.
+ cflags += [ "-fno-builtin-abs" ]
+
ldflags += [ "-nodefaultlibs" ]
# Unfortunately, there's no way to disable linking against just libc++
diff --git a/chromium/build/config/sanitizers/BUILD.gn b/chromium/build/config/sanitizers/BUILD.gn
index debc64cf409..9e495424fbc 100644
--- a/chromium/build/config/sanitizers/BUILD.gn
+++ b/chromium/build/config/sanitizers/BUILD.gn
@@ -197,14 +197,9 @@ config("default_sanitizer_ldflags") {
if (is_ubsan_vptr) {
ldflags += [ "-fsanitize=vptr" ]
}
- if (is_safestack) {
- ldflags += [ "-fsanitize=safe-stack" ]
- }
if (use_sanitizer_coverage) {
- # TODO(crbug.com/892167): Remove the is_mac check when new instrumentation
- # works on Mac.
- if (use_libfuzzer && !is_mac) {
+ if (use_libfuzzer) {
ldflags += [ "-fsanitize=fuzzer-no-link" ]
} else {
ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
@@ -390,15 +385,19 @@ config("cfi_icall_generalize_pointers") {
}
}
+config("cfi_icall_disable") {
+ if (is_clang && is_cfi && use_cfi_icall) {
+ cflags = [ "-fno-sanitize=cfi-icall" ]
+ }
+}
+
config("coverage_flags") {
cflags = []
if (use_sanitizer_coverage) {
# Used by sandboxing code to allow coverage dump to be written on the disk.
defines = [ "SANITIZER_COVERAGE" ]
- # TODO(crbug.com/892167): Remove the is_mac check when new instrumentation
- # works on Mac.
- if (use_libfuzzer && !is_mac) {
+ if (use_libfuzzer) {
cflags += [ "-fsanitize=fuzzer-no-link" ]
} else {
cflags += [
@@ -438,12 +437,6 @@ config("msan_flags") {
}
}
-config("safestack_flags") {
- if (is_safestack) {
- cflags = [ "-fsanitize=safe-stack" ]
- }
-}
-
config("tsan_flags") {
if (is_tsan) {
assert(is_linux, "tsan only supported on linux x86_64")
@@ -559,7 +552,6 @@ all_sanitizer_configs = [
":cfi_flags",
":lsan_flags",
":msan_flags",
- ":safestack_flags",
":tsan_flags",
":ubsan_flags",
":ubsan_no_recover",
diff --git a/chromium/build/config/sanitizers/sanitizers.gni b/chromium/build/config/sanitizers/sanitizers.gni
index cceab89cc30..6d23e174f45 100644
--- a/chromium/build/config/sanitizers/sanitizers.gni
+++ b/chromium/build/config/sanitizers/sanitizers.gni
@@ -33,9 +33,6 @@ declare_args() {
# Compile for Undefined Behaviour Sanitizer's vptr checks.
is_ubsan_vptr = false
- # Compile with SafeStack shadow stack support.
- is_safestack = false
-
# Track where uninitialized memory originates from. From fastest to slowest:
# 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
# chain of stores leading from allocation site to use site.
@@ -100,8 +97,7 @@ declare_args() {
# Value for -fsanitize-coverage flag. Setting this causes
# use_sanitizer_coverage to be enabled.
- # This flag is not used for libFuzzer (use_libfuzzer=true) unless we are on
- # Mac. Instead, we use:
+ # This flag is not used for libFuzzer (use_libfuzzer=true). Instead, we use:
# -fsanitize=fuzzer-no-link
# Default value when unset and use_fuzzing_engine=true:
# trace-pc-guard
@@ -159,9 +155,9 @@ if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
}
-# Whether we are linking against a debugging sanitizer runtime library. Among
-# other things, this changes the default symbol level and other settings in
-# order to prepare to create stack traces "live" using the sanitizer runtime.
+# Whether we are linking against a sanitizer runtime library. Among other
+# things, this changes the default symbol level and other settings in order to
+# prepare to create stack traces "live" using the sanitizer runtime.
using_sanitizer =
is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null ||
is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage || use_cfi_diag
@@ -172,9 +168,6 @@ assert(!using_sanitizer || is_clang,
assert(!is_cfi || is_clang,
"is_cfi requires setting is_clang = true in 'gn args'")
-assert(!is_safestack || is_clang,
- "is_safestack requires setting is_clang = true in 'gn args'")
-
prebuilt_instrumented_libraries_available =
is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
diff --git a/chromium/build/config/win/BUILD.gn b/chromium/build/config/win/BUILD.gn
index db14f673153..f27f192e37a 100644
--- a/chromium/build/config/win/BUILD.gn
+++ b/chromium/build/config/win/BUILD.gn
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//buildtools/deps_revisions.gni")
import("//build/config/chrome_build.gni")
import("//build/config/c++/c++.gni")
import("//build/config/clang/clang.gni")
@@ -87,6 +88,14 @@ config("compiler") {
if (is_clang) {
cflags += [ "-fmsc-version=1911" ]
+ if (is_component_build) {
+ cflags += [
+ # Do not export inline member functions. This makes component builds
+ # faster. This is similar to -fvisibility-inlines-hidden.
+ "/Zc:dllexportInlines-",
+ ]
+ }
+
if (current_cpu == "x86") {
cflags += [ "-m32" ]
} else {
@@ -141,13 +150,8 @@ config("compiler") {
# release builds. These are implied by /PROFILE below, but /PROFILE is
# incompatible with /debug:fastlink.
# Release builds always want these optimizations, so enable them explicitly.
- # TODO(crbug.com/884545): Remove the checks for use_libfuzzer when
- # libFuzzer's issues with /OPT:REF are resolved upstream.
- if (!use_libfuzzer) {
- ldflags += [ "/OPT:REF" ]
- }
-
ldflags += [
+ "/OPT:REF",
"/OPT:ICF",
"/INCREMENTAL:NO",
"/FIXED:NO",
@@ -164,9 +168,7 @@ config("compiler") {
# PDB file by about 5%) but does not otherwise alter the output binary. It
# is enabled opportunistically for builds where it is not prohibited (not
# supported when incrementally linking, or using /debug:fastlink).
- # /PROFILE implies /OPT:REF. Don't use it with libFuzzer while /OPT:REF
- # can't be used with libFuzzer. See crbug.com/884545 for more details.
- if (!is_win_fastlink && !use_libfuzzer) {
+ if (!is_win_fastlink) {
ldflags += [ "/PROFILE" ]
}
}
@@ -256,6 +258,8 @@ config("runtime_library") {
defines += [ "_USING_V110_SDK71_" ]
}
+ # TODO(thomasanderson): Move this into a target in //build/config/c++ and
+ # deduplicate with //build/config/posix/BUILD.gn.
if (use_custom_libcxx) {
cflags_cc +=
[ "-I" + rebase_path("$libcxx_prefix/include", root_build_dir) ]
@@ -263,11 +267,17 @@ config("runtime_library") {
defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ]
}
- # Prevent libc++ from embedding linker flags to try to automatically link
- # against its runtime library. This is unnecessary with our build system,
- # and can also result in build failures if libc++'s name for a library does
- # not match ours.
- defines += [ "_LIBCPP_NO_AUTO_LINK" ]
+ defines += [
+ "CR_LIBCXX_REVISION=$libcxx_svn_revision",
+ "CR_LIBCXXABI_REVISION=$libcxxabi_svn_revision",
+ "_LIBCPP_ENABLE_NODISCARD",
+
+ # Prevent libc++ from embedding linker flags to try to automatically link
+ # against its runtime library. This is unnecessary with our build system,
+ # and can also result in build failures if libc++'s name for a library
+ # does not match ours.
+ "_LIBCPP_NO_AUTO_LINK",
+ ]
}
if (current_os == "winuwp") {
@@ -470,6 +480,9 @@ if (current_cpu == "x64") {
# The number after the comma is the minimum required OS version.
# 5.02 = Windows Server 2003.
subsystem_version_suffix = ",5.02"
+} else if (current_cpu == "arm64") {
+ # Windows ARM64 requires Windows 10.
+ subsystem_version_suffix = ",10.0"
} else {
# 5.01 = Windows XP.
subsystem_version_suffix = ",5.01"
diff --git a/chromium/build/docs/writing_gn_templates.md b/chromium/build/docs/writing_gn_templates.md
index f75c88003d3..1d1e68bf202 100644
--- a/chromium/build/docs/writing_gn_templates.md
+++ b/chromium/build/docs/writing_gn_templates.md
@@ -58,6 +58,7 @@ gn analyze //out/Debug <(echo '{
* `grit()` does this.
### Outputs
+#### What to List as Outputs
Do not list files as `outputs` unless they are important. Outputs are important
if they are:
* used as an input by another target, or
@@ -66,6 +67,38 @@ if they are:
Example:
* An action runs a binary that creates an output as well as a log file. Do not
list the log file as an output.
+
+#### Where to Place Outputs
+**Option 1:** To make outputs visible in codesearch (e.g. generated sources):
+* use `$target_gen_dir/$target_name.$EXTENSION`.
+
+**Option 2:** Otherwise (for binary files):
+* use `$target_out_dir/$target_name.$EXTENSION`.
+
+**Option 3:** For outputs that are required at runtime
+(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/master/docs/reference.md#runtime_deps)),
+options 1 & 2 do not work because they are not archived in builder/tester bot
+configurations. In this case:
+* use `$root_out_dir/gen.runtime` or `$root_out_dir/obj.runtime`.
+
+Example:
+```python
+# This .json file is used at runtime and thus cannot go in target_gen_dir.
+_target_dir_name = rebase_path(get_label_info(":$target_name", "dir"), "//")
+_output_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.json"
+```
+
+**Option 4:** For outputs that map 1:1 with executables, and whose paths cannot
+be derived at runtime:
+* use `$root_build_dir/YOUR_NAME_HERE/$target_name`.
+
+Examples:
+```python
+# Wrapper scripts for apks:
+_output_path = "$root_build_dir/bin/$target_name"
+# Metadata for apks. Used by binary size tools.
+_output_path = "$root_build_dir/size-info/${invoker.name}.apk.jar.info"
+```
## Best Practices for Python Actions
Outputs should be atomic and take advantage of `restat=1`.
diff --git a/chromium/build/download_nacl_toolchains.py b/chromium/build/download_nacl_toolchains.py
index ea9e0cdae89..4b61fc025ba 100755
--- a/chromium/build/download_nacl_toolchains.py
+++ b/chromium/build/download_nacl_toolchains.py
@@ -11,11 +11,6 @@ import sys
def Main(args):
- # Exit early if disable_nacl=1.
- if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
- return 0
- if 'OS=android' in os.environ.get('GYP_DEFINES', ''):
- return 0
script_dir = os.path.dirname(os.path.abspath(__file__))
src_dir = os.path.dirname(script_dir)
nacl_dir = os.path.join(src_dir, 'native_client')
diff --git a/chromium/build/fuchsia/boot_data.py b/chromium/build/fuchsia/boot_data.py
index 64998661c3d..ac54746774e 100644
--- a/chromium/build/fuchsia/boot_data.py
+++ b/chromium/build/fuchsia/boot_data.py
@@ -41,20 +41,15 @@ def _TargetCpuToSdkBinPath(target_arch):
def _ProvisionSSH(output_dir):
- """Provisions the key files used by the SSH daemon, and generates a
- configuration file used by clients for connecting to SSH.
+ """Generates a keypair and configuration data for the SSH client.
+ Returns a path to the client public key."""
- Returns a tuple with:
- #0: the client configuration file
- #1: a list of file path pairs: (<path in image>, <path on build filesystem>).
- """
-
- host_key_path = output_dir + '/ssh_key'
+ host_key_path = os.path.join(output_dir, 'ssh_key')
host_pubkey_path = host_key_path + '.pub'
- id_key_path = output_dir + '/id_ed25519'
+ id_key_path = os.path.join(output_dir, 'id_ed25519')
id_pubkey_path = id_key_path + '.pub'
- known_hosts_path = output_dir + '/known_hosts'
- ssh_config_path = GetSSHConfigPath(output_dir)
+ known_hosts_path = os.path.join(output_dir, 'known_hosts')
+ ssh_config_path = os.path.join(output_dir, 'ssh_config')
logging.debug('Generating SSH credentials.')
if not os.path.isfile(host_key_path):
@@ -73,12 +68,7 @@ def _ProvisionSSH(output_dir):
if os.path.exists(known_hosts_path):
os.remove(known_hosts_path)
- return (
- ssh_config_path,
- (('ssh/ssh_host_ed25519_key', host_key_path),
- ('ssh/ssh_host_ed25519_key.pub', host_pubkey_path),
- ('ssh/authorized_keys', id_pubkey_path))
- )
+ return id_pubkey_path
def _MakeQcowDisk(output_dir, disk_path):
@@ -104,54 +94,20 @@ def GetSSHConfigPath(output_dir):
return output_dir + '/ssh_config'
-def ConfigureDataFVM(output_dir, output_type):
- """Builds the FVM image for the /data volume and prepopulates it
- with SSH keys.
-
- output_dir: Path to the output directory which will contain the FVM file.
- output_type: If FVM_TYPE_QCOW, then returns a path to the qcow2 FVM file,
- used for QEMU.
-
- If FVM_TYPE_SPARSE, then returns a path to the
- sparse/compressed FVM file."""
-
- logging.debug('Building /data partition FVM file.')
- # minfs expects absolute paths(bug:
- # https://fuchsia.atlassian.net/browse/ZX-2397)
- output_dir = os.path.abspath(output_dir)
- with tempfile.NamedTemporaryFile() as data_file:
- # Build up the minfs partition data and install keys into it.
- ssh_config, ssh_data = _ProvisionSSH(output_dir)
- with tempfile.NamedTemporaryFile() as manifest:
- for dest, src in ssh_data:
- manifest.write('%s=%s\n' % (dest, src))
- manifest.flush()
- minfs_path = os.path.join(common.SDK_ROOT, 'tools', 'minfs')
- subprocess.check_call([minfs_path, '%s@1G' % data_file.name, 'create'])
- subprocess.check_call([minfs_path, data_file.name, 'manifest',
- manifest.name])
-
- # Wrap the minfs partition in a FVM container.
- fvm_path = os.path.join(common.SDK_ROOT, 'tools', 'fvm')
- fvm_output_path = os.path.join(output_dir, 'fvm.data.blk')
- if os.path.exists(fvm_output_path):
- os.remove(fvm_output_path)
-
- if output_type == FVM_TYPE_SPARSE:
- cmd = [fvm_path, fvm_output_path, 'sparse', '--compress', 'lz4',
- '--data', data_file.name]
- else:
- cmd = [fvm_path, fvm_output_path, 'create', '--data', data_file.name]
-
- logging.debug(' '.join(cmd))
- subprocess.check_call(cmd)
-
- if output_type == FVM_TYPE_SPARSE:
- return fvm_output_path
- elif output_type == FVM_TYPE_QCOW:
- return _MakeQcowDisk(output_dir, fvm_output_path)
- else:
- raise Exception('Unknown output_type: %r' % output_type)
+def GetBootImage(output_dir, target_arch):
+ """"Gets a path to the Zircon boot image, with the SSH client public key
+ added."""
+
+ pubkey_path = _ProvisionSSH(output_dir)
+ zbi_tool = os.path.join(common.SDK_ROOT, 'tools', 'zbi')
+ image_source_path = GetTargetFile(target_arch, 'fuchsia.zbi')
+ image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi')
+
+ cmd = [ zbi_tool, '-o', image_dest_path, image_source_path,
+ '-e', 'data/ssh/authorized_keys=' + pubkey_path ]
+ subprocess.check_call(cmd)
+
+ return image_dest_path
def GetNodeName(output_dir):
diff --git a/chromium/build/fuchsia/common_args.py b/chromium/build/fuchsia/common_args.py
index 27f37ddb501..4867188966b 100644
--- a/chromium/build/fuchsia/common_args.py
+++ b/chromium/build/fuchsia/common_args.py
@@ -34,6 +34,9 @@ def AddCommonArgs(arg_parser):
'are located (must include build type).'))
common_args.add_argument('--target-cpu', required=True,
help='GN target_cpu setting for the build.')
+ common_args.add_argument('--target-staging-path',
+ help='target path under which to stage packages '
+ 'during deployment.', default='/data')
common_args.add_argument('--device', '-d', action='store_true', default=False,
help='Run on hardware device instead of QEMU.')
common_args.add_argument('--host', help='The IP of the target device. ' +
@@ -57,6 +60,10 @@ def AddCommonArgs(arg_parser):
common_args.add_argument('--qemu-cpu-cores', type=int, default=4,
help='Sets the number of CPU cores to provide if '
'launching in a VM with QEMU.'),
+ common_args.add_argument('--qemu-require-kvm', action='store_true',
+ help='Disables fall-back to emulated CPU if the '
+ 'host system does not support KVM acceleration.'),
+
def ConfigureLogging(args):
"""Configures the logging level based on command line |args|."""
@@ -88,7 +95,8 @@ def GetDeploymentTargetForArgs(args):
if not args.device:
return QemuTarget(args.output_directory, args.target_cpu,
- args.qemu_cpu_cores, system_log_file)
+ args.qemu_cpu_cores, system_log_file,
+ args.qemu_require_kvm)
else:
return DeviceTarget(args.output_directory, args.target_cpu, args.host,
args.port, args.ssh_config, system_log_file)
diff --git a/chromium/build/fuchsia/device_target.py b/chromium/build/fuchsia/device_target.py
index 862b10b6c5f..c901952befe 100644
--- a/chromium/build/fuchsia/device_target.py
+++ b/chromium/build/fuchsia/device_target.py
@@ -108,19 +108,21 @@ class DeviceTarget(target.Target):
bootserver_command = [
bootserver_path,
'-1',
- '--efi',
- EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
- 'local.esp.blk')),
'--fvm',
EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
'fvm.sparse.blk')),
- '--fvm',
- EnsurePathExists(
- boot_data.ConfigureDataFVM(self._output_dir,
- boot_data.FVM_TYPE_SPARSE)),
- EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
- 'fuchsia.zbi')),
- '--'] + boot_data.GetKernelArgs(self._output_dir)
+ EnsurePathExists(boot_data.GetBootImage(self._output_dir,
+ self._GetTargetSdkArch()))]
+
+ if self._GetTargetSdkArch() == 'x64':
+ bootserver_command += [
+ '--efi',
+ EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
+ 'local.esp.blk'))]
+
+ bootserver_command += ['--']
+ bootserver_command += boot_data.GetKernelArgs(self._output_dir)
+
logging.debug(' '.join(bootserver_command))
subprocess.check_call(bootserver_command)
diff --git a/chromium/build/fuchsia/exe_runner.py b/chromium/build/fuchsia/exe_runner.py
index 5eeaf2c2b18..feb96d0779f 100755
--- a/chromium/build/fuchsia/exe_runner.py
+++ b/chromium/build/fuchsia/exe_runner.py
@@ -12,7 +12,7 @@ import sys
from common_args import AddCommonArgs, ConfigureLogging, \
GetDeploymentTargetForArgs
-from run_package import RunPackage
+from run_package import RunPackage, RunPackageArgs
def main():
@@ -25,10 +25,11 @@ def main():
with GetDeploymentTargetForArgs(args) as target:
target.Start()
+
+ run_package_args = RunPackageArgs.FromCommonArgs(args)
return RunPackage(
args.output_directory, target, args.package, args.package_name,
- args.package_dep, args.child_args, args.include_system_logs,
- args.install_only, args.package_manifest)
+ args.package_dep, args.child_args, run_package_args)
if __name__ == '__main__':
diff --git a/chromium/build/fuchsia/fidlgen_js/BUILD.gn b/chromium/build/fuchsia/fidlgen_js/BUILD.gn
new file mode 100644
index 00000000000..4b2bb6400c5
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/BUILD.gn
@@ -0,0 +1,63 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/fidl_library.gni")
+import("//testing/test.gni")
+
+test("fidlgen_js_unittests") {
+ testonly = true
+
+ sources = [
+ "test/fidlgen_js_unittest.cc",
+ ]
+
+ deps = [
+ ":fidljstest",
+ ":runtime",
+ "//base/test:test_support",
+ "//gin:gin_test",
+ "//testing/gtest",
+ "//v8",
+ ]
+
+ configs += [
+ "//tools/v8_context_snapshot:use_v8_context_snapshot",
+ "//v8:external_startup_data",
+ ]
+
+ data_deps = [
+ "//tools/v8_context_snapshot:v8_context_snapshot",
+ ]
+
+ data = [
+ "runtime/fidl.mjs",
+ ]
+}
+
+static_library("runtime") {
+ sources = [
+ "runtime/zircon.cc",
+ "runtime/zircon.h",
+ ]
+
+ deps = [
+ "//base",
+ "//gin",
+ "//third_party/fuchsia-sdk/sdk:async",
+ "//third_party/fuchsia-sdk/sdk:async_default",
+ "//v8",
+ ]
+}
+
+fidl_library("fidljstest") {
+ testonly = true
+ sources = [
+ "test/simple.fidl",
+ ]
+
+ languages = [
+ "cpp",
+ "js",
+ ]
+}
diff --git a/chromium/build/fuchsia/fidlgen_js/DEPS b/chromium/build/fuchsia/fidlgen_js/DEPS
new file mode 100644
index 00000000000..681254d0f3e
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+gin",
+ "+v8/include",
+]
diff --git a/chromium/build/fuchsia/fidlgen_js/fidl.py b/chromium/build/fuchsia/fidlgen_js/fidl.py
new file mode 100644
index 00000000000..6f8b99f4413
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/fidl.py
@@ -0,0 +1,549 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This was generated (and can be regenerated) by pasting
+# zircon/system/host/fidl/schema.json from Fuchsia into
+# https://app.quicktype.io and choosing Python 2.7 output. The only manual
+# change is to modify the import path for Enum.
+
+from third_party.enum34 import Enum
+
+
+def from_str(x):
+ assert isinstance(x, (str, unicode))
+ return x
+
+
+def from_int(x):
+ assert isinstance(x, int) and not isinstance(x, bool)
+ return x
+
+
+def from_none(x):
+ assert x is None
+ return x
+
+
+def from_union(fs, x):
+ for f in fs:
+ try:
+ return f(x)
+ except:
+ pass
+ assert False
+
+
+def from_bool(x):
+ assert isinstance(x, bool)
+ return x
+
+
+def to_class(c, x):
+ assert isinstance(x, c)
+ return x.to_dict()
+
+
+def to_enum(c, x):
+ assert isinstance(x, c)
+ return x.value
+
+
+def from_list(f, x):
+ assert isinstance(x, list)
+ return [f(y) for y in x]
+
+
+def from_dict(f, x):
+ assert isinstance(x, dict)
+ return { k: f(v) for (k, v) in x.items() }
+
+
+class Attribute:
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ name = from_str(obj.get(u"name"))
+ value = from_str(obj.get(u"value"))
+ return Attribute(name, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"name"] = from_str(self.name)
+ result[u"value"] = from_str(self.value)
+ return result
+
+
+class TypeKind(Enum):
+ ARRAY = u"array"
+ HANDLE = u"handle"
+ IDENTIFIER = u"identifier"
+ PRIMITIVE = u"primitive"
+ REQUEST = u"request"
+ STRING = u"string"
+ VECTOR = u"vector"
+
+
+class TypeClass:
+ def __init__(self, element_count, element_type, kind, maybe_element_count, nullable, subtype, identifier):
+ self.element_count = element_count
+ self.element_type = element_type
+ self.kind = kind
+ self.maybe_element_count = maybe_element_count
+ self.nullable = nullable
+ self.subtype = subtype
+ self.identifier = identifier
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ element_count = from_union([from_int, from_none], obj.get(u"element_count"))
+ element_type = from_union([TypeClass.from_dict, from_none], obj.get(u"element_type"))
+ kind = TypeKind(obj.get(u"kind"))
+ maybe_element_count = from_union([from_int, from_none], obj.get(u"maybe_element_count"))
+ nullable = from_union([from_bool, from_none], obj.get(u"nullable"))
+ subtype = from_union([from_str, from_none], obj.get(u"subtype"))
+ identifier = from_union([from_str, from_none], obj.get(u"identifier"))
+ return TypeClass(element_count, element_type, kind, maybe_element_count, nullable, subtype, identifier)
+
+ def to_dict(self):
+ result = {}
+ result[u"element_count"] = from_union([from_int, from_none], self.element_count)
+ result[u"element_type"] = from_union([lambda x: to_class(TypeClass, x), from_none], self.element_type)
+ result[u"kind"] = to_enum(TypeKind, self.kind)
+ result[u"maybe_element_count"] = from_union([from_int, from_none], self.maybe_element_count)
+ result[u"nullable"] = from_union([from_bool, from_none], self.nullable)
+ result[u"subtype"] = from_union([from_str, from_none], self.subtype)
+ result[u"identifier"] = from_union([from_str, from_none], self.identifier)
+ return result
+
+
+class ConstantKind(Enum):
+ IDENTIFIER = u"identifier"
+ LITERAL = u"literal"
+
+
+class LiteralKind(Enum):
+ DEFAULT = u"default"
+ FALSE = u"false"
+ NUMERIC = u"numeric"
+ STRING = u"string"
+ TRUE = u"true"
+
+
+class Literal:
+ def __init__(self, kind, value):
+ self.kind = kind
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ kind = LiteralKind(obj.get(u"kind"))
+ value = from_union([from_str, from_none], obj.get(u"value"))
+ return Literal(kind, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"kind"] = to_enum(LiteralKind, self.kind)
+ result[u"value"] = from_union([from_str, from_none], self.value)
+ return result
+
+
+class Constant:
+ def __init__(self, identifier, kind, literal):
+ self.identifier = identifier
+ self.kind = kind
+ self.literal = literal
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ identifier = from_union([from_str, from_none], obj.get(u"identifier"))
+ kind = ConstantKind(obj.get(u"kind"))
+ literal = from_union([Literal.from_dict, from_none], obj.get(u"literal"))
+ return Constant(identifier, kind, literal)
+
+ def to_dict(self):
+ result = {}
+ result[u"identifier"] = from_union([from_str, from_none], self.identifier)
+ result[u"kind"] = to_enum(ConstantKind, self.kind)
+ result[u"literal"] = from_union([lambda x: to_class(Literal, x), from_none], self.literal)
+ return result
+
+
+class Const:
+ def __init__(self, maybe_attributes, name, type, value):
+ self.maybe_attributes = maybe_attributes
+ self.name = name
+ self.type = type
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ name = from_str(obj.get(u"name"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ value = Constant.from_dict(obj.get(u"value"))
+ return Const(maybe_attributes, name, type, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"name"] = from_str(self.name)
+ result[u"type"] = to_class(TypeClass, self.type)
+ result[u"value"] = to_class(Constant, self.value)
+ return result
+
+
+class DeclarationsMap(Enum):
+ CONST = u"const"
+ ENUM = u"enum"
+ INTERFACE = u"interface"
+ STRUCT = u"struct"
+ UNION = u"union"
+
+
+class EnumMember:
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ name = from_str(obj.get(u"name"))
+ value = Constant.from_dict(obj.get(u"value"))
+ return EnumMember(name, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"name"] = from_str(self.name)
+ result[u"value"] = to_class(Constant, self.value)
+ return result
+
+
+class IntegerType(Enum):
+ INT16 = u"int16"
+ INT32 = u"int32"
+ INT64 = u"int64"
+ INT8 = u"int8"
+ UINT16 = u"uint16"
+ UINT32 = u"uint32"
+ UINT64 = u"uint64"
+ UINT8 = u"uint8"
+
+
+class EnumDeclarationElement:
+ def __init__(self, maybe_attributes, members, name, type):
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(EnumMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ type = IntegerType(obj.get(u"type"))
+ return EnumDeclarationElement(maybe_attributes, members, name, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(EnumMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"type"] = to_enum(IntegerType, self.type)
+ return result
+
+
+class InterfaceMethodParameter:
+ def __init__(self, alignment, name, offset, size, type):
+ self.alignment = alignment
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return InterfaceMethodParameter(alignment, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class InterfaceMethod:
+ def __init__(self, has_request, has_response, maybe_attributes, maybe_request, maybe_request_alignment, maybe_request_size, maybe_response, maybe_response_alignment, maybe_response_size, name, ordinal):
+ self.has_request = has_request
+ self.has_response = has_response
+ self.maybe_attributes = maybe_attributes
+ self.maybe_request = maybe_request
+ self.maybe_request_alignment = maybe_request_alignment
+ self.maybe_request_size = maybe_request_size
+ self.maybe_response = maybe_response
+ self.maybe_response_alignment = maybe_response_alignment
+ self.maybe_response_size = maybe_response_size
+ self.name = name
+ self.ordinal = ordinal
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ has_request = from_bool(obj.get(u"has_request"))
+ has_response = from_bool(obj.get(u"has_response"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ maybe_request = from_union([lambda x: from_list(InterfaceMethodParameter.from_dict, x), from_none], obj.get(u"maybe_request"))
+ maybe_request_alignment = from_union([from_int, from_none], obj.get(u"maybe_request_alignment"))
+ maybe_request_size = from_union([from_int, from_none], obj.get(u"maybe_request_size"))
+ maybe_response = from_union([lambda x: from_list(InterfaceMethodParameter.from_dict, x), from_none], obj.get(u"maybe_response"))
+ maybe_response_alignment = from_union([from_int, from_none], obj.get(u"maybe_response_alignment"))
+ maybe_response_size = from_union([from_int, from_none], obj.get(u"maybe_response_size"))
+ name = from_str(obj.get(u"name"))
+ ordinal = from_int(obj.get(u"ordinal"))
+ return InterfaceMethod(has_request, has_response, maybe_attributes, maybe_request, maybe_request_alignment, maybe_request_size, maybe_response, maybe_response_alignment, maybe_response_size, name, ordinal)
+
+ def to_dict(self):
+ result = {}
+ result[u"has_request"] = from_bool(self.has_request)
+ result[u"has_response"] = from_bool(self.has_response)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"maybe_request"] = from_union([lambda x: from_list(lambda x: to_class(InterfaceMethodParameter, x), x), from_none], self.maybe_request)
+ result[u"maybe_request_alignment"] = from_union([from_int, from_none], self.maybe_request_alignment)
+ result[u"maybe_request_size"] = from_union([from_int, from_none], self.maybe_request_size)
+ result[u"maybe_response"] = from_union([lambda x: from_list(lambda x: to_class(InterfaceMethodParameter, x), x), from_none], self.maybe_response)
+ result[u"maybe_response_alignment"] = from_union([from_int, from_none], self.maybe_response_alignment)
+ result[u"maybe_response_size"] = from_union([from_int, from_none], self.maybe_response_size)
+ result[u"name"] = from_str(self.name)
+ result[u"ordinal"] = from_int(self.ordinal)
+ return result
+
+
+class Interface:
+ def __init__(self, maybe_attributes, methods, name):
+ self.maybe_attributes = maybe_attributes
+ self.methods = methods
+ self.name = name
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ methods = from_list(InterfaceMethod.from_dict, obj.get(u"methods"))
+ name = from_str(obj.get(u"name"))
+ return Interface(maybe_attributes, methods, name)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"methods"] = from_list(lambda x: to_class(InterfaceMethod, x), self.methods)
+ result[u"name"] = from_str(self.name)
+ return result
+
+
+class Library:
+ def __init__(self, declarations, name):
+ self.declarations = declarations
+ self.name = name
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ declarations = from_dict(DeclarationsMap, obj.get(u"declarations"))
+ name = from_str(obj.get(u"name"))
+ return Library(declarations, name)
+
+ def to_dict(self):
+ result = {}
+ result[u"declarations"] = from_dict(lambda x: to_enum(DeclarationsMap, x), self.declarations)
+ result[u"name"] = from_str(self.name)
+ return result
+
+
+class StructMember:
+ def __init__(self, alignment, maybe_default_value, name, offset, size, type):
+ self.alignment = alignment
+ self.maybe_default_value = maybe_default_value
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ maybe_default_value = from_union([Constant.from_dict, from_none], obj.get(u"maybe_default_value"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return StructMember(alignment, maybe_default_value, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"maybe_default_value"] = from_union([lambda x: to_class(Constant, x), from_none], self.maybe_default_value)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class Struct:
+ def __init__(self, max_handles, maybe_attributes, members, name, size):
+ self.max_handles = max_handles
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.size = size
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ max_handles = from_union([from_int, from_none], obj.get(u"max_handles"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(StructMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ size = from_int(obj.get(u"size"))
+ return Struct(max_handles, maybe_attributes, members, name, size)
+
+ def to_dict(self):
+ result = {}
+ result[u"max_handles"] = from_union([from_int, from_none], self.max_handles)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(StructMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"size"] = from_int(self.size)
+ return result
+
+
+class UnionMember:
+ def __init__(self, alignment, name, offset, size, type):
+ self.alignment = alignment
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return UnionMember(alignment, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class UnionDeclarationElement:
+ def __init__(self, alignment, max_handles, maybe_attributes, members, name, size):
+ self.alignment = alignment
+ self.max_handles = max_handles
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.size = size
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ max_handles = from_union([from_int, from_none], obj.get(u"max_handles"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(UnionMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ size = from_int(obj.get(u"size"))
+ return UnionDeclarationElement(alignment, max_handles, maybe_attributes, members, name, size)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"max_handles"] = from_union([from_int, from_none], self.max_handles)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(UnionMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"size"] = from_int(self.size)
+ return result
+
+
+class Fidl:
+ def __init__(self, const_declarations, declaration_order, declarations, enum_declarations, interface_declarations, library_dependencies, name, struct_declarations, union_declarations, version):
+ self.const_declarations = const_declarations
+ self.declaration_order = declaration_order
+ self.declarations = declarations
+ self.enum_declarations = enum_declarations
+ self.interface_declarations = interface_declarations
+ self.library_dependencies = library_dependencies
+ self.name = name
+ self.struct_declarations = struct_declarations
+ self.union_declarations = union_declarations
+ self.version = version
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ const_declarations = from_list(Const.from_dict, obj.get(u"const_declarations"))
+ declaration_order = from_list(from_str, obj.get(u"declaration_order"))
+ declarations = from_dict(DeclarationsMap, obj.get(u"declarations"))
+ enum_declarations = from_list(EnumDeclarationElement.from_dict, obj.get(u"enum_declarations"))
+ interface_declarations = from_list(Interface.from_dict, obj.get(u"interface_declarations"))
+ library_dependencies = from_list(Library.from_dict, obj.get(u"library_dependencies"))
+ name = from_str(obj.get(u"name"))
+ struct_declarations = from_list(Struct.from_dict, obj.get(u"struct_declarations"))
+ union_declarations = from_list(UnionDeclarationElement.from_dict, obj.get(u"union_declarations"))
+ version = from_str(obj.get(u"version"))
+ return Fidl(const_declarations, declaration_order, declarations, enum_declarations, interface_declarations, library_dependencies, name, struct_declarations, union_declarations, version)
+
+ def to_dict(self):
+ result = {}
+ result[u"const_declarations"] = from_list(lambda x: to_class(Const, x), self.const_declarations)
+ result[u"declaration_order"] = from_list(from_str, self.declaration_order)
+ result[u"declarations"] = from_dict(lambda x: to_enum(DeclarationsMap, x), self.declarations)
+ result[u"enum_declarations"] = from_list(lambda x: to_class(EnumDeclarationElement, x), self.enum_declarations)
+ result[u"interface_declarations"] = from_list(lambda x: to_class(Interface, x), self.interface_declarations)
+ result[u"library_dependencies"] = from_list(lambda x: to_class(Library, x), self.library_dependencies)
+ result[u"name"] = from_str(self.name)
+ result[u"struct_declarations"] = from_list(lambda x: to_class(Struct, x), self.struct_declarations)
+ result[u"union_declarations"] = from_list(lambda x: to_class(UnionDeclarationElement, x), self.union_declarations)
+ result[u"version"] = from_str(self.version)
+ return result
+
+
+def fidl_from_dict(s):
+ return Fidl.from_dict(s)
+
+
+def fidl_to_dict(x):
+ return to_class(Fidl, x)
+
diff --git a/chromium/build/fuchsia/fidlgen_js/gen.py b/chromium/build/fuchsia/fidlgen_js/gen.py
new file mode 100755
index 00000000000..484440e2d1c
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/gen.py
@@ -0,0 +1,673 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import fidl
+import json
+
+
+class _CompoundIdentifier(object):
+
+ def __init__(self, library, name):
+ self.library = library
+ self.name = name
+
+
+def _ParseLibraryName(lib):
+ return lib.split('.')
+
+
+def _ParseCompoundIdentifier(ident):
+ parts = ident.split('/', 2)
+ raw_library = ''
+ raw_name = parts[0]
+ if len(parts) == 2:
+ raw_library, raw_name = parts
+ library = _ParseLibraryName(raw_library)
+ return _CompoundIdentifier(library, raw_name)
+
+
+def _ChangeIfReserved(name):
+ # TODO(crbug.com/883496): Remap any JS keywords.
+ return name
+
+
+def _CompileCompoundIdentifier(compound, ext=''):
+ result = _ChangeIfReserved(compound.name) + ext
+ return result
+
+
+def _CompileIdentifier(ident):
+ return _ChangeIfReserved(ident)
+
+
+def _GetUnderlyingPrimitiveType(t):
+ """Returns the underlying FIDL primitive type for a higher level type."""
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return t.subtype
+ elif t.kind == fidl.TypeKind.STRING:
+ return 'string'
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ # No underlying type is required because it will be implied by the type of
+ # the value that the identifer represents.
+ return None
+ else:
+ raise Exception(
+ 'expected primitive or identifier representing primitive underlying '
+ 'type, but got ' + str(t.kind))
+
+
+def _InlineSizeOfPrimitiveType(primitive_type):
+ return {
+ 'bool': 1,
+ 'float32': 4,
+ 'float64': 8,
+ 'int16': 2,
+ 'int32': 4,
+ 'int64': 8,
+ 'int8': 1,
+ 'uint16': 2,
+ 'uint32': 4,
+ 'uint64': 8,
+ 'uint8': 1,
+ }[primitive_type]
+
+
+def _JsTypeForPrimitiveType(t):
+ mapping = {
+ fidl.IntegerType.INT16: 'number',
+ fidl.IntegerType.INT32: 'number',
+ fidl.IntegerType.INT64: 'BigInt',
+ fidl.IntegerType.INT8: 'number',
+ fidl.IntegerType.UINT16: 'number',
+ fidl.IntegerType.UINT32: 'number',
+ fidl.IntegerType.UINT64: 'BigInt',
+ fidl.IntegerType.UINT8: 'number',
+ }
+ return mapping[t]
+
+
+def _BuildInlineSizeTable(fidl):
+ """Builds a mapping from type name to inline type size. These need to be
+ extracted beforehand because a vector<X> can be required during compilation
+ before seeing the compilation of X."""
+ result = {}
+ for enum in fidl.enum_declarations:
+ result[enum.name] = _InlineSizeOfPrimitiveType(enum.type.value)
+ for union in fidl.union_declarations:
+ result[union.name] = union.size
+ for struct in fidl.struct_declarations:
+ result[struct.name] = struct.size
+ return result
+
+
+class Compiler(object):
+
+ def __init__(self, fidl, output_file):
+ self.fidl = fidl
+ self.f = output_file
+ self.output_deferred_to_eof = ''
+ self.type_table_defined = set()
+ self.type_inline_size_by_name = _BuildInlineSizeTable(self.fidl)
+ # Used to hold the JS name for constants and enumerants. In particular,
+ # enums aren't scoped by name to their enum in the fidl json, but the JS
+ # bindings emit them as Enum.Something. So this maps from Something ->
+ # Enum.Something.
+ self.resolved_constant_name = {}
+
+ def Compile(self):
+ self._EmitHeader()
+ for c in self.fidl.const_declarations:
+ self._CompileConst(c)
+ for e in self.fidl.enum_declarations:
+ self._CompileEnum(e)
+ for u in self.fidl.union_declarations:
+ self._CompileUnion(u)
+ for s in self.fidl.struct_declarations:
+ self._CompileStruct(s)
+ for i in self.fidl.interface_declarations:
+ self._CompileInterface(i)
+
+ self.f.write(self.output_deferred_to_eof)
+
+ def _InlineSizeOfType(self, t):
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return _InlineSizeOfPrimitiveType(t.subtype)
+ elif t.kind == fidl.TypeKind.STRING:
+ return 16
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ size = self.type_inline_size_by_name.get(t.identifier)
+ if size is None:
+ raise Exception('expected ' + t.identifier +
+ ' to be in self.type_inline_size_by_name')
+ return size
+ elif t.kind == fidl.TypeKind.HANDLE:
+ return 4
+ else:
+ raise NotImplementedError(t.kind)
+
+ def _CompileConstant(self, val, primitive_type):
+ """primitive_type is the string representation of the underlying FIDL type
+ of the constant's value. Note that this is not a type object, but rather
+ the string name of a basic primitive type, e.g. 'int8' or 'uint64'."""
+ if val.kind == fidl.ConstantKind.IDENTIFIER:
+ js_name = self.resolved_constant_name.get(val.identifier)
+ if not js_name:
+ raise Exception('expected ' + val.identifer +
+ ' to be in self.resolved_constant_name')
+ return js_name
+ elif val.kind == fidl.ConstantKind.LITERAL:
+ lit_kind = val.literal.kind
+ if lit_kind == fidl.LiteralKind.STRING:
+ return json.dumps(val.literal.value)
+ elif lit_kind == fidl.LiteralKind.NUMERIC:
+ suffix = 'n' if primitive_type in ('int64', 'uint64') else ''
+ return val.literal.value + suffix
+ elif lit_kind == fidl.LiteralKind.TRUE:
+ return 'true'
+ elif lit_kind == fidl.LiteralKind.FALSE:
+ return 'false'
+ elif lit_kind == fidl.LiteralKind.DEFAULT:
+ return 'default'
+ else:
+ raise Exception('unexpected kind')
+
+ def _EmitHeader(self):
+ self.f.write('''// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// WARNING: This file is machine generated by fidlgen_js.
+
+''')
+
+ def _CompileConst(self, const):
+ compound = _ParseCompoundIdentifier(const.name)
+ name = _CompileCompoundIdentifier(compound)
+ value = self._CompileConstant(const.value,
+ _GetUnderlyingPrimitiveType(const.type))
+ self.f.write('''/**
+ * @const
+ */
+const %(name)s = %(value)s;
+
+''' % {
+ 'name': name,
+ 'value': value
+ })
+ self.resolved_constant_name[const.name] = name
+
+ def _CompileEnum(self, enum):
+ compound = _ParseCompoundIdentifier(enum.name)
+ name = _CompileCompoundIdentifier(compound)
+ js_type = _JsTypeForPrimitiveType(enum.type)
+ data = {'js_type': js_type, 'type': enum.type.value, 'name': name}
+ self.f.write('''/**
+ * @enum {%(js_type)s}
+ */
+const %(name)s = {
+''' % data)
+ for member in enum.members:
+ # The 'type' of an enum isn't a real Type like most other places, but
+ # instead just a simple 'int8' or similar.
+ underlying_type = enum.type.value
+ self.f.write(
+ ''' %s: %s,\n''' %
+ (member.name, self._CompileConstant(member.value, underlying_type)))
+ fidl_constant_name = '.'.join(compound.library) + '/' + member.name
+ javascript_name = name + '.' + member.name
+ self.resolved_constant_name[fidl_constant_name] = javascript_name
+ self.f.write('};\n')
+ self.f.write('const _kTT_%(name)s = _kTT_%(type)s;\n\n' % data)
+
+ def _CompileUnion(self, union):
+ compound = _ParseCompoundIdentifier(union.name)
+ name = _CompileCompoundIdentifier(compound)
+ member_names = []
+ enc_cases = []
+ dec_cases = []
+ for i, m in enumerate(union.members):
+ member_name = _ChangeIfReserved(m.name)
+ member_names.append(member_name)
+ member_type = self._CompileType(m.type)
+ enc_cases.append('''\
+ case %(index)s:
+ _kTT_%(member_type)s.enc(e, o + 4, v.%(member_name)s);
+ break;''' % {
+ 'index': i,
+ 'member_type': member_type,
+ 'member_name': member_name,
+ })
+ dec_cases.append('''\
+ case %(index)s:
+ result.set_%(member_name)s(_kTT_%(member_type)s.dec(d, o + 4));
+ break;''' % {
+ 'index': i,
+ 'member_type': member_type,
+ 'member_name': member_name,
+ })
+
+ self.f.write(
+ '''\
+const _kTT_%(name)s = {
+ enc: function(e, o, v) {
+ if (v.$tag === $fidl__kInvalidUnionTag) throw "invalid tag";
+ e.data.setUint32(o, v.$tag, $fidl__kLE);
+ switch (v.$tag) {
+%(enc_cases)s
+ }
+ },
+ dec: function(d, o) {
+ var tag = d.data.getUint32(o, $fidl__kLE);
+ var result = new %(name)s();
+ switch (tag) {
+%(dec_cases)s
+ default:
+ throw "invalid tag";
+ }
+ return result;
+ },
+};
+
+const _kTT_%(name)s_Nullable = {
+ enc: function(e, o, v) {
+ e.data.setUint32(o, v ? 0xffffffff : 0, $fidl__kLE);
+ e.data.setUint32(o + 4, v ? 0xffffffff : 0, $fidl__kLE);
+ var start = e.alloc(%(size)s);
+ _kTT_%(name)s.enc(e, start, v);
+ },
+ dec: function(d, o) {
+ if (d.data.getUint32(o, $fidl__kLE) === 0) {
+ return new %(name)s();
+ }
+ var pointer = d.data.getUint32(o + 4, $fidl__kLE);
+ var dataOffset = d.claimMemory(%(size)s);
+ return _kTT_%(name)s.dec(d, dataOffset);
+ },
+};
+
+/**
+ * @constructor
+ */
+function %(name)s() { this.reset(); }
+
+%(name)s.prototype.reset = function(i) {
+ this.$tag = (i === undefined) ? $fidl__kInvalidUnionTag : i;
+''' % {
+ 'name': name,
+ 'size': union.size,
+ 'enc_cases': '\n'.join(enc_cases),
+ 'dec_cases': '\n'.join(dec_cases),
+ })
+ for m in member_names:
+ self.f.write(' this.%s = null;\n' % m)
+ self.f.write('}\n\n')
+
+ for i, m in enumerate(member_names):
+ self.f.write('''\
+%(name)s.prototype.set_%(member_name)s = function(v) {
+ this.reset(%(index)s);
+ this.%(member_name)s = v;
+};
+
+%(name)s.prototype.is_%(member_name)s = function() {
+ return this.$tag === %(index)s;
+};
+
+''' % {
+ 'name': name,
+ 'member_name': m,
+ 'index': i,
+ })
+
+ def _CompileStruct(self, struct):
+ compound = _ParseCompoundIdentifier(struct.name)
+ name = _CompileCompoundIdentifier(compound)
+ param_names = [_ChangeIfReserved(x.name) for x in struct.members]
+ # TODO(crbug.com/883496): @param and types.
+ self.f.write('''/**
+ * @constructor
+ * @struct
+ */
+function %(name)s(%(param_names)s) {
+''' % {
+ 'name': name,
+ 'param_names': ', '.join(param_names)
+ })
+ for member in struct.members:
+ member_name = _ChangeIfReserved(member.name)
+ value = '%(member_name)s'
+ if member.maybe_default_value:
+ underlying_type = _GetUnderlyingPrimitiveType(member.type)
+ value = (
+ '(%(member_name)s !== undefined) ? %(member_name)s : ' +
+ self._CompileConstant(member.maybe_default_value, underlying_type))
+ elif self.fidl.declarations.get(member.type.identifier) == \
+ fidl.DeclarationsMap.UNION:
+ union_compound = _ParseCompoundIdentifier(member.type.identifier)
+ union_name = _CompileCompoundIdentifier(union_compound)
+ value = ('(%(member_name)s !== undefined) ? %(member_name)s : ' + 'new '
+ + union_name + '()')
+ self.f.write((' this.%(member_name)s = ' + value + ';\n') %
+ {'member_name': member_name})
+ self.f.write('}\n\n')
+
+ self.f.write('''const _kTT_%(name)s = {
+ enc: function(e, o, v) {
+''' % {'name': name})
+
+ for member in struct.members:
+ element_ttname = self._CompileType(member.type)
+ self.f.write(
+ ' _kTT_%(element_ttname)s.enc('
+ 'e, o + %(offset)s, v.%(member_name)s);\n' % {
+ 'element_ttname': element_ttname,
+ 'offset': member.offset,
+ 'member_name': _ChangeIfReserved(member.name)
+ })
+
+ self.f.write(''' },
+ dec: function(d, o) {
+''')
+
+ for member in struct.members:
+ element_ttname = self._CompileType(member.type)
+ self.f.write(
+ ' var $temp_%(member_name)s = _kTT_%(element_ttname)s.dec('
+ 'd, o + %(offset)s);\n' % {
+ 'element_ttname': element_ttname,
+ 'offset': member.offset,
+ 'member_name': _ChangeIfReserved(member.name)
+ })
+ self.f.write(''' return new %(name)s(%(temp_names)s);
+ }
+};
+
+''' % {
+ 'name': name,
+ 'temp_names': ', '.join(['$temp_' + x for x in param_names])
+ })
+
+ def _CompileType(self, t):
+ """Ensures there's a type table for the given type, and returns the stem of
+ its name."""
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return t.subtype
+ elif t.kind == fidl.TypeKind.STRING:
+ return 'String' + ('_Nullable' if t.nullable else '')
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ compound = _ParseCompoundIdentifier(t.identifier)
+ name = _CompileCompoundIdentifier(compound)
+ return name + ('_Nullable' if t.nullable else '')
+ elif t.kind == fidl.TypeKind.HANDLE or t.kind == fidl.TypeKind.REQUEST:
+ return 'Handle'
+ elif t.kind == fidl.TypeKind.ARRAY:
+ element_ttname = self._CompileType(t.element_type)
+ ttname = 'ARR_%d_%s' % (t.element_count, element_ttname)
+ if ttname not in self.type_table_defined:
+ self.type_table_defined.add(ttname)
+ self.output_deferred_to_eof += ('''\
+const _kTT_%(ttname)s = {
+ enc: function(e, o, v) {
+ for (var i = 0; i < %(element_count)s; i++) {
+ _kTT_%(element_ttname)s.enc(e, o + (i * %(element_size)s), v[i]);
+ }
+ },
+ dec: function(d, o) {
+ var result = [];
+ for (var i = 0; i < %(element_count)s; i++) {
+ result.push(_kTT_%(element_ttname)s.dec(d, o + (i * %(element_size)s)));
+ }
+ return result;
+ },
+};
+
+''' % {
+ 'ttname': ttname,
+ 'element_ttname': element_ttname,
+ 'element_count': t.element_count,
+ 'element_size': self._InlineSizeOfType(t.element_type),
+ })
+ return ttname
+ elif t.kind == fidl.TypeKind.VECTOR:
+ element_ttname = self._CompileType(t.element_type)
+ ttname = ('VEC_' + ('Nullable_' if t.nullable else '') + element_ttname)
+ if t.nullable:
+ handle_null_enc = '''e.data.setUint32(o, 0, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0, $fidl__kLE);
+ e.data.setUint32(o + 12, 0, $fidl__kLE);
+ return;
+'''
+ handle_null_dec = 'return null;'
+ else:
+ handle_null_enc = 'throw "non-null vector required";'
+ handle_null_dec = 'throw "non-null vector required";'
+
+ if ttname not in self.type_table_defined:
+ self.type_table_defined.add(ttname)
+ self.output_deferred_to_eof += ('''\
+const _kTT_%(ttname)s = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ %(handle_null_enc)s
+ }
+ e.data.setUint32(o, v.length, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0xffffffff, $fidl__kLE);
+ e.data.setUint32(o + 12, 0xffffffff, $fidl__kLE);
+ var start = e.alloc(v.length * %(element_size)s);
+ for (var i = 0; i < v.length; i++) {
+ _kTT_%(element_ttname)s.enc(e, start + (i * %(element_size)s), v[i]);
+ }
+ },
+ dec: function(d, o) {
+ var len = d.data.getUint32(o, $fidl__kLE);
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) {
+ %(handle_null_dec)s
+ }
+ var dataOffset = d.claimMemory(len * %(element_size)s);
+ var result = [];
+ for (var i = 0; i < len; i++) {
+ result.push(_kTT_%(element_ttname)s.dec(
+ d, dataOffset + (i * %(element_size)s)));
+ }
+ return result;
+ }
+};
+
+''' % {
+ 'ttname': ttname,
+ 'element_ttname': element_ttname,
+ 'element_size': self._InlineSizeOfType(t.element_type),
+ 'handle_null_enc': handle_null_enc,
+ 'handle_null_dec': handle_null_dec,
+ })
+ return ttname
+ else:
+ raise NotImplementedError(t.kind)
+
+ def _GenerateJsInterfaceForInterface(self, name, interface):
+ """Generates a JS @interface for the given FIDL interface."""
+ self.f.write('''/**
+ * @interface
+ */
+function %(name)s() {}
+
+''' % {'name': name})
+
+ # Define a JS interface part for the interface for typechecking.
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ if method.has_request:
+ param_names = [_CompileIdentifier(x.name) for x in method.maybe_request]
+ if len(param_names):
+ self.f.write('/**\n')
+ # TODO(crbug.com/883496): Emit @param and @return type comments.
+ self.f.write(' */\n')
+ self.f.write(
+ '%(name)s.prototype.%(method_name)s = '
+ 'function(%(param_names)s) {};\n\n' % {
+ 'name': name,
+ 'method_name': method_name,
+ 'param_names': ', '.join(param_names)
+ })
+
+ # Emit message ordinals for later use.
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ self.f.write(
+ 'const _k%(name)s_%(method_name)s_Ordinal = %(ordinal)s;\n' % {
+ 'name': name,
+ 'method_name': method_name,
+ 'ordinal': method.ordinal
+ })
+
+ self.f.write('\n')
+
+ def _GenerateJsProxyForInterface(self, name, interface):
+ """Generates the JS side implementation of a proxy class implementing the
+ given interface."""
+ proxy_name = name + 'Proxy'
+ self.f.write('''/**
+ * @constructor
+ * @implements %(name)s
+ */
+function %(proxy_name)s() {
+ this.channel = $ZX_HANDLE_INVALID;
+}
+
+%(proxy_name)s.prototype.$bind = function(channel) {
+ this.channel = channel;
+};
+
+%(proxy_name)s.prototype.$is_bound = function() {
+ return this.channel != $ZX_HANDLE_INVALID;
+};
+
+%(proxy_name)s.prototype.$request = function() {
+ if (this.$is_bound())
+ throw "Proxy already bound";
+ var pair = $ZxChannelCreate();
+ if (pair.status != $ZX_OK)
+ throw "ChannelPair creation failed";
+ this.channel = pair.first;
+ return pair.second;
+};
+
+%(proxy_name)s.prototype.$close = function() {
+ if (!this.$is_bound())
+ return;
+ var status = $zx_handle_close(this.channel);
+ if (status !== $ZX_OK) {
+ throw "close handle failed";
+ }
+ this.channel = $ZX_HANDLE_INVALID;
+};
+
+''' % {
+ 'name': name,
+ 'proxy_name': proxy_name
+ })
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ if method.has_request:
+ type_tables = []
+ for param in method.maybe_request:
+ type_tables.append(self._CompileType(param.type))
+ param_names = [_CompileIdentifier(x.name) for x in method.maybe_request]
+ self.f.write(
+ '''\
+%(proxy_name)s.prototype.%(method_name)s = function(%(param_names)s) {
+ if (this.channel === $ZX_HANDLE_INVALID) {
+ throw "channel closed";
+ }
+ var $encoder = new $fidl_Encoder(_k%(name)s_%(method_name)s_Ordinal);
+ $encoder.alloc(%(size)s - $fidl_kMessageHeaderSize);
+''' % {
+ 'name': name,
+ 'proxy_name': proxy_name,
+ 'method_name': method_name,
+ 'param_names': ', '.join(param_names),
+ 'size': method.maybe_request_size
+ })
+
+ for param, ttname in zip(method.maybe_request, type_tables):
+ self.f.write(
+ '''\
+ _kTT_%(type_table)s.enc($encoder, %(offset)s, %(param_name)s);
+''' % {
+ 'type_table': ttname,
+ 'param_name': _CompileIdentifier(param.name),
+ 'offset': param.offset
+ })
+
+ self.f.write(''' var $writeResult = $ZxChannelWrite(this.channel,
+ $encoder.messageData(),
+ $encoder.messageHandles());
+ if ($writeResult !== $ZX_OK) {
+ throw "$ZxChannelWrite failed: " + $writeResult;
+ }
+''')
+
+ if method.has_response:
+ type_tables = []
+ for param in method.maybe_response:
+ type_tables.append(self._CompileType(param.type))
+ self.f.write('''
+ return $ZxObjectWaitOne(this.channel, $ZX_CHANNEL_READABLE, $ZX_TIME_INFINITE)
+ .then(() => new Promise(res => {
+ var $readResult = $ZxChannelRead(this.channel);
+ if ($readResult.status !== $ZX_OK) {
+ throw "channel read failed";
+ }
+
+ var $view = new DataView($readResult.data);
+
+ var $decoder = new $fidl_Decoder($view, $readResult.handles);
+ $decoder.claimMemory(%(size)s - $fidl_kMessageHeaderSize);
+''' % {'size': method.maybe_response_size})
+ for param, ttname in zip(method.maybe_response, type_tables):
+ self.f.write(
+ '''\
+ var %(param_name)s = _kTT_%(type_table)s.dec($decoder, %(offset)s);
+''' % {
+ 'type_table': ttname,
+ 'param_name': _CompileIdentifier(param.name),
+ 'offset': param.offset
+ })
+
+ self.f.write('''
+ res(%(args)s);
+ }));
+''' % {'args': ', '.join(x.name for x in method.maybe_response)})
+
+ self.f.write('''};
+
+''')
+
+ def _CompileInterface(self, interface):
+ compound = _ParseCompoundIdentifier(interface.name)
+ name = _CompileCompoundIdentifier(compound)
+ self._GenerateJsInterfaceForInterface(name, interface)
+ self._GenerateJsProxyForInterface(name, interface)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('json')
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args()
+
+ fidl_obj = fidl.fidl_from_dict(json.load(open(args.json, 'r')))
+ with open(args.output, 'w') as f:
+ c = Compiler(fidl_obj, f)
+ c.Compile()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/build/fuchsia/fidlgen_js/runtime/fidl.mjs b/chromium/build/fuchsia/fidlgen_js/runtime/fidl.mjs
new file mode 100644
index 00000000000..722098b1431
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/runtime/fidl.mjs
@@ -0,0 +1,270 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is the JS runtime support library for code generated by fidlgen_js. It
+// mostly consists of helpers to facilitate encoding and decoding of FIDL
+// messages.
+
+const $fidl_kInitialBufferSize = 1024;
+
+const $fidl_kMessageHeaderSize = 16;
+const $fidl_kMessageTxidOffset = 0;
+const $fidl_kMessageOrdinalOffset = 12;
+
+const $fidl__kAlignment = 8;
+const $fidl__kAlignmentMask = 0x7;
+
+const $fidl__kLE = true;
+
+const $fidl__kUserspaceTxidMask = 0x7fffffff;
+const $fidl__kHandlePresent = 0xffffffff;
+const $fidl__kInvalidUnionTag = 0xffffffff;
+var $fidl__nextTxid = 1;
+
+function $fidl__align(size) {
+ return size + (($fidl__kAlignment - (size & $fidl__kAlignmentMask)) &
+ $fidl__kAlignmentMask);
+}
+
+/**
+ * @constructor
+ * @param {number} ordinal
+ */
+function $fidl_Encoder(ordinal) {
+ var buf = new ArrayBuffer($fidl_kInitialBufferSize);
+ this.data = new DataView(buf);
+ this.extent = 0;
+ this.handles = [];
+ this._encodeMessageHeader(ordinal);
+}
+
+/**
+ * @param {number} ordinal
+ */
+$fidl_Encoder.prototype._encodeMessageHeader = function(ordinal) {
+ this.alloc($fidl_kMessageHeaderSize);
+ var txid = $fidl__nextTxid++ & $fidl__kUserspaceTxidMask;
+ this.data.setUint32($fidl_kMessageTxidOffset, txid, $fidl__kLE);
+ this.data.setUint32($fidl_kMessageOrdinalOffset, ordinal, $fidl__kLE);
+};
+
+/**
+ * @param {number} size
+ */
+$fidl_Encoder.prototype.alloc = function(size) {
+ var offset = this.extent;
+ this._claimMemory($fidl__align(size));
+ return offset;
+};
+
+/**
+ * @param {number} claimSize
+ */
+$fidl_Encoder.prototype._claimMemory = function(claimSize) {
+ this.extent += claimSize;
+ if (this.extent > this.data.byteLength) {
+ var newSize = this.data.byteLength + claimSize;
+ newSize += newSize * 2;
+ this._grow(newSize);
+ }
+};
+
+/**
+ * @param {number} newSize
+ */
+$fidl_Encoder.prototype._grow = function(newSize) {
+ var newBuffer = new ArrayBuffer(newSize);
+ new Uint8Array(newBuffer).set(new Uint8Array(this.data.buffer));
+ this.data = new DataView(newBuffer);
+};
+
+/**
+ * @param {number} handle
+ */
+$fidl_Encoder.prototype.addHandle = function(handle) {
+ this.handles.push(handle);
+};
+
+$fidl_Encoder.prototype.messageData = function() {
+ return new DataView(this.data.buffer, 0, this.extent);
+};
+
+$fidl_Encoder.prototype.messageHandles = function() {
+ return this.handles;
+};
+
+
+/**
+ * @constructor
+ * @param {Array} data
+ * @param {Array} handles
+ */
+function $fidl_Decoder(data, handles) {
+ this.data = data;
+ this.handles = handles;
+ this.nextOffset = 0;
+ this.nextHandle = 0;
+ this.claimMemory($fidl_kMessageHeaderSize);
+}
+
+/**
+ * @param {number} size
+ */
+$fidl_Decoder.prototype.claimMemory = function(size) {
+ var result = this.nextOffset;
+ this.nextOffset = $fidl__align(this.nextOffset + size);
+ return result;
+}
+
+$fidl_Decoder.prototype.claimHandle = function() {
+ if (this.nextHandle >= this.handles.length)
+ throw "Attempt to claim more handles than are available";
+ return this.handles[this.nextHandle++];
+}
+
+
+// Type tables and encoding helpers for generated Proxy code.
+const _kTT_bool = {
+ enc: function(e, o, v) { e.data.setInt8(o, v ? 1 : 0); },
+ dec: function(d, o) { return d.data.getInt8(o) != 0; },
+};
+
+const _kTT_float32 = {
+ enc: function(e, o, v) { e.data.setFloat32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getFloat32(o, $fidl__kLE); },
+};
+
+const _kTT_float64 = {
+ enc: function(e, o, v) { e.data.setFloat64(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getFloat64(o, $fidl__kLE); },
+};
+
+const _kTT_int8 = {
+ enc: function(e, o, v) { e.data.setInt8(o, v); },
+ dec: function(d, o) { return d.data.getInt8(o); },
+};
+
+const _kTT_int16 = {
+ enc: function(e, o, v) { e.data.setInt16(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getInt16(o, $fidl__kLE); },
+};
+
+const _kTT_int32 = {
+ enc: function(e, o, v) { e.data.setUint32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getInt32(o, $fidl__kLE); },
+};
+
+const _kTT_int64 = {
+ enc: function(e, o, v) {
+ var bi = BigInt.asIntN(64, BigInt(v));
+ var x = Number(bi & 0xffffffffn);
+ var y = Number((bi >> 32n) & 0xffffffffn);
+ e.data.setInt32(o, x, $fidl__kLE);
+ e.data.setInt32(o + 4, y, $fidl__kLE);
+ },
+ dec: function(d, o) {
+ var x = BigInt.asIntN(64, BigInt(d.data.getInt32(o, $fidl__kLE)));
+ var y = BigInt.asIntN(64, BigInt(d.data.getInt32(o + 4, $fidl__kLE)));
+ return x | (y << 32n);
+ },
+};
+
+const _kTT_uint8 = {
+ enc: function(e, o, v) { e.data.setUint8(o, v); },
+ dec: function(d, o) { return d.data.getUint8(o); },
+};
+
+const _kTT_uint16 = {
+ enc: function(e, o, v) { e.data.setUint16(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getUint16(o, $fidl__kLE); },
+};
+
+const _kTT_uint32 = {
+ enc: function(e, o, v) { e.data.setUint32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getUint32(o, $fidl__kLE); },
+};
+
+const _kTT_uint64 = {
+ enc: function(e, o, v) {
+ var bi = BigInt.asUintN(64, BigInt(v));
+ var x = Number(bi & 0xffffffffn);
+ var y = Number((bi >> 32n) & 0xffffffffn);
+ e.data.setUint32(o, x, $fidl__kLE);
+ e.data.setUint32(o + 4, y, $fidl__kLE);
+ },
+ dec: function(d, o) {
+ var x = BigInt.asUintN(64, BigInt(d.data.getUint32(o, $fidl__kLE)));
+ var y = BigInt.asUintN(64, BigInt(d.data.getUint32(o + 4, $fidl__kLE)));
+ return x | (y << 32n);
+ },
+};
+
+const _kTT_Handle = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ e.data.setUint32(o, 0, $fidl__kLE);
+ } else {
+ e.data.setUint32(o, $fidl__kHandlePresent, $fidl__kLE);
+ e.addHandle(v);
+ }
+ },
+ dec: function(d, o) {
+ var $present = d.data.getUint32(o, $fidl__kLE);
+ if ($present === 0) {
+ return 0;
+ } else {
+ if ($present !== $fidl__kHandlePresent)
+ throw "Expected UINT32_MAX to indicate handle presence";
+ return d.claimHandle();
+ }
+ },
+};
+
+const _kTT_String = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) throw "non-null string required";
+ // Both size and data are uint64, but that's awkward in JS, so for now only
+ // support a maximum of 32b lengths. The maximum length of a FIDL message is
+ // shorter than 32b in any case.
+ var asUtf8 = $FidlJsStrToUtf8Array(v);
+ e.data.setUint32(o, asUtf8.length, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0xffffffff, $fidl__kLE);
+ e.data.setUint32(o + 12, 0xffffffff, $fidl__kLE);
+ var body = e.alloc(asUtf8.length);
+ for (var i = 0; i < asUtf8.length; i++) {
+ e.data.setUint8(body + i, asUtf8[i], $fidl__kLE);
+ }
+ },
+ dec: function(d, o) {
+ var len = d.data.getUint32(o, $fidl__kLE);
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) throw "non-null string required";
+ var dataOffset = d.claimMemory(len);
+ return $FidlJsUtf8ArrayToStr(new DataView(d.data.buffer, dataOffset, len));
+ }
+};
+
+const _kTT_String_Nullable = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ e.data.setUint32(o, 0, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0, $fidl__kLE);
+ e.data.setUint32(o + 12, 0, $fidl__kLE);
+ } else {
+ _kTT_String.enc(e, o, v);
+ }
+ },
+ dec: function(d, o) {
+ if (v === null || v === undefined) {
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) {
+ return null;
+ }
+ } else {
+ return _kTT_String.dec(e, o, v);
+ }
+ }
+};
diff --git a/chromium/build/fuchsia/fidlgen_js/runtime/zircon.cc b/chromium/build/fuchsia/fidlgen_js/runtime/zircon.cc
new file mode 100644
index 00000000000..3ef2e97fa29
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/runtime/zircon.cc
@@ -0,0 +1,438 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "build/fuchsia/fidlgen_js/runtime/zircon.h"
+
+#include <lib/async/default.h>
+#include <lib/async/wait.h>
+#include <lib/zx/channel.h>
+#include <zircon/errors.h>
+#include <zircon/syscalls.h>
+#include <zircon/types.h>
+
+#include "base/bind.h"
+#include "base/threading/thread_checker.h"
+#include "gin/arguments.h"
+#include "gin/array_buffer.h"
+#include "gin/converter.h"
+#include "gin/data_object_builder.h"
+#include "gin/function_template.h"
+#include "gin/public/gin_embedders.h"
+
+namespace {
+
+fidljs::WaitSet& GetWaitsForIsolate(v8::Isolate* isolate) {
+ return *static_cast<fidljs::WaitSet*>(
+ isolate->GetData(gin::kEmbedderFuchsia));
+}
+
+} // namespace
+
+namespace fidljs {
+
+class WaitPromiseImpl : public async_wait_t {
+ public:
+ WaitPromiseImpl(v8::Isolate* isolate,
+ v8::Local<v8::Context> context,
+ v8::Local<v8::Promise::Resolver> resolver,
+ zx_handle_t handle,
+ zx_signals_t signals)
+ : async_wait_t({ASYNC_STATE_INIT, &WaitPromiseImpl::StaticOnSignaled,
+ handle, signals}),
+ isolate_(isolate),
+ wait_state_(WaitState::kCreated),
+ failed_start_status_(ZX_OK) {
+ context_.Reset(isolate_, context);
+ resolver_.Reset(isolate_, resolver);
+ }
+
+ ~WaitPromiseImpl() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ switch (wait_state_) {
+ case WaitState::kCreated:
+ // The wait never started, so reject the promise (but don't attempt to
+ // cancel the wait).
+ DCHECK_NE(failed_start_status_, ZX_OK);
+ RejectPromise(failed_start_status_, 0);
+ break;
+
+ case WaitState::kStarted:
+ // The wait was started, but has not yet completed. Cancel the wait and
+ // reject the promise. The object is being destructed here because it's
+ // been removed from the set of waits attached to the isolate, so
+ // we need not remove it.
+ CHECK_EQ(async_cancel_wait(async_get_default_dispatcher(), this),
+ ZX_OK);
+ RejectPromise(ZX_ERR_CANCELED, 0);
+ break;
+
+ case WaitState::kCompleted:
+ // The callback has already been called and so the promise has been
+ // resolved or rejected, and the wait has been removed from the
+ // dispatcher, so there's nothing to do.
+ break;
+ }
+ }
+
+ bool BeginWait() {
+ DCHECK_EQ(wait_state_, WaitState::kCreated);
+ zx_status_t status = async_begin_wait(async_get_default_dispatcher(), this);
+ if (status == ZX_OK) {
+ wait_state_ = WaitState::kStarted;
+ } else {
+ failed_start_status_ = status;
+ }
+ return status == ZX_OK;
+ }
+
+ private:
+ static void StaticOnSignaled(async_dispatcher_t* dispatcher,
+ async_wait_t* wait,
+ zx_status_t status,
+ const zx_packet_signal_t* signal) {
+ auto* self = static_cast<WaitPromiseImpl*>(wait);
+ self->OnSignaled(status, signal);
+ }
+
+ void OnSignaled(zx_status_t status, const zx_packet_signal_t* signal) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_EQ(wait_state_, WaitState::kStarted);
+ DCHECK_NE(status, ZX_ERR_CANCELED)
+ << "wait should have been canceled before shutdown";
+
+ wait_state_ = WaitState::kCompleted;
+
+ if (status == ZX_OK &&
+ (signal->observed & signal->trigger) == signal->trigger) {
+ ResolvePromise(signal->observed);
+ } else {
+ RejectPromise(status, signal->observed);
+ }
+
+ GetWaitsForIsolate(isolate_).erase(this);
+ // |this| has been deleted.
+ }
+
+ void ResolvePromise(zx_signals_t observed) {
+ v8::Local<v8::Promise::Resolver> resolver(resolver_.Get(isolate_));
+ v8::Local<v8::Context> context(context_.Get(isolate_));
+ v8::Local<v8::Object> value = gin::DataObjectBuilder(isolate_)
+ .Set("status", ZX_OK)
+ .Set("observed", observed)
+ .Build();
+ resolver->Resolve(context, value).ToChecked();
+ }
+
+ void RejectPromise(zx_status_t status, zx_signals_t observed) {
+ v8::Local<v8::Promise::Resolver> resolver(resolver_.Get(isolate_));
+ v8::Local<v8::Context> context(context_.Get(isolate_));
+ v8::Local<v8::Object> value = gin::DataObjectBuilder(isolate_)
+ .Set("status", status)
+ .Set("observed", observed)
+ .Build();
+ resolver->Reject(context, value).ToChecked();
+ }
+
+ v8::Isolate* isolate_;
+ v8::Global<v8::Context> context_;
+ v8::Global<v8::Promise::Resolver> resolver_;
+ enum class WaitState {
+ kCreated,
+ kStarted,
+ kCompleted,
+ } wait_state_;
+ zx_status_t failed_start_status_;
+
+ THREAD_CHECKER(thread_checker_);
+
+ DISALLOW_COPY_AND_ASSIGN(WaitPromiseImpl);
+};
+
+} // namespace fidljs
+
+namespace {
+
+v8::Local<v8::Promise> ZxObjectWaitOne(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return v8::Local<v8::Promise>();
+ }
+
+ zx_signals_t signals;
+ if (!args->GetNext(&signals)) {
+ args->ThrowError();
+ return v8::Local<v8::Promise>();
+ }
+
+ v8::MaybeLocal<v8::Promise::Resolver> maybe_resolver =
+ v8::Promise::Resolver::New(args->GetHolderCreationContext());
+ v8::Local<v8::Promise::Resolver> resolver;
+ if (maybe_resolver.ToLocal(&resolver)) {
+ auto wait = std::make_unique<fidljs::WaitPromiseImpl>(
+ args->isolate(), args->GetHolderCreationContext(), resolver, handle,
+ signals);
+ if (wait->BeginWait()) {
+ // The wait will always be notified asynchronously, so it's OK to delay
+ // the add until after it has completed successfully. Move |wait| into the
+ // set of active waits.
+ GetWaitsForIsolate(args->isolate()).insert(std::move(wait));
+ }
+
+ // If BeginWait() fails, then |wait| will be deleted here, causing the
+ // returned promise to be rejected.
+ return resolver->GetPromise();
+ }
+
+ return v8::Local<v8::Promise>();
+}
+
+v8::Local<v8::Value> ZxChannelCreate(gin::Arguments* args) {
+ zx_handle_t channel0, channel1;
+ zx_status_t status = zx_channel_create(0, &channel0, &channel1);
+ if (status != ZX_OK) {
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Build();
+ }
+
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Set("first", channel0)
+ .Set("second", channel1)
+ .Build();
+}
+
+zx_status_t ZxChannelWrite(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ gin::ArrayBufferView data;
+ if (!args->GetNext(&data)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ std::vector<zx_handle_t> handles;
+ if (!args->GetNext(&handles)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ zx_status_t status =
+ zx_channel_write(handle, 0, data.bytes(), data.num_bytes(),
+ handles.data(), handles.size());
+ return status;
+}
+
+v8::Local<v8::Object> ZxChannelRead(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", ZX_ERR_INVALID_ARGS)
+ .Build();
+ }
+ zx::unowned_channel ch(handle);
+
+ uint32_t data_size;
+ uint32_t num_handles;
+ zx_status_t status =
+ ch->read(0, nullptr, 0, &data_size, nullptr, 0, &num_handles);
+ DCHECK_EQ(status, ZX_ERR_BUFFER_TOO_SMALL);
+
+ std::vector<zx_handle_t> handles;
+ handles.resize(num_handles);
+
+ v8::Local<v8::ArrayBuffer> buf =
+ v8::ArrayBuffer::New(args->isolate(), data_size);
+ uint32_t actual_bytes, actual_handles;
+ status = ch->read(0, buf->GetContents().Data(), data_size, &actual_bytes,
+ handles.data(), handles.size(), &actual_handles);
+ DCHECK_EQ(actual_bytes, data_size);
+ DCHECK_EQ(actual_handles, num_handles);
+
+ if (status != ZX_OK) {
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Build();
+ }
+
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Set("data", buf)
+ .Set("handles", handles)
+ .Build();
+}
+
+v8::Local<v8::Value> StrToUtf8Array(gin::Arguments* args) {
+ std::string str;
+ // This converts the string to utf8 from ucs2, so then just repackage the
+ // string as an array and return it.
+ if (!args->GetNext(&str)) {
+ args->ThrowError();
+ return v8::Local<v8::Object>();
+ }
+
+ // TODO(crbug.com/883496): Not sure how to make a Uint8Array to return here
+ // which would be a bit more efficient.
+ std::vector<int> data;
+ std::copy(str.begin(), str.end(), std::back_inserter(data));
+ return gin::ConvertToV8(args->isolate(), data);
+}
+
+v8::Local<v8::Value> Utf8ArrayToStr(gin::Arguments* args) {
+ gin::ArrayBufferView data;
+ if (!args->GetNext(&data)) {
+ args->ThrowError();
+ return v8::Local<v8::Value>();
+ }
+
+ // Get the UTF-8 out into a string, and then rely on ConvertToV8 to convert
+ // that to a UCS-2 string.
+ return gin::StringToV8(
+ args->isolate(), base::StringPiece(static_cast<const char*>(data.bytes()),
+ data.num_bytes()));
+}
+
+} // namespace
+
+namespace fidljs {
+
+ZxBindings::ZxBindings(v8::Isolate* isolate, v8::Local<v8::Object> global)
+ : isolate_(isolate), wait_set_(std::make_unique<WaitSet>()) {
+ DCHECK_EQ(isolate->GetData(gin::kEmbedderFuchsia), nullptr);
+ isolate->SetData(gin::kEmbedderFuchsia, wait_set_.get());
+
+#define SET_CONSTANT(k) \
+ global->Set(gin::StringToSymbol(isolate, "$" #k), \
+ gin::ConvertToV8(isolate, k))
+
+ // zx_status_t.
+ SET_CONSTANT(ZX_OK);
+ SET_CONSTANT(ZX_ERR_INTERNAL);
+ SET_CONSTANT(ZX_ERR_NOT_SUPPORTED);
+ SET_CONSTANT(ZX_ERR_NO_RESOURCES);
+ SET_CONSTANT(ZX_ERR_NO_MEMORY);
+ SET_CONSTANT(ZX_ERR_INTERNAL_INTR_RETRY);
+ SET_CONSTANT(ZX_ERR_INVALID_ARGS);
+ SET_CONSTANT(ZX_ERR_BAD_HANDLE);
+ SET_CONSTANT(ZX_ERR_WRONG_TYPE);
+ SET_CONSTANT(ZX_ERR_BAD_SYSCALL);
+ SET_CONSTANT(ZX_ERR_OUT_OF_RANGE);
+ SET_CONSTANT(ZX_ERR_BUFFER_TOO_SMALL);
+ SET_CONSTANT(ZX_ERR_BAD_STATE);
+ SET_CONSTANT(ZX_ERR_TIMED_OUT);
+ SET_CONSTANT(ZX_ERR_SHOULD_WAIT);
+ SET_CONSTANT(ZX_ERR_CANCELED);
+ SET_CONSTANT(ZX_ERR_PEER_CLOSED);
+ SET_CONSTANT(ZX_ERR_NOT_FOUND);
+ SET_CONSTANT(ZX_ERR_ALREADY_EXISTS);
+ SET_CONSTANT(ZX_ERR_ALREADY_BOUND);
+ SET_CONSTANT(ZX_ERR_UNAVAILABLE);
+ SET_CONSTANT(ZX_ERR_ACCESS_DENIED);
+ SET_CONSTANT(ZX_ERR_IO);
+ SET_CONSTANT(ZX_ERR_IO_REFUSED);
+ SET_CONSTANT(ZX_ERR_IO_DATA_INTEGRITY);
+ SET_CONSTANT(ZX_ERR_IO_DATA_LOSS);
+ SET_CONSTANT(ZX_ERR_IO_NOT_PRESENT);
+ SET_CONSTANT(ZX_ERR_IO_OVERRUN);
+ SET_CONSTANT(ZX_ERR_IO_MISSED_DEADLINE);
+ SET_CONSTANT(ZX_ERR_IO_INVALID);
+ SET_CONSTANT(ZX_ERR_BAD_PATH);
+ SET_CONSTANT(ZX_ERR_NOT_DIR);
+ SET_CONSTANT(ZX_ERR_NOT_FILE);
+ SET_CONSTANT(ZX_ERR_FILE_BIG);
+ SET_CONSTANT(ZX_ERR_NO_SPACE);
+ SET_CONSTANT(ZX_ERR_NOT_EMPTY);
+ SET_CONSTANT(ZX_ERR_STOP);
+ SET_CONSTANT(ZX_ERR_NEXT);
+ SET_CONSTANT(ZX_ERR_ASYNC);
+ SET_CONSTANT(ZX_ERR_PROTOCOL_NOT_SUPPORTED);
+ SET_CONSTANT(ZX_ERR_ADDRESS_UNREACHABLE);
+ SET_CONSTANT(ZX_ERR_ADDRESS_IN_USE);
+ SET_CONSTANT(ZX_ERR_NOT_CONNECTED);
+ SET_CONSTANT(ZX_ERR_CONNECTION_REFUSED);
+ SET_CONSTANT(ZX_ERR_CONNECTION_RESET);
+ SET_CONSTANT(ZX_ERR_CONNECTION_ABORTED);
+
+ v8::Local<v8::Context> context = isolate->GetCurrentContext();
+
+ // Handle APIs.
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxObjectWaitOne"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(ZxObjectWaitOne))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$zx_handle_close"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(zx_handle_close))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ SET_CONSTANT(ZX_HANDLE_INVALID);
+ SET_CONSTANT(ZX_TIME_INFINITE);
+
+ // Channel APIs.
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelCreate"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelCreate))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelWrite"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelWrite))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelRead"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelRead))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ SET_CONSTANT(ZX_CHANNEL_READABLE);
+ SET_CONSTANT(ZX_CHANNEL_WRITABLE);
+ SET_CONSTANT(ZX_CHANNEL_PEER_CLOSED);
+ SET_CONSTANT(ZX_CHANNEL_READ_MAY_DISCARD);
+ SET_CONSTANT(ZX_CHANNEL_MAX_MSG_BYTES);
+ SET_CONSTANT(ZX_CHANNEL_MAX_MSG_HANDLES);
+
+ // Utilities to make string handling easier to convert to/from UCS-2 (JS) <->
+ // UTF-8 (FIDL).
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$FidlJsStrToUtf8Array"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&StrToUtf8Array))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$FidlJsUtf8ArrayToStr"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&Utf8ArrayToStr))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+
+#undef SET_CONSTANT
+}
+
+ZxBindings::~ZxBindings() {
+ wait_set_->clear();
+ isolate_->SetData(gin::kEmbedderFuchsia, nullptr);
+}
+
+} // namespace fidljs
diff --git a/chromium/build/fuchsia/fidlgen_js/runtime/zircon.h b/chromium/build/fuchsia/fidlgen_js/runtime/zircon.h
new file mode 100644
index 00000000000..b54d35495c1
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/runtime/zircon.h
@@ -0,0 +1,58 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
+#define BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
+
+#include <memory>
+
+#include "base/containers/flat_set.h"
+#include "base/containers/unique_ptr_adapters.h"
+#include "base/macros.h"
+#include "v8/include/v8.h"
+
+namespace fidljs {
+
+class WaitPromiseImpl;
+
+// A WaitSet is associated with each Isolate and represents all outstanding
+// waits that are queued on the dispatcher.
+//
+// If the wait completes normally, the contained promise is resolved, the
+// WaitPromiseImpl is marked as completed, and then deleted (by removing it from
+// the pending set).
+//
+// If the caller shuts down with outstanding waits pending, the asynchronous
+// waits are canceled by clearing the set (which deletes all the
+// WaitPromiseImpls). If a WaitPromiseImpl has not completed when it is
+// destroyed, it cancels the outstanding wait in its destructor.
+//
+// WaitPromiseImpl is responsible for resolving or rejecting promises. If the
+// object was created, but a wait never started it will not have been added to
+// the wait set, and so will reject the promise immediately. Otherwise, the
+// promise will be resolved or rejected when the asynchronous wait is signaled
+// or canceled.
+using WaitSet =
+ base::flat_set<std::unique_ptr<WaitPromiseImpl>, base::UniquePtrComparator>;
+
+class ZxBindings {
+ public:
+ // Adds Zircon APIs bindings to |global|, for use by JavaScript callers.
+ ZxBindings(v8::Isolate* isolate, v8::Local<v8::Object> global);
+
+ // Cleans up attached storage in the isolate added by the bindings, and
+ // cancels any pending asynchronous requests. It is important this this be
+ // done before the v8 context is torn down.
+ ~ZxBindings();
+
+ private:
+ v8::Isolate* const isolate_;
+ std::unique_ptr<WaitSet> wait_set_;
+
+ DISALLOW_COPY_AND_ASSIGN(ZxBindings);
+};
+
+} // namespace fidljs
+
+#endif // BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
diff --git a/chromium/build/fuchsia/fidlgen_js/third_party/__init__.py b/chromium/build/fuchsia/fidlgen_js/third_party/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/third_party/__init__.py
diff --git a/chromium/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE
new file mode 100644
index 00000000000..9003b8850e7
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE
@@ -0,0 +1,32 @@
+Copyright (c) 2013, Ethan Furman.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name Ethan Furman nor the names of any
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/chromium/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium
new file mode 100644
index 00000000000..4d0ef07c43e
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium
@@ -0,0 +1,15 @@
+Name: enum34
+Short Name: enum34
+URL: https://bitbucket.org/stoneleaf/enum34
+License: BSD
+License File: LICENSE
+Revision: f24487b
+Security Critical: no
+
+
+Description:
+
+'Enum' backported from Python 3.4 to earlier Python versions. Only LICENSE and
+__init__.py are taken, other packaging files, documentation, etc. removed.
+
+Only used at build time.
diff --git a/chromium/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py
new file mode 100644
index 00000000000..d6ffb3a40fe
--- /dev/null
+++ b/chromium/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py
@@ -0,0 +1,837 @@
+"""Python Enumerations"""
+
+import sys as _sys
+
+__all__ = ['Enum', 'IntEnum', 'unique']
+
+version = 1, 1, 6
+
+pyver = float('%s.%s' % _sys.version_info[:2])
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+try:
+ basestring
+except NameError:
+ # In Python 2 basestring is the ancestor of both str and unicode
+ # in Python 3 it's just str, but was missing in 3.1
+ basestring = str
+
+try:
+ unicode
+except NameError:
+ # In Python 3 unicode no longer exists (it's just str)
+ unicode = str
+
+class _RouteClassAttributeToGetattr(object):
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ """
+ def __init__(self, fget=None):
+ self.fget = fget
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ raise AttributeError()
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+ def __delete__(self, instance):
+ raise AttributeError("can't delete attribute")
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, protocol=None):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super(_EnumDict, self).__init__()
+ self._member_names = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If a descriptor is added with the same name as an enum member, the name
+ is removed from _member_names (this may leave a hole in the numerical
+ sequence of values).
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ Note: in 3.x __order__ is simply discarded as a not necessary piece
+ leftover from 2.x
+
+ """
+ if pyver >= 3.0 and key in ('_order_', '__order__'):
+ return
+ elif key == '__order__':
+ key = '_order_'
+ if _is_sunder(key):
+ if key != '_order_':
+ raise ValueError('_names_ are reserved for future Enum use')
+ elif _is_dunder(key):
+ pass
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('Key already defined as: %r' % self[key])
+ self._member_names.append(key)
+ super(_EnumDict, self).__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
+# EnumMeta finishes running the first time the Enum class doesn't exist. This
+# is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ return _EnumDict()
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ if type(classdict) is dict:
+ original_dict = classdict
+ classdict = _EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ members = dict((k, classdict[k]) for k in classdict._member_names)
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # py2 support for definition order
+ _order_ = classdict.get('_order_')
+ if _order_ is None:
+ if pyver < 3.0:
+ try:
+ _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
+ except TypeError:
+ _order_ = [name for name in sorted(members.keys())]
+ else:
+ _order_ = classdict._member_names
+ else:
+ del classdict['_order_']
+ if pyver < 3.0:
+ _order_ = _order_.replace(',', ' ').split()
+ aliases = [name for name in members if name not in _order_]
+ _order_ += aliases
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(members) & set(['mro'])
+ if invalid_names:
+ raise ValueError('Invalid enum member name(s): %s' % (
+ ', '.join(invalid_names), ))
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = set([a for b in bases for a in b.__dict__])
+ # create our new Enum type
+ enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in random order
+ if OrderedDict is not None:
+ enum_class._member_map_ = OrderedDict()
+ else:
+ enum_class._member_map_ = {} # name->value map
+ enum_class._member_type_ = member_type
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ if __new__ is None:
+ __new__ = enum_class.__new__
+ for member_name in _order_:
+ value = members[member_name]
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args or not args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in enum_class._member_map_.items():
+ if canonical_member.value == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ unpicklable = False
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+ unpicklable = True
+
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if name not in classdict and class_method is not enum_method:
+ if name == '__reduce_ex__' and unpicklable:
+ continue
+ setattr(enum_class, name, enum_method)
+
+ # method resolution and int's are not playing nice
+ # Python's less than 2.6 use __cmp__
+
+ if pyver < 2.6:
+
+ if issubclass(enum_class, int):
+ setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
+
+ elif pyver < 3.0:
+
+ if issubclass(enum_class, int):
+ for method in (
+ '__le__',
+ '__lt__',
+ '__gt__',
+ '__ge__',
+ '__eq__',
+ '__ne__',
+ '__hash__',
+ ):
+ setattr(enum_class, method, getattr(int, method))
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
+ setattr(enum_class, '__new__', Enum.__dict__['__new__'])
+ return enum_class
+
+ def __bool__(cls):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, module=None, type=None, start=1):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='red green blue')).
+
+ When used for the functional API: `module`, if set, will be stored in
+ the new class' __module__ attribute; `type`, if set, will be mixed in
+ as the first base class.
+
+ Note: if `module` is not set this routine will attempt to discover the
+ calling module by walking the frame stack; if this is unsuccessful
+ the resulting class will not be pickleable.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, type=type, start=start)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super(EnumMeta, cls).__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a copy of the internal mapping.
+
+ """
+ return cls._member_map_.copy()
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ __nonzero__ = __bool__
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super(EnumMeta, cls).__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, module=None, type=None, start=1):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are auto-numbered from 1.
+ * An iterable of member names. Values are auto-numbered from 1.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value.
+
+ """
+ if pyver < 3.0:
+ # if class_name is unicode, attempt a conversion to ASCII
+ if isinstance(class_name, unicode):
+ try:
+ class_name = class_name.encode('ascii')
+ except UnicodeEncodeError:
+ raise TypeError('%r is not representable in ASCII' % class_name)
+ metacls = cls.__class__
+ if type is None:
+ bases = (cls, )
+ else:
+ bases = (type, cls)
+ classdict = metacls.__prepare__(class_name, bases)
+ _order_ = []
+
+ # special processing needed for names?
+ if isinstance(names, basestring):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
+ names = [(e, i+start) for (i, e) in enumerate(names)]
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ item = None # in case names is empty
+ for item in names:
+ if isinstance(item, basestring):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ _order_.append(member_name)
+ # only set _order_ in classdict if name/value was not from a mapping
+ if not isinstance(item, basestring):
+ classdict['_order_'] = ' '.join(_order_)
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = _sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError):
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases or Enum is None:
+ return object, Enum
+
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ if pyver < 3.0:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+ if __new__:
+ return None, True, True # __new__, save_new, use_args
+
+ N__new__ = getattr(None, '__new__')
+ O__new__ = getattr(object, '__new__')
+ if Enum is None:
+ E__new__ = N__new__
+ else:
+ E__new__ = Enum.__dict__['__new__']
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ try:
+ target = possible.__dict__[method]
+ except (AttributeError, KeyError):
+ target = getattr(possible, method, None)
+ if target not in [
+ None,
+ N__new__,
+ O__new__,
+ E__new__,
+ ]:
+ if method == '__member_new__':
+ classdict['__new__'] = target
+ return None, False, True
+ if isinstance(target, staticmethod):
+ target = target.__get__(member_type)
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, False, use_args
+ else:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __member_new__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in (
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ ):
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+
+########################################################
+# In order to support Python 2 and 3 with a single
+# codebase we have to create the Enum methods separately
+# and then use the `type(name, bases, dict)` method to
+# create the class.
+########################################################
+temp_enum_dict = {}
+temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
+
+def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.red)
+ value = value.value
+ #return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member.value == value:
+ return member
+ raise ValueError("%s is not a valid %s" % (value, cls.__name__))
+temp_enum_dict['__new__'] = __new__
+del __new__
+
+def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+temp_enum_dict['__repr__'] = __repr__
+del __repr__
+
+def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+temp_enum_dict['__str__'] = __str__
+del __str__
+
+if pyver >= 3.0:
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__', ] + added_behavior)
+ temp_enum_dict['__dir__'] = __dir__
+ del __dir__
+
+def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self.value
+ return cls.__format__(val, format_spec)
+temp_enum_dict['__format__'] = __format__
+del __format__
+
+
+####################################
+# Python's less than 2.6 use __cmp__
+
+if pyver < 2.6:
+
+ def __cmp__(self, other):
+ if type(other) is self.__class__:
+ if self is other:
+ return 0
+ return -1
+ return NotImplemented
+ raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__cmp__'] = __cmp__
+ del __cmp__
+
+else:
+
+ def __le__(self, other):
+ raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__le__'] = __le__
+ del __le__
+
+ def __lt__(self, other):
+ raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__lt__'] = __lt__
+ del __lt__
+
+ def __ge__(self, other):
+ raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__ge__'] = __ge__
+ del __ge__
+
+ def __gt__(self, other):
+ raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__gt__'] = __gt__
+ del __gt__
+
+
+def __eq__(self, other):
+ if type(other) is self.__class__:
+ return self is other
+ return NotImplemented
+temp_enum_dict['__eq__'] = __eq__
+del __eq__
+
+def __ne__(self, other):
+ if type(other) is self.__class__:
+ return self is not other
+ return NotImplemented
+temp_enum_dict['__ne__'] = __ne__
+del __ne__
+
+def __hash__(self):
+ return hash(self._name_)
+temp_enum_dict['__hash__'] = __hash__
+del __hash__
+
+def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+temp_enum_dict['__reduce_ex__'] = __reduce_ex__
+del __reduce_ex__
+
+# _RouteClassAttributeToGetattr is used to provide access to the `name`
+# and `value` properties of enum members while keeping some measure of
+# protection from modification, while still allowing for an enumeration
+# to have members named `name` and `value`. This works because enumeration
+# members are not set directly on the enum class -- __getattr__ is
+# used to look them up.
+
+@_RouteClassAttributeToGetattr
+def name(self):
+ return self._name_
+temp_enum_dict['name'] = name
+del name
+
+@_RouteClassAttributeToGetattr
+def value(self):
+ return self._value_
+temp_enum_dict['value'] = value
+del value
+
+@classmethod
+def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(_sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ members = dict((name, value) for name, value in source.items() if filter(name))
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+temp_enum_dict['_convert'] = _convert
+del _convert
+
+Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
+del temp_enum_dict
+
+# Enum has now been created
+###########################
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+def unique(enumeration):
+ """Class decorator that ensures only unique members exist in an enumeration."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ duplicate_names = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
+ )
+ raise ValueError('duplicate names found in %r: %s' %
+ (enumeration, duplicate_names)
+ )
+ return enumeration
diff --git a/chromium/build/fuchsia/linux.sdk.sha1 b/chromium/build/fuchsia/linux.sdk.sha1
index eb5baf9ffe2..97cfbc9ec3b 100644
--- a/chromium/build/fuchsia/linux.sdk.sha1
+++ b/chromium/build/fuchsia/linux.sdk.sha1
@@ -1 +1 @@
-d5cb91035bcf04ae5cb23f3cce9edb8cdfdd86f9 \ No newline at end of file
+fd85194ec253031ab0625905e694e273234cd02c \ No newline at end of file
diff --git a/chromium/build/fuchsia/mac.sdk.sha1 b/chromium/build/fuchsia/mac.sdk.sha1
index b6accb8d1a8..6c138b16b7c 100644
--- a/chromium/build/fuchsia/mac.sdk.sha1
+++ b/chromium/build/fuchsia/mac.sdk.sha1
@@ -1 +1 @@
-6cac70796e4f58c3c3ec9ae2e9e97731456e5311 \ No newline at end of file
+0519779b1f2f963e622edca3ff409409b9b66452 \ No newline at end of file
diff --git a/chromium/build/fuchsia/qemu_target.py b/chromium/build/fuchsia/qemu_target.py
index bad8c6bb3c6..92324535154 100644
--- a/chromium/build/fuchsia/qemu_target.py
+++ b/chromium/build/fuchsia/qemu_target.py
@@ -35,7 +35,7 @@ def _GetAvailableTcpPort():
class QemuTarget(target.Target):
def __init__(self, output_dir, target_cpu, cpu_cores, system_log_file,
- ram_size_mb=2048):
+ require_kvm, ram_size_mb=2048):
"""output_dir: The directory which will contain the files that are
generated to support the QEMU deployment.
target_cpu: The emulated target CPU architecture.
@@ -45,6 +45,7 @@ class QemuTarget(target.Target):
self._ram_size_mb = ram_size_mb
self._system_log_file = system_log_file
self._cpu_cores = cpu_cores
+ self._require_kvm = require_kvm
def __enter__(self):
return self
@@ -76,21 +77,15 @@ class QemuTarget(target.Target):
boot_data.GetTargetFile(self._GetTargetSdkArch(),
'qemu-kernel.bin')),
'-initrd', EnsurePathExists(
- boot_data.GetTargetFile(self._GetTargetSdkArch(),
- 'fuchsia.zbi')),
+ boot_data.GetBootImage(self._output_dir, self._GetTargetSdkArch())),
'-smp', str(self._cpu_cores),
# Attach the blobstore and data volumes. Use snapshot mode to discard
# any changes.
'-snapshot',
- '-drive', 'file=%s,format=qcow2,if=none,id=data,snapshot=on' %
- EnsurePathExists(os.path.join(self._output_dir,
- 'fvm.blk.qcow2')),
'-drive', 'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on' %
- EnsurePathExists(
- boot_data.ConfigureDataFVM(self._output_dir,
- boot_data.FVM_TYPE_QCOW)),
- '-device', 'virtio-blk-pci,drive=data',
+ EnsurePathExists(
+ os.path.join(self._output_dir, 'fvm.blk.qcow2')),
'-device', 'virtio-blk-pci,drive=blobstore',
# Use stdio for the guest OS only; don't attach the QEMU interactive
@@ -101,11 +96,10 @@ class QemuTarget(target.Target):
'-append', ' '.join(kernel_args)
]
- # Configure the machine & CPU to emulate, based on the target architecture.
+ # Configure the machine to emulate, based on the target architecture.
if self._target_cpu == 'arm64':
qemu_command.extend([
'-machine','virt',
- '-cpu', 'cortex-a53',
])
netdev_type = 'virtio-net-pci'
else:
@@ -114,15 +108,21 @@ class QemuTarget(target.Target):
])
netdev_type = 'e1000'
- # On Linux, enable lightweight virtualization (KVM) if the host and guest
- # architectures are the same.
- if sys.platform.startswith('linux'):
- if self._target_cpu == 'arm64' and platform.machine() == 'aarch64':
- qemu_command.append('-enable-kvm')
- elif self._target_cpu == 'x64' and platform.machine() == 'x86_64':
- qemu_command.extend([
- '-enable-kvm', '-cpu', 'host,migratable=no',
- ])
+ # Configure the CPU to emulate.
+ # On Linux, we can enable lightweight virtualization (KVM) if the host and
+ # guest architectures are the same.
+ enable_kvm = self._require_kvm or (sys.platform.startswith('linux') and (
+ (self._target_cpu == 'arm64' and platform.machine() == 'aarch64') or
+ (self._target_cpu == 'x64' and platform.machine() == 'x86_64')) and
+ os.access('/dev/kvm', os.R_OK | os.W_OK))
+ if enable_kvm:
+ qemu_command.extend(['-enable-kvm', '-cpu', 'host,migratable=no'])
+ else:
+ logging.warning('Unable to launch QEMU with KVM acceleration.')
+ if self._target_cpu == 'arm64':
+ qemu_command.extend(['-cpu', 'cortex-a53'])
+ else:
+ qemu_command.extend(['-cpu', 'Haswell,+smap,-check,-fsgsbase'])
# Configure virtual network. It is used in the tests to connect to
# testserver running on the host.
@@ -160,6 +160,8 @@ class QemuTarget(target.Target):
self._WaitUntilReady();
def _IsQemuStillRunning(self):
+ if not self._qemu_process:
+ return False
return os.waitpid(self._qemu_process.pid, os.WNOHANG)[0] == 0
def _GetEndpoint(self):
diff --git a/chromium/build/fuchsia/run_package.py b/chromium/build/fuchsia/run_package.py
index d61deb649f3..077ff28defd 100644
--- a/chromium/build/fuchsia/run_package.py
+++ b/chromium/build/fuchsia/run_package.py
@@ -58,7 +58,7 @@ def _ReadMergedLines(streams):
del streams_by_fd[fileno]
-def DrainStreamToStdout(stream, quit_event):
+def _DrainStreamToStdout(stream, quit_event):
"""Outputs the contents of |stream| until |quit_event| is set."""
while not quit_event.is_set():
@@ -70,34 +70,57 @@ def DrainStreamToStdout(stream, quit_event):
print line.rstrip()
+class RunPackageArgs:
+ """RunPackage() configuration arguments structure.
+
+ install_only: If set, skips the package execution step.
+ symbolizer_config: A newline delimited list of source files contained
+ in the package. Omitting this parameter will disable symbolization.
+ system_logging: If set, connects a system log reader to the target.
+ target_staging_path: Path to which package FARs will be staged, during
+ installation. Defaults to staging into '/data'.
+ """
+ def __init__(self):
+ self.install_only = False
+ self.symbolizer_config = None
+ self.system_logging = False
+ self.target_staging_path = '/data'
+
+ @staticmethod
+ def FromCommonArgs(args):
+ run_package_args = RunPackageArgs()
+ run_package_args.install_only = args.install_only
+ run_package_args.symbolizer_config = args.package_manifest
+ run_package_args.system_logging = args.include_system_logs
+ run_package_args.target_staging_path = args.target_staging_path
+ return run_package_args
+
+
def RunPackage(output_dir, target, package_path, package_name, package_deps,
- run_args, system_logging, install_only, symbolizer_config=None):
+ package_args, args):
"""Copies the Fuchsia package at |package_path| to the target,
- executes it with |run_args|, and symbolizes its output.
+ executes it with |package_args|, and symbolizes its output.
output_dir: The path containing the build output files.
target: The deployment Target object that will run the package.
package_path: The path to the .far package file.
package_name: The name of app specified by package metadata.
- run_args: The arguments which will be passed to the Fuchsia process.
- system_logging: If set, connects a system log reader to the target.
- install_only: If set, skips the package execution step.
- symbolizer_config: A newline delimited list of source files contained
- in the package. Omitting this parameter will disable
- symbolization.
+ package_args: The arguments which will be passed to the Fuchsia process.
+ args: Structure of arguments to configure how the package will be run.
Returns the exit code of the remote package process."""
- system_logger = _AttachKernelLogReader(target) if system_logging else None
+ system_logger = (
+ _AttachKernelLogReader(target) if args.system_logging else None)
try:
if system_logger:
# Spin up a thread to asynchronously dump the system log to stdout
# for easier diagnoses of early, pre-execution failures.
log_output_quit_event = multiprocessing.Event()
log_output_thread = threading.Thread(
- target=lambda: DrainStreamToStdout(system_logger.stdout,
- log_output_quit_event))
+ target=lambda: _DrainStreamToStdout(system_logger.stdout,
+ log_output_quit_event))
log_output_thread.daemon = True
log_output_thread.start()
@@ -105,7 +128,8 @@ def RunPackage(output_dir, target, package_path, package_name, package_deps,
logging.info('Installing ' + os.path.basename(next_package_path) + '.')
# Copy the package archive.
- install_path = os.path.join('/data', os.path.basename(next_package_path))
+ install_path = os.path.join(args.target_staging_path,
+ os.path.basename(next_package_path))
target.PutFile(next_package_path, install_path)
# Install the package.
@@ -125,12 +149,12 @@ def RunPackage(output_dir, target, package_path, package_name, package_deps,
log_output_quit_event.set()
log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
- if install_only:
+ if args.install_only:
logging.info('Installation complete.')
return
logging.info('Running application.')
- command = ['run', package_name] + run_args
+ command = ['run', package_name] + package_args
process = target.RunCommandPiped(command,
stdin=open(os.devnull, 'r'),
stdout=subprocess.PIPE,
@@ -141,9 +165,9 @@ def RunPackage(output_dir, target, package_path, package_name, package_deps,
else:
task_output = process.stdout
- if symbolizer_config:
+ if args.symbolizer_config:
# Decorate the process output stream with the symbolizer.
- output = FilterStream(task_output, package_name, symbolizer_config,
+ output = FilterStream(task_output, package_name, args.symbolizer_config,
output_dir)
else:
logging.warn('Symbolization is DISABLED.')
@@ -168,5 +192,4 @@ def RunPackage(output_dir, target, package_path, package_name, package_deps,
log_output_thread.join()
system_logger.kill()
-
return process.returncode
diff --git a/chromium/build/fuchsia/test_runner.py b/chromium/build/fuchsia/test_runner.py
index d6cdff23b60..f9caabcfd01 100755
--- a/chromium/build/fuchsia/test_runner.py
+++ b/chromium/build/fuchsia/test_runner.py
@@ -18,7 +18,7 @@ import time
from common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForArgs
from net_test_server import SetupTestServer
-from run_package import RunPackage
+from run_package import RunPackage, RunPackageArgs
DEFAULT_TEST_CONCURRENCY = 4
TEST_RESULT_PATH = '/data/test_summary.json'
@@ -32,6 +32,9 @@ def main():
parser.add_argument('--gtest_repeat',
help='GTest repeat value to use. This also disables the '
'test launcher timeout.')
+ parser.add_argument('--test-launcher-retry-limit',
+ help='Number of times that test suite will retry failing '
+ 'tests. This is multiplicative with --gtest_repeat.')
parser.add_argument('--gtest_break_on_failure', action='store_true',
default=False,
help='Should GTest break on failure; useful with '
@@ -79,6 +82,9 @@ def main():
if args.gtest_repeat:
child_args.append('--gtest_repeat=' + args.gtest_repeat)
child_args.append('--test-launcher-timeout=-1')
+ if args.test_launcher_retry_limit:
+ child_args.append(
+ '--test-launcher-retry-limit=' + args.test_launcher_retry_limit)
if args.gtest_break_on_failure:
child_args.append('--gtest_break_on_failure')
if args.child_args:
@@ -98,10 +104,10 @@ def main():
if args.enable_test_server:
test_server = SetupTestServer(target, test_concurrency)
+ run_package_args = RunPackageArgs.FromCommonArgs(args)
returncode = RunPackage(
args.output_directory, target, args.package, args.package_name,
- args.package_dep, child_args, args.include_system_logs,
- args.install_only, args.package_manifest)
+ args.package_dep, child_args, run_package_args)
if forwarder:
forwarder.terminate()
diff --git a/chromium/build/gn_helpers.py b/chromium/build/gn_helpers.py
index a9d1e2ee91a..25f1240c73a 100644
--- a/chromium/build/gn_helpers.py
+++ b/chromium/build/gn_helpers.py
@@ -171,6 +171,19 @@ class GNValueParser(object):
while not self.IsDone() and self.input[self.cur] in ' \t\n':
self.cur += 1
+ def ConsumeComment(self):
+ if self.IsDone() or self.input[self.cur] != '#':
+ return
+
+ # Consume each comment, line by line.
+ while not self.IsDone() and self.input[self.cur] == '#':
+ # Consume the rest of the comment, up until the end of the line.
+ while not self.IsDone() and self.input[self.cur] != '\n':
+ self.cur += 1
+ # Move the cursor to the next line (if there is one).
+ if not self.IsDone():
+ self.cur += 1
+
def Parse(self):
"""Converts a string representing a printed GN value to the Python type.
@@ -203,6 +216,7 @@ class GNValueParser(object):
d = {}
self.ConsumeWhitespace()
+ self.ConsumeComment()
while not self.IsDone():
ident = self._ParseIdent()
self.ConsumeWhitespace()
@@ -212,6 +226,7 @@ class GNValueParser(object):
self.ConsumeWhitespace()
val = self._ParseAllowTrailing()
self.ConsumeWhitespace()
+ self.ConsumeComment()
d[ident] = val
return d
diff --git a/chromium/build/gn_helpers_unittest.py b/chromium/build/gn_helpers_unittest.py
index cc6018a1721..43c084b3aae 100644
--- a/chromium/build/gn_helpers_unittest.py
+++ b/chromium/build/gn_helpers_unittest.py
@@ -85,6 +85,15 @@ class UnitTest(unittest.TestCase):
self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'),
{'foo': 'bar baz'})
+ # Comments should work (and be ignored).
+ gn_args_lines = [
+ '# Top-level comment.',
+ 'foo = true',
+ 'bar = 1 # In-line comment.',
+ ]
+ self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)),
+ {'foo': True, 'bar': 1})
+
# Lists should work.
self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'),
{'foo': [1, 2, 3]})
diff --git a/chromium/build/gyp_chromium.py b/chromium/build/gyp_chromium.py
index ab2e470dce3..1fd8ba29d0b 100644
--- a/chromium/build/gyp_chromium.py
+++ b/chromium/build/gyp_chromium.py
@@ -4,11 +4,8 @@
"""This script is now only used by the closure_compilation builders."""
-import argparse
-import glob
import gyp_environment
import os
-import shlex
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
@@ -18,41 +15,6 @@ sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
import gyp
-def ProcessGypDefinesItems(items):
- """Converts a list of strings to a list of key-value pairs."""
- result = []
- for item in items:
- tokens = item.split('=', 1)
- # Some GYP variables have hyphens, which we don't support.
- if len(tokens) == 2:
- result += [(tokens[0], tokens[1])]
- else:
- # No value supplied, treat it as a boolean and set it. Note that we
- # use the string '1' here so we have a consistent definition whether
- # you do 'foo=1' or 'foo'.
- result += [(tokens[0], '1')]
- return result
-
-
-def GetSupplementalFiles():
- return []
-
-
-def GetGypVars(_):
- """Returns a dictionary of all GYP vars."""
- # GYP defines from the environment.
- env_items = ProcessGypDefinesItems(
- shlex.split(os.environ.get('GYP_DEFINES', '')))
-
- # GYP defines from the command line.
- parser = argparse.ArgumentParser()
- parser.add_argument('-D', dest='defines', action='append', default=[])
- cmdline_input_items = parser.parse_known_args()[0].defines
- cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
-
- return dict(env_items + cmdline_items)
-
-
def main():
gyp_environment.SetEnvironment()
diff --git a/chromium/build/gyp_environment.py b/chromium/build/gyp_environment.py
index 51b9136f241..cde333f4b08 100644
--- a/chromium/build/gyp_environment.py
+++ b/chromium/build/gyp_environment.py
@@ -8,15 +8,12 @@ gyp_chromium and landmines.py which run at different stages of runhooks. To
make sure settings are consistent between them, all setup should happen here.
"""
-import gyp_helper
import os
import sys
import vs_toolchain
def SetEnvironment():
"""Sets defaults for GYP_* variables."""
- gyp_helper.apply_chromium_gyp_env()
-
# Default to ninja on linux and windows, but only if no generator has
# explicitly been set.
# Also default to ninja on mac, but only when not building chrome/ios.
diff --git a/chromium/build/gyp_helper.py b/chromium/build/gyp_helper.py
deleted file mode 100644
index c840f2d6dc2..00000000000
--- a/chromium/build/gyp_helper.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file helps gyp_chromium and landmines correctly set up the gyp
-# environment from chromium.gyp_env on disk
-
-import os
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-CHROME_SRC = os.path.dirname(SCRIPT_DIR)
-
-
-def apply_gyp_environment_from_file(file_path):
- """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
- if not os.path.exists(file_path):
- return
- with open(file_path, 'rU') as f:
- file_contents = f.read()
- try:
- file_data = eval(file_contents, {'__builtins__': None}, None)
- except SyntaxError, e:
- e.filename = os.path.abspath(file_path)
- raise
- supported_vars = (
- 'CC',
- 'CC_wrapper',
- 'CC.host_wrapper',
- 'CHROMIUM_GYP_FILE',
- 'CHROMIUM_GYP_SYNTAX_CHECK',
- 'CXX',
- 'CXX_wrapper',
- 'CXX.host_wrapper',
- 'GYP_DEFINES',
- 'GYP_GENERATOR_FLAGS',
- 'GYP_CROSSCOMPILE',
- 'GYP_GENERATOR_OUTPUT',
- 'GYP_GENERATORS',
- 'GYP_INCLUDE_FIRST',
- 'GYP_INCLUDE_LAST',
- 'GYP_MSVS_VERSION',
- )
- for var in supported_vars:
- file_val = file_data.get(var)
- if file_val:
- if var in os.environ:
- behavior = 'replaces'
- if var == 'GYP_DEFINES':
- result = file_val + ' ' + os.environ[var]
- behavior = 'merges with, and individual components override,'
- else:
- result = os.environ[var]
- print 'INFO: Environment value for "%s" %s value in %s' % (
- var, behavior, os.path.abspath(file_path)
- )
- string_padding = max(len(var), len(file_path), len('result'))
- print ' %s: %s' % (var.rjust(string_padding), os.environ[var])
- print ' %s: %s' % (file_path.rjust(string_padding), file_val)
- os.environ[var] = result
- else:
- os.environ[var] = file_val
-
-
-def apply_chromium_gyp_env():
- if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
- # Update the environment based on chromium.gyp_env
- path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
- apply_gyp_environment_from_file(path)
diff --git a/chromium/build/gypi_to_gn.py b/chromium/build/gypi_to_gn.py
deleted file mode 100644
index 2a3a72ac0f9..00000000000
--- a/chromium/build/gypi_to_gn.py
+++ /dev/null
@@ -1,192 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Converts a given gypi file to a python scope and writes the result to stdout.
-
-USING THIS SCRIPT IN CHROMIUM
-
-Forking Python to run this script in the middle of GN is slow, especially on
-Windows, and it makes both the GYP and GN files harder to follow. You can't
-use "git grep" to find files in the GN build any more, and tracking everything
-in GYP down requires a level of indirection. Any calls will have to be removed
-and cleaned up once the GYP-to-GN transition is complete.
-
-As a result, we only use this script when the list of files is large and
-frequently-changing. In these cases, having one canonical list outweights the
-downsides.
-
-As of this writing, the GN build is basically complete. It's likely that all
-large and frequently changing targets where this is appropriate use this
-mechanism already. And since we hope to turn down the GYP build soon, the time
-horizon is also relatively short. As a result, it is likely that no additional
-uses of this script should every be added to the build. During this later part
-of the transition period, we should be focusing more and more on the absolute
-readability of the GN build.
-
-
-HOW TO USE
-
-It is assumed that the file contains a toplevel dictionary, and this script
-will return that dictionary as a GN "scope" (see example below). This script
-does not know anything about GYP and it will not expand variables or execute
-conditions.
-
-It will strip conditions blocks.
-
-A variables block at the top level will be flattened so that the variables
-appear in the root dictionary. This way they can be returned to the GN code.
-
-Say your_file.gypi looked like this:
- {
- 'sources': [ 'a.cc', 'b.cc' ],
- 'defines': [ 'ENABLE_DOOM_MELON' ],
- }
-
-You would call it like this:
- gypi_values = exec_script("//build/gypi_to_gn.py",
- [ rebase_path("your_file.gypi") ],
- "scope",
- [ "your_file.gypi" ])
-
-Notes:
- - The rebase_path call converts the gypi file from being relative to the
- current build file to being system absolute for calling the script, which
- will have a different current directory than this file.
-
- - The "scope" parameter tells GN to interpret the result as a series of GN
- variable assignments.
-
- - The last file argument to exec_script tells GN that the given file is a
- dependency of the build so Ninja can automatically re-run GN if the file
- changes.
-
-Read the values into a target like this:
- component("mycomponent") {
- sources = gypi_values.sources
- defines = gypi_values.defines
- }
-
-Sometimes your .gypi file will include paths relative to a different
-directory than the current .gn file. In this case, you can rebase them to
-be relative to the current directory.
- sources = rebase_path(gypi_values.sources, ".",
- "//path/gypi/input/values/are/relative/to")
-
-This script will tolerate a 'variables' in the toplevel dictionary or not. If
-the toplevel dictionary just contains one item called 'variables', it will be
-collapsed away and the result will be the contents of that dictinoary. Some
-.gypi files are written with or without this, depending on how they expect to
-be embedded into a .gyp file.
-
-This script also has the ability to replace certain substrings in the input.
-Generally this is used to emulate GYP variable expansion. If you passed the
-argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
-the input will be replaced with "bar":
-
- gypi_values = exec_script("//build/gypi_to_gn.py",
- [ rebase_path("your_file.gypi"),
- "--replace=<(foo)=bar"],
- "scope",
- [ "your_file.gypi" ])
-
-"""
-
-import gn_helpers
-from optparse import OptionParser
-import sys
-
-def LoadPythonDictionary(path):
- file_string = open(path).read()
- try:
- file_data = eval(file_string, {'__builtins__': None}, None)
- except SyntaxError, e:
- e.filename = path
- raise
- except Exception, e:
- raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
-
- assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
-
- # Flatten any variables to the top level.
- if 'variables' in file_data:
- file_data.update(file_data['variables'])
- del file_data['variables']
-
- # Strip all elements that this script can't process.
- elements_to_strip = [
- 'conditions',
- 'target_conditions',
- 'target_defaults',
- 'targets',
- 'includes',
- 'actions',
- ]
- for element in elements_to_strip:
- if element in file_data:
- del file_data[element]
-
- return file_data
-
-
-def ReplaceSubstrings(values, search_for, replace_with):
- """Recursively replaces substrings in a value.
-
- Replaces all substrings of the "search_for" with "repace_with" for all
- strings occurring in "values". This is done by recursively iterating into
- lists as well as the keys and values of dictionaries."""
- if isinstance(values, str):
- return values.replace(search_for, replace_with)
-
- if isinstance(values, list):
- return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
-
- if isinstance(values, dict):
- # For dictionaries, do the search for both the key and values.
- result = {}
- for key, value in values.items():
- new_key = ReplaceSubstrings(key, search_for, replace_with)
- new_value = ReplaceSubstrings(value, search_for, replace_with)
- result[new_key] = new_value
- return result
-
- # Assume everything else is unchanged.
- return values
-
-def main():
- parser = OptionParser()
- parser.add_option("-r", "--replace", action="append",
- help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
- (options, args) = parser.parse_args()
-
- if len(args) != 1:
- raise Exception("Need one argument which is the .gypi file to read.")
-
- data = LoadPythonDictionary(args[0])
- if options.replace:
- # Do replacements for all specified patterns.
- for replace in options.replace:
- split = replace.split('=')
- # Allow "foo=" to replace with nothing.
- if len(split) == 1:
- split.append('')
- assert len(split) == 2, "Replacement must be of the form 'key=value'."
- data = ReplaceSubstrings(data, split[0], split[1])
-
- # Sometimes .gypi files use the GYP syntax with percents at the end of the
- # variable name (to indicate not to overwrite a previously-defined value):
- # 'foo%': 'bar',
- # Convert these to regular variables.
- for key in data:
- if len(key) > 1 and key[len(key) - 1] == '%':
- data[key[:-1]] = data[key]
- del data[key]
-
- print gn_helpers.ToGNString(data)
-
-if __name__ == '__main__':
- try:
- main()
- except Exception, e:
- print str(e)
- sys.exit(1)
diff --git a/chromium/build/install-build-deps.sh b/chromium/build/install-build-deps.sh
index cf4050dfe3e..78033f00b01 100755
--- a/chromium/build/install-build-deps.sh
+++ b/chromium/build/install-build-deps.sh
@@ -289,6 +289,7 @@ backwards_compatible_list="\
libgl1-mesa-dev
libgl1-mesa-glx:i386
libgles2-mesa-dev
+ libgtk-3-0:i386
libgtk2.0-0
libgtk2.0-0:i386
libgtk2.0-dev
@@ -388,7 +389,6 @@ nacl_list="\
libfontconfig1:i386
libglib2.0-0:i386
libgpm2:i386
- libgtk-3-0:i386
libncurses5:i386
lib32ncurses5-dev
libnss3:i386
diff --git a/chromium/build/linux/chrome.safestack.map b/chromium/build/linux/chrome.safestack.map
deleted file mode 100644
index 76b9f5bc45d..00000000000
--- a/chromium/build/linux/chrome.safestack.map
+++ /dev/null
@@ -1,93 +0,0 @@
-# This is a separate SafeStack version script to avoid accidentally exporting
-# a pthread_create symbol in the default build
-{
-global:
- __bss_start;
- __data_start;
- data_start;
- _edata;
- _end;
- _IO_stdin_used;
-
- # Initialization and finalization functions for static global
- # variables.
- _fini;
- _init;
- __libc_csu_fini;
- __libc_csu_init;
-
- # Chrome's main function. Exported for historical purposes.
- ChromeMain;
-
- # Program entry point.
- _start;
-
- # Memory allocation symbols. We want chrome and any libraries to
- # share the same heap, so it is correct to export these symbols.
- calloc;
- cfree;
- free;
- __free_hook;
- __libc_calloc;
- __libc_cfree;
- __libc_free;
- __libc_malloc;
- __libc_memalign;
- __libc_pvalloc;
- __libc_realloc;
- __libc_valloc;
- mallinfo;
- malloc;
- __malloc_hook;
- malloc_size;
- malloc_stats;
- malloc_usable_size;
- mallopt;
- memalign;
- __memalign_hook;
- __posix_memalign;
- posix_memalign;
- pvalloc;
- realloc;
- __realloc_hook;
- valloc;
-
- # Various flavors of operator new and operator delete.
- _ZdaPv;
- _ZdaPvm;
- _ZdaPvmSt11align_val_t;
- _ZdaPvRKSt9nothrow_t;
- _ZdaPvSt11align_val_t;
- _ZdaPvSt11align_val_tRKSt9nothrow_t;
- _ZdlPv;
- _ZdlPvm;
- _ZdlPvmSt11align_val_t;
- _ZdlPvRKSt9nothrow_t;
- _ZdlPvSt11align_val_t;
- _ZdlPvSt11align_val_tRKSt9nothrow_t;
- _Znam;
- _ZnamRKSt9nothrow_t;
- _ZnamSt11align_val_t;
- _ZnamSt11align_val_tRKSt9nothrow_t;
- _Znwm;
- _ZnwmRKSt9nothrow_t;
- _ZnwmSt11align_val_t;
- _ZnwmSt11align_val_tRKSt9nothrow_t;
-
- # Various flavors of localtime(). These are exported by the chrome
- # sandbox to intercept calls to localtime(), which would otherwise
- # fail in untrusted processes that don't have permission to read
- # /etc/localtime. These overrides forward the request to the browser
- # process, which uses dlsym(localtime) to make the real calls.
- localtime;
- localtime64;
- localtime64_r;
- localtime_r;
-
- # The SafeStack runtime overrides thread creation routines to allocate shadow
- # stacks on thread creation.
- pthread_create;
-
-local:
- *;
-};
diff --git a/chromium/build/mac_toolchain.py b/chromium/build/mac_toolchain.py
index 24ee355e262..9f9d2745f2c 100755
--- a/chromium/build/mac_toolchain.py
+++ b/chromium/build/mac_toolchain.py
@@ -31,6 +31,13 @@ MAC_TOOLCHAIN_VERSION = '8E2002'
# 16 is the major version number for macOS 10.12.
MAC_MINIMUM_OS_VERSION = 16
+# The toolchain will not be downloaded if the maximum OS version is exceeded.
+# 17 is the major version number for macOS 10.13. Xcode 8 does not run on macOS
+# 10.14.
+# TODO(https://crbug.com/780980): Once we build with 10.13 SDK, Xcode 9, we
+# should be able to remove this upper bound.
+MAC_MAXIMUM_OS_VERSION = 17
+
MAC_TOOLCHAIN_INSTALLER = 'mac_toolchain'
# Absolute path to src/ directory.
@@ -46,7 +53,9 @@ STAMP_FILE = os.path.join(TOOLCHAIN_ROOT, 'toolchain_build_revision')
def PlatformMeetsHermeticXcodeRequirements():
- return int(platform.release().split('.')[0]) >= MAC_MINIMUM_OS_VERSION
+ major_version = int(platform.release().split('.')[0])
+ return (major_version >= MAC_MINIMUM_OS_VERSION and
+ major_version <= MAC_MAXIMUM_OS_VERSION)
def _UseHermeticToolchain():
diff --git a/chromium/build/sanitizers/OWNERS b/chromium/build/sanitizers/OWNERS
index 3059b0e42aa..e9a248c4af4 100644
--- a/chromium/build/sanitizers/OWNERS
+++ b/chromium/build/sanitizers/OWNERS
@@ -1,4 +1,10 @@
-glider@chromium.org
+ochang@chromium.org
eugenis@chromium.org
+glider@chromium.org
+inferno@chromium.org
+mbarbella@chromium.org
+metzman@chromium.org
+mmoroz@chromium.org
+rnk@chromium.org
per-file tsan_suppressions.cc=*
per-file lsan_suppressions.cc=*
diff --git a/chromium/build/secondary/third_party/android_tools/AndroidManifest.xml.jinja2 b/chromium/build/secondary/third_party/android_tools/AndroidManifest.xml.jinja2
new file mode 100644
index 00000000000..81083cc4f3f
--- /dev/null
+++ b/chromium/build/secondary/third_party/android_tools/AndroidManifest.xml.jinja2
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2018 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. -->
+
+<!-- package name must be unique -->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="com.android.test_{{ library_name }}">
+ <application>
+ <uses-library android:name="android.test.{{ library_name }}"
+ android:required="false" />
+ </application>
+</manifest>
diff --git a/chromium/build/secondary/third_party/android_tools/BUILD.gn b/chromium/build/secondary/third_party/android_tools/BUILD.gn
index 902bdbf5956..bfc217fd54f 100644
--- a/chromium/build/secondary/third_party/android_tools/BUILD.gn
+++ b/chromium/build/secondary/third_party/android_tools/BUILD.gn
@@ -35,18 +35,48 @@ if (enable_java_templates) {
jar_path = android_sdk_jar
}
+ template("android_test_jar_with_manifest") {
+ _library_name = invoker.library_name
+
+ _manifest_output_path = "$target_gen_dir/$target_name/AndroidManifest.xml"
+ _manifest_target = "${target_name}__manifest"
+ jinja_template(_manifest_target) {
+ testonly = true
+ input = "//build/secondary/third_party/android_tools/AndroidManifest.xml.jinja2"
+ output = _manifest_output_path
+ variables = [ "library_name=$_library_name" ]
+ }
+
+ _resources_target = "${target_name}__resources"
+ android_resources(_resources_target) {
+ testonly = true
+ resource_dirs = []
+ android_manifest = _manifest_output_path
+ android_manifest_dep = ":$_manifest_target"
+ }
+
+ java_group(target_name) {
+ testonly = true
+ deps = [
+ ":$_resources_target",
+ ]
+ input_jars_paths =
+ [ "${android_sdk}/optional/android.test.${_library_name}.jar" ]
+ }
+ }
+
# The android test libraries below are part of the main SDK jar
# and are linked by default on O and below. Starting in P, they
# exist in their own libraries that are present on device and are
# available to be linked against but aren't linked by default.
- android_system_java_prebuilt("android_test_base_java") {
- jar_path = "${android_sdk}/optional/android.test.base.jar"
+ android_test_jar_with_manifest("android_test_base_java") {
+ library_name = "base"
}
- android_system_java_prebuilt("android_test_mock_java") {
- jar_path = "${android_sdk}/optional/android.test.mock.jar"
+ android_test_jar_with_manifest("android_test_mock_java") {
+ library_name = "mock"
}
- android_system_java_prebuilt("android_test_runner_java") {
- jar_path = "${android_sdk}/optional/android.test.runner.jar"
+ android_test_jar_with_manifest("android_test_runner_java") {
+ library_name = "runner"
}
android_library("android_support_chromium_java") {
diff --git a/chromium/build/toolchain/OWNERS b/chromium/build/toolchain/OWNERS
index 85d8d311602..b329d48bfba 100644
--- a/chromium/build/toolchain/OWNERS
+++ b/chromium/build/toolchain/OWNERS
@@ -1,2 +1,8 @@
dpranke@chromium.org
scottmg@chromium.org
+
+# Clang Static Analyzer.
+per-file clang_static_analyzer*=mmoroz@chromium.org
+
+# Code Coverage.
+per-file *code_coverage*=mmoroz@chromium.org
diff --git a/chromium/build/toolchain/clang_code_coverage_wrapper.py b/chromium/build/toolchain/clang_code_coverage_wrapper.py
new file mode 100755
index 00000000000..f6f9984740b
--- /dev/null
+++ b/chromium/build/toolchain/clang_code_coverage_wrapper.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Adds code coverage flags to the invocations of the Clang C/C++ compiler.
+
+This script is used to instrument a subset of the source files, and the list of
+files to instrument is specified by an input file that is passed to this script
+as a command-line argument.
+
+The path to the coverage instrumentation input file should be relative to the
+root build directory, and the file consists of multiple lines where each line
+represents a path to a source file, and the specified paths must be relative to
+the root build directory. e.g. ../../base/task/post_task.cc for build
+directory 'out/Release'.
+
+One caveat with this compiler wrapper is that it may introduce unexpected
+behaviors in incremental builds when the file path to the coverage
+instrumentation input file changes between consecutive runs, so callers of this
+script are strongly advised to always use the same path such as
+"${root_build_dir}/coverage_instrumentation_input.txt".
+
+It's worth noting on try job builders, if the contents of the instrumentation
+file changes so that a file doesn't need to be instrumented any longer, it will
+be recompiled automatically because if try job B runs after try job A, the files
+that were instrumented in A will be updated (i.e., reverted to the checked in
+version) in B, and so they'll be considered out of date by ninja and recompiled.
+
+Example usage:
+ clang_code_coverage_wrapper.py \\
+ --files-to-instrument=coverage_instrumentation_input.txt
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+# Flags used to enable coverage instrumentation.
+_COVERAGE_FLAGS = ['-fprofile-instr-generate', '-fcoverage-mapping']
+
+
+def main():
+ # TODO(crbug.com/898695): Make this wrapper work on Windows platform.
+ arg_parser = argparse.ArgumentParser()
+ arg_parser.usage = __doc__
+ arg_parser.add_argument(
+ '--files-to-instrument',
+ type=str,
+ required=True,
+ help='Path to a file that contains a list of file names to instrument.')
+ arg_parser.add_argument('args', nargs=argparse.REMAINDER)
+ parsed_args = arg_parser.parse_args()
+
+ if not os.path.isfile(parsed_args.files_to_instrument):
+ raise Exception('Path to the coverage instrumentation file: "%s" doesn\'t '
+ 'exist.' % parsed_args.files_to_instrument)
+
+ compile_command = parsed_args.args
+ try:
+ # The command is assumed to use Clang as the compiler, and the path to the
+ # source file is behind the -c argument, and the path to the source path is
+ # relative to the root build directory. For example:
+ # clang++ -fvisibility=hidden -c ../../base/files/file_path.cc -o \
+ # obj/base/base/file_path.o
+ index_dash_c = compile_command.index('-c')
+ except ValueError:
+ print '-c argument is not found in the compile command.'
+ raise
+
+ if index_dash_c + 1 >= len(compile_command):
+ raise Exception('Source file to be compiled is missing from the command.')
+
+ compile_source_file = compile_command[index_dash_c + 1]
+ with open(parsed_args.files_to_instrument) as f:
+ if compile_source_file + '\n' in f.read():
+ compile_command.extend(_COVERAGE_FLAGS)
+
+ return subprocess.call(compile_command)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/toolchain/clang_static_analyzer_wrapper.py b/chromium/build/toolchain/clang_static_analyzer_wrapper.py
index 1c54d7267cd..a9882a3f51e 100755
--- a/chromium/build/toolchain/clang_static_analyzer_wrapper.py
+++ b/chromium/build/toolchain/clang_static_analyzer_wrapper.py
@@ -25,7 +25,6 @@ analyzer_option_flags = [
'-fdiagnostics-show-option',
'-analyzer-checker=cplusplus',
'-analyzer-opt-analyze-nested-blocks',
- '-analyzer-eagerly-assume',
'-analyzer-output=text',
'-analyzer-config',
'suppress-c++-stdlib=true',
diff --git a/chromium/build/toolchain/gcc_toolchain.gni b/chromium/build/toolchain/gcc_toolchain.gni
index e7c88708c36..d09981b6abe 100644
--- a/chromium/build/toolchain/gcc_toolchain.gni
+++ b/chromium/build/toolchain/gcc_toolchain.gni
@@ -4,6 +4,7 @@
import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/v8_target_cpu.gni")
import("//build/toolchain/cc_wrapper.gni")
@@ -175,40 +176,51 @@ template("gcc_toolchain") {
if (toolchain_uses_goma &&
(!has_gomacc_path || invoker_toolchain_args.current_os != "chromeos")) {
goma_path = "$goma_dir/gomacc"
+ compiler_prefix = "${goma_path} "
+ } else {
+ compiler_prefix = "${toolchain_cc_wrapper} "
+ }
- # Use the static analysis script if static analysis is turned on
- # AND the tool has not opted out by setting
- # 'is_clang_static_analysis_supported' to false.
- if (is_clang && use_clang_static_analyzer &&
- (!defined(invoker.is_clang_analysis_supported) ||
- invoker.is_clang_analysis_supported)) {
- compiler_prefix = "${analyzer_wrapper} ${goma_path} "
-
- # Create a distinct variable for "asm", since analysis runs pass
- # a bunch of flags to clang/clang++ that are nonsensical on assembler
- # runs.
- asm = "${goma_path} ${invoker.cc}"
- } else {
- compiler_prefix = "${goma_path} "
- }
+ # Create a distinct variable for "asm", since analysis runs pass # a bunch
+ # of flags to clang/clang++ that are nonsensical on assembler runs.
+ asm_prefix = compiler_prefix
+
+ # Use the static analysis script if static analysis is turned on
+ # AND the tool has not opted out by setting
+ # 'is_clang_static_analysis_supported' to false.
+ if (is_clang && use_clang_static_analyzer &&
+ (!defined(invoker.is_clang_analysis_supported) ||
+ invoker.is_clang_analysis_supported)) {
+ compiler_prefix = "${analyzer_wrapper} " + compiler_prefix
+ }
+
+ if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+ toolchain_coverage_instrumentation_input_file =
+ toolchain_args.coverage_instrumentation_input_file
} else {
- if (is_clang && use_clang_static_analyzer &&
- (!defined(invoker.is_clang_analysis_supported) ||
- invoker.is_clang_analysis_supported)) {
- compiler_prefix = "${analyzer_wrapper} "
- asm = invoker.cc
- } else {
- compiler_prefix = "${toolchain_cc_wrapper} "
- }
+ toolchain_coverage_instrumentation_input_file =
+ coverage_instrumentation_input_file
+ }
+ _use_clang_coverage_wrapper =
+ toolchain_coverage_instrumentation_input_file != ""
+ if (_use_clang_coverage_wrapper) {
+ assert(!use_clang_static_analyzer,
+ "Clang static analyzer wrapper and Clang code coverage wrapper " +
+ "cannot be used together.")
+
+ _coverage_wrapper =
+ rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+ root_build_dir) + " --files-to-instrument=" +
+ rebase_path(toolchain_coverage_instrumentation_input_file,
+ root_build_dir)
+ compiler_prefix = "${_coverage_wrapper} " + compiler_prefix
}
cc = compiler_prefix + invoker.cc
cxx = compiler_prefix + invoker.cxx
+ asm = asm_prefix + invoker.cc
ar = invoker.ar
ld = invoker.ld
- if (!defined(asm)) {
- asm = cc
- }
if (defined(invoker.readelf)) {
readelf = invoker.readelf
} else {
diff --git a/chromium/build/toolchain/mac/BUILD.gn b/chromium/build/toolchain/mac/BUILD.gn
index 441ff0f9acc..bd37cc76a56 100644
--- a/chromium/build/toolchain/mac/BUILD.gn
+++ b/chromium/build/toolchain/mac/BUILD.gn
@@ -8,6 +8,7 @@
import("../goma.gni")
import("//build/config/clang/clang.gni")
+import("//build/config/coverage/coverage.gni")
if (is_ios) {
import("//build/config/ios/ios_sdk.gni")
}
@@ -135,6 +136,29 @@ template("mac_toolchain") {
ld = cxx
}
+ if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+ toolchain_coverage_instrumentation_input_file =
+ toolchain_args.coverage_instrumentation_input_file
+ } else {
+ toolchain_coverage_instrumentation_input_file =
+ coverage_instrumentation_input_file
+ }
+ _use_clang_coverage_wrapper =
+ toolchain_coverage_instrumentation_input_file != ""
+ if (_use_clang_coverage_wrapper) {
+ assert(!use_clang_static_analyzer,
+ "Clang static analyzer wrapper and Clang code coverage wrapper " +
+ "cannot be used together.")
+
+ _coverage_wrapper =
+ rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+ root_build_dir) + " --files-to-instrument=" +
+ rebase_path(toolchain_coverage_instrumentation_input_file,
+ root_build_dir)
+ cc = _coverage_wrapper + " ${cc}"
+ cxx = _coverage_wrapper + " ${cxx}"
+ }
+
linker_driver =
"TOOL_VERSION=${tool_versions.linker_driver} " +
rebase_path("//build/toolchain/mac/linker_driver.py", root_build_dir)
diff --git a/chromium/build/toolchain/nacl_toolchain.gni b/chromium/build/toolchain/nacl_toolchain.gni
index 566f0714903..10f4d99f539 100644
--- a/chromium/build/toolchain/nacl_toolchain.gni
+++ b/chromium/build/toolchain/nacl_toolchain.gni
@@ -54,6 +54,7 @@ template("nacl_toolchain") {
# We do not support clang code coverage in the NaCl toolchains.
use_clang_coverage = false
+ coverage_instrumentation_input_file = ""
}
}
}
diff --git a/chromium/build/toolchain/toolchain.gni b/chromium/build/toolchain/toolchain.gni
index 7a87019b8b8..83efcd4f114 100644
--- a/chromium/build/toolchain/toolchain.gni
+++ b/chromium/build/toolchain/toolchain.gni
@@ -23,8 +23,6 @@ declare_args() {
use_xcode_clang = is_ios && is_official_build
# Used for binary size analysis.
- # Currently disabled on LLD because of a bug (fixed upstream).
- # See https://crbug.com/716209.
generate_linker_map = is_android && is_official_build
# Use absolute file paths in the compiler diagnostics and __FILE__ macro
diff --git a/chromium/build/toolchain/win/BUILD.gn b/chromium/build/toolchain/win/BUILD.gn
index 3c6c9d05d2c..07319b79899 100644
--- a/chromium/build/toolchain/win/BUILD.gn
+++ b/chromium/build/toolchain/win/BUILD.gn
@@ -6,6 +6,7 @@ import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/cc_wrapper.gni")
import("//build/toolchain/clang_static_analyzer.gni")
import("//build/toolchain/goma.gni")
import("//build/toolchain/toolchain.gni")
@@ -28,8 +29,14 @@ if (use_goma) {
} else {
goma_prefix = "$goma_dir/gomacc "
}
+ clang_prefix = goma_prefix
} else {
goma_prefix = ""
+ if (cc_wrapper != "") {
+ clang_prefix = cc_wrapper + " "
+ } else {
+ clang_prefix = ""
+ }
}
# Copy the VS runtime DLL for the default toolchain to the root build directory
@@ -49,6 +56,12 @@ if (current_toolchain == default_toolchain) {
])
}
+if (host_os == "win") {
+ clang_cl = "clang-cl.exe"
+} else {
+ clang_cl = "clang-cl"
+}
+
# Parameters:
# environment: File name of environment file.
#
@@ -192,7 +205,7 @@ template("msvc_toolchain") {
}
tool("rc") {
- command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+ command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe /nologo {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
depsformat = "msvc"
outputs = [
"$object_subdir/{{source_name_part}}.res",
@@ -201,12 +214,32 @@ template("msvc_toolchain") {
}
tool("asm") {
- if (toolchain_args.current_cpu == "x64") {
- ml = "ml64.exe"
+ if (toolchain_args.current_cpu == "arm64") {
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ ml = "${clang_prefix}${prefix}/${clang_cl} --target=arm64-windows"
} else {
- ml = "ml.exe"
+ if (toolchain_args.current_cpu == "x64") {
+ ml = "ml64.exe /nologo"
+ } else {
+ ml = "ml.exe /nologo"
+ }
+ if (use_lld) {
+ # Wrap ml(64).exe with a script that makes its output deterministic.
+ # It's lld only because the script zaps obj Timestamp which
+ # link.exe /incremental looks at.
+ # TODO(https://crbug.com/762167): If we end up writing an llvm-ml64,
+ # make sure it has deterministic output (maybe with /Brepro or
+ # something) and remove this wrapper.
+ ml_py = rebase_path("ml.py", root_build_dir)
+ ml = "$python_path $ml_py $ml"
+ }
+ }
+ command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} "
+ if (toolchain_args.current_cpu == "arm64") {
+ command += "-c -o{{output}} {{source}}"
+ } else {
+ command += "/c /Fo{{output}} {{source}}"
}
- command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} /c /Fo{{output}} {{source}}"
description = "ASM {{output}}"
outputs = [
"$object_subdir/{{source_name_part}}.obj",
@@ -332,12 +365,6 @@ template("msvc_toolchain") {
}
}
-if (host_os == "win") {
- clang_cl = "clang-cl.exe"
-} else {
- clang_cl = "clang-cl"
-}
-
if (target_cpu == "x86" || target_cpu == "x64") {
win_build_host_cpu = target_cpu
} else {
@@ -377,7 +404,7 @@ if (win_build_host_cpu != "x64") {
msvc_toolchain("win_clang_" + win_build_host_cpu) {
environment = "environment." + win_build_host_cpu
prefix = rebase_path("$clang_base_path/bin", root_build_dir)
- cl = "${goma_prefix}$prefix/${clang_cl}"
+ cl = "${clang_prefix}$prefix/${clang_cl}"
sys_include_flags = "${build_cpu_toolchain_data.include_flags_imsvc}"
if (host_os != "win") {
# For win cross build.
@@ -392,25 +419,28 @@ if (win_build_host_cpu != "x64") {
}
}
-# 64-bit toolchains.
-x64_toolchain_data = exec_script("setup_toolchain.py",
- [
- visual_studio_path,
- windows_sdk_path,
- visual_studio_runtime_dirs,
- "win",
- "x64",
- "environment.x64",
- ],
- "scope")
-
-template("win_x64_toolchains") {
+# 64-bit toolchains, including x64 and arm64.
+template("win_64bit_toolchains") {
+ assert(defined(invoker.toolchain_arch))
+ toolchain_arch = invoker.toolchain_arch
+
+ win_64bit_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ toolchain_arch,
+ "environment." + toolchain_arch,
+ ],
+ "scope")
+
msvc_toolchain(target_name) {
- environment = "environment.x64"
- cl = "${goma_prefix}\"${x64_toolchain_data.vc_bin_dir}/cl.exe\""
+ environment = "environment." + toolchain_arch
+ cl = "${goma_prefix}\"${win_64bit_toolchain_data.vc_bin_dir}/cl.exe\""
if (host_os != "win") {
# For win cross build
- sys_lib_flags = "${x64_toolchain_data.libpath_flags}"
+ sys_lib_flags = "${win_64bit_toolchain_data.libpath_flags}"
}
toolchain_args = {
@@ -419,18 +449,21 @@ template("win_x64_toolchains") {
}
is_clang = false
current_os = "win"
- current_cpu = "x64"
+ current_cpu = toolchain_arch
}
}
msvc_toolchain("win_clang_" + target_name) {
- environment = "environment.x64"
+ environment = "environment." + toolchain_arch
prefix = rebase_path("$clang_base_path/bin", root_build_dir)
- cl = "${goma_prefix}$prefix/${clang_cl}"
- sys_include_flags = "${x64_toolchain_data.include_flags_imsvc}"
+ cl = "${clang_prefix}$prefix/${clang_cl}"
+ if (toolchain_arch == "arm64") {
+ cl += " --target=arm64-windows"
+ }
+ sys_include_flags = "${win_64bit_toolchain_data.include_flags_imsvc}"
if (host_os != "win") {
# For win cross build
- sys_lib_flags = "${x64_toolchain_data.libpath_flags}"
+ sys_lib_flags = "${win_64bit_toolchain_data.libpath_flags}"
}
toolchain_args = {
@@ -439,23 +472,34 @@ template("win_x64_toolchains") {
}
is_clang = true
current_os = "win"
- current_cpu = "x64"
+ current_cpu = toolchain_arch
}
}
}
-win_x64_toolchains("x64") {
+win_64bit_toolchains("x64") {
+ toolchain_arch = "x64"
toolchain_args = {
# Use the defaults.
}
}
+if (target_cpu == "arm64") {
+ win_64bit_toolchains("arm64") {
+ toolchain_arch = "arm64"
+ toolchain_args = {
+ # Use the defaults.
+ }
+ }
+}
+
# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
# The only reason it's a separate toolchain is so that it can force
# is_component_build to false in the toolchain_args() block, because
# building nacl64.exe in component style does not work.
-win_x64_toolchains("nacl_win64") {
+win_64bit_toolchains("nacl_win64") {
+ toolchain_arch = "x64"
toolchain_args = {
is_component_build = false
}
diff --git a/chromium/build/toolchain/win/midl.gni b/chromium/build/toolchain/win/midl.gni
index 9ff29c676bd..b46f4cd538c 100644
--- a/chromium/build/toolchain/win/midl.gni
+++ b/chromium/build/toolchain/win/midl.gni
@@ -77,6 +77,9 @@ template("midl") {
} else if (current_cpu == "x64") {
win_tool_arch = "environment.x64"
idl_target_platform = "x64"
+ } else if (current_cpu == "arm64") {
+ win_tool_arch = "environment.arm64"
+ idl_target_platform = "arm64"
} else {
assert(false, "Need environment for this arch")
}
diff --git a/chromium/build/toolchain/win/ml.py b/chromium/build/toolchain/win/ml.py
new file mode 100755
index 00000000000..877c584c577
--- /dev/null
+++ b/chromium/build/toolchain/win/ml.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps ml.exe or ml64.exe and postprocesses the output to be deterministic.
+Sets timestamp in .obj file to 0, hence incompatible with link.exe /incremental.
+
+Use by prefixing the ml(64).exe invocation with this script:
+ python ml.py ml.exe [args...]"""
+
+import array
+import collections
+import struct
+import subprocess
+import sys
+
+
+class Struct(object):
+ """A thin wrapper around the struct module that returns a namedtuple"""
+ def __init__(self, name, *args):
+ """Pass the name of the return type, and then an interleaved list of
+ format strings as used by the struct module and of field names."""
+ self.fmt = '<' + ''.join(args[0::2])
+ self.type = collections.namedtuple(name, args[1::2])
+
+ def pack_into(self, buffer, offset, data):
+ return struct.pack_into(self.fmt, buffer, offset, *data)
+
+ def unpack_from(self, buffer, offset=0):
+ return self.type(*struct.unpack_from(self.fmt, buffer, offset))
+
+ def size(self):
+ return struct.calcsize(self.fmt)
+
+
+def Subtract(nt, **kwargs):
+ """Subtract(nt, f=2) returns a new namedtuple with 2 subtracted from nt.f"""
+ return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.iteritems()})
+
+
+def MakeDeterministic(objdata):
+ # Takes data produced by ml(64).exe (without any special flags) and
+ # 1. Sets the timestamp to 0
+ # 2. Strips the .debug$S section (which contains an unwanted absolute path)
+
+ # This makes several assumptions about ml's output:
+ # - Section data is in the same order as the corresponding section headers:
+ # section headers preceeding the .debug$S section header have their data
+ # preceeding the .debug$S section data; likewise for section headers
+ # following the .debug$S section.
+ # - The .debug$S section contains only the absolute path to the obj file and
+ # nothing else, in particular there's only a single entry in the symbol
+ # table refering to the .debug$S section.
+ # - There are no COFF line number entries.
+ # - There's no IMAGE_SYM_CLASS_CLR_TOKEN symbol.
+ # These seem to hold in practice; if they stop holding this script needs to
+ # become smarter.
+
+ objdata = array.array('c', objdata) # Writable, e.g. via struct.pack_into.
+
+ # Read coff header.
+ COFFHEADER = Struct('COFFHEADER',
+ 'H', 'Machine',
+ 'H', 'NumberOfSections',
+ 'I', 'TimeDateStamp',
+ 'I', 'PointerToSymbolTable',
+ 'I', 'NumberOfSymbols',
+
+ 'H', 'SizeOfOptionalHeader',
+ 'H', 'Characteristics')
+ coff_header = COFFHEADER.unpack_from(objdata)
+ assert coff_header.SizeOfOptionalHeader == 0 # Only set for binaries.
+
+ # Read section headers following coff header.
+ SECTIONHEADER = Struct('SECTIONHEADER',
+ '8s', 'Name',
+ 'I', 'VirtualSize',
+ 'I', 'VirtualAddress',
+
+ 'I', 'SizeOfRawData',
+ 'I', 'PointerToRawData',
+ 'I', 'PointerToRelocations',
+ 'I', 'PointerToLineNumbers',
+
+ 'H', 'NumberOfRelocations',
+ 'H', 'NumberOfLineNumbers',
+ 'I', 'Characteristics')
+ section_headers = []
+ debug_section_index = -1
+ for i in range(0, coff_header.NumberOfSections):
+ section_header = SECTIONHEADER.unpack_from(
+ objdata, offset=COFFHEADER.size() + i * SECTIONHEADER.size())
+ assert not section_header[0].startswith('/') # Support short names only.
+ section_headers.append(section_header)
+
+ if section_header.Name == '.debug$S':
+ assert debug_section_index == -1
+ debug_section_index = i
+ assert debug_section_index != -1
+
+ data_start = COFFHEADER.size() + len(section_headers) * SECTIONHEADER.size()
+
+ # Verify the .debug$S section looks like we expect.
+ assert section_headers[debug_section_index].Name == '.debug$S'
+ assert section_headers[debug_section_index].VirtualSize == 0
+ assert section_headers[debug_section_index].VirtualAddress == 0
+ debug_size = section_headers[debug_section_index].SizeOfRawData
+ debug_offset = section_headers[debug_section_index].PointerToRawData
+ assert section_headers[debug_section_index].PointerToRelocations == 0
+ assert section_headers[debug_section_index].PointerToLineNumbers == 0
+ assert section_headers[debug_section_index].NumberOfRelocations == 0
+ assert section_headers[debug_section_index].NumberOfLineNumbers == 0
+
+ # Make sure sections in front of .debug$S have their data preceeding it.
+ for header in section_headers[:debug_section_index]:
+ assert header.PointerToRawData < debug_offset
+ assert header.PointerToRelocations < debug_offset
+ assert header.PointerToLineNumbers < debug_offset
+
+ # Make sure sections after of .debug$S have their data following it.
+ for header in section_headers[debug_section_index + 1:]:
+ # Make sure the .debug$S data is at the very end of section data:
+ assert header.PointerToRawData > debug_offset
+ assert header.PointerToRelocations == 0
+ assert header.PointerToLineNumbers == 0
+
+ # Make sure the first non-empty section's data starts right after the section
+ # headers.
+ for section_header in section_headers:
+ if section_header.PointerToRawData == 0:
+ assert section_header.PointerToRelocations == 0
+ assert section_header.PointerToLineNumbers == 0
+ continue
+ assert section_header.PointerToRawData == data_start
+ break
+
+ # Make sure the symbol table (and hence, string table) appear after the last
+ # section:
+ assert (coff_header.PointerToSymbolTable >=
+ section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData)
+
+ # The symbol table contains a symbol for the no-longer-present .debug$S
+ # section. If we leave it there, lld-link will complain:
+ #
+ # lld-link: error: .debug$S should not refer to non-existent section 5
+ #
+ # so we need to remove that symbol table entry as well. This shifts symbol
+ # entries around and we need to update symbol table indices in:
+ # - relocations
+ # - line number records (never present)
+ # - one aux symbol entries (never present in ml output)
+ SYM = Struct('SYM',
+ '8s', 'Name',
+ 'I', 'Value',
+ 'h', 'SectionNumber', # Note: Signed!
+ 'H', 'Type',
+
+ 'B', 'StorageClass',
+ 'B', 'NumberOfAuxSymbols')
+ i = 0
+ debug_sym = -1
+ while i < coff_header.NumberOfSymbols:
+ sym_offset = coff_header.PointerToSymbolTable + i * SYM.size()
+ sym = SYM.unpack_from(objdata, sym_offset)
+
+ # 107 is IMAGE_SYM_CLASS_CLR_TOKEN, which has aux entry "CLR Token
+ # Definition", which contains a symbol index. Check it's never present.
+ assert sym.StorageClass != 107
+
+ # Note: sym.SectionNumber is 1-based, debug_section_index is 0-based.
+ if sym.SectionNumber - 1 == debug_section_index:
+ assert debug_sym == -1, 'more than one .debug$S symbol found'
+ debug_sym = i
+ # Make sure the .debug$S symbol looks like we expect.
+ # In particular, it should have exactly one aux symbol.
+ assert sym.Name == '.debug$S'
+ assert sym.Value == 0
+ assert sym.Type == 0
+ assert sym.StorageClass == 3
+ assert sym.NumberOfAuxSymbols == 1
+ elif sym.SectionNumber > debug_section_index:
+ sym = Subtract(sym, SectionNumber=1)
+ SYM.pack_into(objdata, sym_offset, sym)
+ i += 1 + sym.NumberOfAuxSymbols
+ assert debug_sym != -1, '.debug$S symbol not found'
+
+ # Note: Usually the .debug$S section is the last, but for files saying
+ # `includelib foo.lib`, like safe_terminate_process.asm in 32-bit builds,
+ # this isn't true: .drectve is after .debug$S.
+
+ # Update symbol table indices in relocations.
+ # There are a few processor types that have one or two relocation types
+ # where SymbolTableIndex has a different meaning, but not for x86.
+ REL = Struct('REL',
+ 'I', 'VirtualAddress',
+ 'I', 'SymbolTableIndex',
+ 'H', 'Type')
+ for header in section_headers[0:debug_section_index]:
+ for j in range(0, header.NumberOfRelocations):
+ rel_offset = header.PointerToRelocations + j * REL.size()
+ rel = REL.unpack_from(objdata, rel_offset)
+ assert rel.SymbolTableIndex != debug_sym
+ if rel.SymbolTableIndex > debug_sym:
+ rel = Subtract(rel, SymbolTableIndex=2)
+ REL.pack_into(objdata, rel_offset, rel)
+
+ # Update symbol table indices in line numbers -- just check they don't exist.
+ for header in section_headers:
+ assert header.NumberOfLineNumbers == 0
+
+ # Now that all indices are updated, remove the symbol table entry refering to
+ # .debug$S and its aux entry.
+ del objdata[coff_header.PointerToSymbolTable + debug_sym * SYM.size():
+ coff_header.PointerToSymbolTable + (debug_sym + 2) * SYM.size()]
+
+ # Now we know that it's safe to write out the input data, with just the
+ # timestamp overwritten to 0, the last section header cut out (and the
+ # offsets of all other section headers decremented by the size of that
+ # one section header), and the last section's data cut out. The symbol
+ # table offset needs to be reduced by one section header and the size of
+ # the missing section.
+ # (The COFF spec only requires on-disk sections to be aligned in image files,
+ # for obj files it's not required. If that wasn't the case, deleting slices
+ # if data would not generally be safe.)
+
+ # Update section offsets and remove .debug$S section data.
+ for i in range(0, debug_section_index):
+ header = section_headers[i]
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=SECTIONHEADER.size())
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=SECTIONHEADER.size())
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size())
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+ for i in range(debug_section_index + 1, len(section_headers)):
+ header = section_headers[i]
+ shift = SECTIONHEADER.size() + debug_size
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=shift)
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=shift)
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=shift)
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+
+ del objdata[debug_offset:debug_offset + debug_size]
+
+ # Finally, remove .debug$S section header and update coff header.
+ coff_header = coff_header._replace(TimeDateStamp=0)
+ coff_header = Subtract(coff_header,
+ NumberOfSections=1,
+ PointerToSymbolTable=SECTIONHEADER.size() + debug_size,
+ NumberOfSymbols=2)
+ COFFHEADER.pack_into(objdata, 0, coff_header)
+
+ del objdata[
+ COFFHEADER.size() + debug_section_index * SECTIONHEADER.size():
+ COFFHEADER.size() + (debug_section_index + 1) * SECTIONHEADER.size()]
+
+ # All done!
+ return objdata.tostring()
+
+
+def main():
+ ml_result = subprocess.call(sys.argv[1:])
+ if ml_result != 0:
+ return ml_result
+
+ objfile = None
+ for i in range(1, len(sys.argv)):
+ if sys.argv[i].startswith('/Fo'):
+ objfile = sys.argv[i][len('/Fo'):]
+ assert objfile, 'failed to find ml output'
+
+ with open(objfile, 'rb') as f:
+ objdata = f.read()
+ objdata = MakeDeterministic(objdata)
+ with open(objfile, 'wb') as f:
+ f.write(objdata)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/toolchain/win/rc/rc.py b/chromium/build/toolchain/win/rc/rc.py
index 73da888806f..23387621c02 100755
--- a/chromium/build/toolchain/win/rc/rc.py
+++ b/chromium/build/toolchain/win/rc/rc.py
@@ -11,6 +11,7 @@ options:
-I<dir> Add include path.
-D<sym> Define a macro for the preprocessor.
/fo<out> Set path of output .res file.
+/nologo Ignored (rc.py doesn't print a logo by default).
/showIncludes Print referenced header and resource files."""
from __future__ import print_function
@@ -51,6 +52,8 @@ def ParseFlags():
file=sys.stderr)
sys.exit(1)
output = flag[3:]
+ elif flag == '/nologo':
+ pass
elif flag == '/showIncludes':
show_includes = True
elif (flag.startswith('-') or
diff --git a/chromium/build/toolchain/win/setup_toolchain.py b/chromium/build/toolchain/win/setup_toolchain.py
index 1ba5533c3ef..2c8675206f3 100644
--- a/chromium/build/toolchain/win/setup_toolchain.py
+++ b/chromium/build/toolchain/win/setup_toolchain.py
@@ -149,9 +149,6 @@ def _LoadToolchainEnv(cpu, sdk_dir, target_store):
# Store target must come before any SDK version declaration
if (target_store):
args.append(['store'])
- # Chromium requires the 10.0.17134.0 SDK - previous versions don't have
- # all of the required declarations.
- args.append('10.0.17134.0')
variables = _LoadEnvFromBat(args)
return _ExtractImportantEnvironment(variables)
diff --git a/chromium/build/toolchain/win/tool_wrapper.py b/chromium/build/toolchain/win/tool_wrapper.py
index cb0393ecd50..6f01ebf4026 100644
--- a/chromium/build/toolchain/win/tool_wrapper.py
+++ b/chromium/build/toolchain/win/tool_wrapper.py
@@ -168,16 +168,16 @@ class WinTool(object):
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
+ if sys.platform == 'win32':
+ # Windows ARM64 uses clang-cl as assembler which has '/' as path
+ # separator, convert it to '\\' when running on Windows.
+ args = list(args) # *args is a tuple by default, which is read-only
+ args[0] = args[0].replace('/', '\\')
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
- # Split to avoid triggering license checks:
- if (not line.startswith('Copy' + 'right (C' +
- ') Microsoft Corporation') and
- not line.startswith('Microsoft (R) Macro Assembler') and
- not line.startswith(' Assembling: ') and
- line):
+ if not line.startswith(' Assembling: '):
print line
return popen.returncode
@@ -215,18 +215,7 @@ class WinTool(object):
# 2. Run Microsoft rc.exe.
if sys.platform == 'win32' and rc_exe_exit_code == 0:
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- # Filter logo banner from invocations of rc.exe. Older versions of RC
- # don't support the /nologo flag.
- for line in out.splitlines():
- if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler')
- and not line.startswith('Copy' + 'right (C' +
- ') Microsoft Corporation')
- and line):
- print line
- rc_exe_exit_code = popen.returncode
+ rc_exe_exit_code = subprocess.call(args, shell=True, env=env)
# Assert Microsoft rc.exe and rc.py produced identical .res files.
if rc_exe_exit_code == 0:
import filecmp
diff --git a/chromium/build/util/LASTCHANGE b/chromium/build/util/LASTCHANGE
index 0626cbccf80..61256e0ce8d 100644
--- a/chromium/build/util/LASTCHANGE
+++ b/chromium/build/util/LASTCHANGE
@@ -1 +1 @@
-LASTCHANGE=cc99454a94d72bc0ae0a87712cf0c6a106e03903-refs/branch-heads/3578@{#947}
+LASTCHANGE=38c6381358a1ec7a9fc161a038fb9d7407334aa4-refs/branch-heads/3626@{#858}
diff --git a/chromium/build/util/LASTCHANGE.committime b/chromium/build/util/LASTCHANGE.committime
index 5dc3d81ed37..a3839706afd 100644
--- a/chromium/build/util/LASTCHANGE.committime
+++ b/chromium/build/util/LASTCHANGE.committime
@@ -1 +1 @@
-1548292276 \ No newline at end of file
+1550020232 \ No newline at end of file
diff --git a/chromium/build/vs_toolchain.py b/chromium/build/vs_toolchain.py
index 32bad7f469b..e613b88e535 100755
--- a/chromium/build/vs_toolchain.py
+++ b/chromium/build/vs_toolchain.py
@@ -13,6 +13,7 @@ import shutil
import stat
import subprocess
import sys
+
from gn_helpers import ToGNString
@@ -26,10 +27,12 @@ CURRENT_DEFAULT_TOOLCHAIN_VERSION = '2017'
def SetEnvironmentAndGetRuntimeDllDirs():
"""Sets up os.environ to use the depot_tools VS toolchain with gyp, and
- returns the location of the VS runtime DLLs so they can be copied into
+ returns the location of the VC runtime DLLs so they can be copied into
the output directory after gyp generation.
- Return value is [x64path, x86path] or None
+ Return value is [x64path, x86path, 'Arm64Unused'] or None. arm64path is
+ generated separately because there are multiple folders for the arm64 VC
+ runtime.
"""
vs_runtime_dll_dirs = None
depot_tools_win_toolchain = \
@@ -52,9 +55,14 @@ def SetEnvironmentAndGetRuntimeDllDirs():
win_sdk = toolchain_data['win8sdk']
wdk = toolchain_data['wdk']
# TODO(scottmg): The order unfortunately matters in these. They should be
- # split into separate keys for x86 and x64. (See CopyDlls call below).
+ # split into separate keys for x64/x86/arm64. (See CopyDlls call below).
# http://crbug.com/345992
vs_runtime_dll_dirs = toolchain_data['runtime_dirs']
+ # The number of runtime_dirs in the toolchain_data was two (x64/x86) but
+ # changed to three (x64/x86/arm64) and this code needs to handle both
+ # possibilities, which can change independently from this code.
+ if len(vs_runtime_dll_dirs) == 2:
+ vs_runtime_dll_dirs.append('Arm64Unused')
os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
os.environ['GYP_MSVS_VERSION'] = version
@@ -76,9 +84,14 @@ def SetEnvironmentAndGetRuntimeDllDirs():
# directory ensures that they are available when needed.
bitness = platform.architecture()[0]
# When running 64-bit python the x64 DLLs will be in System32
+ # ARM64 binaries will not be available in the system directories because we
+ # don't build on ARM64 machines.
x64_path = 'System32' if bitness == '64bit' else 'Sysnative'
x64_path = os.path.join(os.path.expandvars('%windir%'), x64_path)
- vs_runtime_dll_dirs = [x64_path, os.path.expandvars('%windir%/SysWOW64')]
+ vs_runtime_dll_dirs = [x64_path,
+ os.path.join(os.path.expandvars('%windir%'),
+ 'SysWOW64'),
+ 'Arm64Unused']
return vs_runtime_dll_dirs
@@ -130,7 +143,6 @@ def DetectVisualStudioPath():
raise Exception(('Visual Studio version %s (from GYP_MSVS_VERSION)'
' not supported. Supported versions are: %s') % (
version_as_year, ', '.join(year_to_version.keys())))
- version = year_to_version[version_as_year]
if version_as_year == '2017':
# The VC++ 2017 install location needs to be located using COM instead of
# the registry. For details see:
@@ -176,6 +188,16 @@ def _CopyRuntimeImpl(target, source, verbose=True):
def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
"""Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
exist, but the target directory does exist."""
+ if target_cpu == 'arm64':
+ # Windows ARM64 VCRuntime is located at {toolchain_root}/VC/Redist/MSVC/
+ # {x.y.z}/[debug_nonredist/]arm64/Microsoft.VC141.CRT/.
+ vc_redist_root = FindVCRedistRoot()
+ if suffix.startswith('.'):
+ source_dir = os.path.join(vc_redist_root,
+ 'arm64', 'Microsoft.VC141.CRT')
+ else:
+ source_dir = os.path.join(vc_redist_root, 'debug_nonredist',
+ 'arm64', 'Microsoft.VC141.DebugCRT')
for file_part in ('msvcp', 'vccorlib', 'vcruntime'):
dll = dll_pattern % file_part
target = os.path.join(target_dir, dll)
@@ -190,16 +212,35 @@ def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
os.environ.get('WINDOWSSDKDIR',
os.path.expandvars('%ProgramFiles(x86)%'
'\\Windows Kits\\10')))
- ucrt_dll_dirs = os.path.join(win_sdk_dir, 'Redist', 'ucrt', 'DLLs',
- target_cpu)
- ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
- assert len(ucrt_files) > 0
- for ucrt_src_file in ucrt_files:
- file_part = os.path.basename(ucrt_src_file)
- ucrt_dst_file = os.path.join(target_dir, file_part)
- _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
- _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
- os.path.join(source_dir, 'ucrtbase' + suffix))
+ # ARM64 doesn't have a redist for the ucrt DLLs because they are always
+ # present in the OS.
+ if target_cpu != 'arm64':
+ ucrt_dll_dirs = os.path.join(win_sdk_dir, 'Redist', 'ucrt', 'DLLs',
+ target_cpu)
+ ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
+ assert len(ucrt_files) > 0
+ for ucrt_src_file in ucrt_files:
+ file_part = os.path.basename(ucrt_src_file)
+ ucrt_dst_file = os.path.join(target_dir, file_part)
+ _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
+ # We must copy ucrtbase.dll for x64/x86, and ucrtbased.dll for all CPU types.
+ if target_cpu != 'arm64' or not suffix.startswith('.'):
+ if not suffix.startswith('.'):
+ # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/
+ # ucrt/.
+ sdk_redist_root = os.path.join(win_sdk_dir, 'bin')
+ sdk_bin_sub_dirs = os.listdir(sdk_redist_root)
+ # Select the most recent SDK if there are multiple versions installed.
+ sdk_bin_sub_dirs.sort(reverse=True)
+ for directory in sdk_bin_sub_dirs:
+ sdk_redist_root_version = os.path.join(sdk_redist_root, directory)
+ if not os.path.isdir(sdk_redist_root_version):
+ continue
+ if re.match('10\.\d+\.\d+\.\d+', directory):
+ source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt')
+ break
+ _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
+ os.path.join(source_dir, 'ucrtbase' + suffix))
def FindVCToolsRoot():
@@ -225,6 +266,29 @@ def FindVCToolsRoot():
raise Exception('Unable to find the VC tools directory.')
+def FindVCRedistRoot():
+ """In VS2017, Redist binaries are located in
+ {toolchain_root}/VC/Redist/MSVC/{x.y.z}/{target_cpu}/, the {version_number}
+ part is likely to change in case of minor update of the toolchain so we don't
+ hardcode this value here (except for the major number).
+
+ This returns the '{toolchain_root}/VC/Redist/MSVC/{x.y.z}/' path.
+
+ This function should only be called when using VS2017.
+ """
+ assert GetVisualStudioVersion() == '2017'
+ SetEnvironmentAndGetRuntimeDllDirs()
+ assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ)
+ vc_redist_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC', 'Redist', 'MSVC')
+ for directory in os.listdir(vc_redist_msvc_root):
+ if not os.path.isdir(os.path.join(vc_redist_msvc_root, directory)):
+ continue
+ if re.match('14\.\d+\.\d+', directory):
+ return os.path.join(vc_redist_msvc_root, directory)
+ raise Exception('Unable to find the VC redist directory')
+
+
def _CopyPGORuntime(target_dir, target_cpu):
"""Copy the runtime dependencies required during a PGO build.
"""
@@ -238,6 +302,7 @@ def _CopyPGORuntime(target_dir, target_cpu):
# from HostX86/x86.
pgo_x86_runtime_dir = os.path.join(pgo_runtime_root, 'HostX86', 'x86')
pgo_x64_runtime_dir = os.path.join(pgo_runtime_root, 'HostX64', 'x64')
+ pgo_arm64_runtime_dir = os.path.join(pgo_runtime_root, 'arm64')
else:
raise Exception('Unexpected toolchain version: %s.' % env_version)
@@ -250,8 +315,10 @@ def _CopyPGORuntime(target_dir, target_cpu):
source = os.path.join(pgo_x86_runtime_dir, runtime)
elif target_cpu == 'x64':
source = os.path.join(pgo_x64_runtime_dir, runtime)
+ elif target_cpu == 'arm64':
+ source = os.path.join(pgo_arm64_runtime_dir, runtime)
else:
- raise NotImplementedError("Unexpected target_cpu value: " + target_cpu)
+ raise NotImplementedError('Unexpected target_cpu value: ' + target_cpu)
if not os.path.exists(source):
raise Exception('Unable to find %s.' % source)
_CopyRuntimeImpl(os.path.join(target_dir, runtime), source)
@@ -260,7 +327,7 @@ def _CopyPGORuntime(target_dir, target_cpu):
def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
"""Copy the VS runtime DLLs, only if the target doesn't exist, but the target
directory does exist. Handles VS 2015 and VS 2017."""
- suffix = "d.dll" if debug else ".dll"
+ suffix = 'd.dll' if debug else '.dll'
# VS 2017 uses the same CRT DLLs as VS 2015.
_CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix,
suffix)
@@ -270,7 +337,7 @@ def CopyDlls(target_dir, configuration, target_cpu):
"""Copy the VS runtime DLLs into the requested directory as needed.
configuration is one of 'Debug' or 'Release'.
- target_cpu is one of 'x86' or 'x64'.
+ target_cpu is one of 'x86', 'x64' or 'arm64'.
The debug configuration gets both the debug and release DLLs; the
release config only the latter.
@@ -279,8 +346,15 @@ def CopyDlls(target_dir, configuration, target_cpu):
if not vs_runtime_dll_dirs:
return
- x64_runtime, x86_runtime = vs_runtime_dll_dirs
- runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
+ x64_runtime, x86_runtime, arm64_runtime = vs_runtime_dll_dirs
+ if target_cpu == 'x64':
+ runtime_dir = x64_runtime
+ elif target_cpu == 'x86':
+ runtime_dir = x86_runtime
+ elif target_cpu == 'arm64':
+ runtime_dir = arm64_runtime
+ else:
+ raise Exception('Unknown target_cpu: ' + target_cpu)
_CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
if configuration == 'Debug':
_CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
@@ -293,7 +367,7 @@ def CopyDlls(target_dir, configuration, target_cpu):
def _CopyDebugger(target_dir, target_cpu):
"""Copy dbghelp.dll and dbgcore.dll into the requested directory as needed.
- target_cpu is one of 'x86' or 'x64'.
+ target_cpu is one of 'x86', 'x64' or 'arm64'.
dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
from the SDK directory avoids using the system copy of dbghelp.dll which then
@@ -413,7 +487,7 @@ def Update(force=False):
def NormalizePath(path):
- while path.endswith("\\"):
+ while path.endswith('\\'):
path = path[:-1]
return path
diff --git a/chromium/build/win/merge_pgc_files.py b/chromium/build/win/merge_pgc_files.py
deleted file mode 100755
index 804c4ea5e4c..00000000000
--- a/chromium/build/win/merge_pgc_files.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Merge the PGC files generated during the profiling step to the PGD database.
-
-This is required to workaround a flakyness in pgomgr.exe where it can run out
-of address space while trying to merge all the PGC files at the same time.
-"""
-
-import glob
-import json
-import optparse
-import os
-import subprocess
-import sys
-
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-sys.path.insert(0, os.path.join(script_dir, os.pardir))
-
-import vs_toolchain
-
-
-# Number of PGC files that should be merged in each iteration, merging all
-# the files one by one is really slow but merging more than 10 at a time doesn't
-# really seem to impact the total time (when merging 180 files).
-#
-# Number of pgc merged per iteration | Time (in min)
-# 1 | 27.2
-# 10 | 12.8
-# 20 | 12.0
-# 30 | 11.5
-# 40 | 11.4
-# 50 | 11.5
-# 60 | 11.6
-# 70 | 11.6
-# 80 | 11.7
-#
-# TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get
-# affected by the number of pgc files.
-_BATCH_SIZE_DEFAULT = 10
-
-
-def find_pgomgr(chrome_checkout_dir):
- """Find pgomgr.exe."""
- win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build',
- 'win_toolchain.json')
- if not os.path.exists(win_toolchain_json_file):
- raise Exception('The toolchain JSON file is missing.')
- with open(win_toolchain_json_file) as temp_f:
- toolchain_data = json.load(temp_f)
- if not os.path.isdir(toolchain_data['path']):
- raise Exception('The toolchain JSON file is invalid.')
-
- # Always use the x64 version of pgomgr (the x86 one doesn't work on the bot's
- # environment).
- pgomgr_dir = None
- if toolchain_data['version'] == '2017':
- vc_tools_root = vs_toolchain.FindVCToolsRoot()
- pgomgr_dir = os.path.join(vc_tools_root, 'HostX64', 'x64')
-
- pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe')
- if not os.path.exists(pgomgr_path):
- raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir)
-
- return pgomgr_path
-
-
-def merge_pgc_files(pgomgr_path, files, pgd_path):
- """Merge all the pgc_files in |files| to |pgd_path|."""
- merge_command = [
- pgomgr_path,
- '/merge'
- ]
- merge_command.extend(files)
- merge_command.append(pgd_path)
- proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE)
- stdout, _ = proc.communicate()
- print stdout
- return proc.returncode
-
-
-def main():
- parser = optparse.OptionParser(usage='%prog [options]')
- parser.add_option('--checkout-dir', help='The Chrome checkout directory.')
- parser.add_option('--target-cpu', help='[DEPRECATED] The target\'s bitness.')
- parser.add_option('--build-dir', help='Chrome build directory.')
- parser.add_option('--binary-name', help='The binary for which the PGC files '
- 'should be merged, without extension.')
- parser.add_option('--files-per-iter', help='The number of PGC files to merge '
- 'in each iteration, default to %d.' % _BATCH_SIZE_DEFAULT,
- type='int', default=_BATCH_SIZE_DEFAULT)
- options, _ = parser.parse_args()
-
- if not options.checkout_dir:
- parser.error('--checkout-dir is required')
- if not options.build_dir:
- parser.error('--build-dir is required')
- if not options.binary_name:
- parser.error('--binary-name is required')
-
- # Starts by finding pgomgr.exe.
- pgomgr_path = find_pgomgr(options.checkout_dir)
-
- pgc_files = glob.glob(os.path.join(options.build_dir,
- '%s*.pgc' % options.binary_name))
- pgd_file = os.path.join(options.build_dir, '%s.pgd' % options.binary_name)
-
- def _split_in_chunks(items, chunk_size):
- """Split |items| in chunks of size |chunk_size|.
-
- Source: http://stackoverflow.com/a/312464
- """
- for i in xrange(0, len(items), chunk_size):
- yield items[i:i + chunk_size]
- for chunk in _split_in_chunks(pgc_files, options.files_per_iter):
- files_to_merge = []
- for pgc_file in chunk:
- files_to_merge.append(
- os.path.join(options.build_dir, os.path.basename(pgc_file)))
- ret = merge_pgc_files(pgomgr_path, files_to_merge, pgd_file)
- # pgomgr.exe sometimes fails to merge too many files at the same time (it
- # usually complains that a stream is missing, but if you try to merge this
- # file individually it works), try to merge all the PGCs from this batch one
- # at a time instead. Don't fail the build if we can't merge a file.
- # TODO(sebmarchand): Report this to Microsoft, check if this is still
- # happening with VS2017.
- if ret != 0:
- print ('Error while trying to merge several PGC files at the same time, '
- 'trying to merge them one by one.')
- for pgc_file in chunk:
- ret = merge_pgc_files(
- pgomgr_path,
- [os.path.join(options.build_dir, os.path.basename(pgc_file))],
- pgd_file
- )
- if ret != 0:
- print 'Error while trying to merge %s, continuing.' % pgc_file
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/chromium/build/win/reorder-imports.py b/chromium/build/win/reorder-imports.py
index c4b294d33b2..ee27ed19cc2 100755
--- a/chromium/build/win/reorder-imports.py
+++ b/chromium/build/win/reorder-imports.py
@@ -36,7 +36,7 @@ def reorder_imports(input_dir, output_dir, architecture):
# through the Structure, while other data must bet set through
# the set_bytes_*() methods.
pe = pefile.PE(input_image, fast_load=True)
- if architecture == 'x64':
+ if architecture == 'x64' or architecture == 'arm64':
assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS
else:
assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE
diff --git a/chromium/build/win/run_pgo_profiling_benchmarks.py b/chromium/build/win/run_pgo_profiling_benchmarks.py
deleted file mode 100644
index 163682a6338..00000000000
--- a/chromium/build/win/run_pgo_profiling_benchmarks.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility script to run the benchmarks during the profiling step of a PGO
-build.
-"""
-
-import json
-import optparse
-import os
-import subprocess
-import sys
-
-# Make sure that we're running as admin, this is required to run the Telemetry
-# benchmarks.
-from win32com.shell import shell
-if not shell.IsUserAnAdmin():
- raise Exception('This script has to be run as admin.')
-
-
-_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-_CHROME_BUILD_DIR = os.path.dirname(_SCRIPT_DIR)
-_CHROME_SRC_DIR = os.path.dirname(_CHROME_BUILD_DIR)
-
-
-# List of the benchmark that we run during the profiling step.
-_BENCHMARKS_TO_RUN = {
- 'blink_perf.bindings',
- 'blink_perf.canvas',
- 'blink_perf.css',
- 'blink_perf.dom',
- 'blink_perf.paint',
- 'blink_perf.svg',
- 'blink_style.top_25',
- 'dromaeo.cssqueryjquery',
- 'dromaeo.domcoreattr',
- 'dromaeo.domcoremodify',
- 'dromaeo.domcorequery',
- 'dromaeo.domcoretraverse',
- 'dromaeo.jslibattrprototype',
- 'dromaeo.jslibeventprototype',
- 'dromaeo.jslibmodifyprototype',
- 'dromaeo.jslibstyleprototype',
- 'dromaeo.jslibtraversejquery',
- 'dromaeo.jslibtraverseprototype',
- 'media.tough_video_cases',
- 'octane',
- 'smoothness.top_25_smooth',
- 'storage.indexeddb_endure_tracing',
- 'sunspider',
-}
-
-
-def RunBenchmarks(options):
- """Run the benchmarks."""
- # Find the run_benchmark script.
- chrome_run_benchmark_script = os.path.join(_CHROME_SRC_DIR, 'tools',
- 'perf', 'run_benchmark')
- if not os.path.exists(chrome_run_benchmark_script):
- raise Exception('Unable to find the run_benchmark script '
- '(%s doesn\'t exist) ' % chrome_run_benchmark_script)
-
- # Augment the PATH to make sure that the benchmarking script can find
- # pgosweep.exe and its runtime libraries.
- env = os.environ.copy()
- env['PATH'] = str(os.pathsep.join([options.build_dir, os.environ['PATH']]))
- env['PogoSafeMode'] = '1'
- # Apply a scaling factor of 0.5 to the PGO profiling buffers for the 32-bit
- # builds, without this the buffers will be too large and the process will
- # fail to start. See crbug.com/632864#c22.
- if options.target_cpu == 'x86':
- env['VCPROFILE_ALLOC_SCALE'] = '0.5'
-
- # Run all the benchmarks.
- # TODO(sebmarchand): Make this run in parallel.
- for benchmark in _BENCHMARKS_TO_RUN:
- try:
- benchmark_command = [
- sys.executable,
- chrome_run_benchmark_script,
- '--browser', options.browser_type,
- ]
- # Automatically set the arguments to run this script on a local build.
- if options.browser_type == 'exact':
- benchmark_command += [
- '--browser-executable', os.path.join(options.build_dir, 'chrome.exe')
- ]
- benchmark_command += [
- '--profiler', 'win_pgo_profiler',
- benchmark
- ]
- subprocess.check_call(benchmark_command, env=env)
- except:
- print ('Error while trying to run the %s benchmark, continuing.' %
- benchmark)
- continue
-
- return 0
-
-
-def main():
- parser = optparse.OptionParser(usage='%prog [options]')
- parser.add_option(
- '--browser-type', help='The browser type (to be passed to Telemetry\'s '
- 'benchmark runner).')
- # TODO(sebmarchand): Parse the args.gn file to automatically set this value.
- parser.add_option('--target-cpu', help='The target\'s bitness.')
- parser.add_option('--build-dir', help='Chrome build directory.')
- options, _ = parser.parse_args()
-
- if not options.target_cpu:
- parser.error('--target-cpu is required')
- if not options.build_dir:
- parser.error('--build-dir is required')
- if not options.browser_type:
- options.browser_type = 'exact'
-
- return RunBenchmarks(options)
-
-
-if __name__ == '__main__':
- sys.exit(main())