diff options
Diffstat (limited to 'chromium/build/android')
49 files changed, 1495 insertions, 2163 deletions
diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn index b07bf34d82c..dd501338b66 100644 --- a/chromium/build/android/BUILD.gn +++ b/chromium/build/android/BUILD.gn @@ -34,7 +34,9 @@ if (enable_java_templates) { java_prebuilt("sun_tools_java") { jar_path = sun_tools_jar_path - jar_dep = ":find_sun_tools_jar" + deps = [ + ":find_sun_tools_jar", + ] } generate_interface_jar("android_ijar") { diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py index 70c132ad660..f3d4a20f7ff 100755 --- a/chromium/build/android/apk_operations.py +++ b/chromium/build/android/apk_operations.py @@ -108,9 +108,9 @@ def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None, debug_process_name = _NormalizeProcessName(debug_process_name, package_name) def launch(device): - # Set debug app in order to enable reading command line flags on user - # builds. - cmd = ['am', 'set-debug-app', debug_process_name] + # --persistent is required to have Settings.Global.DEBUG_APP be set, which + # we currently use to allow reading of flags. https://crbug.com/784947 + cmd = ['am', 'set-debug-app', '--persistent', debug_process_name] if wait_for_java_debugger: cmd[-1:-1] = ['-w'] # Ignore error since it will fail if apk is not debuggable. diff --git a/chromium/build/android/avd.py b/chromium/build/android/avd.py deleted file mode 100755 index 788ceaf053a..00000000000 --- a/chromium/build/android/avd.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Launches Android Virtual Devices with a set configuration for testing Chrome. - -The script will launch a specified number of Android Virtual Devices (AVD's). -""" - -import argparse -import logging -import os -import re -import sys - -import devil_chromium -import install_emulator_deps - -from devil.utils import cmd_helper -from pylib import constants -from pylib.utils import emulator - -def main(argv): - # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch - # the emulator to find the system images upon launch. - emulator_sdk = constants.ANDROID_SDK_ROOT - os.environ['ANDROID_SDK_ROOT'] = emulator_sdk - - arg_parser = argparse.ArgumentParser(description='AVD script.') - sub_parsers = arg_parser.add_subparsers(title='subparser', dest='command') - sub_parsers.add_parser( - 'kill', help='Shutdown all existing emulators') - sub_parsers.add_parser( - 'delete', help='Deleting all the avd files') - wait_parser = sub_parsers.add_parser( - 'wait', help='Wait for emulators to finish booting') - wait_parser.add_argument('-n', '--num', dest='wait_num', - help='Number of emulators to wait for', type=int, - default=1) - run_parser = sub_parsers.add_parser('run', help='Run emulators') - run_parser.add_argument('--name', help='Optinaly, name of existing AVD to ' - 'launch. If not specified, AVD\'s will be created') - run_parser.add_argument('-n', '--num', dest='emulator_count', - help='Number of emulators to launch (default is 1).', - type=int, default='1') - run_parser.add_argument('--abi', default='x86', - help='Platform of emulators to launch (x86 default)') - run_parser.add_argument('--api-level', dest='api_level', - help='API level for the image', - type=int, default=constants.ANDROID_SDK_VERSION) - run_parser.add_argument('--sdcard-size', dest='sdcard_size', - default=emulator.DEFAULT_SDCARD_SIZE, - help='Set sdcard size of the emulators' - ' e.g. --sdcard-size=512M') - run_parser.add_argument('--partition-size', dest='partition_size', - default=emulator.DEFAULT_STORAGE_SIZE, - help='Default internal storage size' - ' e.g. --partition-size=1024M') - run_parser.add_argument('--launch-without-kill', action='store_false', - dest='kill_and_launch', default=True, - help='Kill all emulators at launch') - run_parser.add_argument('--enable-kvm', action='store_true', - dest='enable_kvm', default=False, - help='Enable kvm for faster x86 emulator run') - run_parser.add_argument('--headless', action='store_true', - dest='headless', default=False, - help='Launch an emulator with no UI.') - - arguments = arg_parser.parse_args(argv[1:]) - - logging.root.setLevel(logging.INFO) - - devil_chromium.Initialize() - - if arguments.command == 'kill': - logging.info('Killing all existing emulator and existing the program') - emulator.KillAllEmulators() - elif arguments.command == 'delete': - emulator.DeleteAllTempAVDs() - elif arguments.command == 'wait': - emulator.WaitForEmulatorLaunch(arguments.wait_num) - else: - # Check if SDK exist in ANDROID_SDK_ROOT - if not install_emulator_deps.CheckSDK(): - raise Exception('Emulator SDK not installed in %s' - % constants.ANDROID_SDK_ROOT) - - # Check if KVM is enabled for x86 AVD - if arguments.abi == 'x86': - if not install_emulator_deps.CheckKVM(): - logging.warning('KVM is not installed or enabled') - arguments.enable_kvm = False - - # Check if targeted system image exist - if not install_emulator_deps.CheckSystemImage(arguments.abi, - arguments.api_level): - logging.critical('ERROR: System image for %s AVD not installed. Run ' - 'install_emulator_deps.py', arguments.abi) - return 1 - - # If AVD is specified, check that the SDK has the required target. If not, - # check that the SDK has the desired target for the temporary AVD's. - api_level = arguments.api_level - if arguments.name: - android = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', - 'android') - avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd']) - names = re.findall(r'Name: (\w+)', avds_output) - api_levels = re.findall(r'API level (\d+)', avds_output) - try: - avd_index = names.index(arguments.name) - except ValueError: - logging.critical('ERROR: Specified AVD %s does not exist.', - arguments.name) - return 1 - api_level = int(api_levels[avd_index]) - - if not install_emulator_deps.CheckSDKPlatform(api_level): - logging.critical('ERROR: Emulator SDK missing required target for API %d.' - ' Run install_emulator_deps.py.') - return 1 - - if arguments.name: - emulator.LaunchEmulator( - arguments.name, - arguments.abi, - enable_kvm=arguments.enable_kvm, - kill_and_launch=arguments.reset_and_launch, - sdcard_size=arguments.sdcard_size, - storage_size=arguments.partition_size, - headless=arguments.headless - ) - else: - emulator.LaunchTempEmulators( - arguments.emulator_count, - arguments.abi, - arguments.api_level, - enable_kvm=arguments.enable_kvm, - kill_and_launch=arguments.kill_and_launch, - sdcard_size=arguments.sdcard_size, - storage_size=arguments.partition_size, - wait_for_boot=True, - headless=arguments.headless - ) - logging.info('Emulator launch completed') - return 0 - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/chromium/build/android/binary_size/apk_downloader.py b/chromium/build/android/binary_size/apk_downloader.py index b0787d8ed63..132325bc5b0 100755 --- a/chromium/build/android/binary_size/apk_downloader.py +++ b/chromium/build/android/binary_size/apk_downloader.py @@ -20,7 +20,7 @@ import find_depot_tools # pylint: disable=import-error,unused-import import download_from_google_storage import upload_to_google_storage -CURRENT_MILESTONE = '63' +CURRENT_MILESTONE = '64' DEFAULT_BUCKET = 'gs://chromium-android-tools/apks' DEFAULT_DOWNLOAD_PATH = os.path.join(os.path.dirname(__file__), 'apks') DEFAULT_BUILDER = 'Android_Builder' diff --git a/chromium/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1 new file mode 100644 index 00000000000..92e9519e369 --- /dev/null +++ b/chromium/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1 @@ -0,0 +1 @@ +98af8f2265f68dea89eecd1d93410fd36bf233c4
\ No newline at end of file diff --git a/chromium/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1 new file mode 100644 index 00000000000..7bbf28ae224 --- /dev/null +++ b/chromium/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1 @@ -0,0 +1 @@ +d5c209e967fa2f6b98fd4e3d6882a85ad67a9d87
\ No newline at end of file diff --git a/chromium/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1 new file mode 100644 index 00000000000..123d071a52d --- /dev/null +++ b/chromium/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1 @@ -0,0 +1 @@ +a5059a9bbe1dd9b6750bee7c2728a366bf9864e9
\ No newline at end of file diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1 new file mode 100644 index 00000000000..e0df57bb819 --- /dev/null +++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1 @@ -0,0 +1 @@ +3e4b8fd75aea0a22fa769c6686dd153d72741e91
\ No newline at end of file diff --git a/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1 b/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1 new file mode 100644 index 00000000000..e65c26f9abf --- /dev/null +++ b/chromium/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1 @@ -0,0 +1 @@ +b7b222a6650e98dda980dd9bc922763e4102d0b6
\ No newline at end of file diff --git a/chromium/build/android/binary_size/apks/README.md b/chromium/build/android/binary_size/apks/README.md index b09596881df..e651ed701cf 100644 --- a/chromium/build/android/binary_size/apks/README.md +++ b/chromium/build/android/binary_size/apks/README.md @@ -39,3 +39,4 @@ build/android/binary_size/apk_downloader.py --update 63 508578 --update 62 49918 * [M61](https://crrev.com/488528) * [M62](https://crrev.com/499187) * [M63](https://crrev.com/508578) + * [M64](https://crrev.com/520840) diff --git a/chromium/build/android/buildhooks/BUILD.gn b/chromium/build/android/buildhooks/BUILD.gn index e4c708a8799..6500914319a 100644 --- a/chromium/build/android/buildhooks/BUILD.gn +++ b/chromium/build/android/buildhooks/BUILD.gn @@ -6,6 +6,7 @@ import("//build/config/android/rules.gni") import("//build/config/android/config.gni") java_library("build_hooks_java") { + emma_never_instrument = true java_files = [ "java/org/chromium/build/BuildHooks.java" ] # Make all targets pull in the try-with-resources support files. @@ -34,6 +35,7 @@ java_cpp_template("base_build_hooks_config") { build_hooks_android_impl = "java/org/chromium/build/BuildHooksAndroidImpl.java" android_library("build_hooks_android_java") { + emma_never_instrument = true java_files = [ "java/org/chromium/build/BuildHooksAndroid.java", build_hooks_android_impl, @@ -46,6 +48,7 @@ android_library("build_hooks_android_java") { # This default implementation is used if an android_apk target doesn't # specify a different implementation via build_hooks_android_impl_deps. android_library("build_hooks_android_impl_java") { + emma_never_instrument = true java_files = [ build_hooks_android_impl ] deps = [ ":build_hooks_android_java", diff --git a/chromium/build/android/buildhooks/OWNERS b/chromium/build/android/buildhooks/OWNERS index a512a5a43d3..c964495a78c 100644 --- a/chromium/build/android/buildhooks/OWNERS +++ b/chromium/build/android/buildhooks/OWNERS @@ -1,5 +1,4 @@ agrieve@chromium.org estevenson@chromium.org -zpeng@chromium.org # COMPONENT: Build diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn index 3a2069019c4..3e10b8d6c0a 100644 --- a/chromium/build/android/bytecode/BUILD.gn +++ b/chromium/build/android/bytecode/BUILD.gn @@ -7,6 +7,7 @@ import("//build/config/android/rules.gni") assert(current_toolchain == default_toolchain) java_binary("java_bytecode_rewriter") { + emma_never_instrument = true java_files = [ "java/org/chromium/bytecode/AssertionEnablerClassAdapter.java", "java/org/chromium/bytecode/ByteCodeProcessor.java", diff --git a/chromium/build/android/bytecode/OWNERS b/chromium/build/android/bytecode/OWNERS index a512a5a43d3..c964495a78c 100644 --- a/chromium/build/android/bytecode/OWNERS +++ b/chromium/build/android/bytecode/OWNERS @@ -1,5 +1,4 @@ agrieve@chromium.org estevenson@chromium.org -zpeng@chromium.org # COMPONENT: Build diff --git a/chromium/build/android/gradle/android.jinja b/chromium/build/android/gradle/android.jinja index 2fa40d1d880..3fad6516cc2 100644 --- a/chromium/build/android/gradle/android.jinja +++ b/chromium/build/android/gradle/android.jinja @@ -70,8 +70,13 @@ android { {{ expand_sourceset(main, 'main') }} {{ expand_sourceset(test, 'test') }} -{{ expand_sourceset(android_test, 'androidTest') }} + +{% if android_test is defined %} +{% for t in android_test %} +{{ expand_sourceset(t, 'androidTest') }} +{% endfor %} } +{% endif %} defaultConfig { vectorDrawables.useSupportLibrary = true diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py index 74be0df517c..e6a8906009d 100755 --- a/chromium/build/android/gradle/generate_gradle.py +++ b/chromium/build/android/gradle/generate_gradle.py @@ -7,6 +7,7 @@ import argparse import codecs +import collections import glob import logging import os @@ -151,7 +152,7 @@ class _ProjectEntry(object): self._build_config = None self._java_files = None self._all_entries = None - self.android_test_entry = None + self.android_test_entries = None @classmethod def FromGnTarget(cls, gn_target): @@ -550,7 +551,7 @@ def _GenerateGradleFile(entry, generator, build_vars, source_properties, target_type = 'java_library' elif deps_info['type'] == 'java_binary': target_type = 'java_binary' - variables['main_class'] = gradle['main_class'] + variables['main_class'] = deps_info.get('main_class') elif deps_info['type'] == 'junit_binary': target_type = 'android_junit' variables['sourceSetName'] = 'test' @@ -565,13 +566,14 @@ def _GenerateGradleFile(entry, generator, build_vars, source_properties, if bootclasspath: # Must use absolute path here. variables['bootclasspath'] = _RebasePath(bootclasspath) - if entry.android_test_entry: - variables['android_test'] = generator.Generate( - entry.android_test_entry) - for key, value in variables['android_test'].iteritems(): - if isinstance(value, list): - variables['android_test'][key] = sorted( - set(value) - set(variables['main'][key])) + if entry.android_test_entries: + variables['android_test'] = [] + for e in entry.android_test_entries: + test_entry = generator.Generate(e) + variables['android_test'].append(test_entry) + for key, value in test_entry.iteritems(): + if isinstance(value, list): + test_entry[key] = sorted(set(value) - set(variables['main'][key])) return jinja_processor.Render( _TemplatePath(target_type.split('_')[0]), variables) @@ -607,10 +609,10 @@ def _GenerateModuleAll(gradle_output_dir, generator, build_vars, 'java_excludes': ['**/*.java'], 'res_dirs': Relativize(res_dirs), } - variables['android_test'] = { + variables['android_test'] = [{ 'java_dirs': Relativize(test_java_dirs), 'java_excludes': ['**/*.java'], - } + }] data = jinja_processor.Render( _TemplatePath(target_type.split('_')[0]), variables) _WriteFile( @@ -685,22 +687,22 @@ def _CombineTestEntries(entries): - e.g. base_junit_tests > base_junit_test_support > base_java """ combined_entries = [] - android_test_entries = {} + android_test_entries = collections.defaultdict(list) for entry in entries: target_name = entry.GnTarget() if (target_name.endswith('_test_apk__apk') and 'apk_under_test' in entry.Gradle()): apk_name = entry.Gradle()['apk_under_test'] - android_test_entries[apk_name] = entry + android_test_entries[apk_name].append(entry) else: combined_entries.append(entry) for entry in combined_entries: target_name = entry.DepsInfo()['name'] if target_name in android_test_entries: - entry.android_test_entry = android_test_entries[target_name] + entry.android_test_entries = android_test_entries[target_name] del android_test_entries[target_name] # Add unmatched test entries as individual targets. - combined_entries.extend(android_test_entries.values()) + combined_entries.extend(e for l in android_test_entries.values() for e in l) return combined_entries @@ -744,6 +746,23 @@ def main(): action='store_true', help='Generate a project that is compatible with ' 'Android Studio 3.1 Canary.') + sdk_group = parser.add_mutually_exclusive_group() + sdk_group.add_argument('--sdk', + choices=['AndroidStudioCurrent', + 'AndroidStudioDefault', + 'ChromiumSdkRoot'], + default='ChromiumSdkRoot', + help="Set the project's SDK root. This can be set to " + "Android Studio's current SDK root, the default " + "Android Studio SDK root, or Chromium's SDK " + "root. The default is Chromium's SDK root, but " + "using this means that updates and additions to " + "the SDK (e.g. installing emulators), will " + "modify this root, hence possibly causing " + "conflicts on the next repository sync.") + sdk_group.add_argument('--sdk-path', + help='An explict path for the SDK root, setting this ' + 'is an alternative to setting the --sdk option') args = parser.parse_args() if args.output_directory: constants.SetOutputDirectory(args.output_directory) @@ -768,7 +787,11 @@ def main(): args.canary) logging.warning('Creating project at: %s', generator.project_dir) - args.all = args.all or not args.split_projects + # Generate for "all targets" by default when not using --split-projects (too + # slow), and when no --target has been explicitly set. "all targets" means all + # java targets that are depended on by an apk or java_binary (leaf + # java_library targets will not be included). + args.all = args.all or (not args.split_projects and not args.targets) targets_from_args = set(args.targets or _DEFAULT_TARGETS) if args.extra_targets: @@ -840,9 +863,15 @@ def main(): _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'), _GenerateSettingsGradle(project_entries, add_all_module)) - sdk_path = _RebasePath(build_vars['android_sdk_root']) - _WriteFile(os.path.join(generator.project_dir, 'local.properties'), - _GenerateLocalProperties(sdk_path)) + if args.sdk != "AndroidStudioCurrent": + if args.sdk_path: + sdk_path = _RebasePath(args.sdk_path) + elif args.sdk == "AndroidStudioDefault": + sdk_path = os.path.expanduser('~/Android/Sdk') + else: + sdk_path = _RebasePath(build_vars['android_sdk_root']) + _WriteFile(os.path.join(generator.project_dir, 'local.properties'), + _GenerateLocalProperties(sdk_path)) if generated_inputs: logging.warning('Building generated source files...') diff --git a/chromium/build/android/gradle/java.jinja b/chromium/build/android/gradle/java.jinja index b649149ad57..92fe575af85 100644 --- a/chromium/build/android/gradle/java.jinja +++ b/chromium/build/android/gradle/java.jinja @@ -29,8 +29,10 @@ sourceCompatibility = JavaVersion.VERSION_1_8 targetCompatibility = JavaVersion.VERSION_1_8 {% if template_type == 'java_binary' %} -mainClassName = "{{ main_class }}" applicationName = "{{ target_name }}" +{% if main_class %} +mainClassName = "{{ main_class }}" +{% endif %} {% endif %} {% if template_type in ('java_binary', 'java_library') %} archivesBaseName = "{{ target_name }}" diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja index 76b04469efb..d3a259f7397 100644 --- a/chromium/build/android/gradle/root.jinja +++ b/chromium/build/android/gradle/root.jinja @@ -12,7 +12,7 @@ buildscript { } dependencies { {% if canary %} - classpath "com.android.tools.build:gradle:3.1.0-alpha01" + classpath "com.android.tools.build:gradle:3.1.0-alpha04" {% else %} classpath "com.android.tools.build:gradle:3.0.0" {% endif %} diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py index 8afd6b0cf92..1aadf2b2290 100755 --- a/chromium/build/android/gyp/apkbuilder.py +++ b/chromium/build/android/gyp/apkbuilder.py @@ -193,10 +193,6 @@ def _MergePakInfoFiles(pak_info_path, asset_list): if src.endswith('.pak'): with open(src + '.info', 'r') as src_info_file: lines.update(src_info_file.readlines()) - # Ensure that parent dirs exist before writing new files. - info_dir = os.path.dirname(pak_info_path) - if not os.path.exists(info_dir): - os.makedirs(info_dir) with open(pak_info_path, 'w') as merged_info_file: merged_info_file.writelines(sorted(lines)) diff --git a/chromium/build/android/gyp/create_dist_jar.py b/chromium/build/android/gyp/create_dist_jar.py index 2206cc64ae9..2e06478fae8 100755 --- a/chromium/build/android/gyp/create_dist_jar.py +++ b/chromium/build/android/gyp/create_dist_jar.py @@ -6,24 +6,21 @@ """Merges a list of jars into a single jar.""" -import optparse +import argparse import sys from util import build_utils + def main(args): args = build_utils.ExpandFileArgs(args) - parser = optparse.OptionParser() + parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) - parser.add_option('--output', help='Path to output jar.') - parser.add_option('--inputs', action='append', help='List of jar inputs.') - options, _ = parser.parse_args(args) - build_utils.CheckOptions(options, parser, ['output', 'inputs']) - - input_jars = [] - for inputs_arg in options.inputs: - input_jars.extend(build_utils.ParseGnList(inputs_arg)) + parser.add_argument('--output', required=True, help='Path to output jar.') + parser.add_argument('--jars', required=True, help='GN list of jar inputs.') + options = parser.parse_args(args) + input_jars = build_utils.ParseGnList(options.jars) build_utils.MergeZips(options.output, input_jars) if options.depfile: @@ -31,4 +28,4 @@ def main(args): if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + main(sys.argv[1:]) diff --git a/chromium/build/android/gyp/create_java_binary_script.py b/chromium/build/android/gyp/create_java_binary_script.py index 3bde1c334ed..141b1752c14 100755 --- a/chromium/build/android/gyp/create_java_binary_script.py +++ b/chromium/build/android/gyp/create_java_binary_script.py @@ -69,7 +69,6 @@ def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--output', help='Output path for executable script.') - parser.add_option('--jar-path', help='Path to the main jar.') parser.add_option('--main-class', help='Name of the java class with the "main" entry point.') parser.add_option('--classpath', action='append', default=[], @@ -86,7 +85,7 @@ def main(argv): else: noverify_flag = '' - classpath = [options.jar_path] + classpath = [] for cp_arg in options.classpath: classpath += build_utils.ParseGnList(cp_arg) diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py new file mode 100755 index 00000000000..51515fa6510 --- /dev/null +++ b/chromium/build/android/gyp/dist_aar.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an Android .aar file.""" + +import argparse +import os +import posixpath +import shutil +import sys +import tempfile +import zipfile + +from util import build_utils + + +_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__)) + + +def _MergeRTxt(r_paths): + """Merging the given R.txt files and returns them as a string.""" + all_lines = set() + for r_path in r_paths: + with open(r_path) as f: + all_lines.update(f.readlines()) + return ''.join(sorted(all_lines)) + + +def _MergeProguardConfigs(proguard_configs): + """Merging the given proguard config files and returns them as a string.""" + ret = [] + for config in proguard_configs: + ret.append('# FROM: {}'.format(config)) + with open(config) as f: + ret.append(f.read()) + return '\n'.join(ret) + + +def _AddResources(aar_zip, resource_zips): + """Adds all resource zips to the given aar_zip. + + Ensures all res/values/* files have unique names by prefixing them. + """ + for i, path in enumerate(resource_zips): + with zipfile.ZipFile(path) as res_zip: + for info in res_zip.infolist(): + data = res_zip.read(info) + dirname, basename = posixpath.split(info.filename) + if 'values' in dirname: + basename = '{}_{}'.format(basename, i) + info.filename = posixpath.join(dirname, basename) + info.filename = posixpath.join('res', info.filename) + aar_zip.writestr(info, data) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--output', required=True, help='Path to output aar.') + parser.add_argument('--jars', required=True, help='GN list of jar inputs.') + parser.add_argument('--dependencies-res-zips', required=True, + help='GN list of resource zips') + parser.add_argument('--r-text-files', required=True, + help='GN list of R.txt files to merge') + parser.add_argument('--proguard-configs', required=True, + help='GN list of ProGuard flag files to merge.') + parser.add_argument( + '--android-manifest', + help='Path to AndroidManifest.xml to include.', + default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml')) + + options = parser.parse_args(args) + options.jars = build_utils.ParseGnList(options.jars) + options.dependencies_res_zips = build_utils.ParseGnList( + options.dependencies_res_zips) + options.r_text_files = build_utils.ParseGnList(options.r_text_files) + options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) + + with tempfile.NamedTemporaryFile(delete=False) as staging_file: + try: + with zipfile.ZipFile(staging_file.name, 'w') as z: + build_utils.AddToZipHermetic( + z, 'AndroidManifest.xml', src_path=options.android_manifest) + + with tempfile.NamedTemporaryFile() as jar_file: + build_utils.MergeZips(jar_file.name, options.jars) + build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name) + + build_utils.AddToZipHermetic( + z, 'R.txt', data=_MergeRTxt(options.r_text_files)) + build_utils.AddToZipHermetic(z, 'public.txt', data='') + + if options.proguard_configs: + build_utils.AddToZipHermetic( + z, 'proguard.txt', + data=_MergeProguardConfigs(options.proguard_configs)) + + _AddResources(z, options.dependencies_res_zips) + except: + os.unlink(staging_file.name) + raise + shutil.move(staging_file.name, options.output) + + if options.depfile: + all_inputs = (options.jars + options.dependencies_res_zips + + options.r_text_files + options.proguard_configs) + build_utils.WriteDepfile(options.depfile, options.output, all_inputs) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/chromium/build/android/gyp/javac.py b/chromium/build/android/gyp/javac.py index 48512ab4860..4ad1d35ce3c 100755 --- a/chromium/build/android/gyp/javac.py +++ b/chromium/build/android/gyp/javac.py @@ -10,7 +10,6 @@ import os import shutil import re import sys -import textwrap from util import build_utils from util import md5_check @@ -21,6 +20,71 @@ sys.path.append(build_utils.COLORAMA_ROOT) import colorama +ERRORPRONE_WARNINGS_TO_TURN_OFF = [ + # TODO(crbug.com/801208): Follow steps in bug. + 'FloatingPointLiteralPrecision', + # TODO(crbug.com/801210): Follow steps in bug. + 'SynchronizeOnNonFinalField', + # TODO(crbug.com/801253): Follow steps in bug. + 'JavaLangClash', + # TODO(crbug.com/801256): Follow steps in bug. + 'ParameterName', + # TODO(crbug.com/801261): Follow steps in bug + 'ArgumentSelectionDefectChecker', + # TODO(crbug.com/801268): Follow steps in bug. + 'NarrowingCompoundAssignment', + # TODO(crbug.com/802073): Follow steps in bug. + 'TypeParameterUnusedInFormals', + # TODO(crbug.com/802075): Follow steps in bug + 'ReferenceEquality', + # Android platform default is always UTF-8. + # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset() + 'DefaultCharset', + # Low priority since the alternatives still work. + 'JdkObsolete', + # We don't use that many lambdas. + 'FunctionalInterfaceClash', + # There are lots of times when we just want to post a task. + 'FutureReturnValueIgnored', + # Nice to be explicit about operators, but not necessary. + 'OperatorPrecedence', + # Just false positives in our code. + 'ThreadJoinLoop', + # Alias of ParameterName warning. + 'NamedParameters', + # Low priority corner cases with String.split. + 'StringSplitter', + # Preferred to use another method since it propagates exceptions better. + 'ClassNewInstance', + # Nice to have static inner classes but not necessary. + 'ClassCanBeStatic', + # Explicit is better than implicit. + 'FloatCast', + # Results in false positives. + 'ThreadLocalUsage', + # Also just false positives. + 'Finally', + # False positives for Chromium. + 'FragmentNotInstantiable', + # Low priority to fix. + 'HidingField', + # Low priority. + 'IntLongMath', + # Low priority. + 'BadComparable', + # Low priority. + 'EqualsHashCode', + # Nice to fix but low priority. + 'TypeParameterShadowing', + # Good to have immutable enums, also low priority. + 'ImmutableEnumChecker', +] + +ERRORPRONE_WARNINGS_TO_ERROR = [ + # Add warnings to this after fixing/suppressing all instances in our codebase. +] + + def ColorJavacOutput(output): fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)' warning_re = re.compile( @@ -54,14 +118,6 @@ def ColorJavacOutput(output): return '\n'.join(map(ApplyColor, output.split('\n'))) -def _FilterJavaFiles(paths, filters): - return [f for f in paths - if not filters or build_utils.MatchesGlob(f, filters)] - - -_MAX_MANIFEST_LINE_LEN = 72 - - def _ExtractClassFiles(jar_path, dest_dir, java_files): """Extracts all .class files not corresponding to |java_files|.""" # Two challenges exist here: @@ -148,7 +204,8 @@ def _CheckPathMatchesClassName(java_file): (java_file, expected_path_suffix)) -def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): +def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs, + classpath): incremental = options.incremental # Don't bother enabling incremental compilation for third_party code, since # _CheckPathMatchesClassName() fails on some of it, and it's not really much @@ -161,9 +218,6 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars - # The .excluded.jar contains .class files excluded from the main jar. - # It is used for incremental compiles. - excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) @@ -206,7 +260,6 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): for f in changes.IterChangedSubpaths(srcjar)) build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') - jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absence of .class @@ -219,13 +272,10 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): changed_java_files = [p for p in java_files if p in changed_paths] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) - if os.path.exists(excluded_jar_path): - _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. - classpath_idx = javac_cmd.index('-classpath') - javac_cmd[classpath_idx + 1] += ':' + classes_dir + classpath.append(classes_dir) # Can happen when a target goes from having no sources, to having sources. # It's created by the call to build_utils.Touch() below. @@ -235,7 +285,20 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). - cmd = javac_cmd + ['-d', classes_dir] + java_files + cmd = javac_cmd + ['-d', classes_dir] + + # Pass classpath and source paths as response files to avoid extremely + # long command lines that are tedius to debug. + if classpath: + classpath_rsp_path = os.path.join(temp_dir, 'classpath.txt') + with open(classpath_rsp_path, 'w') as f: + f.write(':'.join(classpath)) + cmd += ['-classpath', '@' + classpath_rsp_path] + + java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') + with open(java_files_rsp_path, 'w') as f: + f.write(' '.join(java_files)) + cmd += ['@' + java_files_rsp_path] # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. @@ -263,30 +326,24 @@ def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): # Make sure output exists. build_utils.Touch(pdb_path) - glob = options.jar_excluded_classes - inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) - exclusion_predicate = lambda f: not inclusion_predicate(f) - jar.JarDirectory(classes_dir, options.jar_path, - predicate=inclusion_predicate, - provider_configurations=options.provider_configurations, - additional_files=options.additional_jar_files) - jar.JarDirectory(classes_dir, - excluded_jar_path, - predicate=exclusion_predicate, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files) +def _ParseAndFlattenGnLists(gn_lists): + ret = [] + for arg in gn_lists: + ret.extend(build_utils.ParseGnList(arg)) + return ret + + def _ParseOptions(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option( - '--src-gendirs', - help='Directories containing generated java files.') - parser.add_option( '--java-srcjars', action='append', default=[], @@ -303,30 +360,25 @@ def _ParseOptions(argv): parser.add_option( '--classpath', action='append', - help='Classpath for javac. If this is specified multiple times, they ' - 'will all be appended to construct the classpath.') + help='Classpath to use when annotation processors are present.') + parser.add_option( + '--interface-classpath', + action='append', + help='Classpath to use when no annotation processors are present.') parser.add_option( '--incremental', action='store_true', help='Whether to re-use .class files rather than recompiling them ' '(when possible).') parser.add_option( - '--javac-includes', - default='', - help='A list of file patterns. If provided, only java files that match' - 'one of the patterns will be compiled.') - parser.add_option( - '--jar-excluded-classes', - default='', - help='List of .class file patterns to exclude from the jar.') - parser.add_option( - '--processor', - dest='processors', + '--processors', action='append', - help='Annotation processor to use.') + help='GN list of annotation processor main classes.') parser.add_option( '--processorpath', - help='Where javac should look for annotation processors.') + action='append', + help='GN list of jars that comprise the classpath used for Annotation ' + 'Processors.') parser.add_option( '--processor-arg', dest='processor_args', @@ -354,7 +406,6 @@ def _ParseOptions(argv): '--use-errorprone-path', help='Use the Errorprone compiler at this path.') parser.add_option('--jar-path', help='Jar output path.') - parser.add_option('--stamp', help='Path to touch on success.') parser.add_option( '--javac-arg', action='append', @@ -364,10 +415,14 @@ def _ParseOptions(argv): options, args = parser.parse_args(argv) build_utils.CheckOptions(options, parser, required=('jar_path',)) - bootclasspath = [] - for arg in options.bootclasspath: - bootclasspath += build_utils.ParseGnList(arg) - options.bootclasspath = bootclasspath + options.bootclasspath = _ParseAndFlattenGnLists(options.bootclasspath) + options.classpath = _ParseAndFlattenGnLists(options.classpath) + options.interface_classpath = _ParseAndFlattenGnLists( + options.interface_classpath) + options.processorpath = _ParseAndFlattenGnLists(options.processorpath) + options.processors = _ParseAndFlattenGnLists(options.processors) + options.java_srcjars = _ParseAndFlattenGnLists(options.java_srcjars) + if options.java_version == '1.8' and options.bootclasspath: # Android's boot jar doesn't contain all java 8 classes. # See: https://github.com/evant/gradle-retrolambda/issues/23. @@ -379,29 +434,12 @@ def _ParseOptions(argv): rt_jar = os.path.join(jdk_dir, 'jre', 'lib', 'rt.jar') options.bootclasspath.append(rt_jar) - classpath = [] - for arg in options.classpath: - classpath += build_utils.ParseGnList(arg) - options.classpath = classpath - - java_srcjars = [] - for arg in options.java_srcjars: - java_srcjars += build_utils.ParseGnList(arg) - options.java_srcjars = java_srcjars - additional_jar_files = [] for arg in options.additional_jar_files or []: filepath, jar_filepath = arg.split(':') additional_jar_files.append((filepath, jar_filepath)) options.additional_jar_files = additional_jar_files - if options.src_gendirs: - options.src_gendirs = build_utils.ParseGnList(options.src_gendirs) - - options.javac_includes = build_utils.ParseGnList(options.javac_includes) - options.jar_excluded_classes = ( - build_utils.ParseGnList(options.jar_excluded_classes)) - java_files = [] for arg in args: # Interpret a path prefixed with @ as a file containing a list of sources. @@ -419,11 +457,6 @@ def main(argv): argv = build_utils.ExpandFileArgs(argv) options, java_files = _ParseOptions(argv) - if options.src_gendirs: - java_files += build_utils.FindInDirectories(options.src_gendirs, '*.java') - - java_files = _FilterJavaFiles(java_files, options.javac_includes) - if options.use_errorprone_path: javac_path = options.use_errorprone_path else: @@ -431,21 +464,20 @@ def main(argv): javac_cmd = [javac_path] javac_cmd.extend(( - '-g', - # Chromium only allows UTF8 source files. Being explicit avoids - # javac pulling a default encoding from the user's environment. - '-encoding', 'UTF-8', - # Make sure we do not pass an empty string to -classpath and -sourcepath. - '-classpath', ':'.join(options.classpath) or ':', - # Prevent compiler from compiling .java files not listed as inputs. - # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ - '-sourcepath', ':', + '-g', + # Chromium only allows UTF8 source files. Being explicit avoids + # javac pulling a default encoding from the user's environment. + '-encoding', 'UTF-8', + # Prevent compiler from compiling .java files not listed as inputs. + # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ + '-sourcepath', ':', )) - if options.bootclasspath: - javac_cmd.extend([ - '-bootclasspath', ':'.join(options.bootclasspath) - ]) + if options.use_errorprone_path: + for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF: + javac_cmd.append('-Xep:{}:OFF'.format(warning)) + for warning in ERRORPRONE_WARNINGS_TO_ERROR: + javac_cmd.append('-Xep:{}:ERROR'.format(warning)) if options.java_version: javac_cmd.extend([ @@ -463,26 +495,27 @@ def main(argv): if options.processors: javac_cmd.extend(['-processor', ','.join(options.processors)]) + + if options.bootclasspath: + javac_cmd.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) + + # Annotation processors crash when given interface jars. + active_classpath = ( + options.classpath if options.processors else options.interface_classpath) + classpath = [] + if active_classpath: + classpath.extend(active_classpath) + if options.processorpath: - javac_cmd.extend(['-processorpath', options.processorpath]) + javac_cmd.extend(['-processorpath', ':'.join(options.processorpath)]) if options.processor_args: for arg in options.processor_args: javac_cmd.extend(['-A%s' % arg]) javac_cmd.extend(options.javac_arg) - classpath_inputs = options.bootclasspath - if options.classpath: - if options.classpath[0].endswith('.interface.jar'): - classpath_inputs.extend(options.classpath) - else: - # TODO(agrieve): Remove this .TOC heuristic once GYP is no more. - for path in options.classpath: - if os.path.exists(path + '.TOC'): - classpath_inputs.append(path + '.TOC') - else: - classpath_inputs.append(path) - + classpath_inputs = (options.bootclasspath + options.interface_classpath + + options.processorpath) # GN already knows of java_files, so listing them just make things worse when # they change. depfile_deps = [javac_path] + classpath_inputs + options.java_srcjars @@ -490,7 +523,6 @@ def main(argv): output_paths = [ options.jar_path, - options.jar_path.replace('.jar', '.excluded.jar'), ] if options.incremental: output_paths.append(options.jar_path + '.pdb') @@ -503,11 +535,11 @@ def main(argv): # of them does not change what gets written to the depsfile. build_utils.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options, javac_cmd, java_files, - classpath_inputs), + classpath_inputs, classpath), options, depfile_deps=depfile_deps, input_paths=input_paths, - input_strings=javac_cmd, + input_strings=javac_cmd + classpath, output_paths=output_paths, force=force, pass_changes=True) diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py index 22d05061fba..2ad01c1be63 100755 --- a/chromium/build/android/gyp/lint.py +++ b/chromium/build/android/gyp/lint.py @@ -25,7 +25,7 @@ def _OnStaleMd5(lint_path, config_path, processed_config_path, manifest_path, result_path, product_dir, sources, jar_path, cache_dir, android_sdk_version, srcjars, resource_sources, disable=None, classpath=None, can_fail_build=False, - silent=False): + include_unexpected=False, silent=False): def _RebasePath(path): """Returns relative path to top-level src dir. @@ -173,9 +173,11 @@ def _OnStaleMd5(lint_path, config_path, processed_config_path, # Put the manifest in a temporary directory in order to avoid lint detecting # sibling res/ and src/ directories (which should be pass explicitly if they # are to be included). - if manifest_path: - os.symlink(os.path.abspath(manifest_path), - os.path.join(project_dir, 'AndroidManifest.xml')) + if not manifest_path: + manifest_path = os.path.join( + _SRC_ROOT, 'build', 'android', 'AndroidManifest.xml') + os.symlink(os.path.abspath(manifest_path), + os.path.join(project_dir, 'AndroidManifest.xml')) cmd.append(project_dir) if os.path.exists(result_path): @@ -191,9 +193,17 @@ def _OnStaleMd5(lint_path, config_path, processed_config_path, # We drop all lines that contain _JAVA_OPTIONS from the output stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) + def fail_func(returncode, stderr): + if returncode != 0: + return True + if (include_unexpected and + 'Unexpected failure during lint analysis' in stderr): + return True + return False + try: build_utils.CheckOutput(cmd, cwd=_SRC_ROOT, env=env or None, - stderr_filter=stderr_filter) + stderr_filter=stderr_filter, fail_func=fail_func) except build_utils.CalledProcessError: # There is a problem with lint usage if not os.path.exists(result_path): @@ -234,11 +244,14 @@ def _OnStaleMd5(lint_path, config_path, processed_config_path, raise _ProcessResultFile() - msg = ('\nLint found %d new issues.\n' - ' - For full explanation, please refer to %s\n' - ' - For more information about lint and how to fix lint issues,' - ' please refer to %s\n' % - (num_issues, _RebasePath(result_path), _LINT_MD_URL)) + if num_issues == 0 and include_unexpected: + msg = 'Please refer to output above for unexpected lint failures.\n' + else: + msg = ('\nLint found %d new issues.\n' + ' - For full explanation, please refer to %s\n' + ' - For more information about lint and how to fix lint issues,' + ' please refer to %s\n' % + (num_issues, _RebasePath(result_path), _LINT_MD_URL)) if not silent: print >> sys.stderr, msg if can_fail_build: @@ -269,6 +282,9 @@ def main(): parser.add_argument('--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') + parser.add_argument('--include-unexpected-failures', action='store_true', + help='If set, script will exit with nonzero exit status' + ' if lint itself crashes with unexpected failures.') parser.add_argument('--config-path', help='Path to lint suppressions file.') parser.add_argument('--disable', @@ -345,6 +361,7 @@ def main(): input_strings = [ args.can_fail_build, + args.include_unexpected_failures, args.silent, ] if args.android_sdk_version: @@ -373,6 +390,7 @@ def main(): disable=disable, classpath=classpath, can_fail_build=args.can_fail_build, + include_unexpected=args.include_unexpected_failures, silent=args.silent), args, input_paths=input_paths, diff --git a/chromium/build/android/gyp/package_resources.py b/chromium/build/android/gyp/package_resources.py deleted file mode 100755 index 2be10bb6824..00000000000 --- a/chromium/build/android/gyp/package_resources.py +++ /dev/null @@ -1,449 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# pylint: disable=C0301 -"""Package resources into an apk. - -See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java -and -https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml -""" -# pylint: enable=C0301 - -import multiprocessing.pool -import optparse -import os -import re -import shutil -import subprocess -import sys -import zipfile - -from util import build_utils - - -# A variation of this lists also exists in: -# //base/android/java/src/org/chromium/base/LocaleUtils.java -_CHROME_TO_ANDROID_LOCALE_MAP = { - 'en-GB': 'en-rGB', - 'en-US': 'en-rUS', - 'es-419': 'es-rUS', - 'fil': 'tl', - 'he': 'iw', - 'id': 'in', - 'pt-PT': 'pt-rPT', - 'pt-BR': 'pt-rBR', - 'yi': 'ji', - 'zh-CN': 'zh-rCN', - 'zh-TW': 'zh-rTW', -} - -# List is generated from the chrome_apk.apk_intermediates.ap_ via: -# unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \ -# | uniq | grep -- -tvdpi- | cut -c10- -# and then manually sorted. -# Note that we can't just do a cross-product of dimensions because the filenames -# become too big and aapt fails to create the files. -# This leaves all default drawables (mdpi) in the main apk. Android gets upset -# though if any drawables are missing from the default drawables/ directory. -DENSITY_SPLITS = { - 'hdpi': ( - 'hdpi-v4', # Order matters for output file names. - 'ldrtl-hdpi-v4', - 'sw600dp-hdpi-v13', - 'ldrtl-hdpi-v17', - 'ldrtl-sw600dp-hdpi-v17', - 'hdpi-v21', - ), - 'xhdpi': ( - 'xhdpi-v4', - 'ldrtl-xhdpi-v4', - 'sw600dp-xhdpi-v13', - 'ldrtl-xhdpi-v17', - 'ldrtl-sw600dp-xhdpi-v17', - 'xhdpi-v21', - ), - 'xxhdpi': ( - 'xxhdpi-v4', - 'ldrtl-xxhdpi-v4', - 'sw600dp-xxhdpi-v13', - 'ldrtl-xxhdpi-v17', - 'ldrtl-sw600dp-xxhdpi-v17', - 'xxhdpi-v21', - ), - 'xxxhdpi': ( - 'xxxhdpi-v4', - 'ldrtl-xxxhdpi-v4', - 'sw600dp-xxxhdpi-v13', - 'ldrtl-xxxhdpi-v17', - 'ldrtl-sw600dp-xxxhdpi-v17', - 'xxxhdpi-v21', - ), - 'tvdpi': ( - 'tvdpi-v4', - 'sw600dp-tvdpi-v13', - 'ldrtl-sw600dp-tvdpi-v17', - ), -} - - -_PNG_TO_WEBP_ARGS = [ - '-mt', '-quiet', '-m', '6', '-q', '100', '-lossless', '-o'] - - -def _ParseArgs(args): - """Parses command line options. - - Returns: - An options object as from optparse.OptionsParser.parse_args() - """ - parser = optparse.OptionParser() - build_utils.AddDepfileOption(parser) - parser.add_option('--android-sdk-jar', - help='path to the Android SDK jar.') - parser.add_option('--aapt-path', - help='path to the Android aapt tool') - parser.add_option('--debuggable', - action='store_true', - help='Whether to add android:debuggable="true"') - parser.add_option('--android-manifest', help='AndroidManifest.xml path') - parser.add_option('--version-code', help='Version code for apk.') - parser.add_option('--version-name', help='Version name for apk.') - parser.add_option( - '--shared-resources', - action='store_true', - help='Make a resource package that can be loaded by a different' - 'application at runtime to access the package\'s resources.') - parser.add_option( - '--app-as-shared-lib', - action='store_true', - help='Make a resource package that can be loaded as shared library') - parser.add_option('--resource-zips', - default='[]', - help='zip files containing resources to be packaged') - parser.add_option('--asset-dir', - help='directories containing assets to be packaged') - parser.add_option('--no-compress', help='disables compression for the ' - 'given comma separated list of extensions') - parser.add_option( - '--create-density-splits', - action='store_true', - help='Enables density splits') - parser.add_option('--language-splits', - default='[]', - help='GN list of languages to create splits for') - parser.add_option('--locale-whitelist', - default='[]', - help='GN list of languages to include. All other language ' - 'configs will be stripped out. List may include ' - 'a combination of Android locales or Chrome locales.') - parser.add_option('--apk-path', - help='Path to output (partial) apk.') - parser.add_option('--exclude-xxxhdpi', action='store_true', - help='Do not include xxxhdpi drawables.') - parser.add_option('--xxxhdpi-whitelist', - default='[]', - help='GN list of globs that say which xxxhdpi images to ' - 'include even when --exclude-xxxhdpi is set.') - parser.add_option('--png-to-webp', action='store_true', - help='Convert png files to webp format.') - parser.add_option('--webp-binary', default='', - help='Path to the cwebp binary.') - parser.add_option('--support-zh-hk', action='store_true', - help='Tell aapt to support zh-rHK.') - - options, positional_args = parser.parse_args(args) - - if positional_args: - parser.error('No positional arguments should be given.') - - # Check that required options have been provided. - required_options = ('android_sdk_jar', 'aapt_path', 'android_manifest', - 'version_code', 'version_name', 'apk_path') - - build_utils.CheckOptions(options, parser, required=required_options) - - options.resource_zips = build_utils.ParseGnList(options.resource_zips) - options.language_splits = build_utils.ParseGnList(options.language_splits) - options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist) - options.xxxhdpi_whitelist = build_utils.ParseGnList(options.xxxhdpi_whitelist) - return options - - -def _ToAaptLocales(locale_whitelist, support_zh_hk): - """Converts the list of Chrome locales to aapt config locales.""" - ret = set() - for locale in locale_whitelist: - locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(locale, locale) - if locale is None or ('-' in locale and '-r' not in locale): - raise Exception('_CHROME_TO_ANDROID_LOCALE_MAP needs updating.' - ' Found: %s' % locale) - ret.add(locale) - # Always keep non-regional fall-backs. - language = locale.split('-')[0] - ret.add(language) - - # We don't actually support zh-HK in Chrome on Android, but we mimic the - # native side behavior where we use zh-TW resources when the locale is set to - # zh-HK. See https://crbug.com/780847. - if support_zh_hk: - assert not any('HK' in l for l in locale_whitelist), ( - 'Remove special logic if zh-HK is now supported (crbug.com/780847).') - ret.add('zh-rHK') - return sorted(ret) - - -def MoveImagesToNonMdpiFolders(res_root): - """Move images from drawable-*-mdpi-* folders to drawable-* folders. - - Why? http://crbug.com/289843 - """ - for src_dir_name in os.listdir(res_root): - src_components = src_dir_name.split('-') - if src_components[0] != 'drawable' or 'mdpi' not in src_components: - continue - src_dir = os.path.join(res_root, src_dir_name) - if not os.path.isdir(src_dir): - continue - dst_components = [c for c in src_components if c != 'mdpi'] - assert dst_components != src_components - dst_dir_name = '-'.join(dst_components) - dst_dir = os.path.join(res_root, dst_dir_name) - build_utils.MakeDirectory(dst_dir) - for src_file_name in os.listdir(src_dir): - if not src_file_name.endswith('.png'): - continue - src_file = os.path.join(src_dir, src_file_name) - dst_file = os.path.join(dst_dir, src_file_name) - assert not os.path.lexists(dst_file) - shutil.move(src_file, dst_file) - - -def PackageArgsForExtractedZip(d): - """Returns the aapt args for an extracted resources zip. - - A resources zip either contains the resources for a single target or for - multiple targets. If it is multiple targets merged into one, the actual - resource directories will be contained in the subdirectories 0, 1, 2, ... - """ - subdirs = [os.path.join(d, s) for s in os.listdir(d)] - subdirs = [s for s in subdirs if os.path.isdir(s)] - is_multi = any(os.path.basename(s).isdigit() for s in subdirs) - if is_multi: - res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p))) - else: - res_dirs = [d] - package_command = [] - for d in res_dirs: - MoveImagesToNonMdpiFolders(d) - package_command += ['-S', d] - return package_command - - -def _GenerateDensitySplitPaths(apk_path): - for density, config in DENSITY_SPLITS.iteritems(): - src_path = '%s_%s' % (apk_path, '_'.join(config)) - dst_path = '%s_%s' % (apk_path, density) - yield src_path, dst_path - - -def _GenerateLanguageSplitOutputPaths(apk_path, languages): - for lang in languages: - yield '%s_%s' % (apk_path, lang) - - -def RenameDensitySplits(apk_path): - """Renames all density splits to have shorter / predictable names.""" - for src_path, dst_path in _GenerateDensitySplitPaths(apk_path): - shutil.move(src_path, dst_path) - - -def CheckForMissedConfigs(apk_path, check_density, languages): - """Raises an exception if apk_path contains any unexpected configs.""" - triggers = [] - if check_density: - triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS) - if languages: - triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages) - with zipfile.ZipFile(apk_path) as main_apk_zip: - for name in main_apk_zip.namelist(): - for trigger in triggers: - if trigger.search(name) and not 'mipmap-' in name: - raise Exception(('Found config in main apk that should have been ' + - 'put into a split: %s\nYou need to update ' + - 'package_resources.py to include this new ' + - 'config (trigger=%s)') % (name, trigger.pattern)) - - -def _ConstructMostAaptArgs(options): - package_command = [ - options.aapt_path, - 'package', - '--version-code', options.version_code, - '--version-name', options.version_name, - '-M', options.android_manifest, - '--no-crunch', - '-f', - '--auto-add-overlay', - '--no-version-vectors', - '-I', options.android_sdk_jar, - '-F', options.apk_path, - '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN, - ] - - if options.no_compress: - for ext in options.no_compress.split(','): - package_command += ['-0', ext] - - if options.shared_resources: - package_command.append('--shared-lib') - - if options.app_as_shared_lib: - package_command.append('--app-as-shared-lib') - - if options.asset_dir and os.path.exists(options.asset_dir): - package_command += ['-A', options.asset_dir] - - if options.create_density_splits: - for config in DENSITY_SPLITS.itervalues(): - package_command.extend(('--split', ','.join(config))) - - if options.language_splits: - for lang in options.language_splits: - package_command.extend(('--split', lang)) - - if options.debuggable: - package_command += ['--debug-mode'] - - if options.locale_whitelist: - aapt_locales = _ToAaptLocales( - options.locale_whitelist, options.support_zh_hk) - package_command += ['-c', ','.join(aapt_locales)] - - return package_command - - -def _ResourceNameFromPath(path): - return os.path.splitext(os.path.basename(path))[0] - - -def _CreateExtractPredicate(dep_zips, exclude_xxxhdpi, xxxhdpi_whitelist): - if not exclude_xxxhdpi: - # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. - return lambda path: os.path.basename(path)[0] != '.' - - # Returns False only for xxxhdpi non-mipmap, non-whitelisted drawables. - naive_predicate = lambda path: ( - not re.search(r'[/-]xxxhdpi[/-]', path) or - re.search(r'[/-]mipmap[/-]', path) or - build_utils.MatchesGlob(path, xxxhdpi_whitelist)) - - # Build a set of all non-xxxhdpi drawables to ensure that we never exclude any - # xxxhdpi drawable that does not exist in other densities. - non_xxxhdpi_drawables = set() - for resource_zip_path in dep_zips: - with zipfile.ZipFile(resource_zip_path) as zip_file: - for path in zip_file.namelist(): - if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): - non_xxxhdpi_drawables.add(_ResourceNameFromPath(path)) - - return lambda path: (naive_predicate(path) or - _ResourceNameFromPath(path) not in non_xxxhdpi_drawables) - - -def _ConvertToWebP(webp_binary, png_files): - pool = multiprocessing.pool.ThreadPool(10) - def convert_image(png_path): - root = os.path.splitext(png_path)[0] - webp_path = root + '.webp' - args = [webp_binary, png_path] + _PNG_TO_WEBP_ARGS + [webp_path] - subprocess.check_call(args) - os.remove(png_path) - # Android requires pngs for 9-patch images. - pool.map(convert_image, [f for f in png_files if not f.endswith('.9.png')]) - pool.close() - pool.join() - - -def _OnStaleMd5(package_command, options): - with build_utils.TempDir() as temp_dir: - if options.resource_zips: - dep_zips = options.resource_zips - extract_predicate = _CreateExtractPredicate( - dep_zips, options.exclude_xxxhdpi, options.xxxhdpi_whitelist) - png_paths = [] - package_subdirs = [] - for z in dep_zips: - subdir = os.path.join(temp_dir, os.path.basename(z)) - if os.path.exists(subdir): - raise Exception('Resource zip name conflict: ' + os.path.basename(z)) - extracted_files = build_utils.ExtractAll( - z, path=subdir, predicate=extract_predicate) - if extracted_files: - package_subdirs.append(subdir) - png_paths.extend(f for f in extracted_files if f.endswith('.png')) - if png_paths and options.png_to_webp: - _ConvertToWebP(options.webp_binary, png_paths) - for subdir in package_subdirs: - package_command += PackageArgsForExtractedZip(subdir) - - build_utils.CheckOutput( - package_command, print_stdout=False, print_stderr=False) - - if options.create_density_splits or options.language_splits: - CheckForMissedConfigs(options.apk_path, options.create_density_splits, - options.language_splits) - - if options.create_density_splits: - RenameDensitySplits(options.apk_path) - - -def main(args): - args = build_utils.ExpandFileArgs(args) - options = _ParseArgs(args) - - package_command = _ConstructMostAaptArgs(options) - - output_paths = [options.apk_path] - - if options.create_density_splits: - for _, dst_path in _GenerateDensitySplitPaths(options.apk_path): - output_paths.append(dst_path) - output_paths.extend( - _GenerateLanguageSplitOutputPaths(options.apk_path, - options.language_splits)) - - input_paths = [options.android_manifest] + options.resource_zips - - input_strings = [options.exclude_xxxhdpi] + options.xxxhdpi_whitelist - input_strings.extend(package_command) - if options.png_to_webp: - # This is necessary to ensure conversion if the option is toggled. - input_strings.append('png_to_webp') - if options.support_zh_hk: - input_strings.append('support_zh_hk') - - # The md5_check.py doesn't count file path in md5 intentionally, - # in order to repackage resources when assets' name changed, we need - # to put assets into input_strings, as we know the assets path isn't - # changed among each build if there is no asset change. - if options.asset_dir and os.path.exists(options.asset_dir): - asset_paths = [] - for root, _, filenames in os.walk(options.asset_dir): - asset_paths.extend(os.path.join(root, f) for f in filenames) - input_paths.extend(asset_paths) - input_strings.extend(sorted(asset_paths)) - - build_utils.CallAndWriteDepfileIfStale( - lambda: _OnStaleMd5(package_command, options), - options, - input_paths=input_paths, - input_strings=input_strings, - output_paths=output_paths) - - -if __name__ == '__main__': - main(sys.argv[1:]) diff --git a/chromium/build/android/gyp/process_resources.py b/chromium/build/android/gyp/process_resources.py index 0df462e9e0f..72c9728ffd2 100755 --- a/chromium/build/android/gyp/process_resources.py +++ b/chromium/build/android/gyp/process_resources.py @@ -12,29 +12,118 @@ This will crunch images and generate v14 compatible resources import codecs import collections +import multiprocessing.pool import optparse import os import re import shutil +import subprocess import sys -import xml.etree.ElementTree import zipfile +from xml.etree import ElementTree + import generate_v14_compatible_resources from util import build_utils +_SOURCE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( + __file__)))) # Import jinja2 from third_party/jinja2 -sys.path.insert(1, - os.path.join(os.path.dirname(__file__), '../../../third_party')) +sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party')) from jinja2 import Template # pylint: disable=F0401 +_EMPTY_ANDROID_MANIFEST_PATH = os.path.join( + _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml') + + # Represents a line from a R.txt file. TextSymbolsEntry = collections.namedtuple('RTextEntry', ('java_type', 'resource_type', 'name', 'value')) +# A variation of this lists also exists in: +# //base/android/java/src/org/chromium/base/LocaleUtils.java +_CHROME_TO_ANDROID_LOCALE_MAP = { + 'en-GB': 'en-rGB', + 'en-US': 'en-rUS', + 'es-419': 'es-rUS', + 'fil': 'tl', + 'he': 'iw', + 'id': 'in', + 'pt-PT': 'pt-rPT', + 'pt-BR': 'pt-rBR', + 'yi': 'ji', + 'zh-CN': 'zh-rCN', + 'zh-TW': 'zh-rTW', +} + +# List is generated from the chrome_apk.apk_intermediates.ap_ via: +# unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \ +# | uniq | grep -- -tvdpi- | cut -c10- +# and then manually sorted. +# Note that we can't just do a cross-product of dimensions because the filenames +# become too big and aapt fails to create the files. +# This leaves all default drawables (mdpi) in the main apk. Android gets upset +# though if any drawables are missing from the default drawables/ directory. +_DENSITY_SPLITS = { + 'hdpi': ( + 'hdpi-v4', # Order matters for output file names. + 'ldrtl-hdpi-v4', + 'sw600dp-hdpi-v13', + 'ldrtl-hdpi-v17', + 'ldrtl-sw600dp-hdpi-v17', + 'hdpi-v21', + ), + 'xhdpi': ( + 'xhdpi-v4', + 'ldrtl-xhdpi-v4', + 'sw600dp-xhdpi-v13', + 'ldrtl-xhdpi-v17', + 'ldrtl-sw600dp-xhdpi-v17', + 'xhdpi-v21', + ), + 'xxhdpi': ( + 'xxhdpi-v4', + 'ldrtl-xxhdpi-v4', + 'sw600dp-xxhdpi-v13', + 'ldrtl-xxhdpi-v17', + 'ldrtl-sw600dp-xxhdpi-v17', + 'xxhdpi-v21', + ), + 'xxxhdpi': ( + 'xxxhdpi-v4', + 'ldrtl-xxxhdpi-v4', + 'sw600dp-xxxhdpi-v13', + 'ldrtl-xxxhdpi-v17', + 'ldrtl-sw600dp-xxxhdpi-v17', + 'xxxhdpi-v21', + ), + 'tvdpi': ( + 'tvdpi-v4', + 'sw600dp-tvdpi-v13', + 'ldrtl-sw600dp-tvdpi-v17', + ), +} + +class _ResourceWhitelist(object): + def __init__(self, entries=None): + self._entries = None + if entries: + self._entries = set(self._Key(x) for x in entries) + + def __contains__(self, entry): + return self._entries is None or self._Key(entry) in self._entries + + @staticmethod + def _Key(entry): + # Whitelists should only care about the name of the resource rather than the + # resource ID (since the whitelist is from another compilation unit, the + # resource IDs may not match). + return (entry.java_type, entry.resource_type, entry.name) + + def _ParseArgs(args): """Parses command line options. @@ -61,8 +150,14 @@ def _ParseArgs(args): '--app-as-shared-lib', action='store_true', help='Make a resource package that can be loaded as shared library.') + parser.add_option( + '--shared-resources-whitelist', + help='An R.txt file acting as a whitelist for resources that should be ' + 'non-final and have their package ID changed at runtime in R.java. If no ' + 'whitelist is provided, then everything is whitelisted.') parser.add_option('--resource-dirs', + default='[]', help='Directories containing resources of this target.') parser.add_option('--dependencies-res-zips', help='Resources from dependents.') @@ -97,21 +192,48 @@ def _ParseArgs(args): help='For each additional package, the R.txt file should contain a ' 'list of resources to be included in the R.java file in the format ' 'generated by aapt') - parser.add_option( - '--include-all-resources', - action='store_true', - help='Include every resource ID in every generated R.java file ' - '(ignoring R.txt).') - parser.add_option( - '--all-resources-zip-out', - help='Path for output of all resources. This includes resources in ' - 'dependencies.') parser.add_option('--support-zh-hk', action='store_true', help='Use zh-rTW resources for zh-rHK.') parser.add_option('--stamp', help='File to touch on success') + parser.add_option('--debuggable', + action='store_true', + help='Whether to add android:debuggable="true"') + parser.add_option('--version-code', help='Version code for apk.') + parser.add_option('--version-name', help='Version name for apk.') + parser.add_option('--no-compress', help='disables compression for the ' + 'given comma separated list of extensions') + parser.add_option( + '--create-density-splits', + action='store_true', + help='Enables density splits') + parser.add_option('--language-splits', + default='[]', + help='GN list of languages to create splits for') + parser.add_option('--locale-whitelist', + default='[]', + help='GN list of languages to include. All other language ' + 'configs will be stripped out. List may include ' + 'a combination of Android locales or Chrome locales.') + parser.add_option('--apk-path', + help='Path to output (partial) apk.') + parser.add_option('--exclude-xxxhdpi', action='store_true', + help='Do not include xxxhdpi drawables.') + parser.add_option('--xxxhdpi-whitelist', + default='[]', + help='GN list of globs that say which xxxhdpi images to ' + 'include even when --exclude-xxxhdpi is set.') + parser.add_option('--png-to-webp', action='store_true', + help='Convert png files to webp format.') + parser.add_option('--webp-binary', default='', + help='Path to the cwebp binary.') + parser.add_option('--no-xml-namespaces', + action='store_true', + help='Whether to strip xml namespaces from processed xml ' + 'resources') + options, positional_args = parser.parse_args(args) if positional_args: @@ -121,9 +243,7 @@ def _ParseArgs(args): required_options = ( 'android_sdk_jar', 'aapt_path', - 'android_manifest', 'dependencies_res_zips', - 'resource_dirs', ) build_utils.CheckOptions(options, parser, required=required_options) @@ -131,6 +251,10 @@ def _ParseArgs(args): options.dependencies_res_zips = ( build_utils.ParseGnList(options.dependencies_res_zips)) + options.language_splits = build_utils.ParseGnList(options.language_splits) + options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist) + options.xxxhdpi_whitelist = build_utils.ParseGnList(options.xxxhdpi_whitelist) + # Don't use [] as default value since some script explicitly pass "". if options.extra_res_packages: options.extra_res_packages = ( @@ -147,16 +271,23 @@ def _ParseArgs(args): return options -def CreateRJavaFiles(srcjar_dir, main_r_txt_file, packages, r_txt_files, - shared_resources, non_constant_id): +def _CreateRJavaFiles(srcjar_dir, main_r_txt_file, packages, r_txt_files, + shared_resources, non_constant_id, whitelist_r_txt_file, is_apk): assert len(packages) == len(r_txt_files), 'Need one R.txt file per package' # Map of (resource_type, name) -> Entry. # Contains the correct values for resources. all_resources = {} for entry in _ParseTextSymbolsFile(main_r_txt_file): + entry = entry._replace(value=_FixPackageIds(entry.value)) all_resources[(entry.resource_type, entry.name)] = entry + if whitelist_r_txt_file: + whitelisted_resources = _ResourceWhitelist( + _ParseTextSymbolsFile(whitelist_r_txt_file)) + else: + whitelisted_resources = _ResourceWhitelist() + # Map of package_name->resource_type->entry resources_by_package = ( collections.defaultdict(lambda: collections.defaultdict(list))) @@ -195,8 +326,8 @@ def CreateRJavaFiles(srcjar_dir, main_r_txt_file, packages, r_txt_files, package_r_java_dir = os.path.join(srcjar_dir, *package.split('.')) build_utils.MakeDirectory(package_r_java_dir) package_r_java_path = os.path.join(package_r_java_dir, 'R.java') - java_file_contents = _CreateRJavaFile( - package, resources_by_type, shared_resources, non_constant_id) + java_file_contents = _CreateRJavaFile(package, resources_by_type, + shared_resources, non_constant_id, whitelisted_resources, is_apk) with open(package_r_java_path, 'w') as f: f.write(java_file_contents) @@ -214,9 +345,43 @@ def _ParseTextSymbolsFile(path): return ret +def _FixPackageIds(resource_value): + # Resource IDs for resources belonging to regular APKs have their first byte + # as 0x7f (package id). However with webview, since it is not a regular apk + # but used as a shared library, aapt is passed the --shared-resources flag + # which changes some of the package ids to 0x02 and 0x00. This function just + # normalises all package ids to 0x7f, which the generated code in R.java + # changes to the correct package id at runtime. + # resource_value is a string with either, a single value '0x12345678', or an + # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }' + return re.sub(r'0x(?!01)\d\d', r'0x7f', resource_value) + + def _CreateRJavaFile(package, resources_by_type, shared_resources, - non_constant_id): + non_constant_id, whitelisted_resources, is_apk): """Generates the contents of a R.java file.""" + final_resources_by_type = collections.defaultdict(list) + non_final_resources_by_type = collections.defaultdict(list) + if shared_resources or non_constant_id: + for res_type, resources in resources_by_type.iteritems(): + for entry in resources: + # Entries in stylable that are not int[] are not actually resource ids + # but constants. If we are creating an apk there is no reason for them + # to be non-final. However for libraries, they may be clobbered later on + # and thus should remain non-final. This is regardless of the + # whitelisting rules (since they are not actually resources). + if entry.resource_type == 'styleable' and entry.java_type != 'int[]': + if is_apk: + final_resources_by_type[res_type].append(entry) + else: + non_final_resources_by_type[res_type].append(entry) + elif entry in whitelisted_resources: + non_final_resources_by_type[res_type].append(entry) + else: + final_resources_by_type[res_type].append(entry) + else: + final_resources_by_type = resources_by_type + # Keep these assignments all on one line to make diffing against regular # aapt-generated files easier. create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;') @@ -234,8 +399,11 @@ public final class R { private static boolean sResourcesDidLoad; {% for resource_type in resource_types %} public static final class {{ resource_type }} { - {% for e in resources[resource_type] %} - public static {{ final }}{{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% for e in final_resources[resource_type] %} + public static final {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% endfor %} + {% for e in non_final_resources[resource_type] %} + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; {% endfor %} } {% endfor %} @@ -246,7 +414,7 @@ public final class R { int packageIdTransform = (packageId ^ 0x7f) << 24; {% for resource_type in resource_types %} onResourcesLoaded{{ resource_type|title }}(packageIdTransform); - {% for e in resources[resource_type] %} + {% for e in non_final_resources[resource_type] %} {% if e.java_type == 'int[]' %} for(int i = 0; i < {{ e.resource_type }}.{{ e.name }}.length; ++i) { """ + create_id_arr + """ @@ -258,7 +426,7 @@ public final class R { {% for res_type in resource_types %} private static void onResourcesLoaded{{ res_type|title }} ( int packageIdTransform) { - {% for e in resources[res_type] %} + {% for e in non_final_resources[res_type] %} {% if res_type != 'styleable' and e.java_type != 'int[]' %} """ + create_id + """ {% endif %} @@ -269,15 +437,14 @@ public final class R { } """, trim_blocks=True, lstrip_blocks=True) - final = '' if shared_resources or non_constant_id else 'final ' return template.render(package=package, - resources=resources_by_type, resource_types=sorted(resources_by_type), shared_resources=shared_resources, - final=final) + final_resources=final_resources_by_type, + non_final_resources=non_final_resources_by_type) -def CrunchDirectory(aapt, input_dir, output_dir): +def _CrunchDirectory(aapt, input_dir, output_dir): """Crunches the images in input_dir and its subdirectories into output_dir. If an image is already optimized, crunching often increases image size. In @@ -288,8 +455,8 @@ def CrunchDirectory(aapt, input_dir, output_dir): '-C', output_dir, '-S', input_dir, '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] - build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr, - fail_func=DidCrunchFail) + build_utils.CheckOutput(aapt_cmd, stderr_filter=_FilterCrunchStderr, + fail_func=_DidCrunchFail) # Check for images whose size increased during crunching and replace them # with their originals (except for 9-patches, which must be crunched). @@ -307,7 +474,7 @@ def CrunchDirectory(aapt, input_dir, output_dir): shutil.copyfile(original, crunched) -def FilterCrunchStderr(stderr): +def _FilterCrunchStderr(stderr): """Filters out lines from aapt crunch's stderr that can safely be ignored.""" filtered_lines = [] for line in stderr.splitlines(True): @@ -320,7 +487,7 @@ def FilterCrunchStderr(stderr): return ''.join(filtered_lines) -def DidCrunchFail(returncode, stderr): +def _DidCrunchFail(returncode, stderr): """Determines whether aapt crunch failed from its return code and output. Because aapt's return code cannot be trusted, any output to stderr is @@ -329,11 +496,24 @@ def DidCrunchFail(returncode, stderr): return returncode != 0 or stderr -def ZipResources(resource_dirs, zip_path): +def _GenerateGlobs(pattern): + # This function processes the aapt ignore assets pattern into a list of globs + # to be used to exclude files on the python side. It removes the '!', which is + # used by aapt to mean 'not chatty' so it does not output if the file is + # ignored (we dont output anyways, so it is not required). This function does + # not handle the <dir> and <file> prefixes used by aapt and are assumed not to + # be included in the pattern string. + return pattern.replace('!', '').split(':') + + +def _ZipResources(resource_dirs, zip_path, ignore_pattern): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. + # ignore_pattern is a string of ':' delimited list of globs used to ignore + # files that should not be part of the final resource zip. files_to_zip = dict() + globs = _GenerateGlobs(ignore_pattern) for d in resource_dirs: for root, _, files in os.walk(d): for f in files: @@ -342,216 +522,442 @@ def ZipResources(resource_dirs, zip_path): if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) + if build_utils.MatchesGlob(archive_path, globs): + continue files_to_zip[archive_path] = path build_utils.DoZip(files_to_zip.iteritems(), zip_path) +def _SortZip(original_path, sorted_path): + with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \ + zipfile.ZipFile(original_path, 'r') as original_zip: + for info in sorted(original_zip.infolist(), key=lambda i: i.filename): + sorted_zip.writestr(info, original_zip.read(info)) -def CombineZips(zip_files, output_path, support_zh_hk): - # When packaging resources, if the top-level directories in the zip file are - # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a - # resources directory. While some resources just clobber others (image files, - # etc), other resources (particularly .xml files) need to be more - # intelligently merged. That merging is left up to aapt. - def path_transform(name, src_zip): - return '%d/%s' % (zip_files.index(src_zip), name) - - # We don't currently support zh-HK on Chrome for Android, but on the - # native side we resolve zh-HK resources to zh-TW. This logic is - # duplicated here by just copying the zh-TW res folders to zh-HK. - # See https://crbug.com/780847. - with build_utils.TempDir() as temp_dir: - if support_zh_hk: - zip_files = _DuplicateZhResources(zip_files, temp_dir) - build_utils.MergeZips(output_path, zip_files, path_transform=path_transform) - - -def _DuplicateZhResources(zip_files, temp_dir): - new_zip_files = [] - for i, zip_path in enumerate(zip_files): - # We use zh-TW resources for zh-HK (if we have zh-TW resources). If no - # zh-TW resources exists (ex. api specific resources), then just use the - # original zip. - if not _ZipContains(zip_path, r'zh-r(HK|TW)'): - new_zip_files.append(zip_path) - continue - resource_dir = os.path.join(temp_dir, str(i)) - new_zip_path = os.path.join(temp_dir, str(i) + '.zip') - # Exclude existing zh-HK resources so that we don't mess up any resource - # IDs. This can happen if the type IDs in the existing resources don't - # align with ours (since they've already been generated at this point). - build_utils.ExtractAll( - zip_path, path=resource_dir, predicate=lambda x: not 'zh-rHK' in x) +def _DuplicateZhResources(resource_dirs): + for resource_dir in resource_dirs: + # We use zh-TW resources for zh-HK (if we have zh-TW resources). for path in build_utils.IterFiles(resource_dir): if 'zh-rTW' in path: hk_path = path.replace('zh-rTW', 'zh-rHK') - build_utils.Touch(hk_path) + build_utils.MakeDirectory(os.path.dirname(hk_path)) shutil.copyfile(path, hk_path) - build_utils.ZipDir(new_zip_path, resource_dir) - new_zip_files.append(new_zip_path) - return new_zip_files +def _ExtractPackageFromManifest(manifest_path): + doc = ElementTree.parse(manifest_path) + return doc.getroot().get('package') + + +def _ToAaptLocales(locale_whitelist, support_zh_hk): + """Converts the list of Chrome locales to aapt config locales.""" + ret = set() + for locale in locale_whitelist: + locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(locale, locale) + if locale is None or ('-' in locale and '-r' not in locale): + raise Exception('_CHROME_TO_ANDROID_LOCALE_MAP needs updating.' + ' Found: %s' % locale) + ret.add(locale) + # Always keep non-regional fall-backs. + language = locale.split('-')[0] + ret.add(language) + + # We don't actually support zh-HK in Chrome on Android, but we mimic the + # native side behavior where we use zh-TW resources when the locale is set to + # zh-HK. See https://crbug.com/780847. + if support_zh_hk: + assert not any('HK' in l for l in locale_whitelist), ( + 'Remove special logic if zh-HK is now supported (crbug.com/780847).') + ret.add('zh-rHK') + return sorted(ret) + + +def _MoveImagesToNonMdpiFolders(res_root): + """Move images from drawable-*-mdpi-* folders to drawable-* folders. + + Why? http://crbug.com/289843 + """ + for src_dir_name in os.listdir(res_root): + src_components = src_dir_name.split('-') + if src_components[0] != 'drawable' or 'mdpi' not in src_components: + continue + src_dir = os.path.join(res_root, src_dir_name) + if not os.path.isdir(src_dir): + continue + dst_components = [c for c in src_components if c != 'mdpi'] + assert dst_components != src_components + dst_dir_name = '-'.join(dst_components) + dst_dir = os.path.join(res_root, dst_dir_name) + build_utils.MakeDirectory(dst_dir) + for src_file_name in os.listdir(src_dir): + if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'): + continue + src_file = os.path.join(src_dir, src_file_name) + dst_file = os.path.join(dst_dir, src_file_name) + assert not os.path.lexists(dst_file) + shutil.move(src_file, dst_file) + + +def _GenerateDensitySplitPaths(apk_path): + for density, config in _DENSITY_SPLITS.iteritems(): + src_path = '%s_%s' % (apk_path, '_'.join(config)) + dst_path = '%s_%s' % (apk_path, density) + yield src_path, dst_path + + +def _GenerateLanguageSplitOutputPaths(apk_path, languages): + for lang in languages: + yield '%s_%s' % (apk_path, lang) + + +def _RenameDensitySplits(apk_path): + """Renames all density splits to have shorter / predictable names.""" + for src_path, dst_path in _GenerateDensitySplitPaths(apk_path): + shutil.move(src_path, dst_path) + + +def _CheckForMissedConfigs(apk_path, check_density, languages): + """Raises an exception if apk_path contains any unexpected configs.""" + triggers = [] + if check_density: + triggers.extend(re.compile('-%s' % density) for density in _DENSITY_SPLITS) + if languages: + triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages) + with zipfile.ZipFile(apk_path) as main_apk_zip: + for name in main_apk_zip.namelist(): + for trigger in triggers: + if trigger.search(name) and not 'mipmap-' in name: + raise Exception(('Found config in main apk that should have been ' + + 'put into a split: %s\nYou need to update ' + + 'package_resources.py to include this new ' + + 'config (trigger=%s)') % (name, trigger.pattern)) + + +def _CreateLinkApkArgs(options): + link_command = [ + options.aapt_path + '2', + 'link', + '--version-code', options.version_code, + '--version-name', options.version_name, + '--auto-add-overlay', + '--no-version-vectors', + '-I', options.android_sdk_jar, + '-o', options.apk_path, + ] + if options.proguard_file: + link_command += ['--proguard', options.proguard_file] + if options.proguard_file_main_dex: + link_command += ['--proguard-main-dex', options.proguard_file_main_dex] -def _ZipContains(path, pattern): - with zipfile.ZipFile(path, 'r') as z: - return any(re.search(pattern, f) for f in z.namelist()) + if options.no_compress: + for ext in options.no_compress.split(','): + link_command += ['-0', ext] + if options.shared_resources: + link_command.append('--shared-lib') -def _ExtractPackageFromManifest(manifest_path): - doc = xml.etree.ElementTree.parse(manifest_path) - return doc.getroot().get('package') + if options.create_density_splits: + for config in _DENSITY_SPLITS.itervalues(): + link_command.extend(('--split', ','.join(config))) + + if options.language_splits: + for lang in options.language_splits: + link_command.extend(('--split', lang)) + + if options.locale_whitelist: + aapt_locales = _ToAaptLocales( + options.locale_whitelist, options.support_zh_hk) + link_command += ['-c', ','.join(aapt_locales)] + + if options.no_xml_namespaces: + link_command.append('--no-xml-namespaces') + + return link_command + + +def _ExtractVersionFromSdk(aapt_path, sdk_path): + output = subprocess.check_output([aapt_path, 'dump', 'badging', sdk_path]) + version_code = re.search(r"versionCode='(.*?)'", output).group(1) + version_name = re.search(r"versionName='(.*?)'", output).group(1) + return version_code, version_name, + + +def _FixManifest(options, temp_dir): + debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml') + _ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' + _TOOLS_NAMESPACE = 'http://schemas.android.com/tools' + ElementTree.register_namespace('android', _ANDROID_NAMESPACE) + ElementTree.register_namespace('tools', _TOOLS_NAMESPACE) + original_manifest = ElementTree.parse(options.android_manifest) + + version_code, version_name = _ExtractVersionFromSdk( + options.aapt_path, options.android_sdk_jar) + + # ElementTree.find does not work if the required tag is the root. + if original_manifest.getroot().tag == 'manifest': + manifest_node = original_manifest.getroot() + else: + manifest_node = original_manifest.find('manifest') + + manifest_node.set('platformBuildVersionCode', version_code) + manifest_node.set('platformBuildVersionName', version_name) + + if options.debuggable: + app_node = original_manifest.find('application') + app_node.set('{%s}%s' % (_ANDROID_NAMESPACE, 'debuggable'), 'true') + + with open(debug_manifest_path, 'w') as debug_manifest: + debug_manifest.write(ElementTree.tostring( + original_manifest.getroot(), encoding='UTF-8')) + + return debug_manifest_path + + +def _ResourceNameFromPath(path): + return os.path.splitext(os.path.basename(path))[0] + + +def _CreateKeepPredicate(resource_dirs, exclude_xxxhdpi, xxxhdpi_whitelist): + if not exclude_xxxhdpi: + # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. + return lambda path: os.path.basename(path)[0] != '.' + + # Returns False only for xxxhdpi non-mipmap, non-whitelisted drawables. + naive_predicate = lambda path: ( + not re.search(r'[/-]xxxhdpi[/-]', path) or + re.search(r'[/-]mipmap[/-]', path) or + build_utils.MatchesGlob(path, xxxhdpi_whitelist)) + + # Build a set of all non-xxxhdpi drawables to ensure that we never exclude any + # xxxhdpi drawable that does not exist in other densities. + non_xxxhdpi_drawables = set() + for resource_dir in resource_dirs: + for path in build_utils.IterFiles(resource_dir): + if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): + non_xxxhdpi_drawables.add(_ResourceNameFromPath(path)) + + return lambda path: (naive_predicate(path) or + _ResourceNameFromPath(path) not in non_xxxhdpi_drawables) + + +def _ConvertToWebP(webp_binary, png_files): + pool = multiprocessing.pool.ThreadPool(10) + def convert_image(png_path): + root = os.path.splitext(png_path)[0] + webp_path = root + '.webp' + args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100', + '-lossless', '-o', webp_path] + subprocess.check_call(args) + os.remove(png_path) + # Android requires pngs for 9-patch images. + pool.map(convert_image, [f for f in png_files if not f.endswith('.9.png')]) + pool.close() + pool.join() + + +def _CompileDeps(aapt_path, dep_subdirs, temp_dir): + partials_dir = os.path.join(temp_dir, 'partials') + build_utils.MakeDirectory(partials_dir) + partial_compile_command = [ + aapt_path + '2', + 'compile', + '--no-crunch', + ] + pool = multiprocessing.pool.ThreadPool(10) + def compile_partial(directory): + dirname = os.path.basename(directory) + partial_path = os.path.join(partials_dir, dirname + '.zip') + compile_command = (partial_compile_command + + ['--dir', directory, '-o', partial_path]) + build_utils.CheckOutput(compile_command) + + # Sorting the files in the partial ensures deterministic output from the + # aapt2 link step which uses order of files in the partial. + sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip') + _SortZip(partial_path, sorted_partial_path) + + return sorted_partial_path + + partials = pool.map(compile_partial, dep_subdirs) + pool.close() + pool.join() + return partials + + +def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path): + _DuplicateZhResources(dep_subdirs) + + keep_predicate = _CreateKeepPredicate( + dep_subdirs, options.exclude_xxxhdpi, options.xxxhdpi_whitelist) + png_paths = [] + for directory in dep_subdirs: + for f in build_utils.IterFiles(directory): + if not keep_predicate(f): + os.remove(f) + elif f.endswith('.png'): + png_paths.append(f) + if png_paths and options.png_to_webp: + _ConvertToWebP(options.webp_binary, png_paths) + for directory in dep_subdirs: + _MoveImagesToNonMdpiFolders(directory) + + link_command = _CreateLinkApkArgs(options) + link_command += ['--output-text-symbols', r_txt_path] + link_command += ['--java', gen_dir] + + fixed_manifest = _FixManifest(options, temp_dir) + link_command += ['--manifest', fixed_manifest] + + partials = _CompileDeps(options.aapt_path, dep_subdirs, temp_dir) + for partial in partials: + link_command += ['-R', partial] + + # Creates a .zip with AndroidManifest.xml, resources.arsc, res/* + # Also creates R.txt + build_utils.CheckOutput( + link_command, print_stdout=False, print_stderr=False) + + if options.create_density_splits or options.language_splits: + _CheckForMissedConfigs(options.apk_path, options.create_density_splits, + options.language_splits) + + if options.create_density_splits: + _RenameDensitySplits(options.apk_path) + + +# _PackageLibrary uses aapt rather than aapt2 because aapt2 compile does not +# support outputting an R.txt file. +def _PackageLibrary(options, dep_subdirs, temp_dir, gen_dir): + v14_dir = os.path.join(temp_dir, 'v14') + build_utils.MakeDirectory(v14_dir) + + # Generate R.java. This R.java contains non-final constants and is used only + # while compiling the library jar (e.g. chromium_content.jar). When building + # an apk, a new R.java file with the correct resource -> ID mappings will be + # generated by merging the resources from all libraries and the main apk + # project. + package_command = [options.aapt_path, + 'package', + '-m', + '-M', _EMPTY_ANDROID_MANIFEST_PATH, + '--no-crunch', + '--auto-add-overlay', + '--no-version-vectors', + '-I', options.android_sdk_jar, + '--output-text-symbols', gen_dir, + '-J', gen_dir, # Required for R.txt generation. + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] + + # Adding all dependencies as sources is necessary for @type/foo references + # to symbols within dependencies to resolve. However, it has the side-effect + # that all Java symbols from dependencies are copied into the new R.java. + # E.g.: It enables an arguably incorrect usage of + # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be + # more correct. This is just how Android works. + for d in dep_subdirs: + package_command += ['-S', d] + + input_resource_dirs = options.resource_dirs + + for d in input_resource_dirs: + package_command += ['-S', d] + + if not options.v14_skip: + for resource_dir in input_resource_dirs: + generate_v14_compatible_resources.GenerateV14Resources( + resource_dir, + v14_dir) + + # This is the list of directories with resources to put in the final .zip + # file. The order of these is important so that crunched/v14 resources + # override the normal ones. + zip_resource_dirs = input_resource_dirs + [v14_dir] + + base_crunch_dir = os.path.join(temp_dir, 'crunch') + # Crunch image resources. This shrinks png files and is necessary for + # 9-patch images to display correctly. 'aapt crunch' accepts only a single + # directory at a time and deletes everything in the output directory. + for idx, input_dir in enumerate(input_resource_dirs): + crunch_dir = os.path.join(base_crunch_dir, str(idx)) + build_utils.MakeDirectory(crunch_dir) + zip_resource_dirs.append(crunch_dir) + _CrunchDirectory(options.aapt_path, input_dir, crunch_dir) + + if options.resource_zip_out: + _ZipResources(zip_resource_dirs, options.resource_zip_out, + build_utils.AAPT_IGNORE_PATTERN) + + # Only creates an R.txt + build_utils.CheckOutput( + package_command, print_stdout=False, print_stderr=False) + + +def _CreateRTxtAndSrcJar(options, r_txt_path, srcjar_dir): + # When an empty res/ directory is passed, aapt does not write an R.txt. + if not os.path.exists(r_txt_path): + build_utils.Touch(r_txt_path) + + if options.r_text_in: + r_txt_path = options.r_text_in + + packages = list(options.extra_res_packages) + r_txt_files = list(options.extra_r_text_files) + + cur_package = options.custom_package + if not options.custom_package and options.android_manifest: + cur_package = _ExtractPackageFromManifest(options.android_manifest) + + # Don't create a .java file for the current resource target when: + # - no package name was provided (either by manifest or build rules), + # - there was already a dependent android_resources() with the same + # package (occurs mostly when an apk target and resources target share + # an AndroidManifest.xml) + if cur_package and cur_package not in packages: + packages.append(cur_package) + r_txt_files.append(r_txt_path) + + if packages: + shared_resources = options.shared_resources or options.app_as_shared_lib + _CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files, + shared_resources, options.non_constant_id, + options.shared_resources_whitelist, bool(options.apk_path)) + + if options.srcjar_out: + build_utils.ZipDir(options.srcjar_out, srcjar_dir) + + if options.r_text_out: + shutil.copyfile(r_txt_path, options.r_text_out) + + +def _ExtractDeps(dep_zips, deps_dir): + dep_subdirs = [] + for z in dep_zips: + subdir = os.path.join(deps_dir, os.path.basename(z)) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + os.path.basename(z)) + build_utils.ExtractAll(z, path=subdir) + dep_subdirs.append(subdir) + return dep_subdirs def _OnStaleMd5(options): - aapt = options.aapt_path with build_utils.TempDir() as temp_dir: deps_dir = os.path.join(temp_dir, 'deps') build_utils.MakeDirectory(deps_dir) - v14_dir = os.path.join(temp_dir, 'v14') - build_utils.MakeDirectory(v14_dir) - gen_dir = os.path.join(temp_dir, 'gen') build_utils.MakeDirectory(gen_dir) r_txt_path = os.path.join(gen_dir, 'R.txt') srcjar_dir = os.path.join(temp_dir, 'java') - input_resource_dirs = options.resource_dirs - - if not options.v14_skip: - for resource_dir in input_resource_dirs: - generate_v14_compatible_resources.GenerateV14Resources( - resource_dir, - v14_dir) - - dep_zips = options.dependencies_res_zips - dep_subdirs = [] - for z in dep_zips: - subdir = os.path.join(deps_dir, os.path.basename(z)) - if os.path.exists(subdir): - raise Exception('Resource zip name conflict: ' + os.path.basename(z)) - build_utils.ExtractAll(z, path=subdir) - dep_subdirs.append(subdir) - - # Generate R.java. This R.java contains non-final constants and is used only - # while compiling the library jar (e.g. chromium_content.jar). When building - # an apk, a new R.java file with the correct resource -> ID mappings will be - # generated by merging the resources from all libraries and the main apk - # project. - package_command = [aapt, - 'package', - '-m', - '-M', options.android_manifest, - '--auto-add-overlay', - '--no-version-vectors', - '-I', options.android_sdk_jar, - '--output-text-symbols', gen_dir, - '-J', gen_dir, # Required for R.txt generation. - '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] - - # aapt supports only the "--include-all-resources" mode, where each R.java - # file ends up with all symbols, rather than only those that it had at the - # time it was originally generated. This subtle difference makes no - # difference when compiling, but can lead to increased unused symbols in the - # resulting R.class files. - # TODO(agrieve): See if proguard makes this difference actually translate - # into a size difference. If not, we can delete all of our custom R.java - # template code above (and make include_all_resources the default). - if options.include_all_resources: - srcjar_dir = gen_dir - if options.extra_res_packages: - colon_separated = ':'.join(options.extra_res_packages) - package_command += ['--extra-packages', colon_separated] - if options.non_constant_id: - package_command.append('--non-constant-id') - if options.custom_package: - package_command += ['--custom-package', options.custom_package] - if options.shared_resources: - package_command.append('--shared-lib') - if options.app_as_shared_lib: - package_command.append('--app-as-shared-lib') - - for d in input_resource_dirs: - package_command += ['-S', d] - - # Adding all dependencies as sources is necessary for @type/foo references - # to symbols within dependencies to resolve. However, it has the side-effect - # that all Java symbols from dependencies are copied into the new R.java. - # E.g.: It enables an arguably incorrect usage of - # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be - # more correct. This is just how Android works. - for d in dep_subdirs: - package_command += ['-S', d] - - if options.proguard_file: - package_command += ['-G', options.proguard_file] - if options.proguard_file_main_dex: - package_command += ['-D', options.proguard_file_main_dex] - build_utils.CheckOutput(package_command, print_stderr=False) - - # When an empty res/ directory is passed, aapt does not write an R.txt. - if not os.path.exists(r_txt_path): - build_utils.Touch(r_txt_path) - - if not options.include_all_resources: - # --include-all-resources can only be specified for generating final R - # classes for APK. It makes no sense for APK to have pre-generated R.txt - # though, because aapt-generated already lists all available resources. - if options.r_text_in: - r_txt_path = options.r_text_in - - packages = list(options.extra_res_packages) - r_txt_files = list(options.extra_r_text_files) - - cur_package = options.custom_package - if not options.custom_package: - cur_package = _ExtractPackageFromManifest(options.android_manifest) - - # Don't create a .java file for the current resource target when: - # - no package name was provided (either by manifest or build rules), - # - there was already a dependent android_resources() with the same - # package (occurs mostly when an apk target and resources target share - # an AndroidManifest.xml) - if cur_package != 'org.dummy' and cur_package not in packages: - packages.append(cur_package) - r_txt_files.append(r_txt_path) - - if packages: - shared_resources = options.shared_resources or options.app_as_shared_lib - CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files, - shared_resources, options.non_constant_id) - - # This is the list of directories with resources to put in the final .zip - # file. The order of these is important so that crunched/v14 resources - # override the normal ones. - zip_resource_dirs = input_resource_dirs + [v14_dir] - - base_crunch_dir = os.path.join(temp_dir, 'crunch') - - # Crunch image resources. This shrinks png files and is necessary for - # 9-patch images to display correctly. 'aapt crunch' accepts only a single - # directory at a time and deletes everything in the output directory. - for idx, input_dir in enumerate(input_resource_dirs): - crunch_dir = os.path.join(base_crunch_dir, str(idx)) - build_utils.MakeDirectory(crunch_dir) - zip_resource_dirs.append(crunch_dir) - CrunchDirectory(aapt, input_dir, crunch_dir) - - if options.resource_zip_out: - ZipResources(zip_resource_dirs, options.resource_zip_out) - - if options.all_resources_zip_out: - all_zips = [options.resource_zip_out] if options.resource_zip_out else [] - all_zips += dep_zips - CombineZips(all_zips, - options.all_resources_zip_out, options.support_zh_hk) - - if options.srcjar_out: - build_utils.ZipDir(options.srcjar_out, srcjar_dir) - - if options.r_text_out: - shutil.copyfile(r_txt_path, options.r_text_out) + dep_subdirs = _ExtractDeps(options.dependencies_res_zips, deps_dir) + + if options.apk_path: + _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path) + else: + _PackageLibrary(options, dep_subdirs, temp_dir, gen_dir) + + _CreateRTxtAndSrcJar(options, r_txt_path, srcjar_dir) def main(args): @@ -561,8 +967,8 @@ def main(args): # Order of these must match order specified in GN so that the correct one # appears first in the depfile. possible_output_paths = [ + options.apk_path, options.resource_zip_out, - options.all_resources_zip_out, options.r_text_out, options.srcjar_out, options.proguard_file, @@ -570,24 +976,40 @@ def main(args): ] output_paths = [x for x in possible_output_paths if x] + if options.apk_path and options.create_density_splits: + for _, dst_path in _GenerateDensitySplitPaths(options.apk_path): + output_paths.append(dst_path) + if options.apk_path and options.language_splits: + output_paths.extend( + _GenerateLanguageSplitOutputPaths(options.apk_path, + options.language_splits)) + # List python deps in input_strings rather than input_paths since the contents # of them does not change what gets written to the depsfile. input_strings = options.extra_res_packages + [ options.app_as_shared_lib, options.custom_package, - options.include_all_resources, options.non_constant_id, options.shared_resources, options.v14_skip, + options.exclude_xxxhdpi, + options.xxxhdpi_whitelist, + str(options.debuggable), + str(options.png_to_webp), + str(options.support_zh_hk), + str(options.no_xml_namespaces), ] - if options.support_zh_hk: - input_strings.append('support_zh_hk') - input_paths = [ + if options.apk_path: + input_strings.extend(_CreateLinkApkArgs(options)) + + possible_input_paths = [ options.aapt_path, options.android_manifest, options.android_sdk_jar, + options.shared_resources_whitelist, ] + input_paths = [x for x in possible_input_paths if x] input_paths.extend(options.dependencies_res_zips) input_paths.extend(options.extra_r_text_files) diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py index 5be45e5133e..dec818ee17e 100644 --- a/chromium/build/android/gyp/util/build_utils.py +++ b/chromium/build/android/gyp/util/build_utils.py @@ -30,10 +30,14 @@ import gn_helpers COLORAMA_ROOT = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src') -# aapt should ignore OWNERS and python files in addition the default ignore -# pattern. -AAPT_IGNORE_PATTERN = ('!OWNERS:!*.py:!*.pyc:!.svn:!.git:!.ds_store:!*.scc:' + - '.*:<dir>_*:!CVS:!thumbs.db::!*~:!*.d.stamp') +AAPT_IGNORE_PATTERN = ':'.join([ + 'OWNERS', # Allow OWNERS files within res/ + '*.py', # PRESUBMIT.py sometimes exist. + '*.pyc', + '*~', # Some editors create these as temp files. + '.*', # Never makes sense to include dot(files/dirs). + '*.d.stamp', # Ignore stamp files + ]) HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0) _HERMETIC_FILE_ATTR = (0644 << 16L) diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py index 3df54532737..b66e879f7d3 100755 --- a/chromium/build/android/gyp/write_build_config.py +++ b/chromium/build/android/gyp/write_build_config.py @@ -33,12 +33,10 @@ import sys import xml.dom.minidom from util import build_utils -from util import md5_check - # Types that should never be used as a dependency of another build config. -_ROOT_TYPES = ('android_apk', 'deps_dex', 'java_binary', 'junit_binary', - 'resource_rewriter') +_ROOT_TYPES = ('android_apk', 'java_binary', + 'java_annotation_processor', 'junit_binary', 'resource_rewriter') # Types that should not allow code deps to pass through. _RESOURCE_TYPES = ('android_assets', 'android_resources') @@ -200,7 +198,7 @@ def _ResolveGroups(configs): ret[index:index + 1] = expanded_configs -def _FilterDepsPaths(dep_paths, target_type): +def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True): """Resolves all groups and trims dependency branches that we never want. E.g. When a resource or asset depends on an apk target, the intent is to @@ -209,16 +207,13 @@ def _FilterDepsPaths(dep_paths, target_type): configs = [GetDepConfig(p) for p in dep_paths] configs = _ResolveGroups(configs) # Don't allow root targets to be considered as a dep. - configs = [c for c in configs if c['type'] not in _ROOT_TYPES] + if filter_root_targets: + configs = [c for c in configs if c['type'] not in _ROOT_TYPES] # Don't allow java libraries to cross through assets/resources. if target_type in _RESOURCE_TYPES: configs = [c for c in configs if c['type'] in _RESOURCE_TYPES] - return [c['path'] for c in configs] - - -def _AsInterfaceJar(jar_path): - return jar_path[:-3] + 'interface.jar' + return Deps([c['path'] for c in configs]) def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_files): @@ -259,10 +254,14 @@ def main(argv): help='Type of this target (e.g. android_library).') parser.add_option( '--deps-configs', - help='List of paths for dependency\'s build_config files. ') + help='GN-list of dependent build_config files.') + parser.add_option( + '--annotation-processor-configs', + help='GN-list of build_config files for annotation processors.') parser.add_option( '--classpath-deps-configs', - help='List of paths for classpath dependency\'s build_config files. ') + help='GN-list of build_config files for libraries to include as ' + 'build-time-only classpath.') # android_resources options parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') @@ -287,6 +286,12 @@ def main(argv): # java library options parser.add_option('--jar-path', help='Path to target\'s jar output.') + parser.add_option('--unprocessed-jar-path', + help='Path to the .jar to use for javac classpath purposes.') + parser.add_option('--interface-jar-path', + help='Path to the .interface.jar to use for javac classpath purposes.') + parser.add_option('--is-prebuilt', action='store_true', + help='Whether the jar was compiled or pre-compiled.') parser.add_option('--java-sources-file', help='Path to .sources file') parser.add_option('--bundled-srcjars', help='GYP-list of .srcjars that have been included in this java_library.') @@ -302,7 +307,8 @@ def main(argv): parser.add_option('--gradle-treat-as-prebuilt', action='store_true', help='Whether this library should be treated as a prebuilt library by ' 'generate_gradle.py.') - parser.add_option('--main-class', help='Java class for java_binary targets.') + parser.add_option('--main-class', + help='Main class for java_binary or java_annotation_processor targets.') parser.add_option('--java-resources-jar-path', help='Path to JAR that contains java resources. Everything ' 'from this JAR except meta-inf/ content and .class files ' @@ -338,11 +344,11 @@ def main(argv): parser.add_option('--proguard-enabled', action='store_true', help='Whether proguard is enabled for this apk.') parser.add_option('--proguard-configs', - help='GYP-list of proguard flag files to use in final apk.') + help='GN-list of proguard flag files to use in final apk.') parser.add_option('--proguard-info', help='Path to the proguard .info output for this apk.') parser.add_option('--fail', - help='GYP-list of error message lines to fail with.') + help='GN-list of error message lines to fail with.') options, args = parser.parse_args(argv) @@ -351,16 +357,17 @@ def main(argv): if options.fail: parser.error('\n'.join(build_utils.ParseGnList(options.fail))) + jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path'] required_options_map = { - 'java_binary': ['build_config', 'jar_path'], - 'junit_binary': ['build_config', 'jar_path'], - 'java_library': ['build_config', 'jar_path'], - 'java_prebuilt': ['build_config', 'jar_path'], + 'java_binary': ['build_config'], + 'java_annotation_processor': ['build_config', 'main_class'], + 'junit_binary': ['build_config'], + 'java_library': ['build_config'] + jar_path_options, 'android_assets': ['build_config'], 'android_resources': ['build_config', 'resources_zip'], - 'android_apk': ['build_config', 'jar_path', 'dex_path'], - 'deps_dex': ['build_config', 'dex_path'], + 'android_apk': ['build_config','dex_path'] + jar_path_options, 'dist_jar': ['build_config'], + 'dist_aar': ['build_config'], 'resource_rewriter': ['build_config'], 'group': ['build_config'], } @@ -370,34 +377,39 @@ def main(argv): build_utils.CheckOptions(options, parser, required_options) - # Java prebuilts are the same as libraries except for in gradle files. - is_java_prebuilt = options.type == 'java_prebuilt' - if is_java_prebuilt: - options.type = 'java_library' - - if options.type == 'java_library': - if options.supports_android and not options.dex_path: - raise Exception('java_library that supports Android requires a dex path.') - - if options.requires_android and not options.supports_android: - raise Exception( - '--supports-android is required when using --requires-android') - - direct_deps_config_paths = build_utils.ParseGnList(options.deps_configs) - direct_deps_config_paths = _FilterDepsPaths( - direct_deps_config_paths, options.type) - - deps = Deps(direct_deps_config_paths) - all_inputs = deps.AllConfigPaths() + if options.jar_path and options.supports_android and not options.dex_path: + raise Exception('java_library that supports Android requires a dex path.') + if any(getattr(options, x) for x in jar_path_options): + for attr in jar_path_options: + if not getattr(options, attr): + raise('Expected %s to be set.' % attr) + + if options.requires_android and not options.supports_android: + raise Exception( + '--supports-android is required when using --requires-android') + + is_java_target = options.type in ( + 'java_binary', 'junit_binary', 'java_annotation_processor', + 'java_library', 'android_apk', 'dist_aar', 'dist_jar') + + deps = _DepsFromPaths( + build_utils.ParseGnList(options.deps_configs), options.type) + processor_deps = _DepsFromPaths( + build_utils.ParseGnList(options.annotation_processor_configs or ''), + options.type, filter_root_targets=False) + classpath_deps = _DepsFromPaths( + build_utils.ParseGnList(options.classpath_deps_configs or ''), + options.type) + + all_inputs = sorted(set(deps.AllConfigPaths() + + processor_deps.AllConfigPaths() + + classpath_deps.AllConfigPaths())) direct_library_deps = deps.Direct('java_library') all_library_deps = deps.All('java_library') direct_resources_deps = deps.Direct('android_resources') all_resources_deps = deps.All('android_resources') - # Resources should be ordered with the highest-level dependency first so that - # overrides are done correctly. - all_resources_deps.reverse() # Initialize some common config. # Any value that needs to be queryable by dependents must go within deps_info. @@ -406,7 +418,7 @@ def main(argv): 'name': os.path.basename(options.build_config), 'path': options.build_config, 'type': options.type, - 'deps_configs': direct_deps_config_paths + 'deps_configs': deps.direct_deps_config_paths }, # Info needed only by generate_gradle.py. 'gradle': {} @@ -424,21 +436,18 @@ def main(argv): # Required for generating gradle files. if options.type == 'java_library': - deps_info['is_prebuilt'] = is_java_prebuilt + deps_info['is_prebuilt'] = bool(options.is_prebuilt) deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt if options.android_manifest: deps_info['android_manifest'] = options.android_manifest - if options.type in ( - 'java_binary', 'junit_binary', 'java_library', 'android_apk'): + if is_java_target: if options.java_sources_file: deps_info['java_sources_file'] = options.java_sources_file if options.bundled_srcjars: gradle['bundled_srcjars'] = ( build_utils.ParseGnList(options.bundled_srcjars)) - else: - gradle['bundled_srcjars'] = [] gradle['dependent_android_projects'] = [] gradle['dependent_java_projects'] = [] @@ -447,7 +456,7 @@ def main(argv): if options.bootclasspath: gradle['bootclasspath'] = options.bootclasspath if options.main_class: - gradle['main_class'] = options.main_class + deps_info['main_class'] = options.main_class for c in deps.GradleLibraryProjectDeps(): if c['requires_android']: @@ -455,7 +464,6 @@ def main(argv): else: gradle['dependent_java_projects'].append(c['path']) - if options.type == 'android_apk': config['jni'] = {} all_java_sources = [c['java_sources_file'] for c in all_library_deps @@ -464,8 +472,7 @@ def main(argv): all_java_sources.append(options.java_sources_file) config['jni']['all_source'] = all_java_sources - if options.type in ( - 'java_binary', 'junit_binary', 'java_library', 'dist_jar'): + if is_java_target: deps_info['requires_android'] = options.requires_android deps_info['supports_android'] = options.supports_android @@ -483,10 +490,14 @@ def main(argv): raise Exception('Not all deps support the Android platform: ' + str(deps_not_support_android)) - if options.type in ( - 'java_binary', 'junit_binary', 'java_library', 'android_apk'): - deps_info['jar_path'] = options.jar_path - if options.type == 'android_apk' or options.supports_android: + if is_java_target: + # Classpath values filled in below (after applying tested_apk_config). + config['javac'] = {} + if options.jar_path: + deps_info['jar_path'] = options.jar_path + deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path + deps_info['interface_jar_path'] = options.interface_jar_path + if options.dex_path: deps_info['dex_path'] = options.dex_path if options.type == 'android_apk': deps_info['apk_path'] = options.apk_path @@ -496,33 +507,24 @@ def main(argv): deps_info['non_native_packed_relocations'] = str( options.non_native_packed_relocations) - requires_javac_classpath = options.type in ( - 'java_binary', 'junit_binary', 'java_library', 'android_apk', 'dist_jar') - requires_full_classpath = ( - options.type == 'java_prebuilt' or requires_javac_classpath) - - if requires_javac_classpath: - # Classpath values filled in below (after applying tested_apk_config). - config['javac'] = {} - - if options.type == 'java_library': - # android_resources targets use this srcjars field to expose R.java files. - # Since there is no java_library associated with an android_resources(), - # Each java_library recompiles the R.java files. - # junit_binary and android_apk create their own R.java srcjars, so should - # not pull them in from deps here. - config['javac']['srcjars'] = [ - c['srcjar'] for c in all_resources_deps if 'srcjar' in c] - - # Used to strip out R.class for android_prebuilt()s. - config['javac']['resource_packages'] = [ - c['package_name'] for c in all_resources_deps if 'package_name' in c] - elif options.type in ('android_apk', 'java_binary', 'junit_binary'): - # Apks will get their resources srcjar explicitly passed to the java step - config['javac']['srcjars'] = [] - # Gradle may need to generate resources for some apks. - gradle['srcjars'] = [ - c['srcjar'] for c in direct_resources_deps if 'srcjar' in c] + if options.type == 'java_library': + # android_resources targets use this srcjars field to expose R.java files. + # Since there is no java_library associated with an android_resources(), + # Each java_library recompiles the R.java files. + # junit_binary and android_apk create their own R.java srcjars, so should + # not pull them in from deps here. + config['javac']['srcjars'] = [ + c['srcjar'] for c in all_resources_deps if 'srcjar' in c] + + # Used to strip out R.class for android_prebuilt()s. + config['javac']['resource_packages'] = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + else: + # Apks will get their resources srcjar explicitly passed to the java step + config['javac']['srcjars'] = [] + # Gradle may need to generate resources for some apks. + gradle['srcjars'] = [ + c['srcjar'] for c in direct_resources_deps if 'srcjar' in c] if options.type == 'android_assets': all_asset_sources = [] @@ -578,11 +580,12 @@ def main(argv): if c['supports_android']: owned_resource_dirs.difference_update(c['owned_resources_dirs']) owned_resource_zips.difference_update(c['owned_resources_zips']) - deps_info['owned_resources_dirs'] = list(owned_resource_dirs) - deps_info['owned_resources_zips'] = list(owned_resource_zips) + deps_info['owned_resources_dirs'] = sorted(owned_resource_dirs) + deps_info['owned_resources_zips'] = sorted(owned_resource_zips) if options.type in ( - 'android_resources', 'android_apk', 'junit_binary', 'resource_rewriter'): + 'android_resources', 'android_apk', 'junit_binary', 'resource_rewriter', + 'dist_aar'): config['resources'] = {} config['resources']['dependency_zips'] = [ c['resources_zip'] for c in all_resources_deps] @@ -597,29 +600,47 @@ def main(argv): config['resources']['extra_package_names'] = extra_package_names config['resources']['extra_r_text_files'] = extra_r_text_files - if options.type in ['android_apk', 'deps_dex']: + if options.type == 'android_apk': deps_dex_files = [c['dex_path'] for c in all_library_deps] - if requires_javac_classpath: - extra_jars = [] - if options.extra_classpath_jars: - extra_jars += build_utils.ParseGnList(options.extra_classpath_jars) - - if options.classpath_deps_configs: - config_paths = build_utils.ParseGnList(options.classpath_deps_configs) - classpath_deps = Deps(_FilterDepsPaths(config_paths, options.type)) - extra_jars += [ - c['jar_path'] for c in classpath_deps.Direct('java_library')] + if is_java_target: + # The classpath used to compile this target when annotation processors are + # present. + javac_classpath = [c['unprocessed_jar_path'] for c in direct_library_deps] + # The classpath used to compile this target when annotation processors are + # not present. These are also always used to know when a target needs to be + # rebuilt. + javac_interface_classpath = [ + c['interface_jar_path'] for c in direct_library_deps] + # The classpath used for error prone. + javac_full_interface_classpath = [ + c['interface_jar_path'] for c in all_library_deps] + # The classpath used for bytecode-rewritting. + javac_full_classpath = [c['unprocessed_jar_path'] for c in all_library_deps] + # The classpath to use to run this target (or as an input to ProGuard). + java_full_classpath = [] + if options.jar_path: + java_full_classpath.append(options.jar_path) + java_full_classpath.extend(c['jar_path'] for c in all_library_deps) - javac_classpath = [c['jar_path'] for c in direct_library_deps] - if requires_full_classpath: - java_full_classpath = [c['jar_path'] for c in all_library_deps] + # Deps to add to the compile-time classpath (but not the runtime classpath). + # TODO(agrieve): Might be less confusing to fold these into bootclasspath. + extra_jars = [c['unprocessed_jar_path'] + for c in classpath_deps.Direct('java_library')] + if options.extra_classpath_jars: + # These are .jars to add to javac classpath but not to runtime classpath. + extra_jars.extend(build_utils.ParseGnList(options.extra_classpath_jars)) + + extra_jars = [p for p in extra_jars if p not in javac_classpath] + javac_classpath.extend(extra_jars) + javac_interface_classpath.extend(extra_jars) + javac_full_interface_classpath.extend( + p for p in extra_jars if p not in javac_full_classpath) + javac_full_classpath.extend( + p for p in extra_jars if p not in javac_full_classpath) if extra_jars: deps_info['extra_classpath_jars'] = extra_jars - javac_classpath += [p for p in extra_jars if p not in javac_classpath] - java_full_classpath += [ - p for p in extra_jars if p not in java_full_classpath] # The java code for an instrumentation test apk is assembled differently for # ProGuard vs. non-ProGuard. @@ -638,24 +659,32 @@ def main(argv): if options.type == 'android_apk' and options.tested_apk_config: tested_apk_config = GetDepConfig(options.tested_apk_config) - expected_tested_package = tested_apk_config['package_name'] - AndroidManifest(options.android_manifest).CheckInstrumentationElements( - expected_tested_package) - if options.proguard_enabled: - # Add all tested classes to the test's classpath to ensure that the test's - # java code is a superset of the tested apk's java code - java_full_classpath += [ - jar for jar in tested_apk_config['java']['full_classpath'] - if jar not in java_full_classpath] - if tested_apk_config['proguard_enabled']: assert options.proguard_enabled, ('proguard must be enabled for ' 'instrumentation apks if it\'s enabled for the tested apk.') + expected_tested_package = tested_apk_config['package_name'] + AndroidManifest(options.android_manifest).CheckInstrumentationElements( + expected_tested_package) + + # Add all tested classes to the test's classpath to ensure that the test's + # java code is a superset of the tested apk's java code + java_full_classpath.extend( + p for p in tested_apk_config['java_runtime_classpath'] + if p not in java_full_classpath) # Include in the classpath classes that are added directly to the apk under # test (those that are not a part of a java_library). - javac_classpath.append(tested_apk_config['jar_path']) - java_full_classpath.append(tested_apk_config['jar_path']) + javac_classpath.append(tested_apk_config['unprocessed_jar_path']) + javac_full_classpath.append(tested_apk_config['unprocessed_jar_path']) + javac_interface_classpath.append(tested_apk_config['interface_jar_path']) + javac_full_interface_classpath.append( + tested_apk_config['interface_jar_path']) + javac_full_interface_classpath.extend( + p for p in tested_apk_config['javac_full_interface_classpath'] + if p not in javac_full_interface_classpath) + javac_full_classpath.extend( + p for p in tested_apk_config['javac_full_classpath'] + if p not in javac_full_classpath) # Exclude dex files from the test apk that exist within the apk under test. # TODO(agrieve): When proguard is enabled, this filtering logic happens @@ -667,53 +696,53 @@ def main(argv): p for p in deps_dex_files if not p in tested_apk_deps_dex_files] if options.proguard_configs: - assert options.type == 'java_library' deps_info['proguard_configs'] = ( build_utils.ParseGnList(options.proguard_configs)) - if options.type in ('android_apk', 'dist_jar'): + if options.type in ('android_apk', 'dist_aar', 'dist_jar'): deps_info['proguard_enabled'] = options.proguard_enabled deps_info['proguard_info'] = options.proguard_info config['proguard'] = {} proguard_config = config['proguard'] - proguard_config['input_paths'] = list(java_full_classpath) - if options.jar_path: - proguard_config['input_paths'].insert(0, options.jar_path) - extra_jars = set() - lib_configs = set() + extra_jars = [] + all_configs = deps_info.get('proguard_configs', []) for c in all_library_deps: - extra_jars.update(c.get('extra_classpath_jars', ())) - lib_configs.update(c.get('proguard_configs', ())) - proguard_config['lib_paths'] = list(extra_jars) - proguard_config['lib_configs'] = list(lib_configs) - - # Dependencies for the final dex file of an apk or a 'deps_dex'. - if options.type in ['android_apk', 'deps_dex']: + extra_jars.extend( + p for p in c.get('extra_classpath_jars', []) if p not in extra_jars) + all_configs.extend( + p for p in c.get('proguard_configs', []) if p not in all_configs) + proguard_config['extra_jars'] = extra_jars + proguard_config['all_configs'] = all_configs + + # Dependencies for the final dex file of an apk. + if options.type == 'android_apk': config['final_dex'] = {} dex_config = config['final_dex'] dex_config['dependency_dex_files'] = deps_dex_files - if requires_javac_classpath: + if is_java_target: config['javac']['classpath'] = javac_classpath - javac_interface_classpath = [ - _AsInterfaceJar(p) for p in javac_classpath - if p not in deps_info.get('extra_classpath_jars', [])] - javac_interface_classpath += deps_info.get('extra_classpath_jars', []) config['javac']['interface_classpath'] = javac_interface_classpath - - if requires_full_classpath: - deps_info['java'] = { - 'full_classpath': java_full_classpath, - } + # Direct() will be of type 'java_annotation_processor'. + config['javac']['processor_classpath'] = [ + c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [ + c['jar_path'] for c in processor_deps.All('java_library')] + config['javac']['processor_classes'] = [ + c['main_class'] for c in processor_deps.Direct()] + deps_info['javac_full_classpath'] = javac_full_classpath + deps_info['javac_full_interface_classpath'] = javac_full_interface_classpath + + if options.type in ( + 'android_apk', 'dist_jar', 'java_binary', 'junit_binary'): + deps_info['java_runtime_classpath'] = java_full_classpath if options.type in ('android_apk', 'dist_jar'): - dependency_jars = [c['jar_path'] for c in all_library_deps] - all_interface_jars = [_AsInterfaceJar(p) for p in dependency_jars] - if options.type == 'android_apk': - all_interface_jars.append(_AsInterfaceJar(options.jar_path)) + all_interface_jars = [] + if options.jar_path: + all_interface_jars.append(options.interface_jar_path) + all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps) config['dist_jar'] = { - 'dependency_jars': dependency_jars, 'all_interface_jars': all_interface_jars, } @@ -770,7 +799,7 @@ def main(argv): if jar not in tested_apk_resource_jars] config['java_resources_jars'] = java_resources_jars - if options.type == 'java_library' and options.java_resources_jar_path: + if options.java_resources_jar_path: deps_info['java_resources_jar'] = options.java_resources_jar_path build_utils.WriteJson(config, options.build_config, only_if_changed=True) diff --git a/chromium/build/android/gyp/write_ordered_libraries.py b/chromium/build/android/gyp/write_ordered_libraries.py index f656adfe30a..dda25b8d0bf 100755 --- a/chromium/build/android/gyp/write_ordered_libraries.py +++ b/chromium/build/android/gyp/write_ordered_libraries.py @@ -28,92 +28,65 @@ import sys from util import build_utils _readelf = None -_library_dirs = None _library_re = re.compile( '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]') +_library_path_map = {} + def SetReadelfPath(path): global _readelf _readelf = path -def SetLibraryDirs(dirs): - global _library_dirs - _library_dirs = dirs - - -def FullLibraryPath(library_name): - assert _library_dirs is not None - for directory in _library_dirs: - path = '%s/%s' % (directory, library_name) - if os.path.exists(path): - return path - return library_name - - -def IsSystemLibrary(library_name): - # If the library doesn't exist in the libraries directory, assume that it is - # an Android system library. - return not os.path.exists(FullLibraryPath(library_name)) - - def CallReadElf(library_or_executable): assert _readelf is not None - readelf_cmd = [_readelf, - '-d', - FullLibraryPath(library_or_executable)] + readelf_cmd = [_readelf, '-d', library_or_executable] return build_utils.CheckOutput(readelf_cmd) def GetDependencies(library_or_executable): elf = CallReadElf(library_or_executable) - return set(_library_re.findall(elf)) - - -def GetNonSystemDependencies(library_name): - all_deps = GetDependencies(library_name) - return set((lib for lib in all_deps if not IsSystemLibrary(lib))) + deps = set() + for l in _library_re.findall(elf): + p = _library_path_map.get(l) + if p is not None: + deps.add(p) + return deps def GetSortedTransitiveDependencies(libraries): """Returns all transitive library dependencies in dependency order.""" return build_utils.GetSortedTransitiveDependencies( - libraries, GetNonSystemDependencies) - - -def GetSortedTransitiveDependenciesForBinaries(binaries): - if binaries[0].endswith('.so'): - libraries = [os.path.basename(lib) for lib in binaries] - else: - assert len(binaries) == 1 - all_deps = GetDependencies(binaries[0]) - libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)] - - return GetSortedTransitiveDependencies(libraries) + libraries, GetDependencies) def main(): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) - parser.add_option('--input-libraries', - help='A list of top-level input libraries.') - parser.add_option('--libraries-dir', - help='The directory which contains shared libraries.') parser.add_option('--readelf', help='Path to the readelf binary.') + parser.add_option('--runtime-deps', + help='A file created for the target using write_runtime_deps.') parser.add_option('--output', help='Path to the generated .json file.') parser.add_option('--stamp', help='Path to touch on success.') options, _ = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) SetReadelfPath(options.readelf) - SetLibraryDirs(options.libraries_dir.split(',')) - libraries = build_utils.ParseGnList(options.input_libraries) - if len(libraries): - libraries = GetSortedTransitiveDependenciesForBinaries(libraries) + unsorted_lib_paths = [] + for f in open(options.runtime_deps): + f = f[:-1] + if f.endswith('.so'): + p = f.replace('lib.unstripped/', '') + unsorted_lib_paths.append(p) + _library_path_map[os.path.basename(p)] = p + + lib_paths = GetSortedTransitiveDependencies(unsorted_lib_paths) + + libraries = [os.path.basename(l) for l in lib_paths] # Convert to "base" library names: e.g. libfoo.so -> foo java_libraries_list = ( @@ -121,7 +94,7 @@ def main(): out_json = { 'libraries': libraries, - 'lib_paths': [FullLibraryPath(l) for l in libraries], + 'lib_paths': lib_paths, 'java_libraries_list': java_libraries_list } build_utils.WriteJson( diff --git a/chromium/build/android/install_emulator_deps.py b/chromium/build/android/install_emulator_deps.py deleted file mode 100755 index 1690df33ec2..00000000000 --- a/chromium/build/android/install_emulator_deps.py +++ /dev/null @@ -1,318 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Installs deps for using SDK emulator for testing. - -The script will download the SDK and system images, if they are not present, and -install and enable KVM, if virtualization has been enabled in the BIOS. -""" - - -import logging -import optparse -import os -import re -import sys - -import devil_chromium -from devil.utils import cmd_helper -from devil.utils import run_tests_helper -from pylib import constants -from pylib import pexpect - -# Android API level -DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION -# Android ABI/Arch -DEFAULT_ABI = 'x86' - -# Default Time out for downloading SDK component -DOWNLOAD_SYSTEM_IMAGE_TIMEOUT = 300 -DOWNLOAD_SDK_PLATFORM_TIMEOUT = 300 - -def CheckSDK(): - """Check if SDK is already installed. - - Returns: - True if the emulator SDK directory (src/android_emulator_sdk/) exists. - """ - return os.path.exists(constants.ANDROID_SDK_ROOT) - - -def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): - """Check if the "SDK Platform" for the specified API level is installed. - This is necessary in order for the emulator to run when the target - is specified. - - Args: - abi: target abi, x86 or arm - api_level: the Android API level to check; defaults to the latest API. - google: use Google build system image instead of AOSP build - - Returns: - True if the platform is already installed. - """ - android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') - if google: - pattern = re.compile('id: [0-9]+ or "Google Inc.:Google APIs:%s"' % - api_level) - else: - pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level) - - try: - exit_code, stdout = cmd_helper.GetCmdStatusAndOutput( - [android_binary, 'list']) - if exit_code != 0: - raise Exception('\'android list\' command failed') - for line in stdout.split('\n'): - if pattern.match(line): - return True - return False - except OSError: - logging.exception('Unable to execute \'android list\'') - return False - - -def CheckSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): - """Check if Android system images have been installed. - - Args: - abi: target abi, x86 or arm - api_level: the Android API level to check for; defaults to the latest API. - google: use Google build system image instead of AOSP build - - Returns: - True if x86 image has been previously downloaded. - """ - api_target = 'android-%d' % api_level - system_image_root = os.path.join(constants.ANDROID_SDK_ROOT, - 'system-images', api_target) - if abi == 'x86': - if google: - return os.path.exists(os.path.join(system_image_root, 'google_apis', - 'x86')) - else: - return os.path.exists(os.path.join(system_image_root, 'default', 'x86')) - elif abi == 'arm': - if google: - return os.path.exists(os.path.join(system_image_root, 'google_apis', - 'armeabi-v7a')) - else: - return os.path.exists(os.path.join(system_image_root, 'default', - 'armeabi-v7a')) - else: - raise Exception("abi option invalid") - -def CheckKVM(): - """Quickly check whether KVM is enabled. - - Returns: - True iff /dev/kvm exists (Linux only). - """ - return os.path.exists('/dev/kvm') - -def RunKvmOk(): - """Run kvm-ok as root to check that KVM is properly enabled after installation - of the required packages. - - Returns: - True iff KVM is enabled (/dev/kvm exists). On failure, returns False - but also print detailed information explaining why KVM isn't enabled - (e.g. CPU doesn't support it, or BIOS disabled it). - """ - try: - # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it. - return not cmd_helper.RunCmd(['sudo', 'kvm-ok']) - except OSError: - logging.info('kvm-ok not installed') - return False - - -def InstallKVM(): - """Installs KVM packages.""" - rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm']) - if rc: - logging.critical('ERROR: Did not install KVM. Make sure hardware ' - 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' - 'AMD SVM).') - # TODO(navabi): Use modprobe kvm-amd on AMD processors. - rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel']) - if rc: - logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure ' - 'hardware virtualization is enabled in BIOS.') - # Now check to ensure KVM acceleration can be used. - if not RunKvmOk(): - logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware ' - 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' - 'AMD SVM).') - - -def UpdateSDK(api_level, package_name, package_pattern, timeout): - """This function update SDK with a filter index. - - Args: - api_level: the Android API level to download for. - package_name: logging name of package that is being updated. - package_pattern: the pattern to match the filter index from. - timeout: the amount of time wait for update command. - """ - android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') - - list_sdk_repo_command = [android_binary, 'list', 'sdk', '--all'] - - exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(list_sdk_repo_command) - - if exit_code != 0: - raise Exception('\'android list sdk --all\' command return %d' % exit_code) - - for line in stdout.split('\n'): - match = package_pattern.match(line) - if match: - index = match.group(1) - logging.info('package %s corresponds to %s with api level %d', - index, package_name, api_level) - update_command = [android_binary, 'update', 'sdk', '--no-ui', '--all', - '--filter', index] - update_command_str = ' '.join(update_command) - logging.info('running update command: %s', update_command_str) - update_process = pexpect.spawn(update_command_str) - - if update_process.expect('Do you accept the license') != 0: - raise Exception('License agreement check failed') - update_process.sendline('y') - if update_process.expect( - 'Done. 1 package installed.', timeout=timeout) == 0: - logging.info('Successfully installed %s for API level %d', - package_name, api_level) - return - else: - raise Exception('Failed to install platform update') - raise Exception('Could not find android-%d update for the SDK!' % api_level) - -def GetSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): - """Download system image files - - Args: - abi: target abi, x86 or arm - api_level: the Android API level to download for. - google: use Google build system image instead of AOSP build - """ - logging.info('Download x86 system image directory into sdk directory.') - - if abi == 'x86': - if google: - package_name = 'Google Intel x86 Atom System Image' - pattern = re.compile( - r'\s*([0-9]+)- Google APIs Intel x86 Atom System Image, Google Inc.' - ' API %d.*' % api_level) - else: - package_name = 'Intel x86 system image' - pattern = re.compile( - r'\s*([0-9]+)- Intel x86 Atom System Image, Android API %d.*' - % api_level) - elif abi == 'arm': - if google: - package_name = 'Google arm system image' - pattern = re.compile( - r'\s*([0-9]+)- Google APIs ARM EABI v7a System Image, Google Inc. API ' - '%d.*' % api_level) - else: - package_name = 'Android arm system image' - pattern = re.compile( - r'\s*([0-9]+)- ARM EABI v7a System Image, Android API %d.*' % api_level) - else: - raise Exception('abi option is invalid') - - UpdateSDK(api_level, package_name, pattern, DOWNLOAD_SYSTEM_IMAGE_TIMEOUT) - -def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): - """Update the SDK to include the platform specified. - - Args: - api_level: the Android API level to download - google: use Google build system image instead of AOSP build - """ - logging.info('Download SDK Platform directory into sdk directory.') - - platform_package_pattern = re.compile( - r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level) - - UpdateSDK(api_level, 'SDK Platform', platform_package_pattern, - DOWNLOAD_SDK_PLATFORM_TIMEOUT) - - if google: - google_api_package_pattern = re.compile( - r'\s*([0-9]+)- Google APIs, Android API %d.*' % api_level) - UpdateSDK(api_level, 'Google APIs', google_api_package_pattern, - DOWNLOAD_SDK_PLATFORM_TIMEOUT) - - -def main(argv): - opt_parser = optparse.OptionParser( - description='Install dependencies for running the Android emulator') - opt_parser.add_option('--abi', - dest='abi', - help='The targeted abi for emulator system image', - type='string', - default=DEFAULT_ABI) - opt_parser.add_option('--api-level', - dest='api_level', - help=('The API level (e.g., 19 for Android 4.4) to ' - 'ensure is available'), - type='int', - default=DEFAULT_ANDROID_API_LEVEL) - opt_parser.add_option('-v', - dest='verbosity', - default=1, - action='count', - help='Verbose level (multiple times for more)') - opt_parser.add_option('--google', - dest='google', - action='store_true', - default=False, - help='Install Google System Image instead of AOSP') - - options, _ = opt_parser.parse_args(argv[1:]) - - run_tests_helper.SetLogLevel(verbose_count=options.verbosity) - - devil_chromium.Initialize() - - # Calls below will download emulator SDK and/or system images only if needed. - if CheckSDK(): - logging.info('android_emulator_sdk/ exists') - else: - logging.critical('ERROR: Emulator SDK not installed in %s' - , constants.ANDROID_SDK_ROOT) - return 1 - - # Check target. The target has to be installed in order to run the emulator. - if CheckSDKPlatform(options.api_level, options.google): - logging.info('SDK platform %s %s android-%d already present, skipping.', - 'Google' if options.google else 'AOSP', options.abi, - options.api_level) - else: - logging.info('SDK platform %s %s android-%d not present, installing.', - 'Google' if options.google else 'AOSP', options.abi, - options.api_level) - GetSDKPlatform(options.api_level, options.google) - - # Download the system image needed - if CheckSystemImage(options.abi, options.api_level, options.google): - logging.info('system image for %s %s android-%d already present, skipping.', - 'Google' if options.google else 'AOSP', options.abi, - options.api_level) - else: - GetSystemImage(options.abi, options.api_level, options.google) - - # Make sure KVM packages are installed and enabled. - if options.abi == 'x86': - if CheckKVM(): - logging.info('KVM already installed and enabled.') - else: - logging.warning('KVM is not installed or enabled.') - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/chromium/build/android/lint/suppressions.xml b/chromium/build/android/lint/suppressions.xml index ef84286d79f..49d8aa0892f 100644 --- a/chromium/build/android/lint/suppressions.xml +++ b/chromium/build/android/lint/suppressions.xml @@ -25,6 +25,8 @@ Still reading? <issue id="AllowBackup"> <ignore path="AndroidManifest.xml"/> </issue> + <!-- TODO(crbug.com/799070): File bugs to fix this --> + <issue id="AppCompatResource" severity="ignore"/> <!-- We use asserts in Chromium. See https://chromium.googlesource.com/chromium/src/+/master/styleguide/java/java.md#Asserts --> <issue id="Assert" severity="ignore"/> <issue id="AuthLeak" severity="Error"> @@ -44,6 +46,7 @@ Still reading? <issue id="ByteOrderMark" severity="Error"> <ignore regexp="values-pt-rBR/android_chrome_strings.xml"/> </issue> + <issue id="ClickableViewAccessibility" severity="ignore"/> <issue id="CommitPrefEdits"> <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/> </issue> @@ -68,6 +71,12 @@ Still reading? <ignore regexp="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/> <ignore regexp="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/> </issue> + <!-- TODO(crbug.com/799070): File bugs to fix this --> + <issue id="EllipsizeMaxLines" severity="ignore"/> + <issue id="ExifInterface"> + <!-- TODO(crbug.com/799070): Update android.support.media.ExifInterface and use it instead --> + <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/BitmapUtils.java"/> + </issue> <issue id="ExportedContentProvider"> <ignore path="AndroidManifest.xml"/> </issue> @@ -82,6 +91,10 @@ Still reading? <issue id="GoogleAppIndexingWarning" severity="Error"> <ignore regexp="AndroidManifest.xml"/> </issue> + <!-- TODO(crbug.com/799070): Fix for remoting --> + <issue id="AppLinkUrlError"> + <ignore regexp="AndroidManifest.xml"/> + </issue> <issue id="HandlerLeak"> <ignore regexp="android_webview/glue/java/src/com/android/webview/chromium/WebViewContentsClientAdapter.java" /> <ignore regexp="chromecast/internal" /> @@ -117,6 +130,8 @@ Still reading? </issue> <issue id="IconDipSize"> <ignore regexp="chromecast/internal"/> + <!-- TODO(crbug.com/799070): File bugs to fix these icons --> + <ignore regexp="chrome/android/java/res/.*tab_strip_fade"/> </issue> <issue id="IconDuplicates" severity="Error"> <ignore regexp="chromecast/internal"/> @@ -124,6 +139,10 @@ Still reading? <issue id="IconDuplicatesConfig" severity="Error"> <ignore regexp="chromecast/internal"/> </issue> + <issue id="IconLauncherFormat" severity="ignore"> + <!-- TODO(crbug.com/739746): Remove after lint version has been updated. --> + <ignore regexp="remoting/android/java/res/mipmap-anydpi-v26/ic_launcher.xml"/> + </issue> <issue id="IconLauncherShape" severity="Error"> <ignore regexp="chromecast/internal"/> <ignore regexp="chrome/android/webapk/shell_apk/res/mipmap-mdpi/ic_launcher_background.png"/> @@ -162,9 +181,12 @@ Still reading? <issue id="InvalidPackage" severity="Error"> <ignore regexp="espresso/espresso_core_java.interface.jar"/> </issue> + <issue id="InvalidVectorPath" severity="ignore"/> <issue id="LabelFor" severity="Error"> <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/> </issue> + <!-- TODO(crbug.com/799070): File bugs to fix this. --> + <issue id="KeyboardInaccessibleWidget" severity="ignore"/> <issue id="LintError" severity="Error"/> <issue id="LogConditional" severity="ignore"/> <issue id="LongLogTag" severity="ignore"/> @@ -202,6 +224,9 @@ Still reading? <ignore regexp="Call requires API level 19.*`java.lang.Throwable#addSuppressed`"/> <!-- We support default methods via desugar. --> <ignore regexp="Default method requires API level 24"/> + <!-- TODO(crbug.com/799070): Fix these after lint upgrade. --> + <ignore regexp="Static interface method requires API level 24"/> + <ignore regexp="(current min is 1)"/> <!-- Suppressions below this line need rationale :( --> <ignore regexp="Attribute `paddingStart` referenced here can result in a crash on some specific devices older than API 17"/> <ignore regexp="chrome/android/java/res/values-v17/styles.xml"/> @@ -227,6 +252,8 @@ Still reading? <!-- TODO(crbug.com/635567): Fix this properly. --> <ignore regexp="sdk/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/> </issue> + <!-- TODO(crbug.com/799070): Re-enable this --> + <issue id="ObsoleteSdkInt" severity="ignore"/> <issue id="OldTargetApi"> <ignore path="AndroidManifest.xml"/> </issue> @@ -247,14 +274,16 @@ Still reading? </issue> <issue id="PackageManagerGetSignatures"> <ignore regexp="chrome/android/webapk/libs/client/src/org/chromium/webapk/lib/client/WebApkValidator.java"/> - <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/customtabs/OriginVerifier.java"/> + <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/browserservices/OriginVerifier.java"/> </issue> <issue id="PluralsCandidate" severity="Error"> <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-en-rGB/android_chrome_strings.xml"/> <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/> </issue> + <issue id="PrivateApi" severity="ignore"/> <issue id="Recycle" severity="ignore"/> <issue id="Registered" severity="ignore"/> + <issue id="ResourceAsColor" severity="ignore"/> <issue id="ResourceType" severity="Error"> <ignore regexp="/javatests/"/> </issue> @@ -269,6 +298,10 @@ Still reading? <issue id="SpUsage" severity="Error"> <ignore regexp="chromecast/internal"/> </issue> + <issue id="StaticFieldLeak"> + <!-- TODO(crbug.com/799070): Fix after upgrading lint. --> + <ignore regexp="This AsyncTask class should be static or leaks might occur"/> + </issue> <issue id="StringFormatCount" severity="Error"> <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/> <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/> @@ -328,7 +361,7 @@ Still reading? <!-- 2 resources used by android webview glue layer, could be refactored --> <ignore regexp="android_webview/java/res/drawable-hdpi/ic_play_circle_outline_black_48dp.png"/> <ignore regexp="R.string.private_browsing_warning"/> - <!-- OMR1 SDK roll mystery. TODO(jbudorick): Remove this after rolling lint. --> + <!-- 9 OMR1 SDK roll mystery. TODO(jbudorick): Remove this after rolling lint. --> <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/> <ignore regexp="chrome/android/java/res/layout/location_bar.xml"/> <ignore regexp="chrome/android/java/res/values/colors.xml"/> @@ -338,6 +371,14 @@ Still reading? <ignore regexp="chrome/android/java/res/drawable-hdpi/omnibox_info.png"/> <ignore regexp="clank/java/clank_strings_grd.resources.zip/values/android_internal_strings.xml"/> <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-hdpi/notification_icon.png"/> + <!-- # TODO(crbug.com/799070): Fix these. --> + <ignore regexp="ui/android/java/res/values/colors.xml"/> + <ignore regexp="ui/android/java/res/values/dimens.xml"/> + <ignore regexp="ui/android/java/res/values-v17/styles.xml"/> + <ignore regexp="chrome/android/java/res/anim/fullscreen_notification_in.xml"/> + <ignore regexp="chrome/android/java/res/values-v17/styles.xml"/> + <ignore regexp="chrome/android/java/res/values-v21/styles.xml"/> + <!-- Endnote: Please specify number of resources when adding more suppressions --> </issue> <issue id="UseCompoundDrawables"> <!-- Upscaling 24dp to 48dp doesn't work as expected with a TextView compound drawable. --> @@ -352,6 +393,7 @@ Still reading? <issue id="UsesMinSdkAttributes" severity="Error"> <ignore regexp="AndroidManifest.xml"/> </issue> + <issue id="UseSparseArrays" severity="ignore"/> <issue id="ValidFragment" severity="Error"> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/BaseMediaRouteDialogManager.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteChooserDialogManager.java"/> @@ -359,6 +401,7 @@ Still reading? <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/preferences/privacy/OtherFormsOfHistoryDialogFragment.java"/> <ignore regexp="media/capture/content/android/java/src/org/chromium/media/ScreenCapture.java"/> </issue> + <issue id="VectorPath" severity="ignore"/> <issue id="ViewConstructor" severity="ignore"/> <issue id="VisibleForTests" severity="Error"> <ignore regexp="/javatests/"/> @@ -372,12 +415,10 @@ Still reading? </issue> <issue id="WrongCall" severity="ignore"/> <issue id="WrongConstant"> + <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/bookmarks/BookmarkItemsAdapter.java"/> + <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/instantapps/InstantAppsHandler.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/SSLClientCertificateRequest.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/payments/ui/EditorDialog.java"/> - <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/signin/SigninAndSyncView.java"/> - </issue> - <issue id="IconLauncherFormat" severity="ignore"> - <!-- TODO(crbug.com/739746): Remove after lint version has been updated. --> - <ignore regexp="remoting/android/java/res/mipmap-anydpi-v26/ic_launcher.xml"/> + <ignore regexp="third_party/android_data_chart/java/src/org/chromium/third_party/android/datausagechart/ChartDataUsageView.java"/> </issue> </lint> diff --git a/chromium/build/android/pylib/constants/__init__.py b/chromium/build/android/pylib/constants/__init__.py index d7547b22ab5..e844a1ad4cf 100644 --- a/chromium/build/android/pylib/constants/__init__.py +++ b/chromium/build/android/pylib/constants/__init__.py @@ -102,7 +102,7 @@ ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT, 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION) ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT, - 'third_party', 'android_tools', 'ndk') + 'third_party', 'android_ndk') PROGUARD_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard') @@ -115,6 +115,15 @@ UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com' # TODO(jbudorick): Remove once unused. DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop' +# Configure ubsan to print stack traces in the format understood by "stack" so +# that they will be symbolized, and disable signal handlers because they +# interfere with the breakpad and sandbox tests. +# This value is duplicated in +# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java +UBSAN_OPTIONS = ( + 'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' ' + 'handle_segv=0 handle_sigbus=0 handle_sigfpe=0') + # TODO(jbudorick): Rework this into testing/buildbot/ PYTHON_UNIT_TEST_SUITES = { 'pylib_py_unittests': { diff --git a/chromium/build/android/pylib/device/commands/BUILD.gn b/chromium/build/android/pylib/device/commands/BUILD.gn index fe17276588c..480db1e88f8 100644 --- a/chromium/build/android/pylib/device/commands/BUILD.gn +++ b/chromium/build/android/pylib/device/commands/BUILD.gn @@ -11,6 +11,7 @@ group("commands") { } android_library("chromium_commands_java") { + emma_never_instrument = true java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ] dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar" data = [ diff --git a/chromium/build/android/pylib/gtest/gtest_test_instance.py b/chromium/build/android/pylib/gtest/gtest_test_instance.py index 0313b15a1ef..c3e7fb6cdc4 100644 --- a/chromium/build/android/pylib/gtest/gtest_test_instance.py +++ b/chromium/build/android/pylib/gtest/gtest_test_instance.py @@ -194,6 +194,9 @@ def ParseGTestOutput(output, symbolizer, device_abi): log.append(l) if result_type and test_name: + # Don't bother symbolizing output if the test passed. + if result_type == base_test_result.ResultType.PASS: + stack = [] results.append(base_test_result.BaseTestResult( TestNameWithoutDisabledPrefix(test_name), result_type, duration, log=symbolize_stack_and_merge_with_log())) @@ -282,16 +285,17 @@ class GtestTestInstance(test_instance.TestInstance): # TODO(jbudorick): Support multiple test suites. if len(args.suite_name) > 1: raise ValueError('Platform mode currently supports only 1 gtest suite') + self._chartjson_result_file = args.chartjson_result_file self._exe_dist_dir = None self._external_shard_index = args.test_launcher_shard_index self._extract_test_list_from_filter = args.extract_test_list_from_filter self._filter_tests_lock = threading.Lock() + self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket self._shard_timeout = args.shard_timeout self._store_tombstones = args.store_tombstones - self._total_external_shards = args.test_launcher_total_shards self._suite = args.suite_name[0] self._symbolizer = stack_symbolizer.Symbolizer(None, False) - self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket + self._total_external_shards = args.test_launcher_total_shards self._wait_for_java_debugger = args.wait_for_java_debugger # GYP: @@ -430,6 +434,10 @@ class GtestTestInstance(test_instance.TestInstance): return self._gtest_filter @property + def chartjson_result_file(self): + return self._chartjson_result_file + + @property def package(self): return self._apk_helper and self._apk_helper.GetPackageName() diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py index 5f7c196db8f..ddc80f75635 100644 --- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py +++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py @@ -568,10 +568,10 @@ class InstrumentationTestInstance(test_instance.TestInstance): self._test_package = self._test_apk.GetPackageName() all_instrumentations = self._test_apk.GetAllInstrumentations() all_junit3_runner_classes = [ - x for x in all_instrumentations if ('true' not in x.get( + x for x in all_instrumentations if ('0xffffffff' not in x.get( 'chromium-junit4', ''))] all_junit4_test_runner_classes = [ - x for x in all_instrumentations if ('true' in x.get( + x for x in all_instrumentations if ('0xffffffff' in x.get( 'chromium-junit4', ''))] if len(all_junit3_runner_classes) > 1: diff --git a/chromium/build/android/pylib/linker/linker_test_instance.py b/chromium/build/android/pylib/linker/linker_test_instance.py index ce696f25d21..1de3e17f1df 100644 --- a/chromium/build/android/pylib/linker/linker_test_instance.py +++ b/chromium/build/android/pylib/linker/linker_test_instance.py @@ -9,7 +9,6 @@ from pylib.linker import test_case with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): import unittest_util -_MODERN_LINKER_MINIMUM_SDK_INT = 23 class LinkerTestInstance(test_instance.TestInstance): @@ -26,15 +25,11 @@ class LinkerTestInstance(test_instance.TestInstance): def test_filter(self): return self._test_filter - def GetTests(self, min_device_sdk): + def GetTests(self): tests = [ - test_case.LinkerSharedRelroTest(is_modern_linker=False, - is_low_memory=False), - test_case.LinkerSharedRelroTest(is_modern_linker=False, - is_low_memory=True) + test_case.LinkerSharedRelroTest(is_low_memory=False), + test_case.LinkerSharedRelroTest(is_low_memory=True) ] - if min_device_sdk >= _MODERN_LINKER_MINIMUM_SDK_INT: - tests.append(test_case.LinkerSharedRelroTest(is_modern_linker=True)) if self._test_filter: filtered_names = unittest_util.FilterTestNames( diff --git a/chromium/build/android/pylib/linker/test_case.py b/chromium/build/android/pylib/linker/test_case.py index f4ae7c6562e..cf16a738047 100644 --- a/chromium/build/android/pylib/linker/test_case.py +++ b/chromium/build/android/pylib/linker/test_case.py @@ -124,17 +124,12 @@ class AddressList(list): class LinkerTestCaseBase(object): """Base class for linker test cases.""" - def __init__(self, is_modern_linker=False, is_low_memory=False): + def __init__(self, is_low_memory=False): """Create a test case. Args: - is_modern_linker: True to test ModernLinker, False to test LegacyLinker. is_low_memory: True to simulate a low-memory device, False otherwise. """ - self.is_modern_linker = is_modern_linker - if is_modern_linker: - test_suffix = 'ForModernLinker' - else: - test_suffix = 'ForLegacyLinker' + test_suffix = 'ForLegacyLinker' self.is_low_memory = is_low_memory if is_low_memory: test_suffix += 'LowMemoryDevice' @@ -166,11 +161,7 @@ class LinkerTestCaseBase(object): print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name) logging.info('Running linker test: %s', self.tagged_name) - # Create command-line file on device. - if self.is_modern_linker: - command_line_flags = '--use-linker=modern' - else: - command_line_flags = '--use-linker=legacy' + command_line_flags = '--use-linker=legacy' if self.is_low_memory: command_line_flags += ' --low-memory-device' device.WriteFile(_COMMAND_LINE_FILE, command_line_flags) diff --git a/chromium/build/android/pylib/local/device/local_device_environment.py b/chromium/build/android/pylib/local/device/local_device_environment.py index 0c85671057a..1403504e005 100644 --- a/chromium/build/android/pylib/local/device/local_device_environment.py +++ b/chromium/build/android/pylib/local/device/local_device_environment.py @@ -98,6 +98,7 @@ class LocalDeviceEnvironment(environment.Environment): self._logcat_output_dir = args.logcat_output_dir self._logcat_output_file = args.logcat_output_file self._max_tries = 1 + args.num_retries + self._recover_devices = args.recover_devices self._skip_clear_data = args.skip_clear_data self._tool_name = args.tool self._trace_output = None @@ -188,6 +189,10 @@ class LocalDeviceEnvironment(environment.Environment): return parallelizer.SyncParallelizer(self.devices) @property + def recover_devices(self): + return self._recover_devices + + @property def skip_clear_data(self): return self._skip_clear_data @@ -201,7 +206,9 @@ class LocalDeviceEnvironment(environment.Environment): #override def TearDown(self): - if self.trace_output: + if self.trace_output and self._trace_all: + instrumentation_tracing.stop_instrumenting() + elif self.trace_output: self.DisableTracing() if not self._devices: diff --git a/chromium/build/android/pylib/local/device/local_device_gtest_run.py b/chromium/build/android/pylib/local/device/local_device_gtest_run.py index 008f98715e1..3306a2245cb 100644 --- a/chromium/build/android/pylib/local/device/local_device_gtest_run.py +++ b/chromium/build/android/pylib/local/device/local_device_gtest_run.py @@ -108,7 +108,7 @@ def _ExtractTestsFromFilter(gtest_filter): class _ApkDelegate(object): - def __init__(self, test_instance): + def __init__(self, test_instance, tool): self._activity = test_instance.activity self._apk_helper = test_instance.apk_helper self._test_apk_incremental_install_json = ( @@ -120,6 +120,7 @@ class _ApkDelegate(object): self._component = '%s/%s' % (self._package, self._runner) self._extras = test_instance.extras self._wait_for_java_debugger = test_instance.wait_for_java_debugger + self._tool = tool def GetTestDataRoot(self, device): # pylint: disable=no-self-use @@ -206,12 +207,13 @@ class _ApkDelegate(object): class _ExeDelegate(object): - def __init__(self, tr, dist_dir): + def __init__(self, tr, dist_dir, tool): self._host_dist_dir = dist_dir self._exe_file_name = os.path.basename(dist_dir)[:-len('__dist')] self._device_dist_dir = posixpath.join( constants.TEST_EXECUTABLE_DIR, os.path.basename(dist_dir)) self._test_run = tr + self._tool = tool def GetTestDataRoot(self, device): # pylint: disable=no-self-use @@ -247,6 +249,10 @@ class _ExeDelegate(object): env = { 'LD_LIBRARY_PATH': self._device_dist_dir } + + if self._tool != 'asan': + env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS + try: gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP'] external = device.GetExternalStoragePath() @@ -276,9 +282,10 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): super(LocalDeviceGtestRun, self).__init__(env, test_instance) if self._test_instance.apk: - self._delegate = _ApkDelegate(self._test_instance) + self._delegate = _ApkDelegate(self._test_instance, env.tool) elif self._test_instance.exe_dist_dir: - self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir) + self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir, + self._env.tool) self._crashes = set() self._servers = collections.defaultdict(list) @@ -457,54 +464,71 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): device_temp_file.NamedDeviceTemporaryDirectory( adb=device.adb, dir='/sdcard/'), self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir: - - flags = list(self._test_instance.flags) - if self._test_instance.enable_xml_result_parsing: - flags.append('--gtest_output=xml:%s' % device_tmp_results_file.name) - - if self._test_instance.gs_test_artifacts_bucket: - flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name) - - logging.info('flags:') - for f in flags: - logging.info(' %s', f) - - stream_name = 'logcat_%s_%s_%s' % ( - hash(tuple(test)), - time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), - device.serial) - - with self._env.output_manager.ArchivedTempfile( - stream_name, 'logcat') as logcat_file: - with logcat_monitor.LogcatMonitor( - device.adb, - filter_specs=local_device_environment.LOGCAT_FILTERS, - output_file=logcat_file.name) as logmon: - with contextlib_ext.Optional( - trace_event.trace(str(test)), - self._env.trace_output): - output = self._delegate.Run( - test, device, flags=' '.join(flags), - timeout=timeout, retries=0) - logmon.Close() - - if logcat_file.Link(): - logging.info('Logcat saved to %s', logcat_file.Link()) - - if self._test_instance.enable_xml_result_parsing: - try: - gtest_xml = device.ReadFile( - device_tmp_results_file.name, - as_root=True) - except device_errors.CommandFailedError as e: - logging.warning( - 'Failed to pull gtest results XML file %s: %s', - device_tmp_results_file.name, - str(e)) - gtest_xml = None - - test_artifacts_url = self._UploadTestArtifacts(device, - test_artifacts_dir) + with contextlib_ext.Optional( + device_temp_file.DeviceTempFile( + adb=device.adb, dir=self._delegate.ResultsDirectory(device)), + self._test_instance.chartjson_result_file) as chartjson_result_file: + + flags = list(self._test_instance.flags) + if self._test_instance.enable_xml_result_parsing: + flags.append('--gtest_output=xml:%s' % device_tmp_results_file.name) + + if self._test_instance.gs_test_artifacts_bucket: + flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name) + + if self._test_instance.chartjson_result_file: + flags.append('--chartjson_result_file=%s' + % chartjson_result_file.name) + + logging.info('flags:') + for f in flags: + logging.info(' %s', f) + + stream_name = 'logcat_%s_%s_%s' % ( + hash(tuple(test)), + time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), + device.serial) + + with self._env.output_manager.ArchivedTempfile( + stream_name, 'logcat') as logcat_file: + with logcat_monitor.LogcatMonitor( + device.adb, + filter_specs=local_device_environment.LOGCAT_FILTERS, + output_file=logcat_file.name) as logmon: + with contextlib_ext.Optional( + trace_event.trace(str(test)), + self._env.trace_output): + output = self._delegate.Run( + test, device, flags=' '.join(flags), + timeout=timeout, retries=0) + logmon.Close() + + if logcat_file.Link(): + logging.info('Logcat saved to %s', logcat_file.Link()) + + if self._test_instance.enable_xml_result_parsing: + try: + gtest_xml = device.ReadFile( + device_tmp_results_file.name, + as_root=True) + except device_errors.CommandFailedError as e: + logging.warning( + 'Failed to pull gtest results XML file %s: %s', + device_tmp_results_file.name, + str(e)) + gtest_xml = None + + if self._test_instance.chartjson_result_file: + try: + device.PullFile(chartjson_result_file.name, + self._test_instance.chartjson_result_file) + except device_errors.CommandFailedError as e: + logging.warning( + 'Failed to pull chartjson results %s: %s', + chartjson_result_file.name, str(e)) + + test_artifacts_url = self._UploadTestArtifacts(device, + test_artifacts_dir) for s in self._servers[str(device)]: s.Reset() diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py index 67b8fd0a9da..664bb8d2c2d 100644 --- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py +++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py @@ -29,6 +29,7 @@ from pylib.constants import host_paths from pylib.instrumentation import instrumentation_test_instance from pylib.local.device import local_device_environment from pylib.local.device import local_device_test_run +from pylib.output import remote_output_manager from pylib.utils import instrumentation_tracing from pylib.utils import shared_preference_utils @@ -203,16 +204,19 @@ class LocalDeviceInstrumentationTestRun( def set_debug_app(dev): # Set debug app in order to enable reading command line flags on user # builds - if not self._test_instance.package_info: - logging.error("Couldn't set debug app: no package info") - elif not self._test_instance.package_info.package: - logging.error("Couldn't set debug app: no package defined") + package_name = None + if self._test_instance.apk_under_test: + package_name = self._test_instance.apk_under_test.GetPackageName() + elif self._test_instance.test_apk: + package_name = self._test_instance.test_apk.GetPackageName() else: - cmd = ['am', 'set-debug-app', '--persistent'] - if self._test_instance.wait_for_java_debugger: - cmd.append('-w') - cmd.append(self._test_instance.package_info.package) - dev.RunShellCommand(cmd, check_return=True) + logging.error("Couldn't set debug app: no package name found") + return + cmd = ['am', 'set-debug-app', '--persistent'] + if self._test_instance.wait_for_java_debugger: + cmd.append('-w') + cmd.append(package_name) + dev.RunShellCommand(cmd, check_return=True) @trace_event.traced def edit_shared_prefs(dev): @@ -241,15 +245,10 @@ class LocalDeviceInstrumentationTestRun( @trace_event.traced def create_flag_changer(dev): if self._test_instance.flags: - if not self._test_instance.package_info: - logging.error("Couldn't set flags: no package info") - elif not self._test_instance.package_info.cmdline_file: - logging.error("Couldn't set flags: no cmdline_file") - else: - self._CreateFlagChangerIfNeeded(dev) - logging.debug('Attempting to set flags: %r', - self._test_instance.flags) - self._flag_changers[str(dev)].AddFlags(self._test_instance.flags) + self._CreateFlagChangerIfNeeded(dev) + logging.debug('Attempting to set flags: %r', + self._test_instance.flags) + self._flag_changers[str(dev)].AddFlags(self._test_instance.flags) valgrind_tools.SetChromeTimeoutScale( dev, self._test_instance.timeout_scale) @@ -275,13 +274,29 @@ class LocalDeviceInstrumentationTestRun( steps = [bind_crash_handler(s, device) for s in steps] - if self._env.concurrent_adb: - reraiser_thread.RunAsync(steps) - else: - for step in steps: - step() - if self._test_instance.store_tombstones: - tombstones.ClearAllTombstones(device) + try: + if self._env.concurrent_adb: + reraiser_thread.RunAsync(steps) + else: + for step in steps: + step() + if self._test_instance.store_tombstones: + tombstones.ClearAllTombstones(device) + except device_errors.CommandFailedError: + # A bugreport can be large and take a while to generate, so only capture + # one if we're using a remote manager. + if isinstance( + self._env.output_manager, + remote_output_manager.RemoteOutputManager): + logging.error( + 'Error when setting up device for tests. Taking a bugreport for ' + 'investigation. This may take a while...') + report_name = '%s.bugreport' % device.serial + with self._env.output_manager.ArchivedTempfile( + report_name, 'bug_reports') as report_file: + device.TakeBugReport(report_file.name) + logging.error('Bug report saved to %s', report_file.Link()) + raise self._env.parallel_devices.pMap( individual_device_set_up, @@ -289,7 +304,7 @@ class LocalDeviceInstrumentationTestRun( if self._test_instance.wait_for_java_debugger: logging.warning('*' * 80) logging.warning('Waiting for debugger to attach to process: %s', - self._test_instance.package_info.package) + self._test_instance.apk_under_test.GetPackageName()) logging.warning('*' * 80) #override @@ -328,7 +343,7 @@ class LocalDeviceInstrumentationTestRun( def _CreateFlagChangerIfNeeded(self, device): if not str(device) in self._flag_changers: self._flag_changers[str(device)] = flag_changer.FlagChanger( - device, self._test_instance.package_info.cmdline_file) + device, "test-cmdline-file") #override def _CreateShards(self, tests): @@ -638,11 +653,11 @@ class LocalDeviceInstrumentationTestRun( extras['log'] = 'true' extras[_EXTRA_TEST_LIST] = dev_test_list_json.name target = '%s/%s' % (test_package, junit4_runner_class) - kwargs = {} + timeout = 120 if self._test_instance.wait_for_java_debugger: - kwargs['timeout'] = None + timeout = None test_list_run_output = dev.StartInstrumentation( - target, extras=extras, retries=0, **kwargs) + target, extras=extras, retries=0, timeout=timeout) if any(test_list_run_output): logging.error('Unexpected output while listing tests:') for line in test_list_run_output: @@ -812,18 +827,11 @@ class LocalDeviceInstrumentationTestRun( #override def _ShouldRetry(self, test, result): - def not_run(res): - if isinstance(res, list): - return any(not_run(r) for r in res) - return res.GetType() == base_test_result.ResultType.NOTRUN - - if 'RetryOnFailure' in test.get('annotations', {}) or not_run(result): - return True - - # TODO(jbudorick): Remove this log message once @RetryOnFailure has been - # enabled for a while. See crbug.com/619055 for more details. - logging.error('Default retries are being phased out. crbug.com/619055') - return False + # We've tried to disable retries in the past with mixed results. + # See crbug.com/619055 for historical context and crbug.com/797002 + # for ongoing efforts. + del test, result + return True #override def _ShouldShard(self): diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py index d7cb3727d12..82db02e8eb4 100755 --- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py +++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py @@ -16,6 +16,9 @@ from pylib.local.device import local_device_instrumentation_test_run class LocalDeviceInstrumentationTestRunTest(unittest.TestCase): + # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth + # retaining and remove these tests if not. + def testShouldRetry_failure(self): env = mock_environment.MockEnvironment() ti = mock_test_instance.MockTestInstance() @@ -29,7 +32,7 @@ class LocalDeviceInstrumentationTestRunTest(unittest.TestCase): } result = base_test_result.BaseTestResult( 'SadTest.testFailure', base_test_result.ResultType.FAIL) - self.assertFalse(obj._ShouldRetry(test, result)) + self.assertTrue(obj._ShouldRetry(test, result)) def testShouldRetry_retryOnFailure(self): env = mock_environment.MockEnvironment() diff --git a/chromium/build/android/pylib/local/device/local_device_test_run.py b/chromium/build/android/pylib/local/device/local_device_test_run.py index 21ab0465412..90e6b674cb0 100644 --- a/chromium/build/android/pylib/local/device/local_device_test_run.py +++ b/chromium/build/android/pylib/local/device/local_device_test_run.py @@ -12,6 +12,7 @@ import threading from devil import base_error from devil.android import crash_handler from devil.android import device_errors +from devil.android.sdk import version_codes from devil.android.tools import device_recovery from devil.utils import signal_handler from pylib import valgrind_tools @@ -117,10 +118,19 @@ class LocalDeviceTestRun(test_run.TestRun): results = [] while tries < self._env.max_tries and tests: logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries) - if tries > 0 and tries + 1 == self._env.max_tries: - logging.info( - 'Attempting to recover devices prior to last test attempt.') - self._env.parallel_devices.pMap(device_recovery.RecoverDevice, None) + if tries > 0 and self._env.recover_devices: + if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1 + for d in self._env.devices): + logging.info( + 'Attempting to recover devices due to known issue on L MR1. ' + 'See crbug.com/787056 for details.') + self._env.parallel_devices.pMap( + device_recovery.RecoverDevice, None) + elif tries + 1 == self._env.max_tries: + logging.info( + 'Attempting to recover devices prior to last test attempt.') + self._env.parallel_devices.pMap( + device_recovery.RecoverDevice, None) logging.info('Will run %d tests on %d devices: %s', len(tests), len(self._env.devices), ', '.join(str(d) for d in self._env.devices)) diff --git a/chromium/build/android/pylib/results/json_results.py b/chromium/build/android/pylib/results/json_results.py index d9f441f1a03..92b01b71fd3 100644 --- a/chromium/build/android/pylib/results/json_results.py +++ b/chromium/build/android/pylib/results/json_results.py @@ -104,7 +104,7 @@ def GenerateResultsDict(test_run_results, global_tags=None): result_dict = { 'status': status_as_string(r.GetType()), 'elapsed_time_ms': r.GetDuration(), - 'output_snippet': r.GetLog(), + 'output_snippet': unicode(r.GetLog(), errors='replace'), 'losless_snippet': '', 'output_snippet_base64': '', 'links': r.GetLinks(), diff --git a/chromium/build/android/pylib/results/presentation/test_results_presentation.py b/chromium/build/android/pylib/results/presentation/test_results_presentation.py index 552ccd76844..21137feb14c 100755 --- a/chromium/build/android/pylib/results/presentation/test_results_presentation.py +++ b/chromium/build/android/pylib/results/presentation/test_results_presentation.py @@ -386,6 +386,10 @@ def main(): '(Output of the swarming.py collect ' '--task-summary-json=XXX command.)') parser.add_argument( + '--task-output-dir', + help='(Swarming Merge Script API) ' + 'Directory containing all swarming task results.') + parser.add_argument( 'positional', nargs='*', help='output.json from shards.') @@ -454,7 +458,9 @@ def main(): if args.output_json: with open(json_file) as original_json_file: json_object = json.load(original_json_file) - json_object['links'] = {'result_details': result_details_link} + json_object['links'] = { + 'result_details (logcats, flakiness links)': result_details_link + } with open(args.output_json, 'w') as f: json.dump(json_object, f) else: diff --git a/chromium/build/android/pylib/symbols/stack_symbolizer.py b/chromium/build/android/pylib/symbols/stack_symbolizer.py index c1c9afbb0ac..05e40657dfb 100644 --- a/chromium/build/android/pylib/symbols/stack_symbolizer.py +++ b/chromium/build/android/pylib/symbols/stack_symbolizer.py @@ -7,6 +7,7 @@ import os import re import shutil import tempfile +import time import zipfile from devil.utils import cmd_helper @@ -38,6 +39,7 @@ class Symbolizer(object): self._libs_dir = None self._apk_libs = [] self._has_unzipped = False + self._time_spent_symbolizing = 0 def __del__(self): @@ -52,6 +54,9 @@ class Symbolizer(object): if self._libs_dir: shutil.rmtree(self._libs_dir) self._libs_dir = None + if self._time_spent_symbolizing > 0: + logging.info( + 'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing) def UnzipAPKIfNecessary(self): @@ -97,7 +102,11 @@ class Symbolizer(object): with tempfile.NamedTemporaryFile() as f: f.write('\n'.join(data_to_symbolize)) f.flush() - _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env) + start = time.time() + try: + _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env) + finally: + self._time_spent_symbolizing += time.time() - start for line in output.splitlines(): if not include_stack and 'Stack Data:' in line: break diff --git a/chromium/build/android/pylib/utils/emulator.py b/chromium/build/android/pylib/utils/emulator.py deleted file mode 100644 index c41cdd22c07..00000000000 --- a/chromium/build/android/pylib/utils/emulator.py +++ /dev/null @@ -1,519 +0,0 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Provides an interface to start and stop Android emulator. - - Emulator: The class provides the methods to launch/shutdown the emulator with - the android virtual device named 'avd_armeabi' . -""" - -import logging -import os -import signal -import subprocess -import time - -from devil.android import device_errors -from devil.android import device_utils -from devil.android.sdk import adb_wrapper -from devil.utils import cmd_helper -from pylib import constants -from pylib import pexpect -from pylib.utils import time_profile - -# Default sdcard size in the format of [amount][unit] -DEFAULT_SDCARD_SIZE = '512M' -# Default internal storage (MB) of emulator image -DEFAULT_STORAGE_SIZE = '1024M' - -# Each emulator has 300 secs of wait time for launching -_BOOT_WAIT_INTERVALS = 30 -_BOOT_WAIT_INTERVAL_TIME = 10 - -# Path for avd files and avd dir -_BASE_AVD_DIR = os.path.expanduser(os.path.join('~', '.android', 'avd')) -_TOOLS_ANDROID_PATH = os.path.join(constants.ANDROID_SDK_ROOT, - 'tools', 'android') - -# Template used to generate config.ini files for the emulator -CONFIG_TEMPLATE = """avd.ini.encoding=ISO-8859-1 -hw.dPad=no -hw.lcd.density=320 -sdcard.size={sdcard.size} -hw.cpu.arch={hw.cpu.arch} -hw.device.hash=-708107041 -hw.camera.back=none -disk.dataPartition.size=800M -hw.gpu.enabled={gpu} -skin.path=720x1280 -skin.dynamic=yes -hw.keyboard=yes -hw.ramSize=1024 -hw.device.manufacturer=Google -hw.sdCard=yes -hw.mainKeys=no -hw.accelerometer=yes -skin.name=720x1280 -abi.type={abi.type} -hw.trackBall=no -hw.device.name=Galaxy Nexus -hw.battery=yes -hw.sensors.proximity=yes -image.sysdir.1=system-images/android-{api.level}/default/{abi.type}/ -hw.sensors.orientation=yes -hw.audioInput=yes -hw.camera.front=none -hw.gps=yes -vm.heapSize=128 -{extras}""" - -CONFIG_REPLACEMENTS = { - 'x86': { - '{hw.cpu.arch}': 'x86', - '{abi.type}': 'x86', - '{extras}': '' - }, - 'arm': { - '{hw.cpu.arch}': 'arm', - '{abi.type}': 'armeabi-v7a', - '{extras}': 'hw.cpu.model=cortex-a8\n' - }, - 'mips': { - '{hw.cpu.arch}': 'mips', - '{abi.type}': 'mips', - '{extras}': '' - } -} - -class EmulatorLaunchException(Exception): - """Emulator failed to launch.""" - pass - -def WaitForEmulatorLaunch(num): - """Wait for emulators to finish booting - - Emulators on bots are launch with a separate background process, to avoid - running tests before the emulators are fully booted, this function waits for - a number of emulators to finish booting - - Arg: - num: the amount of emulators to wait. - """ - for _ in range(num*_BOOT_WAIT_INTERVALS): - emulators = [device_utils.DeviceUtils(a) - for a in adb_wrapper.AdbWrapper.Devices() - if a.is_emulator] - if len(emulators) >= num: - logging.info('All %d emulators launched', num) - return - logging.info( - 'Waiting for %d emulators, %d of them already launched', num, - len(emulators)) - time.sleep(_BOOT_WAIT_INTERVAL_TIME) - raise Exception("Expected %d emulators, %d launched within time limit" % - (num, len(emulators))) - -def KillAllEmulators(): - """Kill all running emulators that look like ones we started. - - There are odd 'sticky' cases where there can be no emulator process - running but a device slot is taken. A little bot trouble and we're out of - room forever. - """ - logging.info('Killing all existing emulators and existing the program') - emulators = [device_utils.DeviceUtils(a) - for a in adb_wrapper.AdbWrapper.Devices() - if a.is_emulator] - if not emulators: - return - for e in emulators: - e.adb.Emu(['kill']) - logging.info('Emulator killing is async; give a few seconds for all to die.') - for _ in range(10): - if not any(a.is_emulator for a in adb_wrapper.AdbWrapper.Devices()): - return - time.sleep(1) - - -def DeleteAllTempAVDs(): - """Delete all temporary AVDs which are created for tests. - - If the test exits abnormally and some temporary AVDs created when testing may - be left in the system. Clean these AVDs. - """ - logging.info('Deleting all the avd files') - avds = device_utils.GetAVDs() - if not avds: - return - for avd_name in avds: - if 'run_tests_avd' in avd_name: - cmd = [_TOOLS_ANDROID_PATH, '-s', 'delete', 'avd', '--name', avd_name] - cmd_helper.RunCmd(cmd) - logging.info('Delete AVD %s', avd_name) - - -class PortPool(object): - """Pool for emulator port starting position that changes over time.""" - _port_min = 5554 - _port_max = 5585 - _port_current_index = 0 - - @classmethod - def port_range(cls): - """Return a range of valid ports for emulator use. - - The port must be an even number between 5554 and 5584. Sometimes - a killed emulator "hangs on" to a port long enough to prevent - relaunch. This is especially true on slow machines (like a bot). - Cycling through a port start position helps make us resilient.""" - ports = range(cls._port_min, cls._port_max, 2) - n = cls._port_current_index - cls._port_current_index = (n + 1) % len(ports) - return ports[n:] + ports[:n] - - -def _GetAvailablePort(): - """Returns an available TCP port for the console.""" - used_ports = [] - emulators = [device_utils.DeviceUtils(a) - for a in adb_wrapper.AdbWrapper.Devices() - if a.is_emulator] - for emulator in emulators: - used_ports.append(emulator.adb.GetDeviceSerial().split('-')[1]) - for port in PortPool.port_range(): - if str(port) not in used_ports: - return port - - -def LaunchTempEmulators(emulator_count, abi, api_level, enable_kvm=False, - kill_and_launch=True, sdcard_size=DEFAULT_SDCARD_SIZE, - storage_size=DEFAULT_STORAGE_SIZE, wait_for_boot=True, - headless=False): - """Create and launch temporary emulators and wait for them to boot. - - Args: - emulator_count: number of emulators to launch. - abi: the emulator target platform - api_level: the api level (e.g., 19 for Android v4.4 - KitKat release) - wait_for_boot: whether or not to wait for emulators to boot up - headless: running emulator with no ui - - Returns: - List of emulators. - """ - emulators = [] - for n in xrange(emulator_count): - t = time_profile.TimeProfile('Emulator launch %d' % n) - # Creates a temporary AVD. - avd_name = 'run_tests_avd_%d' % n - logging.info('Emulator launch %d with avd_name=%s and api=%d', - n, avd_name, api_level) - emulator = Emulator(avd_name, abi, enable_kvm=enable_kvm, - sdcard_size=sdcard_size, storage_size=storage_size, - headless=headless) - emulator.CreateAVD(api_level) - emulator.Launch(kill_all_emulators=(n == 0 and kill_and_launch)) - t.Stop() - emulators.append(emulator) - # Wait for all emulators to boot completed. - if wait_for_boot: - for emulator in emulators: - emulator.ConfirmLaunch(True) - logging.info('All emulators are fully booted') - return emulators - - -def LaunchEmulator(avd_name, abi, kill_and_launch=True, enable_kvm=False, - sdcard_size=DEFAULT_SDCARD_SIZE, - storage_size=DEFAULT_STORAGE_SIZE, headless=False): - """Launch an existing emulator with name avd_name. - - Args: - avd_name: name of existing emulator - abi: the emulator target platform - headless: running emulator with no ui - - Returns: - emulator object. - """ - logging.info('Specified emulator named avd_name=%s launched', avd_name) - emulator = Emulator(avd_name, abi, enable_kvm=enable_kvm, - sdcard_size=sdcard_size, storage_size=storage_size, - headless=headless) - emulator.Launch(kill_all_emulators=kill_and_launch) - emulator.ConfirmLaunch(True) - return emulator - - -class Emulator(object): - """Provides the methods to launch/shutdown the emulator. - - The emulator has the android virtual device named 'avd_armeabi'. - - The emulator could use any even TCP port between 5554 and 5584 for the - console communication, and this port will be part of the device name like - 'emulator-5554'. Assume it is always True, as the device name is the id of - emulator managed in this class. - - Attributes: - emulator: Path of Android's emulator tool. - popen: Popen object of the running emulator process. - device: Device name of this emulator. - """ - - # Signals we listen for to kill the emulator on - _SIGNALS = (signal.SIGINT, signal.SIGHUP) - - # Time to wait for an emulator launch, in seconds. This includes - # the time to launch the emulator and a wait-for-device command. - _LAUNCH_TIMEOUT = 120 - - # Timeout interval of wait-for-device command before bouncing to a a - # process life check. - _WAITFORDEVICE_TIMEOUT = 5 - - # Time to wait for a 'wait for boot complete' (property set on device). - _WAITFORBOOT_TIMEOUT = 300 - - def __init__(self, avd_name, abi, enable_kvm=False, - sdcard_size=DEFAULT_SDCARD_SIZE, - storage_size=DEFAULT_STORAGE_SIZE, headless=False): - """Init an Emulator. - - Args: - avd_name: name of the AVD to create - abi: target platform for emulator being created, defaults to x86 - """ - android_sdk_root = constants.ANDROID_SDK_ROOT - self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator') - self.android = _TOOLS_ANDROID_PATH - self.popen = None - self.device_serial = None - self.abi = abi - self.avd_name = avd_name - self.sdcard_size = sdcard_size - self.storage_size = storage_size - self.enable_kvm = enable_kvm - self.headless = headless - - @staticmethod - def _DeviceName(): - """Return our device name.""" - port = _GetAvailablePort() - return ('emulator-%d' % port, port) - - def CreateAVD(self, api_level): - """Creates an AVD with the given name. - - Args: - api_level: the api level of the image - - Return avd_name. - """ - - if self.abi == 'arm': - abi_option = 'armeabi-v7a' - elif self.abi == 'mips': - abi_option = 'mips' - else: - abi_option = 'x86' - - api_target = 'android-%s' % api_level - - avd_command = [ - self.android, - '--silent', - 'create', 'avd', - '--name', self.avd_name, - '--abi', abi_option, - '--target', api_target, - '--sdcard', self.sdcard_size, - '--force', - ] - avd_cmd_str = ' '.join(avd_command) - logging.info('Create AVD command: %s', avd_cmd_str) - avd_process = pexpect.spawn(avd_cmd_str) - - # Instead of creating a custom profile, we overwrite config files. - avd_process.expect('Do you wish to create a custom hardware profile') - avd_process.sendline('no\n') - avd_process.expect('Created AVD \'%s\'' % self.avd_name) - - # Replace current configuration with default Galaxy Nexus config. - ini_file = os.path.join(_BASE_AVD_DIR, '%s.ini' % self.avd_name) - new_config_ini = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, - 'config.ini') - - # Remove config files with defaults to replace with Google's GN settings. - os.unlink(ini_file) - os.unlink(new_config_ini) - - # Create new configuration files with Galaxy Nexus by Google settings. - with open(ini_file, 'w') as new_ini: - new_ini.write('avd.ini.encoding=ISO-8859-1\n') - new_ini.write('target=%s\n' % api_target) - new_ini.write('path=%s/%s.avd\n' % (_BASE_AVD_DIR, self.avd_name)) - new_ini.write('path.rel=avd/%s.avd\n' % self.avd_name) - - custom_config = CONFIG_TEMPLATE - replacements = CONFIG_REPLACEMENTS[self.abi] - for key in replacements: - custom_config = custom_config.replace(key, replacements[key]) - custom_config = custom_config.replace('{api.level}', str(api_level)) - custom_config = custom_config.replace('{sdcard.size}', self.sdcard_size) - custom_config.replace('{gpu}', 'no' if self.headless else 'yes') - - with open(new_config_ini, 'w') as new_config_ini: - new_config_ini.write(custom_config) - - return self.avd_name - - - def _DeleteAVD(self): - """Delete the AVD of this emulator.""" - avd_command = [ - self.android, - '--silent', - 'delete', - 'avd', - '--name', self.avd_name, - ] - logging.info('Delete AVD command: %s', ' '.join(avd_command)) - cmd_helper.RunCmd(avd_command) - - def ResizeAndWipeAvd(self, storage_size): - """Wipes old AVD and creates new AVD of size |storage_size|. - - This serves as a work around for '-partition-size' and '-wipe-data' - """ - userdata_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, - 'userdata.img') - userdata_qemu_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, - 'userdata-qemu.img') - resize_cmd = ['resize2fs', userdata_img, '%s' % storage_size] - logging.info('Resizing userdata.img to ideal size') - cmd_helper.RunCmd(resize_cmd) - wipe_cmd = ['cp', userdata_img, userdata_qemu_img] - logging.info('Replacing userdata-qemu.img with the new userdata.img') - cmd_helper.RunCmd(wipe_cmd) - - def Launch(self, kill_all_emulators): - """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the - emulator is ready for use. - - If fails, an exception will be raised. - """ - if kill_all_emulators: - KillAllEmulators() # just to be sure - self._AggressiveImageCleanup() - (self.device_serial, port) = self._DeviceName() - self.ResizeAndWipeAvd(storage_size=self.storage_size) - emulator_command = [ - self.emulator, - # Speed up emulator launch by 40%. Really. - '-no-boot-anim', - ] - if self.headless: - emulator_command.extend([ - '-no-skin', - '-no-window' - ]) - else: - emulator_command.extend([ - '-gpu', 'on' - ]) - emulator_command.extend([ - # Use a familiar name and port. - '-avd', self.avd_name, - '-port', str(port), - # all the argument after qemu are sub arguments for qemu - '-qemu', '-m', '1024', - ]) - if self.abi == 'x86' and self.enable_kvm: - emulator_command.extend([ - # For x86 emulator --enable-kvm will fail early, avoiding accidental - # runs in a slow mode (i.e. without hardware virtualization support). - '--enable-kvm', - ]) - - logging.info('Emulator launch command: %s', ' '.join(emulator_command)) - self.popen = subprocess.Popen(args=emulator_command, - stderr=subprocess.STDOUT) - self._InstallKillHandler() - - @staticmethod - def _AggressiveImageCleanup(): - """Aggressive cleanup of emulator images. - - Experimentally it looks like our current emulator use on the bot - leaves image files around in /tmp/android-$USER. If a "random" - name gets reused, we choke with a 'File exists' error. - TODO(jrg): is there a less hacky way to accomplish the same goal? - """ - logging.info('Aggressive Image Cleanup') - emulator_imagedir = '/tmp/android-%s' % os.environ['USER'] - if not os.path.exists(emulator_imagedir): - return - for image in os.listdir(emulator_imagedir): - full_name = os.path.join(emulator_imagedir, image) - if 'emulator' in full_name: - logging.info('Deleting emulator image %s', full_name) - os.unlink(full_name) - - def ConfirmLaunch(self, wait_for_boot=False): - """Confirm the emulator launched properly. - - Loop on a wait-for-device with a very small timeout. On each - timeout, check the emulator process is still alive. - After confirming a wait-for-device can be successful, make sure - it returns the right answer. - """ - seconds_waited = 0 - number_of_waits = 2 # Make sure we can wfd twice - - device = device_utils.DeviceUtils(self.device_serial) - while seconds_waited < self._LAUNCH_TIMEOUT: - try: - device.adb.WaitForDevice( - timeout=self._WAITFORDEVICE_TIMEOUT, retries=1) - number_of_waits -= 1 - if not number_of_waits: - break - except device_errors.CommandTimeoutError: - seconds_waited += self._WAITFORDEVICE_TIMEOUT - device.adb.KillServer() - self.popen.poll() - if self.popen.returncode != None: - raise EmulatorLaunchException('EMULATOR DIED') - - if seconds_waited >= self._LAUNCH_TIMEOUT: - raise EmulatorLaunchException('TIMEOUT with wait-for-device') - - logging.info('Seconds waited on wait-for-device: %d', seconds_waited) - if wait_for_boot: - # Now that we checked for obvious problems, wait for a boot complete. - # Waiting for the package manager is sometimes problematic. - device.WaitUntilFullyBooted(timeout=self._WAITFORBOOT_TIMEOUT) - logging.info('%s is now fully booted', self.avd_name) - - def Shutdown(self): - """Shuts down the process started by launch.""" - self._DeleteAVD() - if self.popen: - self.popen.poll() - if self.popen.returncode == None: - self.popen.kill() - self.popen = None - - def _ShutdownOnSignal(self, _signum, _frame): - logging.critical('emulator _ShutdownOnSignal') - for sig in self._SIGNALS: - signal.signal(sig, signal.SIG_DFL) - self.Shutdown() - raise KeyboardInterrupt # print a stack - - def _InstallKillHandler(self): - """Install a handler to kill the emulator when we exit unexpectedly.""" - for sig in self._SIGNALS: - signal.signal(sig, self._ShutdownOnSignal) diff --git a/chromium/build/android/pylib/utils/google_storage_helper.py b/chromium/build/android/pylib/utils/google_storage_helper.py index c48543fcdfe..55e4882c56c 100644 --- a/chromium/build/android/pylib/utils/google_storage_helper.py +++ b/chromium/build/android/pylib/utils/google_storage_helper.py @@ -29,7 +29,8 @@ _AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/' @decorators.NoRaiseException(default_return_value='') -def upload(name, filepath, bucket, content_type=None, authenticated_link=True): +def upload(name, filepath, bucket, gs_args=None, command_args=None, + content_type=None, authenticated_link=True): """Uploads data to Google Storage. Args: @@ -51,9 +52,10 @@ def upload(name, filepath, bucket, content_type=None, authenticated_link=True): logging.info('Uploading %s to %s', filepath, gs_path) cmd = [_GSUTIL_PATH, '-q'] + cmd.extend(gs_args or []) if content_type: cmd.extend(['-h', 'Content-Type:%s' % content_type]) - cmd.extend(['cp', filepath, gs_path]) + cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path]) cmd_helper.RunCmd(cmd) diff --git a/chromium/build/android/pylib/utils/instrumentation_tracing.py b/chromium/build/android/pylib/utils/instrumentation_tracing.py index 7e00c58be7d..f1d03a0dcf8 100644 --- a/chromium/build/android/pylib/utils/instrumentation_tracing.py +++ b/chromium/build/android/pylib/utils/instrumentation_tracing.py @@ -17,6 +17,7 @@ function from such a module will be added to the trace. import contextlib import functools import inspect +import os import re import sys import threading @@ -108,7 +109,16 @@ def _generate_trace_function(to_include, to_exclude): included = set() excluded = set() + tracing_pid = os.getpid() + def traceFunction(frame, event, arg): + del arg + + # Don't try to trace in subprocesses. + if os.getpid() != tracing_pid: + sys.settrace(None) + return None + # pylint: disable=unused-argument if event not in ("call", "return"): return None diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py index 3236356f3f5..2d34b30cb2e 100755 --- a/chromium/build/android/test_runner.py +++ b/chromium/build/android/test_runner.py @@ -258,6 +258,11 @@ def AddDeviceOptions(parser): 'speed up local development and never on bots ' '(increases flakiness)') parser.add_argument( + '--recover-devices', + action='store_true', + help='Attempt to recover devices prior to the final retry. Warning: ' + 'this will cause all devices to reboot.') + parser.add_argument( '--tool', dest='tool', help='Run the test under a tool ' @@ -295,6 +300,9 @@ def AddGTestOptions(parser): help='Host directory to which app data files will be' ' saved. Used with --app-data-file.') parser.add_argument( + '--chartjson-result-file', + help='If present, store chartjson results on this path.') + parser.add_argument( '--delete-stale-data', dest='delete_stale_data', action='store_true', help='Delete stale test data on the device.') @@ -315,6 +323,10 @@ def AddGTestOptions(parser): 'development, but is not safe to use on bots (' 'http://crbug.com/549214') parser.add_argument( + '--gs-test-artifacts-bucket', + help=('If present, test artifacts will be uploaded to this Google ' + 'Storage bucket.')) + parser.add_argument( '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests', dest='run_disabled', action='store_true', help='Also run disabled tests if applicable.') @@ -347,18 +359,17 @@ def AddGTestOptions(parser): filter_group.add_argument( '-f', '--gtest_filter', '--gtest-filter', dest='test_filter', - help='googletest-style filter string.') + help='googletest-style filter string.', + default=os.environ.get('GTEST_FILTER')) filter_group.add_argument( + # Deprecated argument. '--gtest-filter-file', + # New argument. + '--test-launcher-filter-file', dest='test_filter_file', type=os.path.realpath, help='Path to file that contains googletest-style filter strings. ' 'See also //testing/buildbot/filters/README.md.') - parser.add_argument( - '--gs-test-artifacts-bucket', - help=('If present, test artifacts will be uploaded to this Google ' - 'Storage bucket.')) - def AddInstrumentationTestOptions(parser): """Adds Instrumentation test options to |parser|.""" @@ -408,7 +419,8 @@ def AddInstrumentationTestOptions(parser): parser.add_argument( '-f', '--test-filter', '--gtest_filter', '--gtest-filter', dest='test_filter', - help='Test filter (if not fully qualified, will run all matches).') + help='Test filter (if not fully qualified, will run all matches).', + default=os.environ.get('GTEST_FILTER')) parser.add_argument( '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests', dest='run_disabled', action='store_true', @@ -554,7 +566,8 @@ def AddLinkerTestOptions(parser): parser.add_argument( '-f', '--gtest-filter', dest='test_filter', - help='googletest-style filter string.') + help='googletest-style filter string.', + default=os.environ.get('GTEST_FILTER')) parser.add_argument( '--test-apk', type=os.path.realpath, |