summaryrefslogtreecommitdiff
path: root/chromium/build/android
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/build/android')
-rw-r--r--chromium/build/android/BUILD.gn1
-rw-r--r--chromium/build/android/OWNERS2
-rw-r--r--chromium/build/android/PRESUBMIT.py6
-rwxr-xr-xchromium/build/android/adb_gdb24
-rwxr-xr-xchromium/build/android/apk_operations.py15
-rw-r--r--chromium/build/android/bytecode/BUILD.gn8
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java167
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java196
-rw-r--r--chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java83
-rw-r--r--chromium/build/android/devil_chromium.json19
-rw-r--r--chromium/build/android/devil_chromium.py1
-rw-r--r--chromium/build/android/docs/android_app_bundles.md4
-rw-r--r--chromium/build/android/docs/java_toolchain.md66
-rwxr-xr-xchromium/build/android/emma_coverage_stats_test.py4
-rw-r--r--chromium/build/android/gradle/OWNERS2
-rwxr-xr-xchromium/build/android/gradle/generate_gradle.py91
-rw-r--r--chromium/build/android/gradle/root.jinja12
-rw-r--r--chromium/build/android/gyp/OWNERS2
-rwxr-xr-xchromium/build/android/gyp/aar.py79
-rw-r--r--chromium/build/android/gyp/aar.pydeps1
-rwxr-xr-xchromium/build/android/gyp/apkbuilder.py3
-rwxr-xr-xchromium/build/android/gyp/bytecode_processor.py56
-rw-r--r--chromium/build/android/gyp/compile_java.pydeps1
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py157
-rw-r--r--chromium/build/android/gyp/compile_resources.pydeps2
-rwxr-xr-xchromium/build/android/gyp/copy_ex.py3
-rw-r--r--chromium/build/android/gyp/create_app_bundle_apks.pydeps1
-rw-r--r--chromium/build/android/gyp/create_bundle_wrapper_script.pydeps3
-rwxr-xr-xchromium/build/android/gyp/create_size_info_files.py8
-rwxr-xr-xchromium/build/android/gyp/create_ui_locale_resources.py4
-rw-r--r--chromium/build/android/gyp/create_ui_locale_resources.pydeps28
-rwxr-xr-xchromium/build/android/gyp/desugar.py8
-rwxr-xr-xchromium/build/android/gyp/dex.py39
-rw-r--r--chromium/build/android/gyp/dex.pydeps1
-rwxr-xr-xchromium/build/android/gyp/dex_jdk_libs.py42
-rwxr-xr-xchromium/build/android/gyp/dexsplitter.py4
-rwxr-xr-xchromium/build/android/gyp/dist_aar.py9
-rwxr-xr-xchromium/build/android/gyp/filter_zip.py33
-rwxr-xr-xchromium/build/android/gyp/gcc_preprocess.py2
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum.py17
-rwxr-xr-xchromium/build/android/gyp/java_cpp_enum_tests.py36
-rwxr-xr-xchromium/build/android/gyp/java_cpp_strings.py33
-rwxr-xr-xchromium/build/android/gyp/java_cpp_strings_tests.py25
-rwxr-xr-xchromium/build/android/gyp/lint.py511
-rw-r--r--chromium/build/android/gyp/lint.pydeps22
-rwxr-xr-xchromium/build/android/gyp/main_dex_list.py11
-rwxr-xr-xchromium/build/android/gyp/merge_manifest.py7
-rw-r--r--chromium/build/android/gyp/prepare_resources.pydeps1
-rwxr-xr-xchromium/build/android/gyp/proguard.py130
-rw-r--r--chromium/build/android/gyp/proguard.pydeps1
-rw-r--r--chromium/build/android/gyp/turbine.pydeps1
-rw-r--r--chromium/build/android/gyp/util/build_utils.py47
-rw-r--r--chromium/build/android/gyp/util/md5_check.py34
-rwxr-xr-xchromium/build/android/gyp/util/md5_check_test.py17
-rw-r--r--chromium/build/android/gyp/util/parallel.py214
-rw-r--r--chromium/build/android/gyp/util/resource_utils.py1
-rwxr-xr-xchromium/build/android/gyp/write_build_config.py188
-rwxr-xr-xchromium/build/android/gyp/write_native_libraries_java.py7
-rwxr-xr-xchromium/build/android/gyp/zip.py5
-rw-r--r--chromium/build/android/incremental_install/BUILD.gn7
-rw-r--r--chromium/build/android/lint/suppressions.xml163
-rw-r--r--chromium/build/android/list_class_verification_failures_test.py11
-rw-r--r--chromium/build/android/pylib/base/mock_environment.py4
-rw-r--r--chromium/build/android/pylib/base/mock_test_instance.py4
-rw-r--r--chromium/build/android/pylib/constants/host_paths.py3
-rw-r--r--chromium/build/android/pylib/device/commands/BUILD.gn11
-rw-r--r--chromium/build/android/pylib/gtest/filter/unit_tests_disabled3
-rw-r--r--chromium/build/android/pylib/instrumentation/instrumentation_parser.py2
-rw-r--r--chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py79
-rwxr-xr-xchromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py92
-rw-r--r--chromium/build/android/pylib/instrumentation/test_result.py8
-rw-r--r--chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py270
-rwxr-xr-xchromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py82
-rw-r--r--chromium/build/android/pylib/local/device/local_device_test_run.py26
-rwxr-xr-xchromium/build/android/pylib/local/device/local_device_test_run_test.py4
-rwxr-xr-xchromium/build/android/pylib/output/remote_output_manager_test.py4
-rw-r--r--chromium/build/android/pylib/symbols/deobfuscator.py2
-rw-r--r--chromium/build/android/pylib/utils/app_bundle_utils.py14
-rw-r--r--chromium/build/android/pylib/utils/chrome_proxy_utils.py171
-rwxr-xr-xchromium/build/android/pylib/utils/chrome_proxy_utils_test.py235
-rw-r--r--chromium/build/android/pylib/utils/gold_utils.py597
-rwxr-xr-xchromium/build/android/pylib/utils/gold_utils_test.py921
-rwxr-xr-xchromium/build/android/resource_sizes.py2
-rwxr-xr-xchromium/build/android/test_runner.py7
-rw-r--r--chromium/build/android/test_runner.pydeps12
85 files changed, 2283 insertions, 2946 deletions
diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn
index d5632d064d2..e9eccf26d25 100644
--- a/chromium/build/android/BUILD.gn
+++ b/chromium/build/android/BUILD.gn
@@ -34,6 +34,7 @@ if (enable_java_templates) {
"android_tool_prefix=" + rebase_path(android_tool_prefix, root_build_dir),
"android_configuration_failure_dir=" +
rebase_path(android_configuration_failure_dir, root_build_dir),
+ "final_android_sdk=$final_android_sdk"
]
if (defined(android_secondary_abi_cpu)) {
_secondary_label_info =
diff --git a/chromium/build/android/OWNERS b/chromium/build/android/OWNERS
index 654c83ec4af..a10904e8e54 100644
--- a/chromium/build/android/OWNERS
+++ b/chromium/build/android/OWNERS
@@ -4,5 +4,3 @@ pasko@chromium.org
skyostil@chromium.org
tiborg@chromium.org
wnwen@chromium.org
-
-# COMPONENT: Build
diff --git a/chromium/build/android/PRESUBMIT.py b/chromium/build/android/PRESUBMIT.py
index 91f2c886a8d..d6d3a442bc8 100644
--- a/chromium/build/android/PRESUBMIT.py
+++ b/chromium/build/android/PRESUBMIT.py
@@ -21,6 +21,8 @@ def CommonChecks(input_api, output_api):
r'gyp/.*\.py$',
]
tests = []
+ # yapf likes formatting the extra_paths_list to be less readable.
+ # yapf: disable
tests.extend(
input_api.canned_checks.GetPylint(
input_api,
@@ -41,7 +43,7 @@ def CommonChecks(input_api, output_api):
J('..', '..', 'third_party', 'catapult', 'tracing'),
J('..', '..', 'third_party', 'depot_tools'),
J('..', '..', 'third_party', 'colorama', 'src'),
- J('..', '..', 'third_party', 'pymock'),
+ J('..', '..', 'build'),
]))
tests.extend(
input_api.canned_checks.GetPylint(
@@ -52,6 +54,7 @@ def CommonChecks(input_api, output_api):
r'.*_pb2\.py',
],
extra_paths_list=[J('gyp'), J('gn')]))
+ # yapf: enable
# Disabled due to http://crbug.com/410936
#output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
@@ -86,6 +89,7 @@ def CommonChecks(input_api, output_api):
J('pylib', 'symbols', 'apk_native_libs_unittest.py'),
J('pylib', 'symbols', 'elf_symbolizer_unittest.py'),
J('pylib', 'symbols', 'symbol_utils_unittest.py'),
+ J('pylib', 'utils', 'chrome_proxy_utils_test.py'),
J('pylib', 'utils', 'decorators_test.py'),
J('pylib', 'utils', 'device_dependencies_test.py'),
J('pylib', 'utils', 'dexdump_test.py'),
diff --git a/chromium/build/android/adb_gdb b/chromium/build/android/adb_gdb
index 1dc3ce5f3bc..bd0f1f315ca 100755
--- a/chromium/build/android/adb_gdb
+++ b/chromium/build/android/adb_gdb
@@ -581,32 +581,32 @@ get_ndk_toolchain_prebuilt () {
get_ndk_toolchain_fullprefix () {
local NDK_DIR="$1"
local ARCH="$2"
- local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+ local TARGET NAME HOST_OS HOST_ARCH LD CONFIG
# NOTE: This will need to be updated if the NDK changes the names or moves
# the location of its prebuilt toolchains.
#
- GCC=
+ LD=
HOST_OS=$(get_ndk_host_system)
HOST_ARCH=$(get_ndk_host_arch)
CONFIG=$(get_arch_gnu_config $ARCH)
- GCC=$(get_ndk_toolchain_prebuilt \
- "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
- if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
- GCC=$(get_ndk_toolchain_prebuilt \
- "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+ LD=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld")
+ if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then
+ LD=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld")
fi
- if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+ if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then
# Special case, the x86 toolchain used to be incorrectly
# named i686-android-linux-gcc!
- GCC=$(get_ndk_toolchain_prebuilt \
- "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+ LD=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld")
fi
- if [ -z "$GCC" ]; then
+ if [ -z "$LD" ]; then
panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
Please verify your NDK installation!"
fi
- echo "${GCC%%gcc}"
+ echo "${LD%%ld}"
}
# $1: NDK install path
diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py
index a09fae9e787..d2798147a0b 100755
--- a/chromium/build/android/apk_operations.py
+++ b/chromium/build/android/apk_operations.py
@@ -96,7 +96,8 @@ def _GenerateBundleApks(info,
output_path=None,
minimal=False,
minimal_sdk_version=None,
- mode=None):
+ mode=None,
+ optimize_for=None):
"""Generate an .apks archive from a bundle on demand.
Args:
@@ -105,6 +106,8 @@ def _GenerateBundleApks(info,
minimal: Create the minimal set of apks possible (english-only).
minimal_sdk_version: When minimal=True, use this sdkVersion.
mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+ optimize_for: Override split config, either None, or one of
+ app_bundle_utils.OPTIMIZE_FOR_OPTIONS.
"""
logging.info('Generating .apks file')
app_bundle_utils.GenerateBundleApks(
@@ -118,7 +121,8 @@ def _GenerateBundleApks(info,
system_image_locales=info.system_image_locales,
mode=mode,
minimal=minimal,
- minimal_sdk_version=minimal_sdk_version)
+ minimal_sdk_version=minimal_sdk_version,
+ optimize_for=optimize_for)
def _InstallBundle(devices, apk_helper_instance, package_name,
@@ -1732,6 +1736,10 @@ class _BuildBundleApks(_Command):
'single universal APK, "system" generates an archive with a system '
'image APK, while "system_compressed" generates a compressed system '
'APK, with an additional stub APK for the system image.')
+ group.add_argument(
+ '--optimize-for',
+ choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS,
+ help='Override split configuration.')
def Run(self):
_GenerateBundleApks(
@@ -1739,7 +1747,8 @@ class _BuildBundleApks(_Command):
output_path=self.args.output_apks,
minimal=self.args.minimal,
minimal_sdk_version=self.args.sdk_version,
- mode=self.args.build_mode)
+ mode=self.args.build_mode,
+ optimize_for=self.args.optimize_for)
class _ManifestCommand(_Command):
diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn
index f9e1baf2604..4d29aca9dbc 100644
--- a/chromium/build/android/bytecode/BUILD.gn
+++ b/chromium/build/android/bytecode/BUILD.gn
@@ -4,13 +4,10 @@
import("//build/config/android/rules.gni")
-assert(current_toolchain == default_toolchain)
-
-java_binary("java_bytecode_rewriter") {
+java_binary("bytecode_processor") {
sources = [
"java/org/chromium/bytecode/ByteCodeProcessor.java",
"java/org/chromium/bytecode/ClassPathValidator.java",
- "java/org/chromium/bytecode/ThreadAssertionClassAdapter.java",
"java/org/chromium/bytecode/TypeUtils.java",
]
main_class = "org.chromium.bytecode.ByteCodeProcessor"
@@ -18,5 +15,6 @@ java_binary("java_bytecode_rewriter") {
"//third_party/android_deps:org_ow2_asm_asm_java",
"//third_party/android_deps:org_ow2_asm_asm_util_java",
]
- wrapper_script_name = "helper/java_bytecode_rewriter"
+ wrapper_script_name = "helper/bytecode_processor"
+ enable_bytecode_checks = false
}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
index 636e094cf02..b767f4f0890 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -5,40 +5,29 @@
package org.chromium.bytecode;
import org.objectweb.asm.ClassReader;
-import org.objectweb.asm.ClassVisitor;
-import org.objectweb.asm.ClassWriter;
import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.HashMap;
import java.util.HashSet;
-import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.zip.CRC32;
+import java.util.concurrent.TimeUnit;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
-import java.util.zip.ZipOutputStream;
/**
* Java application that takes in an input jar, performs a series of bytecode
@@ -46,154 +35,55 @@ import java.util.zip.ZipOutputStream;
*/
class ByteCodeProcessor {
private static final String CLASS_FILE_SUFFIX = ".class";
- private static final String TEMPORARY_FILE_SUFFIX = ".temp";
private static final int BUFFER_SIZE = 16384;
private static boolean sVerbose;
private static boolean sIsPrebuilt;
- private static boolean sShouldUseThreadAnnotations;
- private static boolean sShouldCheckClassPath;
private static ClassLoader sDirectClassPathClassLoader;
private static ClassLoader sFullClassPathClassLoader;
private static Set<String> sFullClassPathJarPaths;
private static Set<String> sMissingClassesAllowlist;
+ private static Map<String, String> sJarToGnTarget;
private static ClassPathValidator sValidator;
- private static class EntryDataPair {
- private final ZipEntry mEntry;
- private final byte[] mData;
-
- private EntryDataPair(ZipEntry mEntry, byte[] mData) {
- this.mEntry = mEntry;
- this.mData = mData;
- }
-
- private static EntryDataPair create(String zipPath, byte[] data) {
- ZipEntry entry = new ZipEntry(zipPath);
- entry.setMethod(ZipEntry.STORED);
- entry.setTime(0);
- entry.setSize(data.length);
- CRC32 crc = new CRC32();
- crc.update(data);
- entry.setCrc(crc.getValue());
- return new EntryDataPair(entry, data);
- }
- }
-
- private static EntryDataPair processEntry(ZipEntry entry, byte[] data)
- throws ClassPathValidator.ClassNotLoadedException {
- // Copy all non-.class files to the output jar.
- if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
- return new EntryDataPair(entry, data);
- }
-
+ private static Void processEntry(ZipEntry entry, byte[] data) {
ClassReader reader = new ClassReader(data);
- if (sShouldCheckClassPath) {
- sValidator.validateClassPathsAndOutput(reader, sDirectClassPathClassLoader,
- sFullClassPathClassLoader, sFullClassPathJarPaths, sIsPrebuilt, sVerbose,
- sMissingClassesAllowlist);
- }
-
- ClassWriter writer = new ClassWriter(reader, 0);
- ClassVisitor chain = writer;
- /* DEBUGGING:
- To see objectweb.asm code that will generate bytecode for a given class:
-
- java -cp
- "third_party/android_deps/libs/org_ow2_asm_asm/asm-7.0.jar:third_party/android_deps/libs/org_ow2_asm_asm_util/asm-util-7.0.jar:out/Debug/lib.java/jar_containing_yourclass.jar"
- org.objectweb.asm.util.ASMifier org.package.YourClassName
-
- See this pdf for more details: https://asm.ow2.io/asm4-guide.pdf
-
- To see the bytecode for a specific class, uncomment this code with your class name:
-
- if (entry.getName().contains("YOUR_CLASS_NAME")) {
- chain = new TraceClassVisitor(chain, new PrintWriter(System.out));
+ if (sIsPrebuilt) {
+ sValidator.validateFullClassPath(
+ reader, sFullClassPathClassLoader, sMissingClassesAllowlist);
+ } else {
+ sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader,
+ sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist,
+ sVerbose);
}
- */
- if (sShouldUseThreadAnnotations) {
- chain = new ThreadAssertionClassAdapter(chain);
- }
- reader.accept(chain, 0);
- byte[] patchedByteCode = writer.toByteArray();
- return EntryDataPair.create(entry.getName(), patchedByteCode);
+ return null;
}
- private static void process(String inputJarPath, String outputJarPath)
- throws ClassPathValidator.ClassNotLoadedException, ExecutionException,
- InterruptedException {
- String tempJarPath = outputJarPath + TEMPORARY_FILE_SUFFIX;
+ private static void process(String gnTarget, String inputJarPath)
+ throws ExecutionException, InterruptedException {
ExecutorService executorService =
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
try (ZipInputStream inputStream = new ZipInputStream(
- new BufferedInputStream(new FileInputStream(inputJarPath)));
- ZipOutputStream tempStream = new ZipOutputStream(
- new BufferedOutputStream(new FileOutputStream(tempJarPath)))) {
- List<Future<EntryDataPair>> list = new ArrayList<>();
+ new BufferedInputStream(new FileInputStream(inputJarPath)))) {
while (true) {
ZipEntry entry = inputStream.getNextEntry();
if (entry == null) {
break;
}
byte[] data = readAllBytes(inputStream);
- list.add(executorService.submit(() -> processEntry(entry, data)));
+ executorService.submit(() -> processEntry(entry, data));
}
executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
- // Write the zip file entries in order to preserve determinism.
- for (Future<EntryDataPair> futurePair : list) {
- EntryDataPair pair = futurePair.get();
- tempStream.putNextEntry(pair.mEntry);
- tempStream.write(pair.mData);
- tempStream.closeEntry();
- }
+ executorService.awaitTermination(1, TimeUnit.HOURS);
} catch (IOException e) {
throw new RuntimeException(e);
}
- try {
- Path src = Paths.get(tempJarPath);
- Path dest = Paths.get(outputJarPath);
- Files.move(src, dest, StandardCopyOption.REPLACE_EXISTING);
- } catch (IOException ioException) {
- throw new RuntimeException(ioException);
- }
if (sValidator.hasErrors()) {
- System.err.println("Direct classpath is incomplete. To fix, add deps on the "
- + "GN target(s) that provide:");
- for (Map.Entry<String, Map<String, Set<String>>> entry :
- sValidator.getErrors().entrySet()) {
- printValidationError(System.err, entry.getKey(), entry.getValue());
- }
+ sValidator.printAll(gnTarget, sJarToGnTarget);
System.exit(1);
}
}
- private static void printValidationError(
- PrintStream out, String jarName, Map<String, Set<String>> missingClasses) {
- out.print(" * ");
- out.println(jarName);
- int i = 0;
- final int numErrorsPerJar = 2;
- // The list of missing classes is non-exhaustive because each class that fails to validate
- // reports only the first missing class.
- for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
- String missingClass = entry.getKey();
- Set<String> filesThatNeededIt = entry.getValue();
- out.print(" * ");
- if (i == numErrorsPerJar) {
- out.print(String.format("And %d more...", missingClasses.size() - numErrorsPerJar));
- break;
- }
- out.print(missingClass.replace('/', '.'));
- out.print(" (needed by ");
- out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
- if (filesThatNeededIt.size() > 1) {
- out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
- }
- out.println(")");
- i++;
- }
- }
-
private static byte[] readAllBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
int numRead = 0;
@@ -235,12 +125,10 @@ class ByteCodeProcessor {
ExecutionException, InterruptedException {
// Invoke this script using //build/android/gyp/bytecode_processor.py
int currIndex = 0;
+ String gnTarget = args[currIndex++];
String inputJarPath = args[currIndex++];
- String outputJarPath = args[currIndex++];
sVerbose = args[currIndex++].equals("--verbose");
sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
- sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations");
- sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path");
sMissingClassesAllowlist = new HashSet<>();
currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist);
@@ -254,19 +142,26 @@ class ByteCodeProcessor {
currIndex = parseListArgument(args, currIndex, directClassPathJarPaths);
sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+ ArrayList<String> fullClassPathJarPaths = new ArrayList<>();
+ currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths);
+ ArrayList<String> gnTargets = new ArrayList<>();
+ parseListArgument(args, currIndex, gnTargets);
+ sJarToGnTarget = new HashMap<>();
+ assert fullClassPathJarPaths.size() == gnTargets.size();
+ for (int i = 0; i < fullClassPathJarPaths.size(); ++i) {
+ sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i));
+ }
+
// Load all jars that are on the classpath for the input jar for analyzing class
// hierarchy.
sFullClassPathJarPaths = new HashSet<>();
- sFullClassPathJarPaths.clear();
sFullClassPathJarPaths.add(inputJarPath);
sFullClassPathJarPaths.addAll(sdkJarPaths);
- sFullClassPathJarPaths.addAll(
- Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length)));
-
+ sFullClassPathJarPaths.addAll(fullClassPathJarPaths);
sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
sValidator = new ClassPathValidator();
- process(inputJarPath, outputJarPath);
+ process(gnTarget, inputJarPath);
}
}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
index f3ed501873d..ce1803fca8a 100644
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
+++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -6,12 +6,14 @@ package org.chromium.bytecode;
import org.objectweb.asm.ClassReader;
+import java.io.PrintStream;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
+import java.util.function.Consumer;
/**
* Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
@@ -20,9 +22,18 @@ import java.util.TreeSet;
* can't find the class with any given classpath.
*/
public class ClassPathValidator {
+ // Number of warnings to print.
+ private static final int MAX_MISSING_CLASS_WARNINGS = 4;
+ // Number of missing classes to show per missing jar.
+ private static final int MAX_ERRORS_PER_JAR = 2;
// Map of missing .jar -> Missing class -> Classes that failed.
// TreeMap so that error messages have sorted list of jars.
- private final Map<String, Map<String, Set<String>>> mErrors = new TreeMap<>();
+ private final Map<String, Map<String, Set<String>>> mDirectErrors =
+ Collections.synchronizedMap(new TreeMap<>());
+ // Missing classes we only track the first one for each jar.
+ // Map of missingClass -> srcClass.
+ private final Map<String, String> mMissingClasses =
+ Collections.synchronizedMap(new TreeMap<>());
static class ClassNotLoadedException extends ClassNotFoundException {
private final String mClassName;
@@ -37,17 +48,6 @@ public class ClassPathValidator {
}
}
- private static void printAndQuit(ClassNotLoadedException e, ClassReader classReader,
- boolean verbose) throws ClassNotLoadedException {
- System.err.println("Class \"" + e.getClassName()
- + "\" not found on any classpath. Used by class \"" + classReader.getClassName()
- + "\"");
- if (verbose) {
- throw e;
- }
- System.exit(1);
- }
-
private static void validateClass(ClassLoader classLoader, String className)
throws ClassNotLoadedException {
if (className.startsWith("[")) {
@@ -87,10 +87,10 @@ public class ClassPathValidator {
*
* @param classReader .class file interface for reading the constant pool.
* @param classLoader classpath you wish to validate.
- * @throws ClassNotLoadedException thrown if it can't load a certain class.
+ * @param errorConsumer Called for each missing class.
*/
- private static void validateClassPath(ClassReader classReader, ClassLoader classLoader)
- throws ClassNotLoadedException {
+ private static void validateClassPath(ClassReader classReader, ClassLoader classLoader,
+ Consumer<ClassNotLoadedException> errorConsumer) {
char[] charBuffer = new char[classReader.getMaxStringLength()];
// According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
// 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
@@ -99,73 +99,135 @@ public class ClassPathValidator {
// Class entries correspond to 7 in the constant pool
// https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
if (offset > 0 && classReader.readByte(offset - 1) == 7) {
- validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+ try {
+ validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+ } catch (ClassNotLoadedException e) {
+ errorConsumer.accept(e);
+ }
}
}
}
- public void validateClassPathsAndOutput(ClassReader classReader,
- ClassLoader directClassPathClassLoader, ClassLoader fullClassPathClassLoader,
- Collection<String> jarsOnlyInFullClassPath, boolean isPrebuilt, boolean verbose,
- Set<String> missingClassAllowlist) throws ClassNotLoadedException {
- if (isPrebuilt) {
- // Prebuilts only need transitive dependencies checked, not direct dependencies.
+ public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader,
+ Set<String> missingClassAllowlist) {
+ // Prebuilts only need transitive dependencies checked, not direct dependencies.
+ validateClassPath(classReader, fullClassLoader, (e) -> {
+ if (!missingClassAllowlist.contains(e.getClassName())) {
+ addMissingError(classReader.getClassName(), e.getClassName());
+ }
+ });
+ }
+
+ public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader,
+ ClassLoader fullClassLoader, Collection<String> jarsOnlyInFullClassPath,
+ Set<String> missingClassAllowlist, boolean verbose) {
+ validateClassPath(classReader, directClassLoader, (e) -> {
try {
- validateClassPath(classReader, fullClassPathClassLoader);
- } catch (ClassNotLoadedException e) {
+ validateClass(fullClassLoader, e.getClassName());
+ } catch (ClassNotLoadedException d) {
if (!missingClassAllowlist.contains(e.getClassName())) {
- printAndQuit(e, classReader, verbose);
+ addMissingError(classReader.getClassName(), e.getClassName());
}
+ return;
}
- } else {
- try {
- validateClassPath(classReader, directClassPathClassLoader);
- } catch (ClassNotLoadedException e) {
+ if (verbose) {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found in direct dependencies,"
+ + " but found in indirect dependiences.");
+ }
+ // Iterating through all jars that are in the full classpath but not the direct
+ // classpath to find which one provides the class we are looking for.
+ for (String jarPath : jarsOnlyInFullClassPath) {
try {
- validateClass(fullClassPathClassLoader, e.getClassName());
- } catch (ClassNotLoadedException d) {
- if (!missingClassAllowlist.contains(d.getClassName())) {
- printAndQuit(d, classReader, verbose);
- }
- }
- if (verbose) {
- System.err.println("Class \"" + e.getClassName()
- + "\" not found in direct dependencies,"
- + " but found in indirect dependiences.");
- }
- // Iterating through all jars that are in the full classpath but not the direct
- // classpath to find which one provides the class we are looking for.
- for (String jarPath : jarsOnlyInFullClassPath) {
- try {
- ClassLoader smallLoader =
- ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
- validateClass(smallLoader, e.getClassName());
- Map<String, Set<String>> failedClassesByMissingClass = mErrors.get(jarPath);
- if (failedClassesByMissingClass == null) {
- // TreeMap so that error messages have sorted list of classes.
- failedClassesByMissingClass = new TreeMap<>();
- mErrors.put(jarPath, failedClassesByMissingClass);
- }
- Set<String> failedClasses =
- failedClassesByMissingClass.get(e.getClassName());
- if (failedClasses == null) {
- failedClasses = new TreeSet<>();
- failedClassesByMissingClass.put(e.getClassName(), failedClasses);
- }
- failedClasses.add(classReader.getClassName());
- break;
- } catch (ClassNotLoadedException f) {
- }
+ ClassLoader smallLoader =
+ ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+ validateClass(smallLoader, e.getClassName());
+ addDirectError(jarPath, classReader.getClassName(), e.getClassName());
+ break;
+ } catch (ClassNotLoadedException f) {
}
}
- }
+ });
+ }
+
+ private void addMissingError(String srcClass, String missingClass) {
+ mMissingClasses.put(missingClass, srcClass);
}
- public Map<String, Map<String, Set<String>>> getErrors() {
- return mErrors;
+ private void addDirectError(String jarPath, String srcClass, String missingClass) {
+ synchronized (mDirectErrors) {
+ Map<String, Set<String>> failedClassesByMissingClass = mDirectErrors.get(jarPath);
+ if (failedClassesByMissingClass == null) {
+ // TreeMap so that error messages have sorted list of classes.
+ failedClassesByMissingClass = new TreeMap<>();
+ mDirectErrors.put(jarPath, failedClassesByMissingClass);
+ }
+ Set<String> failedClasses = failedClassesByMissingClass.get(missingClass);
+ if (failedClasses == null) {
+ failedClasses = new TreeSet<>();
+ failedClassesByMissingClass.put(missingClass, failedClasses);
+ }
+ failedClasses.add(srcClass);
+ }
}
public boolean hasErrors() {
- return !mErrors.isEmpty();
+ return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty();
+ }
+
+ private static void printValidationError(
+ PrintStream out, String gnTarget, Map<String, Set<String>> missingClasses) {
+ out.print(" * ");
+ out.println(gnTarget);
+ int i = 0;
+ // The list of missing classes is non-exhaustive because each class that fails to validate
+ // reports only the first missing class.
+ for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+ String missingClass = entry.getKey();
+ Set<String> filesThatNeededIt = entry.getValue();
+ out.print(" * ");
+ if (i == MAX_ERRORS_PER_JAR) {
+ out.print(String.format(
+ "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR));
+ break;
+ }
+ out.print(missingClass.replace('/', '.'));
+ out.print(" (needed by ");
+ out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+ if (filesThatNeededIt.size() > 1) {
+ out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+ }
+ out.println(")");
+ i++;
+ }
+ }
+
+ public void printAll(String gnTarget, Map<String, String> jarToGnTarget) {
+ String streamer = "=============================";
+ System.err.println();
+ System.err.println(streamer + " Dependency Checks Failed " + streamer);
+ System.err.println("Target: " + gnTarget);
+ if (!mMissingClasses.isEmpty()) {
+ int i = 0;
+ for (Map.Entry<String, String> entry : mMissingClasses.entrySet()) {
+ if (++i > MAX_MISSING_CLASS_WARNINGS) {
+ System.err.println(String.format("... and %d more.",
+ mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS));
+ break;
+ }
+ System.err.println(String.format(
+ "Class \"%s\" not found on any classpath. Used by class \"%s\"",
+ entry.getKey(), entry.getValue()));
+ }
+ System.err.println();
+ }
+ if (!mDirectErrors.isEmpty()) {
+ System.err.println("Direct classpath is incomplete. To fix, add deps on:");
+ for (Map.Entry<String, Map<String, Set<String>>> entry : mDirectErrors.entrySet()) {
+ printValidationError(
+ System.err, jarToGnTarget.get(entry.getKey()), entry.getValue());
+ }
+ System.err.println();
+ }
}
}
diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
deleted file mode 100644
index 0feae07aabf..00000000000
--- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package org.chromium.bytecode;
-
-import static org.objectweb.asm.Opcodes.ASM7;
-import static org.objectweb.asm.Opcodes.INVOKESTATIC;
-
-import org.objectweb.asm.AnnotationVisitor;
-import org.objectweb.asm.ClassVisitor;
-import org.objectweb.asm.MethodVisitor;
-
-/**
- * A ClassVisitor which adds calls to
- * {@link org.chromium.base.ThreadUtils}'s assertOnUiThread/assertOnBackgroundThread when the
- * corresponding {@link androidx.annotation.UiThread} or
- * {@link androidx.annotation.WorkerThread} annotations are present. The function calls
- * are placed at the start of the method.
- */
-class ThreadAssertionClassAdapter extends ClassVisitor {
- private static final String THREAD_UTILS_DESCRIPTOR = "org/chromium/base/ThreadUtils";
- private static final String THREAD_UTILS_SIGNATURE = "()V";
- private static final String UI_THREAD_ANNOTATION_DESCRIPTOR =
- "Landroid/support/annotation/UiThread;";
- private static final String WORKER_THREAD_ANNOTATION_DESCRIPTOR =
- "Landroid/support/annotation/WorkerThread;";
-
- ThreadAssertionClassAdapter(ClassVisitor visitor) {
- super(ASM7, visitor);
- }
-
- @Override
- public MethodVisitor visitMethod(final int access, final String name, String desc,
- String signature, String[] exceptions) {
- return new AddAssertMethodVisitor(
- super.visitMethod(access, name, desc, signature, exceptions));
- }
-
- private static class AddAssertMethodVisitor extends MethodVisitor {
- String mAssertMethodName = "";
-
- AddAssertMethodVisitor(MethodVisitor mv) {
- super(ASM7, mv);
- }
-
- /**
- * Call for annotations on the method. Checks if the annotation is @UiThread
- * or @WorkerThread, and if so will set the mAssertMethodName property to the name of the
- * method to call in order to assert that a method is running on the intented thread.
- *
- * @param descriptor Annotation descriptor containing its name and package.
- */
- @Override
- public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) {
- switch (descriptor) {
- case UI_THREAD_ANNOTATION_DESCRIPTOR:
- mAssertMethodName = "assertOnUiThread";
- break;
- case WORKER_THREAD_ANNOTATION_DESCRIPTOR:
- mAssertMethodName = "assertOnBackgroundThread";
- break;
- default:
- break;
- }
-
- return super.visitAnnotation(descriptor, visible);
- }
-
- /**
- * Called to start visiting code. Will also insert the assertOnXThread methods at the start
- * of the method if needed.
- */
- @Override
- public void visitCode() {
- super.visitCode();
- if (!mAssertMethodName.equals("")) {
- visitMethodInsn(INVOKESTATIC, THREAD_UTILS_DESCRIPTOR, mAssertMethodName,
- THREAD_UTILS_SIGNATURE, false);
- }
- }
- }
-} \ No newline at end of file
diff --git a/chromium/build/android/devil_chromium.json b/chromium/build/android/devil_chromium.json
index 6727072c8d4..5d66730ae8b 100644
--- a/chromium/build/android/devil_chromium.json
+++ b/chromium/build/android/devil_chromium.json
@@ -55,25 +55,6 @@
}
}
},
- "pymock": {
- "file_info": {
- "darwin_x86_64": {
- "local_paths": [
- "../../third_party/pymock"
- ]
- },
- "linux2_x86_64": {
- "local_paths": [
- "../../third_party/pymock"
- ]
- },
- "win32_AMD64": {
- "local_paths": [
- "../../third_party/pymock"
- ]
- }
- }
- },
"simpleperf": {
"file_info": {
"android_armeabi-v7a": {
diff --git a/chromium/build/android/devil_chromium.py b/chromium/build/android/devil_chromium.py
index 6a6def6afc9..1cd5a87154d 100644
--- a/chromium/build/android/devil_chromium.py
+++ b/chromium/build/android/devil_chromium.py
@@ -113,7 +113,6 @@ def Initialize(output_directory=None, custom_deps=None, adb_path=None):
This includes:
- Libraries:
- the android SDK ("android_sdk")
- - pymock ("pymock")
- Build products:
- host & device forwarder binaries
("forwarder_device" and "forwarder_host")
diff --git a/chromium/build/android/docs/android_app_bundles.md b/chromium/build/android/docs/android_app_bundles.md
index 8934477195a..1edcba4a94a 100644
--- a/chromium/build/android/docs/android_app_bundles.md
+++ b/chromium/build/android/docs/android_app_bundles.md
@@ -167,8 +167,8 @@ performed, which consists of the following steps:
This synchronized proguarding step is added by the `android_app_bundle()` GN
template. In practice this means the following:
- - If `proguard_enabled` and `proguard_jar_path` must be passed to
- `android_app_bundle` targets, but not to `android_app_bundle_module` ones.
+ - `proguard_enabled` must be passed to `android_app_bundle` targets, but not
+ to `android_app_bundle_module` ones.
- `proguard_configs` can be still passed to individual modules, just
like regular APKs. All proguard configs will be merged during the
diff --git a/chromium/build/android/docs/java_toolchain.md b/chromium/build/android/docs/java_toolchain.md
index f19c9b383fc..9829f8d395c 100644
--- a/chromium/build/android/docs/java_toolchain.md
+++ b/chromium/build/android/docs/java_toolchain.md
@@ -20,6 +20,12 @@ also have a default `jar_excluded_patterns` set (more on that later):
All target names must end with "_java" so that the build system can distinguish
them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)).
+Most targets produce two separate `.jar` files:
+* Device `.jar`: Used to produce `.dex.jar`, which is used on-device.
+* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`).
+ * Host `.jar` files live in `lib.java/` so that they are archived in
+ builder/tester bots (which do not archive `obj/`).
+
## From Source to Final Dex
### Step 1: Create interface .jar with turbine or ijar
@@ -37,11 +43,6 @@ What are interface jars?:
removed.
* Dependant targets use interface `.jar` files to skip having to be rebuilt
when only private implementation details change.
- * To accomplish this behavior, library targets list only their
- interface `.jar` files as outputs. Ninja's `restat=1` feature then causes
- dependent targets to be rebuilt only when the interface `.jar` changes.
- Final dex targets are always rebuilt because they depend on the
- non-interface `.jar` through a `depfile`.
[//third_party/ijar]: /third_party/ijar/README.chromium
[//third_party/turbine]: /third_party/turbine/README.chromium
@@ -77,20 +78,23 @@ This step can be disabled via GN arg: `use_errorprone_java_compiler = false`
[ErrorProne]: https://errorprone.info/
[ep_plugins]: /tools/android/errorprone_plugin/
-### Step 3: Bytecode Processing
-
-* `//build/android/bytecode` runs on the compiled `.jar` in order to:
- * Enable Java assertions (when dcheck is enabled).
- * Assert that libraries have properly declared `deps`.
-
-### Step 4: Desugaring
+### Step 3: Desugaring (Device .jar Only)
-This step happens only when targets have `supports_android = true`.
+This step happens only when targets have `supports_android = true`. It is not
+applied to `.jar` files used by `junit_binary`.
* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as
lambdas and default interface methods, into constructs that are compatible
with Java 7.
+### Step 4: Instrumenting (Device .jar Only)
+
+This step happens only when this GN arg is set: `use_jacoco_coverage = true`
+
+* [Jacoco] adds instrumentation hooks to methods.
+
+[Jacoco]: https://www.eclemma.org/jacoco/
+
### Step 5: Filtering
This step happens only when targets that have `jar_excluded_patterns` or
@@ -108,27 +112,12 @@ This step happens only when targets that have `jar_excluded_patterns` or
[Android Resources]: life_of_a_resource.md
[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java
-### Step 6: Instrumentation
-
-This step happens only when this GN arg is set: `use_jacoco_coverage = true`
-
-* [Jacoco] adds instrumentation hooks to methods.
-
-[Jacoco]: https://www.eclemma.org/jacoco/
-
-### Step 7: Copy to lib.java
-
-* The `.jar` is copied into `$root_build_dir/lib.java` (under target-specific
- subdirectories) so that it will be included by bot archive steps.
- * These `.jar` files are the ones used when running `java_binary` and
- `junit_binary` targets.
-
-### Step 8: Per-Library Dexing
+### Step 6: Per-Library Dexing
This step happens only when targets have `supports_android = true`.
* [d8] converts `.jar` files containing `.class` files into `.dex.jar` files
- containing `.dex` files.
+ containing `classes.dex` files.
* Dexing is incremental - it will reuse dex'ed classes from a previous build if
the corresponding `.class` file is unchanged.
* These per-library `.dex.jar` files are used directly by [incremental install],
@@ -139,7 +128,7 @@ This step happens only when targets have `supports_android = true`.
[d8]: https://developer.android.com/studio/command-line/d8
[incremental install]: /build/android/incremental_install/README.md
-### Step 9: Apk / Bundle Module Compile
+### Step 7: Apk / Bundle Module Compile
* Each `android_apk` and `android_bundle_module` template has a nested
`java_library` target. The nested library includes final copies of files
@@ -150,7 +139,7 @@ This step happens only when targets have `supports_android = true`.
[JNI glue]: /base/android/jni_generator/README.md
-### Step 10: Final Dexing
+### Step 8: Final Dexing
This step is skipped when building using [Incremental Install].
@@ -160,19 +149,11 @@ When `is_java_debug = true`:
When `is_java_debug = false`:
* [R8] performs whole-program optimization on all library `lib.java` `.jar`
files and outputs a final `.r8dex.jar`.
- * For App Bundles, R8 creates a single `.r8dex.jar` with the code from all
- modules.
+ * For App Bundles, R8 creates a `.r8dex.jar` for each module.
[Incremental Install]: /build/android/incremental_install/README.md
[R8]: https://r8.googlesource.com/r8
-### Step 11: Bundle Module Dex Splitting
-
-This step happens only when `is_java_debug = false`.
-
-* [dexsplitter.py] splits the single `*dex.jar` into per-module `*dex.jar`
- files.
-
## Test APKs with apk_under_test
Test APKs are normal APKs that contain an `<instrumentation>` tag within their
@@ -266,8 +247,7 @@ We use several tools for static analysis.
[lint_plugins]: http://tools.android.com/tips/lint-custom-rules
-### [Bytecode Rewriter](/build/android/bytecode/)
-* Runs as part of normal compilation.
+### [Bytecode Processor](/build/android/bytecode/)
* Performs a single check:
* That target `deps` are not missing any entries.
* In other words: Enforces that targets do not rely on indirect dependencies
diff --git a/chromium/build/android/emma_coverage_stats_test.py b/chromium/build/android/emma_coverage_stats_test.py
index 44f6dc3586a..d67f6be2180 100755
--- a/chromium/build/android/emma_coverage_stats_test.py
+++ b/chromium/build/android/emma_coverage_stats_test.py
@@ -9,10 +9,8 @@ import unittest
from xml.etree import ElementTree
import emma_coverage_stats
-from pylib.constants import host_paths
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
EMPTY_COVERAGE_STATS_DICT = {
'files': {},
diff --git a/chromium/build/android/gradle/OWNERS b/chromium/build/android/gradle/OWNERS
index d1f94845f4d..a0e08269724 100644
--- a/chromium/build/android/gradle/OWNERS
+++ b/chromium/build/android/gradle/OWNERS
@@ -1,4 +1,2 @@
agrieve@chromium.org
wnwen@chromium.org
-
-# COMPONENT: Build
diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py
index 5501aa984d7..85b24410eaa 100755
--- a/chromium/build/android/gradle/generate_gradle.py
+++ b/chromium/build/android/gradle/generate_gradle.py
@@ -16,7 +16,6 @@ import re
import shutil
import subprocess
import sys
-import zipfile
_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
sys.path.append(_BUILD_ANDROID)
@@ -28,6 +27,7 @@ from pylib.constants import host_paths
sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
import jinja_template
from util import build_utils
+from util import resource_utils
_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
'depot_tools')
@@ -38,7 +38,6 @@ _FILE_DIR = os.path.dirname(__file__)
_GENERATED_JAVA_SUBDIR = 'generated_java'
_JNI_LIBS_SUBDIR = 'symlinked-libs'
_ARMEABI_SUBDIR = 'armeabi'
-_RES_SUBDIR = 'extracted-res'
_GRADLE_BUILD_FILE = 'build.gradle'
_CMAKE_FILE = 'CMakeLists.txt'
# This needs to come first alphabetically among all modules.
@@ -255,11 +254,8 @@ class _ProjectEntry(object):
'junit_binary',
)
- def ResZips(self):
- return self.DepsInfo().get('owned_resources_zips', [])
-
- def ResDirs(self):
- return self.DepsInfo().get('owned_resources_dirs', [])
+ def ResSources(self):
+ return self.DepsInfo().get('lint_resource_sources', [])
def JavaFiles(self):
if self._java_files is None:
@@ -360,24 +356,12 @@ class _ProjectContextGenerator(object):
def EntryOutputDir(self, entry):
return os.path.join(self.project_dir, entry.GradleSubdir())
- def AllResZips(self, root_entry):
- res_zips = []
- for entry in self._GetEntries(root_entry):
- res_zips += entry.ResZips()
- return set(_RebasePath(res_zips))
-
def GeneratedInputs(self, root_entry):
generated_inputs = set()
- generated_inputs.update(self.AllResZips(root_entry))
for entry in self._GetEntries(root_entry):
generated_inputs.update(entry.PrebuiltJars())
return generated_inputs
- def GeneratedZips(self, root_entry):
- entry_output_dir = self.EntryOutputDir(root_entry)
- return [(s, os.path.join(entry_output_dir, _RES_SUBDIR))
- for s in self.AllResZips(root_entry)]
-
def GenerateManifest(self, root_entry):
android_manifest = root_entry.DepsInfo().get('android_manifest')
if not android_manifest:
@@ -401,13 +385,15 @@ class _ProjectContextGenerator(object):
p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
self.processed_prebuilts.update(prebuilts)
variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
- res_dirs = set(
- p for e in self._GetEntries(root_entry) for p in e.ResDirs())
+ res_sources_files = _RebasePath(
+ set(p for e in self._GetEntries(root_entry) for p in e.ResSources()))
+ res_sources = []
+ for res_sources_file in res_sources_files:
+ res_sources.extend(build_utils.ReadSourcesList(res_sources_file))
+ res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources)
# Do not add generated resources for the all module since it creates many
# duplicates, and currently resources are only used for editing.
self.processed_res_dirs.update(res_dirs)
- res_dirs.add(
- os.path.join(self.EntryOutputDir(root_entry), _RES_SUBDIR))
variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
if self.split_projects:
deps = [_ProjectEntry.FromBuildConfigPath(p)
@@ -527,11 +513,35 @@ def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
def _GenerateLocalProperties(sdk_dir):
- """Returns the data for project.properties as a string."""
+ """Returns the data for local.properties as a string."""
return '\n'.join([
'# Generated by //build/android/gradle/generate_gradle.py',
'sdk.dir=%s' % sdk_dir,
- ''])
+ '',
+ ])
+
+
+def _GenerateGradleWrapperPropertiesCanary():
+ """Returns the data for gradle-wrapper.properties as a string."""
+ # Before May 2020, this wasn't necessary. Might not be necessary at some point
+ # in the future?
+ return '\n'.join([
+ '# Generated by //build/android/gradle/generate_gradle.py',
+ ('distributionUrl=https\\://services.gradle.org/distributions/'
+ 'gradle-6.5-rc-1-all.zip\n'),
+ '',
+ ])
+
+
+def _GenerateGradleProperties():
+ """Returns the data for gradle.properties as a string."""
+ return '\n'.join([
+ '# Generated by //build/android/gradle/generate_gradle.py',
+ '',
+ '# Tells Gradle to show warnings during project sync.',
+ 'org.gradle.warning.mode=all',
+ '',
+ ])
def _GenerateBaseVars(generator, build_vars):
@@ -692,23 +702,6 @@ def _GenerateSettingsGradle(project_entries):
return '\n'.join(lines)
-def _ExtractFile(zip_path, extracted_path):
- logging.debug('Extracting %s to %s', zip_path, extracted_path)
- with zipfile.ZipFile(zip_path) as z:
- z.extractall(extracted_path)
-
-
-def _ExtractZips(entry_output_dir, zip_tuples):
- """Extracts all zips to the directory given in the tuples."""
- extracted_paths = set(s[1] for s in zip_tuples)
- for extracted_path in extracted_paths:
- assert _IsSubpathOf(extracted_path, entry_output_dir)
- shutil.rmtree(extracted_path, True)
-
- for zip_path, extracted_path in zip_tuples:
- _ExtractFile(zip_path, extracted_path)
-
-
def _FindAllProjectEntries(main_entries):
"""Returns the list of all _ProjectEntry instances given the root project."""
found = set()
@@ -930,8 +923,16 @@ def main():
_WriteFile(
os.path.join(generator.project_dir, 'local.properties'),
_GenerateLocalProperties(args.sdk_path))
+ _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'),
+ _GenerateGradleProperties())
+
+ wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper',
+ 'gradle-wrapper.properties')
+ if os.path.exists(wrapper_properties):
+ os.unlink(wrapper_properties)
+ if args.canary:
+ _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary())
- zip_tuples = []
generated_inputs = set()
for entry in entries:
entries_to_gen = [entry]
@@ -939,13 +940,9 @@ def main():
for entry_to_gen in entries_to_gen:
# Build all paths references by .gradle that exist within output_dir.
generated_inputs.update(generator.GeneratedInputs(entry_to_gen))
- zip_tuples.extend(generator.GeneratedZips(entry_to_gen))
if generated_inputs:
targets = _RebasePath(generated_inputs, output_dir)
_RunNinja(output_dir, targets)
- if zip_tuples:
- # This extracts generated xml files (e.g. strings).
- _ExtractZips(generator.project_dir, zip_tuples)
logging.warning('Generated files will only appear once you\'ve built them.')
logging.warning('Generated projects for Android Studio %s', channel)
diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja
index c80292d1226..549dc245593 100644
--- a/chromium/build/android/gradle/root.jinja
+++ b/chromium/build/android/gradle/root.jinja
@@ -9,18 +9,18 @@ buildscript {
jcenter()
{% if channel == 'canary' %}
// Workaround for http://b/144885480.
- maven() {
- url "http://dl.bintray.com/kotlin/kotlin-eap"
- }
+ //maven() {
+ // url "http://dl.bintray.com/kotlin/kotlin-eap"
+ //}
{% endif %}
}
dependencies {
{% if channel == 'canary' %}
- classpath "com.android.tools.build:gradle:4.0.0-alpha04"
+ classpath "com.android.tools.build:gradle:4.1.0-beta01"
{% elif channel == 'beta' %}
- classpath "com.android.tools.build:gradle:3.1.0-beta4"
+ classpath "com.android.tools.build:gradle:4.0.0-rc01"
{% else %}
- classpath "com.android.tools.build:gradle:3.0.1"
+ classpath "com.android.tools.build:gradle:3.6.3"
{% endif %}
}
}
diff --git a/chromium/build/android/gyp/OWNERS b/chromium/build/android/gyp/OWNERS
index 7defba6b1ae..25557e1fc55 100644
--- a/chromium/build/android/gyp/OWNERS
+++ b/chromium/build/android/gyp/OWNERS
@@ -2,5 +2,3 @@ agrieve@chromium.org
digit@chromium.org
smaier@chromium.org
wnwen@chromium.org
-
-# COMPONENT: Build
diff --git a/chromium/build/android/gyp/aar.py b/chromium/build/android/gyp/aar.py
index 87f189014a1..ffd6cf8fa12 100755
--- a/chromium/build/android/gyp/aar.py
+++ b/chromium/build/android/gyp/aar.py
@@ -23,15 +23,30 @@ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
import gn_helpers
+# Regular expression to extract -checkdiscard / -check* lines.
+# Does not support nested comments with "}" in them (oh well).
+_CHECKDISCARD_PATTERN = re.compile(r'^\s*?-check.*?}\s*',
+ re.DOTALL | re.MULTILINE)
+
+_PROGUARD_TXT = 'proguard.txt'
+_PROGUARD_CHECKS_TXT = 'proguard-checks.txt'
+
+
def _IsManifestEmpty(manifest_str):
- """Returns whether the given manifest has merge-worthy elements.
+ """Decides whether the given manifest has merge-worthy elements.
E.g.: <activity>, <service>, etc.
+
+ Args:
+ manifest_str: Content of a manifiest XML.
+
+ Returns:
+ Whether the manifest has merge-worthy elements.
"""
doc = ElementTree.fromstring(manifest_str)
for node in doc:
if node.tag == 'application':
- if len(node):
+ if node.getchildren():
return False
elif node.tag != 'uses-sdk':
return False
@@ -40,6 +55,14 @@ def _IsManifestEmpty(manifest_str):
def _CreateInfo(aar_file):
+ """Extracts and return .info data from an .aar file.
+
+ Args:
+ aar_file: Path to an input .aar file.
+
+ Returns:
+ A dict containing .info data.
+ """
data = {}
data['aidl'] = []
data['assets'] = []
@@ -76,16 +99,40 @@ def _CreateInfo(aar_file):
data['native_libraries'] = [name]
elif name == 'classes.jar':
data['has_classes_jar'] = True
- elif name == 'proguard.txt':
+ elif name == _PROGUARD_TXT:
data['has_proguard_flags'] = True
elif name == 'R.txt':
# Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
# have no resources as well. We treat empty R.txt as having no R.txt.
- data['has_r_text_file'] = (z.read('R.txt').strip() != '')
+ data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+ if data['has_proguard_flags']:
+ config = z.read(_PROGUARD_TXT)
+ if _CHECKDISCARD_PATTERN.search(config):
+ data['has_proguard_check_flags'] = True
+
return data
-def _PerformExtract(aar_file, output_dir, name_allowlist):
+def _SplitProguardConfig(tmp_dir):
+ # Put -checkdiscard (and friends) into a separate proguard config.
+ # https://crbug.com/1093831
+ main_flag_path = os.path.join(tmp_dir, _PROGUARD_TXT)
+ check_flag_path = os.path.join(tmp_dir, _PROGUARD_CHECKS_TXT)
+ with open(main_flag_path) as f:
+ config_data = f.read()
+ with open(main_flag_path, 'w') as f:
+ MSG = ('# Check flag moved to proguard-checks.txt by '
+ '//build/android/gyp/aar.py\n')
+ f.write(_CHECKDISCARD_PATTERN.sub(MSG, config_data))
+ with open(check_flag_path, 'w') as f:
+ f.write('# Check flags extracted by //build/android/gyp/aar.py\n\n')
+ for m in _CHECKDISCARD_PATTERN.finditer(config_data):
+ f.write(m.group(0))
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist,
+ has_proguard_check_flags):
with build_utils.TempDir() as tmp_dir:
tmp_dir = os.path.join(tmp_dir, 'staging')
os.mkdir(tmp_dir)
@@ -94,6 +141,10 @@ def _PerformExtract(aar_file, output_dir, name_allowlist):
# Write a breadcrumb so that SuperSize can attribute files back to the .aar.
with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
f.write('source={}\n'.format(aar_file))
+
+ if has_proguard_check_flags:
+ _SplitProguardConfig(tmp_dir)
+
shutil.rmtree(output_dir, ignore_errors=True)
shutil.move(tmp_dir, output_dir)
@@ -135,7 +186,7 @@ def main():
# Generated by //build/android/gyp/aar.py
# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-""" + gn_helpers.ToGNString(aar_info)
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
if args.command == 'extract':
if args.assert_info_file:
@@ -150,12 +201,20 @@ def main():
if args.ignore_resources:
names = [n for n in names if not n.startswith('res')]
+ has_proguard_check_flags = aar_info.get('has_proguard_check_flags')
output_paths = [os.path.join(args.output_dir, n) for n in names]
output_paths.append(os.path.join(args.output_dir, 'source.info'))
- md5_check.CallAndRecordIfStale(
- lambda: _PerformExtract(args.aar_file, args.output_dir, set(names)),
- input_paths=[args.aar_file],
- output_paths=output_paths)
+ if has_proguard_check_flags:
+ output_paths.append(os.path.join(args.output_dir, _PROGUARD_CHECKS_TXT))
+
+ def on_stale_md5():
+ _PerformExtract(args.aar_file, args.output_dir, set(names),
+ has_proguard_check_flags)
+
+ md5_check.CallAndRecordIfStale(on_stale_md5,
+ input_strings=[aar_info],
+ input_paths=[args.aar_file],
+ output_paths=output_paths)
elif args.command == 'list':
aar_output_present = args.output != '-' and os.path.isfile(args.output)
diff --git a/chromium/build/android/gyp/aar.pydeps b/chromium/build/android/gyp/aar.pydeps
index e08c5475e3d..edb351d2fc8 100644
--- a/chromium/build/android/gyp/aar.pydeps
+++ b/chromium/build/android/gyp/aar.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
../../gn_helpers.py
+../../print_python_deps.py
aar.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py
index 7f8403919ca..dd2175bbe9c 100755
--- a/chromium/build/android/gyp/apkbuilder.py
+++ b/chromium/build/android/gyp/apkbuilder.py
@@ -359,8 +359,7 @@ def _MaybeWriteDepAndStampFiles(options, depfile_deps):
output = options.stamp
else:
output = options.output_apk
- build_utils.WriteDepfile(
- options.depfile, output, inputs=depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output, inputs=depfile_deps)
def main(args):
diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py
index 3d78347998d..850a809d5d2 100755
--- a/chromium/build/android/gyp/bytecode_processor.py
+++ b/chromium/build/android/gyp/bytecode_processor.py
@@ -3,7 +3,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs."""
+"""Wraps bin/helper/bytecode_processor and expands @FileArgs."""
import argparse
import os
@@ -23,45 +23,45 @@ def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--script', required=True,
help='Path to the java binary wrapper script.')
+ parser.add_argument('--gn-target', required=True)
parser.add_argument('--input-jar', required=True)
- parser.add_argument('--output-jar', required=True)
- parser.add_argument('--direct-classpath-jars', required=True)
- parser.add_argument('--sdk-classpath-jars', required=True)
- parser.add_argument('--extra-classpath-jars', dest='extra_jars',
- action='append', default=[],
- help='Extra inputs, passed last to the binary script.')
+ parser.add_argument('--direct-classpath-jars')
+ parser.add_argument('--sdk-classpath-jars')
+ parser.add_argument('--full-classpath-jars')
+ parser.add_argument('--full-classpath-gn-targets')
+ parser.add_argument('--stamp')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--missing-classes-allowlist')
_AddSwitch(parser, '--is-prebuilt')
- _AddSwitch(parser, '--enable-thread-annotations')
- _AddSwitch(parser, '--enable-check-class-path')
args = parser.parse_args(argv)
- sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
- assert len(sdk_jars) > 0
-
- direct_jars = build_utils.ParseGnList(args.direct_classpath_jars)
- assert len(direct_jars) > 0
-
- extra_classpath_jars = []
- for a in args.extra_jars:
- extra_classpath_jars.extend(build_utils.ParseGnList(a))
+ args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ args.direct_classpath_jars = build_utils.ParseGnList(
+ args.direct_classpath_jars)
+ args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
+ args.full_classpath_gn_targets = build_utils.ParseGnList(
+ args.full_classpath_gn_targets)
args.missing_classes_allowlist = build_utils.ParseGnList(
args.missing_classes_allowlist)
- if args.verbose:
- verbose = '--verbose'
- else:
- verbose = '--not-verbose'
+ verbose = '--verbose' if args.verbose else '--not-verbose'
- cmd = ([
- args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
- args.enable_thread_annotations, args.enable_check_class_path
- ] + [str(len(args.missing_classes_allowlist))] +
- args.missing_classes_allowlist + [str(len(sdk_jars))] + sdk_jars +
- [str(len(direct_jars))] + direct_jars + extra_classpath_jars)
+ cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt]
+ cmd += [str(len(args.missing_classes_allowlist))]
+ cmd += args.missing_classes_allowlist
+ cmd += [str(len(args.sdk_classpath_jars))]
+ cmd += args.sdk_classpath_jars
+ cmd += [str(len(args.direct_classpath_jars))]
+ cmd += args.direct_classpath_jars
+ cmd += [str(len(args.full_classpath_jars))]
+ cmd += args.full_classpath_jars
+ cmd += [str(len(args.full_classpath_gn_targets))]
+ cmd += args.full_classpath_gn_targets
subprocess.check_call(cmd)
+ if args.stamp:
+ build_utils.Touch(args.stamp)
+
if __name__ == '__main__':
sys.exit(main(sys.argv))
diff --git a/chromium/build/android/gyp/compile_java.pydeps b/chromium/build/android/gyp/compile_java.pydeps
index a128f47280a..f24bdcbb879 100644
--- a/chromium/build/android/gyp/compile_java.pydeps
+++ b/chromium/build/android/gyp/compile_java.pydeps
@@ -7,6 +7,7 @@
../../../third_party/colorama/src/colorama/win32.py
../../../third_party/colorama/src/colorama/winterm.py
../../gn_helpers.py
+../../print_python_deps.py
compile_java.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 2ca4ec781c8..eece2eb3fea 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -18,7 +18,6 @@ import contextlib
import filecmp
import hashlib
import logging
-import multiprocessing.dummy
import os
import re
import shutil
@@ -26,7 +25,6 @@ import subprocess
import sys
import tempfile
import textwrap
-import time
import zipfile
from xml.etree import ElementTree
@@ -34,9 +32,11 @@ from util import build_utils
from util import diff_utils
from util import manifest_utils
from util import md5_check
+from util import parallel
from util import protoresources
from util import resource_utils
+
# Pngs that we shouldn't convert to webp. Please add rationale when updating.
_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
# Crashes on Galaxy S5 running L (https://crbug.com/807059).
@@ -546,68 +546,64 @@ def _CreateKeepPredicate(resource_exclusion_regex,
build_utils.MatchesGlob(path, resource_exclusion_exceptions))
-def _ConvertToWebP(webp_binary, png_paths, path_info, webp_cache_dir):
- pool = multiprocessing.dummy.Pool(10)
+def _ComputeSha1(path):
+ with open(path, 'rb') as f:
+ data = f.read()
+ return hashlib.sha1(data).hexdigest()
- build_utils.MakeDirectory(webp_cache_dir)
- cwebp_version = subprocess.check_output([webp_binary, '-version']).rstrip()
- cwebp_arguments = ['-mt', '-quiet', '-m', '6', '-q', '100', '-lossless']
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+ sha1_hash = _ComputeSha1(png_path)
- sha1_time = [0]
- cwebp_time = [0]
- cache_hits = [0]
+ # The set of arguments that will appear in the cache key.
+ quality_args = ['-m', '6', '-q', '100', '-lossless']
- def cal_sha1(png_path):
- start = time.time()
- with open(png_path, 'rb') as f:
- png_content = f.read()
+ webp_cache_path = os.path.join(
+ webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+ ''.join(quality_args)))
+ # No need to add .webp. Android can load images fine without them.
+ webp_path = os.path.splitext(png_path)[0]
- sha1_hex = hashlib.sha1(png_content).hexdigest()
- sha1_time[0] += time.time() - start
- return sha1_hex
+ cache_hit = os.path.exists(webp_cache_path)
+ if cache_hit:
+ os.link(webp_cache_path, webp_path)
+ else:
+ # We place the generated webp image to webp_path, instead of in the
+ # webp_cache_dir to avoid concurrency issues.
+ args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+ subprocess.check_call(args)
- def get_converted_image(png_path):
- sha1_hash = cal_sha1(png_path)
+ try:
+ os.link(webp_path, webp_cache_path)
+ except OSError:
+ # Because of concurrent run, a webp image may already exists in
+ # webp_cache_path.
+ pass
- webp_cache_path = os.path.join(
- webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
- ''.join(cwebp_arguments)))
- # No need to add an extension, android can load images fine without them.
- webp_path = os.path.splitext(png_path)[0]
+ os.remove(png_path)
+ original_dir = os.path.dirname(os.path.dirname(png_path))
+ rename_tuple = (os.path.relpath(png_path, original_dir),
+ os.path.relpath(webp_path, original_dir))
+ return rename_tuple, cache_hit
- if os.path.exists(webp_cache_path):
- cache_hits[0] += 1
- os.link(webp_cache_path, webp_path)
- else:
- # We place the generated webp image to webp_path, instead of in the
- # webp_cache_dir to avoid concurrency issues.
- start = time.time()
- args = [webp_binary, png_path] + cwebp_arguments + ['-o', webp_path]
- subprocess.check_call(args)
- cwebp_time[0] += time.time() - start
-
- try:
- os.link(webp_path, webp_cache_path)
- except OSError:
- # Because of concurrent run, a webp image may already exists in
- # webp_cache_path.
- pass
-
- os.remove(png_path)
- original_dir = os.path.dirname(os.path.dirname(png_path))
- path_info.RegisterRename(
- os.path.relpath(png_path, original_dir),
- os.path.relpath(webp_path, original_dir))
-
- png_paths = [f for f in png_paths if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
- try:
- pool.map(get_converted_image, png_paths)
- finally:
- pool.close()
- pool.join()
- logging.debug('png->webp: cache: %d/%d sha1 time: %.1fms cwebp time: %.1fms',
- cache_hits[0], len(png_paths), sha1_time[0], cwebp_time[0])
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+ cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+ shard_args = [(f, ) for f in png_paths
+ if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+ build_utils.MakeDirectory(webp_cache_dir)
+ results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+ shard_args,
+ cwebp_binary=cwebp_binary,
+ cwebp_version=cwebp_version,
+ webp_cache_dir=webp_cache_dir)
+ total_cache_hits = 0
+ for rename_tuple, cache_hit in results:
+ path_info.RegisterRename(*rename_tuple)
+ total_cache_hits += int(cache_hit)
+
+ logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
def _RemoveImageExtensions(directory, path_info):
@@ -627,10 +623,9 @@ def _RemoveImageExtensions(directory, path_info):
os.path.relpath(path_no_extension, directory))
-def _CompileSingleDep(args):
- index, dep_path, aapt2_path, partials_dir, exclusion_rules = args
- basename = os.path.basename(dep_path)
- unique_name = '{}_{}'.format(index, basename)
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+ partials_dir):
+ unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
compile_command = [
@@ -639,7 +634,7 @@ def _CompileSingleDep(args):
# TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
# '--no-crunch',
'--dir',
- dep_path,
+ dep_subdir,
'-o',
partial_path
]
@@ -654,33 +649,16 @@ def _CompileSingleDep(args):
# Filtering these files is expensive, so only apply filters to the partials
# that have been explicitly targeted.
- keep_predicate = _CreateValuesKeepPredicate(exclusion_rules, dep_path)
if keep_predicate:
- logging.debug('Applying .arsc filtering to %s', dep_path)
+ logging.debug('Applying .arsc filtering to %s', dep_subdir)
protoresources.StripUnwantedResources(partial_path, keep_predicate)
return partial_path
-def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
- partials_dir = os.path.join(temp_dir, 'partials')
- build_utils.MakeDirectory(partials_dir)
-
- def iter_params():
- for i, dep_path in enumerate(dep_subdirs):
- yield i, dep_path, aapt2_path, partials_dir, exclusion_rules
-
- pool = multiprocessing.dummy.Pool(10)
- try:
- return pool.map(_CompileSingleDep, iter_params())
- finally:
- pool.close()
- pool.join()
-
-
-def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
patterns = [
x[1] for x in exclusion_rules
- if build_utils.MatchesGlob(dep_path, [x[0]])
+ if build_utils.MatchesGlob(dep_subdir, [x[0]])
]
if not patterns:
return None
@@ -689,6 +667,23 @@ def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
return lambda x: not any(r.search(x) for r in regexes)
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+
+ job_params = [(i, dep_subdir,
+ _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+ for i, dep_subdir in enumerate(dep_subdirs)]
+
+ # Filtering is slow, so ensure jobs with keep_predicate are started first.
+ job_params.sort(key=lambda x: not x[2])
+ return list(
+ parallel.BulkForkAndCall(_CompileSingleDep,
+ job_params,
+ aapt2_path=aapt2_path,
+ partials_dir=partials_dir))
+
+
def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
for zip_file in dependencies_res_zips:
zip_info_file_path = zip_file + '.info'
diff --git a/chromium/build/android/gyp/compile_resources.pydeps b/chromium/build/android/gyp/compile_resources.pydeps
index f34926c185e..cb1d7621cd7 100644
--- a/chromium/build/android/gyp/compile_resources.pydeps
+++ b/chromium/build/android/gyp/compile_resources.pydeps
@@ -46,6 +46,7 @@
../../../third_party/protobuf/python/google/protobuf/text_format.py
../../../third_party/six/src/six.py
../../gn_helpers.py
+../../print_python_deps.py
compile_resources.py
proto/Configuration_pb2.py
proto/Resources_pb2.py
@@ -55,5 +56,6 @@ util/build_utils.py
util/diff_utils.py
util/manifest_utils.py
util/md5_check.py
+util/parallel.py
util/protoresources.py
util/resource_utils.py
diff --git a/chromium/build/android/gyp/copy_ex.py b/chromium/build/android/gyp/copy_ex.py
index 8451555ee57..f93597f973f 100755
--- a/chromium/build/android/gyp/copy_ex.py
+++ b/chromium/build/android/gyp/copy_ex.py
@@ -119,8 +119,7 @@ def main(args):
DoRenaming(options, deps)
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, options.stamp, deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.stamp, deps)
if options.stamp:
build_utils.Touch(options.stamp)
diff --git a/chromium/build/android/gyp/create_app_bundle_apks.pydeps b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
index bdee0af2c2d..064ab48f0f6 100644
--- a/chromium/build/android/gyp/create_app_bundle_apks.pydeps
+++ b/chromium/build/android/gyp/create_app_bundle_apks.pydeps
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
../pylib/__init__.py
../pylib/utils/__init__.py
../pylib/utils/app_bundle_utils.py
diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
index d8825145a19..65222c6976d 100644
--- a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
+++ b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -22,7 +22,7 @@
../../../third_party/catapult/devil/devil/android/constants/chrome.py
../../../third_party/catapult/devil/devil/android/constants/file_system.py
../../../third_party/catapult/devil/devil/android/decorators.py
-../../../third_party/catapult/devil/devil/android/device_blacklist.py
+../../../third_party/catapult/devil/devil/android/device_denylist.py
../../../third_party/catapult/devil/devil/android/device_errors.py
../../../third_party/catapult/devil/devil/android/device_signal.py
../../../third_party/catapult/devil/devil/android/device_temp_file.py
@@ -85,6 +85,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
../adb_command_line.py
../apk_operations.py
../convert_dex_profile.py
diff --git a/chromium/build/android/gyp/create_size_info_files.py b/chromium/build/android/gyp/create_size_info_files.py
index 27046db1150..b446b7f5dd4 100755
--- a/chromium/build/android/gyp/create_size_info_files.py
+++ b/chromium/build/android/gyp/create_size_info_files.py
@@ -179,11 +179,9 @@ def main(args):
_MergeResInfoFiles(options.res_info_path, res_inputs)
all_inputs = jar_inputs + pak_inputs + res_inputs
- build_utils.WriteDepfile(
- options.depfile,
- options.jar_info_path,
- inputs=all_inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/create_ui_locale_resources.py b/chromium/build/android/gyp/create_ui_locale_resources.py
index 97868cbfde8..007afb37ec5 100755
--- a/chromium/build/android/gyp/create_ui_locale_resources.py
+++ b/chromium/build/android/gyp/create_ui_locale_resources.py
@@ -60,7 +60,6 @@ def main():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- build_utils.AddDepfileOption(parser)
parser.add_argument(
'--locale-list',
required=True,
@@ -83,9 +82,6 @@ def main():
android_locale = resource_utils.ToAndroidLocaleName(locale)
_AddLocaleResourceFileToZip(out_zip, android_locale, locale)
- if args.depfile:
- build_utils.WriteDepfile(args.depfile, args.output_zip)
-
if __name__ == '__main__':
main()
diff --git a/chromium/build/android/gyp/create_ui_locale_resources.pydeps b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
new file mode 100644
index 00000000000..663ed03619d
--- /dev/null
+++ b/chromium/build/android/gyp/create_ui_locale_resources.pydeps
@@ -0,0 +1,28 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_ui_locale_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py
index 1e1c15678ee..f12aafbe74e 100755
--- a/chromium/build/android/gyp/desugar.py
+++ b/chromium/build/android/gyp/desugar.py
@@ -53,11 +53,9 @@ def main():
stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile,
- options.output_jar,
- inputs=options.bootclasspath + options.classpath,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.output_jar,
+ inputs=options.bootclasspath + options.classpath)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py
index 0b3dcbd28b9..6fd0ab35dd8 100755
--- a/chromium/build/android/gyp/dex.py
+++ b/chromium/build/android/gyp/dex.py
@@ -62,6 +62,9 @@ def _ParseArgs(args):
'--multi-dex',
action='store_true',
help='Allow multiple dex files within output.')
+ parser.add_argument('--library',
+ action='store_true',
+ help='Allow numerous dex files within output.')
parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
parser.add_argument('--desugar', action='store_true')
parser.add_argument(
@@ -159,9 +162,18 @@ def _RunD8(dex_cmd, input_paths, output_path):
output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE)
return output
- # stdout sometimes spams with things like:
- # Stripped invalid locals information from 1 method.
- build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter)
+ with tempfile.NamedTemporaryFile() as flag_file:
+ # Chosen arbitrarily. Needed to avoid command-line length limits.
+ MAX_ARGS = 50
+ if len(dex_cmd) > MAX_ARGS:
+ flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
+ flag_file.flush()
+ dex_cmd = dex_cmd[:MAX_ARGS]
+ dex_cmd.append('@' + flag_file.name)
+
+ # stdout sometimes spams with things like:
+ # Stripped invalid locals information from 1 method.
+ build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter)
def _EnvWithArtLibPath(binary_path):
@@ -325,13 +337,15 @@ def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
- if (output.endswith('.dex')
- or not all(f.endswith('.dex') for f in d8_inputs)):
+ needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
+ needs_dexmerge = output.endswith('.dex') or not (options and options.library)
+ if needs_dexing or needs_dexmerge:
if options:
if options.main_dex_list_path:
dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path]
- elif options.multi_dex and int(options.min_api or 1) < 21:
- # When dexing library targets, it doesn't matter what's in the main dex.
+ elif options.library and int(options.min_api or 1) < 21:
+ # When dexing D8 requires a main dex list pre-21. For library targets,
+ # it doesn't matter what's in the main dex, so just use a dummy one.
tmp_main_dex_list_path = os.path.join(tmp_dir, 'main_list.txt')
with open(tmp_main_dex_list_path, 'w') as f:
f.write('Foo.class\n')
@@ -420,7 +434,7 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
# If the only change is deleting a file, class_files will be empty.
if class_files:
# Dex necessary classes into intermediate dex files.
- dex_cmd = dex_cmd + ['--intermediate', '--file-per-class']
+ dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
_RunD8(dex_cmd, class_files, options.incremental_dir)
logging.debug('Dexed class files.')
@@ -444,9 +458,9 @@ def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar):
dex_cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
r8_jar_path,
- 'd8',
+ 'com.android.tools.r8.D8',
]
with build_utils.TempDir() as tmp_dir:
_CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
@@ -479,7 +493,10 @@ def main(args):
final_dex_inputs += options.dex_inputs
dex_cmd = [
- build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8',
+ build_utils.JAVA_PATH,
+ '-cp',
+ options.r8_jar_path,
+ 'com.android.tools.r8.D8',
]
if options.release:
dex_cmd += ['--release']
diff --git a/chromium/build/android/gyp/dex.pydeps b/chromium/build/android/gyp/dex.pydeps
index 5fe5b2b99c1..23856f3c847 100644
--- a/chromium/build/android/gyp/dex.pydeps
+++ b/chromium/build/android/gyp/dex.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
../../gn_helpers.py
+../../print_python_deps.py
../convert_dex_profile.py
dex.py
util/__init__.py
diff --git a/chromium/build/android/gyp/dex_jdk_libs.py b/chromium/build/android/gyp/dex_jdk_libs.py
index 0cda991a4c9..01dc3c93091 100755
--- a/chromium/build/android/gyp/dex_jdk_libs.py
+++ b/chromium/build/android/gyp/dex_jdk_libs.py
@@ -29,30 +29,44 @@ def _ParseArgs(args):
return options
-def main(args):
- options = _ParseArgs(args)
-
+def DexJdkLibJar(r8_path, min_api, desugar_jdk_libs_json, desugar_jdk_libs_jar,
+ keep_rule_file, output):
# TODO(agrieve): Spews a lot of stderr about missing classes.
with build_utils.TempDir() as tmp_dir:
cmd = [
build_utils.JAVA_PATH,
- '-jar',
- options.r8_path,
- 'l8',
+ '-cp',
+ r8_path,
+ 'com.android.tools.r8.L8',
'--min-api',
- options.min_api,
- #'--lib', build_utils.JAVA_HOME,
+ min_api,
+ '--lib',
+ build_utils.JAVA_HOME,
'--desugared-lib',
- options.desugar_jdk_libs_json,
- '--output',
- tmp_dir,
- options.desugar_jdk_libs_jar
+ desugar_jdk_libs_json,
]
- subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ if keep_rule_file:
+ cmd += ['--pg-conf', keep_rule_file]
+
+ cmd += ['--output', tmp_dir, desugar_jdk_libs_jar]
+
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
- shutil.move(os.path.join(tmp_dir, 'classes.dex'), options.output)
+
+ # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used
+ # at all.
+ if os.path.exists(os.path.join(tmp_dir, 'classes.dex')):
+ shutil.move(os.path.join(tmp_dir, 'classes.dex'), output)
+ return True
+ return False
+
+
+def main(args):
+ options = _ParseArgs(args)
+ DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar, None, options.output)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py
index 8e8230b97bc..47bea7ee80c 100755
--- a/chromium/build/android/gyp/dexsplitter.py
+++ b/chromium/build/android/gyp/dexsplitter.py
@@ -50,9 +50,9 @@ def _ParseOptions(args):
def _RunDexsplitter(options, output_dir):
cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
options.r8_path,
- 'dexsplitter',
+ 'com.android.tools.r8.dexsplitter.DexSplitter',
'--output',
output_dir,
'--proguard-map',
diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py
index a74037af07a..fed1983b9ca 100755
--- a/chromium/build/android/gyp/dist_aar.py
+++ b/chromium/build/android/gyp/dist_aar.py
@@ -14,7 +14,7 @@ import sys
import tempfile
import zipfile
-from filter_zip import CreatePathTransform
+import filter_zip
from util import build_utils
@@ -117,8 +117,8 @@ def main(args):
build_utils.AddToZipHermetic(
z, 'AndroidManifest.xml', src_path=options.android_manifest)
- path_transform = CreatePathTransform(options.jar_excluded_globs,
- options.jar_included_globs, [])
+ path_transform = filter_zip.CreatePathTransform(
+ options.jar_excluded_globs, options.jar_included_globs, [])
with tempfile.NamedTemporaryFile() as jar_file:
build_utils.MergeZips(
jar_file.name, options.jars, path_transform=path_transform)
@@ -152,8 +152,7 @@ def main(args):
if options.depfile:
all_inputs = (options.jars + options.dependencies_res_zips +
options.r_text_files + options.proguard_configs)
- build_utils.WriteDepfile(options.depfile, options.output, all_inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/filter_zip.py b/chromium/build/android/gyp/filter_zip.py
index 6f854191254..9b52288c7a9 100755
--- a/chromium/build/android/gyp/filter_zip.py
+++ b/chromium/build/android/gyp/filter_zip.py
@@ -5,6 +5,7 @@
# found in the LICENSE file.
import argparse
+import shutil
import sys
from util import build_utils
@@ -20,6 +21,21 @@ _RESOURCE_CLASSES = [
def CreatePathTransform(exclude_globs, include_globs,
strip_resource_classes_for):
+ """Returns a function to strip paths for the given patterns.
+
+ Args:
+ exclude_globs: List of globs that if matched should be excluded.
+ include_globs: List of globs that if not matched should be excluded.
+ strip_resource_classes_for: List of Java packages for which to strip
+ R.java classes from.
+
+ Returns:
+ * None if no filters are needed.
+ * A function "(path) -> path" that returns None when |path| should be
+ stripped, or |path| otherwise.
+ """
+ if not (exclude_globs or include_globs or strip_resource_classes_for):
+ return None
exclude_globs = list(exclude_globs or [])
if strip_resource_classes_for:
exclude_globs.extend(p.replace('.', '/') + '/' + f
@@ -52,19 +68,18 @@ def main():
argv = build_utils.ExpandFileArgs(sys.argv[1:])
args = parser.parse_args(argv)
- if args.exclude_globs:
- args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
- if args.include_globs:
- args.include_globs= build_utils.ParseGnList(args.include_globs)
- if args.strip_resource_classes_for:
- args.strip_resource_classes_for = build_utils.ParseGnList(
- args.strip_resource_classes_for)
+ args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+ args.include_globs = build_utils.ParseGnList(args.include_globs)
+ args.strip_resource_classes_for = build_utils.ParseGnList(
+ args.strip_resource_classes_for)
path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
args.strip_resource_classes_for)
with build_utils.AtomicOutput(args.output) as f:
- build_utils.MergeZips(
- f.name, [args.input], path_transform=path_transform)
+ if path_transform:
+ build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+ else:
+ shutil.copy(args.input, f.name)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/gcc_preprocess.py b/chromium/build/android/gyp/gcc_preprocess.py
index 8b3444c2b01..8c5c404c744 100755
--- a/chromium/build/android/gyp/gcc_preprocess.py
+++ b/chromium/build/android/gyp/gcc_preprocess.py
@@ -47,7 +47,7 @@ def main(args):
DoGcc(options)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, options.output)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py
index 39cba3d785d..0b9ee541e4e 100755
--- a/chromium/build/android/gyp/java_cpp_enum.py
+++ b/chromium/build/android/gyp/java_cpp_enum.py
@@ -124,15 +124,20 @@ class EnumDefinition(object):
def _TransformKeys(d, func):
"""Normalize keys in |d| and update references to old keys in |d| values."""
- normal_keys = {k: func(k) for k in d}
+ keys_map = {k: func(k) for k in d}
ret = collections.OrderedDict()
for k, v in d.items():
# Need to transform values as well when the entry value was explicitly set
# (since it could contain references to other enum entry values).
if isinstance(v, str):
- for normal_key in normal_keys:
- v = v.replace(normal_key, normal_keys[normal_key])
- ret[normal_keys[k]] = v
+ # First check if a full replacement is available. This avoids issues when
+ # one key is a substring of another.
+ if v in d:
+ v = keys_map[v]
+ else:
+ for old_key, new_key in keys_map.items():
+ v = v.replace(old_key, new_key)
+ ret[keys_map[k]] = v
return ret
@@ -412,7 +417,6 @@ ${ENUM_ENTRIES}
def DoMain(argv):
usage = 'usage: %prog [options] [output_dir] input_file(s)...'
parser = optparse.OptionParser(usage=usage)
- build_utils.AddDepfileOption(parser)
parser.add_option('--srcjar',
help='When specified, a .srcjar at the given path is '
@@ -429,9 +433,6 @@ def DoMain(argv):
for output_path, data in DoGenerate(input_paths):
build_utils.AddToZipHermetic(srcjar, output_path, data=data)
- if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False)
-
if __name__ == '__main__':
DoMain(sys.argv[1:])
diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py
index 088c450aeb6..1acb57f82c4 100755
--- a/chromium/build/android/gyp/java_cpp_enum_tests.py
+++ b/chromium/build/android/gyp/java_cpp_enum_tests.py
@@ -484,6 +484,42 @@ public @interface ClassName {
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
+ def testParseEnumClassOneValueSubstringOfAnother(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class SafeBrowsingStatus {
+ kChecking = 0,
+ kEnabled = 1,
+ kDisabled = 2,
+ kDisabledByAdmin = 3,
+ kDisabledByExtension = 4,
+ kEnabledStandard = 5,
+ kEnabledEnhanced = 6,
+ // New enum values must go above here.
+ kMaxValue = kEnabledEnhanced,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('SafeBrowsingStatus', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('CHECKING', '0'),
+ ('ENABLED', '1'),
+ ('DISABLED', '2'),
+ ('DISABLED_BY_ADMIN', '3'),
+ ('DISABLED_BY_EXTENSION', '4'),
+ ('ENABLED_STANDARD', '5'),
+ ('ENABLED_ENHANCED', '6'),
+ ('MAX_VALUE', 'ENABLED_ENHANCED'),
+ ]), definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('MAX_VALUE', 'New enum values must go above here.')
+ ]), definition.comments)
+
def testParseEnumStruct(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
diff --git a/chromium/build/android/gyp/java_cpp_strings.py b/chromium/build/android/gyp/java_cpp_strings.py
index acaaf223efb..498e05e3540 100755
--- a/chromium/build/android/gyp/java_cpp_strings.py
+++ b/chromium/build/android/gyp/java_cpp_strings.py
@@ -114,6 +114,8 @@ class StringFileParser(object):
if string_line.groups()[1]:
self._current_value = string_line.groups()[1]
self._AppendString()
+ else:
+ self._in_string = True
return True
else:
self._in_string = False
@@ -141,19 +143,19 @@ class StringFileParser(object):
return self._strings
-def _GenerateOutput(template, source_path, template_path, strings):
+def _GenerateOutput(template, source_paths, template_path, strings):
description_template = """
// This following string constants were inserted by
// {SCRIPT_NAME}
// From
- // {SOURCE_PATH}
+ // {SOURCE_PATHS}
// Into
// {TEMPLATE_PATH}
"""
values = {
'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
- 'SOURCE_PATH': source_path,
+ 'SOURCE_PATHS': ',\n // '.join(source_paths),
'TEMPLATE_PATH': template_path,
}
description = description_template.format(**values)
@@ -173,15 +175,18 @@ def _ParseStringFile(path):
def _Generate(source_paths, template_path):
with open(template_path) as f:
lines = f.readlines()
- template = ''.join(lines)
- for source_path in source_paths:
- strings = _ParseStringFile(source_path)
- package, class_name = ParseTemplateFile(lines)
- package_path = package.replace('.', os.path.sep)
- file_name = class_name + '.java'
- output_path = os.path.join(package_path, file_name)
- output = _GenerateOutput(template, source_path, template_path, strings)
- yield output, output_path
+
+ template = ''.join(lines)
+ package, class_name = ParseTemplateFile(lines)
+ package_path = package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ strings = []
+ for source_path in source_paths:
+ strings.extend(_ParseStringFile(source_path))
+
+ output = _GenerateOutput(template, source_paths, template_path, strings)
+ return output, output_path
def _Main(argv):
@@ -205,8 +210,8 @@ def _Main(argv):
with build_utils.AtomicOutput(args.srcjar) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
- for data, path in _Generate(args.inputs, args.template):
- build_utils.AddToZipHermetic(srcjar, path, data=data)
+ data, path = _Generate(args.inputs, args.template)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/java_cpp_strings_tests.py b/chromium/build/android/gyp/java_cpp_strings_tests.py
index acf51e428e0..3b7d5ca8f67 100755
--- a/chromium/build/android/gyp/java_cpp_strings_tests.py
+++ b/chromium/build/android/gyp/java_cpp_strings_tests.py
@@ -51,6 +51,10 @@ const char kAnotherSwitch[] = "another-value";
const char kAString[] = "a-value";
const char kNoComment[] = "no-comment";
+namespace myfeature {
+const char kMyFeatureNoComment[] = "myfeature.no-comment";
+}
+
// Single line switch with a big space.
const char kAStringWithSpace[] = "a-value";
@@ -58,23 +62,34 @@ const char kAStringWithSpace[] = "a-value";
const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
"a-string-with-a-very-long-name-that-will-have-to-wrap";
+// This one has no comment before it.
+
+const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap2";
+
// This is erroneous and should be ignored.
const char kInvalidLineBreak[] =
"invalid-line-break";
""".split('\n')
strings = java_cpp_strings.StringFileParser(test_data).Parse()
- self.assertEqual(4, len(strings))
+ self.assertEqual(5, len(strings))
self.assertEqual('A_STRING', strings[0].name)
self.assertEqual('"a-value"', strings[0].value)
self.assertEqual('NO_COMMENT', strings[1].name)
self.assertEqual('"no-comment"', strings[1].value)
- self.assertEqual('A_STRING_WITH_SPACE', strings[2].name)
- self.assertEqual('"a-value"', strings[2].value)
+ self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name)
+ self.assertEqual('"myfeature.no-comment"', strings[2].value)
+ self.assertEqual('A_STRING_WITH_SPACE', strings[3].name)
+ self.assertEqual('"a-value"', strings[3].value)
self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
- strings[3].name)
+ strings[4].name)
self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
- strings[3].value)
+ strings[4].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2',
+ strings[5].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"',
+ strings[5].value)
def testTemplateParsing(self):
test_data = """
diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py
index fb751bd6ed6..fa526e6df88 100755
--- a/chromium/build/android/gyp/lint.py
+++ b/chromium/build/android/gyp/lint.py
@@ -3,10 +3,8 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Runs Android's lint tool."""
-
from __future__ import print_function
import argparse
@@ -22,9 +20,8 @@ from xml.etree import ElementTree
from util import build_utils
from util import manifest_utils
-from util import resource_utils
-_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
# These checks are not useful for test targets and adds an unnecessary burden
# to suppress them.
@@ -41,12 +38,83 @@ _DISABLED_FOR_TESTS = [
"UnusedResources",
]
-
-def _RunLint(lint_path,
+_RES_ZIP_DIR = 'RESZIPS'
+_SRCJAR_DIR = 'SRCJARS'
+
+
+def _SrcRelative(path):
+ """Returns relative path to top-level src dir."""
+ return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT)
+
+
+def _GenerateProjectFile(android_manifest,
+ android_sdk_root,
+ cache_dir,
+ sources=None,
+ srcjar_sources=None,
+ resource_sources=None,
+ android_sdk_version=None):
+ project = ElementTree.Element('project')
+ root = ElementTree.SubElement(project, 'root')
+ # An absolute path helps error paths to be shorter.
+ root.set('dir', os.path.abspath(build_utils.DIR_SOURCE_ROOT))
+ sdk = ElementTree.SubElement(project, 'sdk')
+ # Lint requires that the sdk path be an absolute path.
+ sdk.set('dir', os.path.abspath(android_sdk_root))
+ cache = ElementTree.SubElement(project, 'cache')
+ cache.set('dir', _SrcRelative(cache_dir))
+ main_module = ElementTree.SubElement(project, 'module')
+ main_module.set('name', 'main')
+ main_module.set('android', 'true')
+ main_module.set('library', 'false')
+ if android_sdk_version:
+ main_module.set('compile_sdk_version', android_sdk_version)
+ manifest = ElementTree.SubElement(main_module, 'manifest')
+ manifest.set('file', _SrcRelative(android_manifest))
+ if srcjar_sources:
+ for srcjar_file in srcjar_sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', _SrcRelative(srcjar_file))
+ if sources:
+ for source in sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', _SrcRelative(source))
+ if resource_sources:
+ for resource_file in resource_sources:
+ resource = ElementTree.SubElement(main_module, 'resource')
+ resource.set('file', _SrcRelative(resource_file))
+ return project
+
+
+def _GenerateAndroidManifest(original_manifest_path,
+ min_sdk_version,
+ manifest_package=None):
+ # Set minSdkVersion and package in the manifest to the correct values.
+ doc, manifest, _ = manifest_utils.ParseManifest(original_manifest_path)
+ uses_sdk = manifest.find('./uses-sdk')
+ if uses_sdk is None:
+ uses_sdk = ElementTree.Element('uses-sdk')
+ manifest.insert(0, uses_sdk)
+ uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+ min_sdk_version)
+ if manifest_package:
+ manifest.set('package', manifest_package)
+ return doc
+
+
+def _WriteXmlFile(root, path):
+ build_utils.MakeDirectory(os.path.dirname(path))
+ with build_utils.AtomicOutput(path) as f:
+ # Although we can write it just with ElementTree.tostring, using minidom
+ # makes it a lot easier to read as a human (also on code search).
+ f.write(
+ minidom.parseString(ElementTree.tostring(
+ root, encoding='utf-8')).toprettyxml(indent=' '))
+
+
+def _RunLint(lint_binary_path,
config_path,
manifest_path,
- result_path,
- product_dir,
sources,
cache_dir,
android_sdk_version,
@@ -56,268 +124,139 @@ def _RunLint(lint_path,
resource_sources,
resource_zips,
android_sdk_root,
+ lint_gen_dir,
testonly_target=False,
can_fail_build=False,
- include_unexpected=False,
silent=False):
logging.info('Lint starting')
- def _RebasePath(path):
- """Returns relative path to top-level src dir.
-
- Args:
- path: A path relative to cwd.
- """
- ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT)
- # If it's outside of src/, just use abspath.
- if ret.startswith('..'):
- ret = os.path.abspath(path)
- return ret
-
- def _ProcessResultFile():
- with open(result_path, 'rb') as f:
- content = f.read().replace(
- _RebasePath(product_dir), 'PRODUCT_DIR')
-
- with open(result_path, 'wb') as f:
- f.write(content)
-
- def _ParseAndShowResultFile():
- dom = minidom.parse(result_path)
- issues = dom.getElementsByTagName('issue')
+ cmd = [
+ _SrcRelative(lint_binary_path),
+ # Consider all lint warnings as errors. Warnings should either always be
+ # fixed or completely suppressed in suppressions.xml. They should not
+ # bloat build output if they are not important enough to be fixed.
+ '-Werror',
+ '--exitcode', # Sets error code if there are errors.
+ '--quiet', # Silences lint's "." progress updates.
+ ]
+ if config_path:
+ cmd.extend(['--config', _SrcRelative(config_path)])
+ if testonly_target:
+ cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
+
+ if not manifest_path:
+ manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
+ 'android', 'AndroidManifest.xml')
+
+ logging.info('Generating Android manifest file')
+ android_manifest_tree = _GenerateAndroidManifest(manifest_path,
+ min_sdk_version,
+ manifest_package)
+ # Include the rebased manifest_path in the lint generated path so that it is
+ # clear in error messages where the original AndroidManifest.xml came from.
+ lint_android_manifest_path = os.path.join(lint_gen_dir,
+ _SrcRelative(manifest_path))
+ logging.info('Writing xml file %s', lint_android_manifest_path)
+ _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)
+
+ resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
+ # These are zip files with generated resources (e. g. strings from GRD).
+ logging.info('Extracting resource zips')
+ for resource_zip in resource_zips:
+ # Use a consistent root and name rather than a temporary file so that
+ # suppressions can be local to the lint target and the resource target.
+ resource_dir = os.path.join(resource_root_dir, resource_zip)
+ shutil.rmtree(resource_dir, True)
+ os.makedirs(resource_dir)
+ resource_sources.extend(
+ build_utils.ExtractAll(resource_zip, path=resource_dir))
+
+ logging.info('Extracting srcjars')
+ srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
+ srcjar_sources = []
+ if srcjars:
+ for srcjar in srcjars:
+ # Use path without extensions since otherwise the file name includes
+ # .srcjar and lint treats it as a srcjar.
+ srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
+ shutil.rmtree(srcjar_dir, True)
+ os.makedirs(srcjar_dir)
+ # Sadly lint's srcjar support is broken since it only considers the first
+ # srcjar. Until we roll a lint version with that fixed, we need to extract
+ # it ourselves.
+ srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))
+
+ logging.info('Generating project file')
+ project_file_root = _GenerateProjectFile(lint_android_manifest_path,
+ android_sdk_root, cache_dir, sources,
+ srcjar_sources, resource_sources,
+ android_sdk_version)
+
+ project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
+ logging.info('Writing xml file %s', project_xml_path)
+ _WriteXmlFile(project_file_root, project_xml_path)
+ cmd += ['--project', _SrcRelative(project_xml_path)]
+
+ logging.info('Preparing environment variables')
+ env = os.environ.copy()
+ # It is important that lint uses the checked-in JDK11 as it is almost 50%
+ # faster than JDK8.
+ env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
+ build_utils.DIR_SOURCE_ROOT)
+ # This filter is necessary for JDK11.
+ stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
+
+ try:
+ logging.debug('Lint command %s', cmd)
+ start = time.time()
+ # Lint outputs "No issues found" if it succeeds, and uses stderr when it
+ # fails, so we can safely ignore stdout.
+ build_utils.CheckOutput(cmd,
+ cwd=build_utils.DIR_SOURCE_ROOT,
+ env=env,
+ stderr_filter=stderr_filter)
+ end = time.time() - start
+ logging.info('Lint command took %ss', end)
+ except build_utils.CalledProcessError as e:
if not silent:
- print(file=sys.stderr)
- for issue in issues:
- issue_id = issue.attributes['id'].value
- message = issue.attributes['message'].value
- location_elem = issue.getElementsByTagName('location')[0]
- path = location_elem.attributes['file'].value
- line = location_elem.getAttribute('line')
- error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
- print(error.encode('utf-8'), file=sys.stderr)
- for attr in ['errorLine1', 'errorLine2']:
- error_line = issue.getAttribute(attr)
- if error_line:
- print(error_line.encode('utf-8'), file=sys.stderr)
- return len(issues)
-
- with build_utils.TempDir() as temp_dir:
- cmd = [
- _RebasePath(lint_path),
- '-Werror',
- '--exitcode',
- '--showall',
- '--xml',
- _RebasePath(result_path),
- # An explicit sdk root needs to be specified since we have an extra
- # intermediate 'lastest' directory under cmdline-tools which prevents
- # lint from automatically deducing the location of the sdk. The sdk is
- # required for many checks (e.g. NewApi). Lint also requires absolute
- # paths.
- '--sdk-home',
- os.path.abspath(android_sdk_root),
- ]
- if config_path:
- cmd.extend(['--config', _RebasePath(config_path)])
- if testonly_target:
- cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
-
- tmp_dir_counter = [0]
- def _NewTempSubdir(prefix, append_digit=True):
- # Helper function to create a new sub directory based on the number of
- # subdirs created earlier.
- if append_digit:
- tmp_dir_counter[0] += 1
- prefix += str(tmp_dir_counter[0])
- new_dir = os.path.join(temp_dir, prefix)
- os.makedirs(new_dir)
- return new_dir
-
- resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
- resource_sources)
- # These are zip files with generated resources (e. g. strings from GRD).
- for resource_zip in resource_zips:
- resource_dir = _NewTempSubdir(resource_zip, append_digit=False)
- resource_dirs.append(resource_dir)
- build_utils.ExtractAll(resource_zip, path=resource_dir)
-
- for resource_dir in resource_dirs:
- cmd.extend(['--resources', _RebasePath(resource_dir)])
-
- # There may be multiple source files with the same basename (but in
- # different directories). It is difficult to determine what part of the path
- # corresponds to the java package, and so instead just link the source files
- # into temporary directories (creating a new one whenever there is a name
- # conflict).
- def PathInDir(d, src):
- subpath = os.path.join(d, _RebasePath(src))
- subdir = os.path.dirname(subpath)
- if not os.path.exists(subdir):
- os.makedirs(subdir)
- return subpath
-
- src_dirs = []
- for src in sources:
- src_dir = None
- for d in src_dirs:
- if not os.path.exists(PathInDir(d, src)):
- src_dir = d
- break
- if not src_dir:
- src_dir = _NewTempSubdir('SRC_ROOT')
- src_dirs.append(src_dir)
- cmd.extend(['--sources', _RebasePath(src_dir)])
- # In cases where the build dir is outside of the src dir, this can
- # result in trying to symlink a file to itself for this file:
- # gen/components/version_info/android/java/org/chromium/
- # components/version_info/VersionConstants.java
- src = os.path.abspath(src)
- dst = PathInDir(src_dir, src)
- if src == dst:
- continue
- os.symlink(src, dst)
-
- if srcjars:
- srcjar_dir = _NewTempSubdir('GENERATED_SRC_ROOT', append_digit=False)
- cmd.extend(['--sources', _RebasePath(srcjar_dir)])
- for srcjar in srcjars:
- # We choose to allow srcjars that contain java files which have the
- # same package and name to clobber each other. This happens for
- # generated files like BuildConfig.java. It is generated for
- # targets like base_build_config_gen as well as targets like
- # chrome_modern_public_base_bundle_module__build_config_srcjar.
- # Although we could extract each srcjar to a separate folder, that
- # slows down some invocations of lint by 20 seconds or more.
- # TODO(wnwen): Switch lint.py to generate a project.xml file which
- # supports srcjar inputs by default.
- build_utils.ExtractAll(srcjar, path=srcjar_dir, no_clobber=False)
-
- project_dir = _NewTempSubdir('PROJECT_ROOT', append_digit=False)
- if android_sdk_version:
- # Create dummy project.properies file in a temporary "project" directory.
- # It is the only way to add Android SDK to the Lint's classpath. Proper
- # classpath is necessary for most source-level checks.
- with open(os.path.join(project_dir, 'project.properties'), 'w') \
- as propfile:
- print('target=android-{}'.format(android_sdk_version), file=propfile)
-
- # Put the manifest in a temporary directory in order to avoid lint detecting
- # sibling res/ and src/ directories (which should be pass explicitly if they
- # are to be included).
- if not manifest_path:
- manifest_path = os.path.join(
- build_utils.DIR_SOURCE_ROOT, 'build', 'android',
- 'AndroidManifest.xml')
- lint_manifest_path = os.path.join(project_dir, 'AndroidManifest.xml')
- shutil.copyfile(os.path.abspath(manifest_path), lint_manifest_path)
-
- # Check that minSdkVersion and package is correct and add it to the manifest
- # in case it does not exist.
- doc, manifest, _ = manifest_utils.ParseManifest(lint_manifest_path)
- manifest_utils.AssertUsesSdk(manifest, min_sdk_version)
- manifest_utils.AssertPackage(manifest, manifest_package)
- uses_sdk = manifest.find('./uses-sdk')
- if uses_sdk is None:
- uses_sdk = ElementTree.Element('uses-sdk')
- manifest.insert(0, uses_sdk)
- uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
- min_sdk_version)
- if manifest_package:
- manifest.set('package', manifest_package)
- manifest_utils.SaveManifest(doc, lint_manifest_path)
-
- cmd.append(project_dir)
-
- if os.path.exists(result_path):
- os.remove(result_path)
-
- env = os.environ.copy()
- stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
- if cache_dir:
- env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
- # When _JAVA_OPTIONS is set, java prints to stderr:
- # Picked up _JAVA_OPTIONS: ...
- #
- # We drop all lines that contain _JAVA_OPTIONS from the output
- stderr_filter = lambda l: re.sub(
- r'.*_JAVA_OPTIONS.*\n?',
- '',
- build_utils.FilterReflectiveAccessJavaWarnings(l))
-
- def fail_func(returncode, stderr):
- if returncode != 0:
- return True
- if (include_unexpected and
- 'Unexpected failure during lint analysis' in stderr):
- return True
- return False
-
- try:
- env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
- build_utils.DIR_SOURCE_ROOT)
- logging.debug('Lint command %s', cmd)
- start = time.time()
- build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
- env=env or None, stderr_filter=stderr_filter,
- fail_func=fail_func)
- end = time.time() - start
- logging.info('Lint command took %ss', end)
- except build_utils.CalledProcessError:
- # There is a problem with lint usage
- if not os.path.exists(result_path):
- raise
-
- # Sometimes produces empty (almost) files:
- if os.path.getsize(result_path) < 10:
- if can_fail_build:
- raise
- elif not silent:
- traceback.print_exc()
- return
-
- # There are actual lint issues
- try:
- num_issues = _ParseAndShowResultFile()
- except Exception: # pylint: disable=broad-except
- if not silent:
- print('Lint created unparseable xml file...')
- print('File contents:')
- with open(result_path) as f:
- print(f.read())
- if can_fail_build:
- traceback.print_exc()
- if can_fail_build:
- raise
- else:
- return
-
- _ProcessResultFile()
- if num_issues == 0 and include_unexpected:
- msg = 'Please refer to output above for unexpected lint failures.\n'
- else:
- msg = ('\nLint found %d new issues.\n'
- ' - For full explanation, please refer to %s\n'
- ' - For more information about lint and how to fix lint issues,'
- ' please refer to %s\n' %
- (num_issues, _RebasePath(result_path), _LINT_MD_URL))
- if not silent:
- print(msg, file=sys.stderr)
- if can_fail_build:
- raise Exception('Lint failed.')
+ print('Lint found new issues.\n'
+ ' - Here is the project.xml file passed to lint: {}\n'
+ ' - For more information about lint and how to fix lint issues,'
+ ' please refer to {}\n'.format(_SrcRelative(project_xml_path),
+ _LINT_MD_URL))
+ if can_fail_build:
+ raise
+ else:
+ print(e)
+ else:
+ # Lint succeeded, no need to keep generated files for debugging purposes.
+ shutil.rmtree(resource_root_dir, ignore_errors=True)
+ shutil.rmtree(srcjar_root_dir, ignore_errors=True)
logging.info('Lint completed')
-def _FindInDirectories(directories, filename_filter):
- all_files = []
- for directory in directories:
- all_files.extend(build_utils.FindInDirectory(directory, filename_filter))
- return all_files
-
-
def _ParseArgs(argv):
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
+ parser.add_argument('--lint-binary-path',
+ required=True,
+ help='Path to lint executable.')
+ parser.add_argument('--cache-dir',
+ required=True,
+ help='Path to the directory in which the android cache '
+ 'directory tree should be stored.')
+ parser.add_argument('--config-path', help='Path to lint suppressions file.')
+ parser.add_argument('--lint-gen-dir',
+ required=True,
+ help='Path to store generated xml files.')
+ parser.add_argument('--stamp', help='Path to stamp upon success.')
+ parser.add_argument('--android-sdk-version',
+ help='Version (API level) of the Android SDK used for '
+ 'building.')
+ parser.add_argument('--min-sdk-version',
+ required=True,
+ help='Minimal SDK version to lint against.')
parser.add_argument('--android-sdk-root',
required=True,
help='Lint needs an explicit path to the android sdk.')
@@ -326,32 +265,20 @@ def _ParseArgs(argv):
help='If set, some checks like UnusedResources will be '
'disabled since they are not helpful for test '
'targets.')
- parser.add_argument('--lint-path', required=True,
- help='Path to lint executable.')
- parser.add_argument('--product-dir', required=True,
- help='Path to product dir.')
- parser.add_argument('--result-path', required=True,
- help='Path to XML lint result file.')
- parser.add_argument('--cache-dir', required=True,
- help='Path to the directory in which the android cache '
- 'directory tree should be stored.')
- parser.add_argument('--platform-xml-path', required=True,
- help='Path to api-platforms.xml')
- parser.add_argument('--android-sdk-version',
- help='Version (API level) of the Android SDK used for '
- 'building.')
- parser.add_argument('--can-fail-build', action='store_true',
- help='If set, script will exit with nonzero exit status'
- ' if lint errors are present')
- parser.add_argument('--include-unexpected-failures', action='store_true',
+ parser.add_argument('--manifest-package',
+ help='Package name of the AndroidManifest.xml.')
+ parser.add_argument('--can-fail-build',
+ action='store_true',
help='If set, script will exit with nonzero exit status'
- ' if lint itself crashes with unexpected failures.')
- parser.add_argument('--config-path',
- help='Path to lint suppressions file.')
+ ' if lint errors are present')
+ parser.add_argument('--silent',
+ action='store_true',
+ help='If set, script will not log anything.')
parser.add_argument('--java-sources',
help='File containing a list of java sources files.')
+ parser.add_argument('--srcjars', help='GN list of included srcjars.')
parser.add_argument('--manifest-path',
- help='Path to AndroidManifest.xml')
+ help='Path to original AndroidManifest.xml')
parser.add_argument('--resource-sources',
default=[],
action='append',
@@ -362,25 +289,12 @@ def _ParseArgs(argv):
action='append',
help='GYP-list of resource zips, zip files of generated '
'resource files.')
- parser.add_argument('--silent', action='store_true',
- help='If set, script will not log anything.')
- parser.add_argument('--srcjars',
- help='GN list of included srcjars.')
- parser.add_argument('--stamp', help='Path to stamp upon success.')
- parser.add_argument(
- '--min-sdk-version',
- required=True,
- help='Minimal SDK version to lint against.')
- parser.add_argument(
- '--manifest-package', help='Package name of the AndroidManifest.xml.')
args = parser.parse_args(build_utils.ExpandFileArgs(argv))
-
args.java_sources = build_utils.ParseGnList(args.java_sources)
args.srcjars = build_utils.ParseGnList(args.srcjars)
args.resource_sources = build_utils.ParseGnList(args.resource_sources)
args.resource_zips = build_utils.ParseGnList(args.resource_zips)
-
return args
@@ -391,7 +305,6 @@ def main():
sources = []
for java_sources_file in args.java_sources:
sources.extend(build_utils.ReadSourcesList(java_sources_file))
-
resource_sources = []
for resource_sources_file in args.resource_sources:
resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
@@ -400,14 +313,11 @@ def main():
resource_sources + [
args.manifest_path,
])
-
depfile_deps = [p for p in possible_depfile_deps if p]
- _RunLint(args.lint_path,
+ _RunLint(args.lint_binary_path,
args.config_path,
args.manifest_path,
- args.result_path,
- args.product_dir,
sources,
args.cache_dir,
args.android_sdk_version,
@@ -417,18 +327,15 @@ def main():
resource_sources,
args.resource_zips,
args.android_sdk_root,
+ args.lint_gen_dir,
testonly_target=args.testonly,
can_fail_build=args.can_fail_build,
- include_unexpected=args.include_unexpected_failures,
silent=args.silent)
logging.info('Creating stamp file')
build_utils.Touch(args.stamp)
if args.depfile:
- build_utils.WriteDepfile(args.depfile,
- args.stamp,
- depfile_deps,
- add_pydeps=False) # pydeps listed in GN.
+ build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/lint.pydeps b/chromium/build/android/gyp/lint.pydeps
index d9a96c70194..68a62f6bf94 100644
--- a/chromium/build/android/gyp/lint.pydeps
+++ b/chromium/build/android/gyp/lint.pydeps
@@ -1,29 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
-../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
-../../../third_party/jinja2/bccache.py
-../../../third_party/jinja2/compiler.py
-../../../third_party/jinja2/defaults.py
-../../../third_party/jinja2/environment.py
-../../../third_party/jinja2/exceptions.py
-../../../third_party/jinja2/filters.py
-../../../third_party/jinja2/idtracking.py
-../../../third_party/jinja2/lexer.py
-../../../third_party/jinja2/loaders.py
-../../../third_party/jinja2/nodes.py
-../../../third_party/jinja2/optimizer.py
-../../../third_party/jinja2/parser.py
-../../../third_party/jinja2/runtime.py
-../../../third_party/jinja2/tests.py
-../../../third_party/jinja2/utils.py
-../../../third_party/jinja2/visitor.py
-../../../third_party/markupsafe/__init__.py
-../../../third_party/markupsafe/_compat.py
-../../../third_party/markupsafe/_native.py
../../gn_helpers.py
lint.py
util/__init__.py
util/build_utils.py
util/manifest_utils.py
-util/resource_utils.py
diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py
index 9c36063468a..75ca886b833 100755
--- a/chromium/build/android/gyp/main_dex_list.py
+++ b/chromium/build/android/gyp/main_dex_list.py
@@ -56,8 +56,9 @@ def main():
args = _ParseArgs()
proguard_cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
args.r8_path,
+ 'com.android.tools.r8.R8',
'--classfile',
'--no-desugaring',
'--lib',
@@ -131,11 +132,9 @@ def main():
f.write(main_dex_list)
if args.depfile:
- build_utils.WriteDepfile(
- args.depfile,
- args.main_dex_list_path,
- inputs=args.class_inputs_filearg,
- add_pydeps=False)
+ build_utils.WriteDepfile(args.depfile,
+ args.main_dex_list_path,
+ inputs=args.class_inputs_filearg)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py
index 3f784588206..f205aa42b48 100755
--- a/chromium/build/android/gyp/merge_manifest.py
+++ b/chromium/build/android/gyp/merge_manifest.py
@@ -22,8 +22,8 @@ _MANIFEST_MERGER_JARS = [
os.path.join('common', 'common.jar'),
os.path.join('sdk-common', 'sdk-common.jar'),
os.path.join('sdklib', 'sdklib.jar'),
- os.path.join('external', 'com', 'google', 'guava', 'guava', '27.1-jre',
- 'guava-27.1-jre.jar'),
+ os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
+ 'guava-28.1-jre.jar'),
os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
'kotlin-stdlib.jar'),
os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5',
@@ -137,8 +137,7 @@ def main(argv):
if args.depfile:
inputs = extras + classpath.split(':')
- build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs,
- add_pydeps=False)
+ build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/prepare_resources.pydeps b/chromium/build/android/gyp/prepare_resources.pydeps
index 1066a5ff1ec..c0d225db2c6 100644
--- a/chromium/build/android/gyp/prepare_resources.pydeps
+++ b/chromium/build/android/gyp/prepare_resources.pydeps
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
../../gn_helpers.py
+../../print_python_deps.py
prepare_resources.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py
index 18919589382..c151be70aef 100755
--- a/chromium/build/android/gyp/proguard.py
+++ b/chromium/build/android/gyp/proguard.py
@@ -12,6 +12,7 @@ import sys
import tempfile
import zipfile
+import dex_jdk_libs
from util import build_utils
from util import diff_utils
@@ -107,15 +108,17 @@ def _ParseOptions():
args = build_utils.ExpandFileArgs(sys.argv[1:])
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
- group = parser.add_mutually_exclusive_group(required=True)
- group.add_argument('--proguard-path', help='Path to the proguard.jar to use.')
- group.add_argument('--r8-path', help='Path to the R8.jar to use.')
+ parser.add_argument('--r8-path',
+ required=True,
+ help='Path to the R8.jar to use.')
parser.add_argument(
'--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
parser.add_argument('--input-paths',
action='append',
required=True,
help='GN-list of .jar files to optimize.')
+ parser.add_argument('--desugar-jdk-libs-jar',
+ help='Path to desugar_jdk_libs.jar.')
parser.add_argument('--output-path', help='Path to the generated .jar file.')
parser.add_argument(
'--proguard-configs',
@@ -196,6 +199,8 @@ def _ParseOptions():
parser.add_argument(
'--stamp',
help='File to touch upon success. Mutually exclusive with --output-path')
+ parser.add_argument('--desugared-library-keep-rule-output',
+ help='Path to desugared library keep rule output file.')
options = parser.parse_args(args)
@@ -213,9 +218,6 @@ def _ParseOptions():
if options.expected_configs_file and not options.output_config:
parser.error('--expected-configs-file requires --output-config')
- if options.proguard_path and options.disable_outlining:
- parser.error('--disable-outlining requires --r8-path')
-
if options.only_verify_expectations and not options.stamp:
parser.error('--only-verify-expectations requires --stamp')
@@ -268,12 +270,18 @@ class _DexPathContext(object):
self.staging_dir = os.path.join(work_dir, name)
os.mkdir(self.staging_dir)
- def CreateOutput(self):
+ def CreateOutput(self, has_imported_lib=False, keep_rule_output=None):
found_files = build_utils.FindInDirectory(self.staging_dir)
if not found_files:
raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
if self._final_output_path.endswith('.dex'):
+ if has_imported_lib:
+ raise Exception(
+ 'Trying to create a single .dex file, but a dependency requires '
+ 'JDK Library Desugaring (which necessitates a second file).'
+ 'Refer to %s to see what desugaring was required' %
+ keep_rule_output)
if len(found_files) != 1:
raise Exception('Expected exactly 1 dex file output, found: {}'.format(
'\t'.join(found_files)))
@@ -323,8 +331,9 @@ def _OptimizeWithR8(options,
cmd = [
build_utils.JAVA_PATH,
- '-jar',
+ '-cp',
options.r8_path,
+ 'com.android.tools.r8.R8',
'--no-data-resources',
'--output',
base_dex_context.staging_dir,
@@ -333,7 +342,12 @@ def _OptimizeWithR8(options,
]
if options.desugar_jdk_libs_json:
- cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
+ cmd += [
+ '--desugared-lib',
+ options.desugar_jdk_libs_json,
+ '--desugared-lib-pg-conf-output',
+ options.desugared_library_keep_rule_output,
+ ]
if options.min_api:
cmd += ['--min-api', options.min_api]
@@ -357,10 +371,8 @@ def _OptimizeWithR8(options,
p for p in feature.input_paths if p not in module_input_jars
]
module_input_jars.update(feature_input_jars)
- cmd += [
- '--feature-jar',
- feature.staging_dir + ':' + ':'.join(feature_input_jars)
- ]
+ for in_jar in feature_input_jars:
+ cmd += ['--feature', in_jar, feature.staging_dir]
cmd += base_dex_context.input_paths
# Add any extra input jars to the base module (e.g. desugar runtime).
@@ -382,7 +394,18 @@ def _OptimizeWithR8(options,
'android/docs/java_optimization.md#Debugging-common-failures\n'))
raise ProguardProcessError(err, debugging_link)
- base_dex_context.CreateOutput()
+ base_has_imported_lib = False
+ if options.desugar_jdk_libs_json:
+ existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir)
+ base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
+ options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar,
+ options.desugared_library_keep_rule_output,
+ os.path.join(base_dex_context.staging_dir,
+ 'classes%d.dex' % (len(existing_files) + 1)))
+
+ base_dex_context.CreateOutput(base_has_imported_lib,
+ options.desugared_library_keep_rule_output)
for feature in feature_contexts:
feature.CreateOutput()
@@ -393,65 +416,6 @@ def _OptimizeWithR8(options,
out_file.writelines(l for l in in_file if not l.startswith('#'))
-def _OptimizeWithProguard(options,
- config_paths,
- libraries,
- dynamic_config_data,
- print_stdout=False):
- with build_utils.TempDir() as tmp_dir:
- combined_injars_path = os.path.join(tmp_dir, 'injars.jar')
- combined_libjars_path = os.path.join(tmp_dir, 'libjars.jar')
- combined_proguard_configs_path = os.path.join(tmp_dir, 'includes.txt')
- tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
- tmp_output_jar = os.path.join(tmp_dir, 'output.jar')
-
- build_utils.MergeZips(combined_injars_path, options.input_paths)
- build_utils.MergeZips(combined_libjars_path, libraries)
- with open(combined_proguard_configs_path, 'w') as f:
- f.write(_CombineConfigs(config_paths, dynamic_config_data))
-
- if options.proguard_path.endswith('.jar'):
- cmd = [
- build_utils.JAVA_PATH, '-jar', options.proguard_path, '-include',
- combined_proguard_configs_path
- ]
- else:
- cmd = [options.proguard_path, '@' + combined_proguard_configs_path]
-
- cmd += [
- '-forceprocessing',
- '-libraryjars',
- combined_libjars_path,
- '-injars',
- combined_injars_path,
- '-outjars',
- tmp_output_jar,
- '-printmapping',
- tmp_mapping_path,
- ]
-
- # Warning: and Error: are sent to stderr, but messages and Note: are sent
- # to stdout.
- stdout_filter = None
- stderr_filter = None
- if print_stdout:
- stdout_filter = _ProguardOutputFilter()
- stderr_filter = _ProguardOutputFilter()
- build_utils.CheckOutput(
- cmd,
- print_stdout=True,
- print_stderr=True,
- stdout_filter=stdout_filter,
- stderr_filter=stderr_filter)
-
- # ProGuard will skip writing if the file would be empty.
- build_utils.Touch(tmp_mapping_path)
-
- # Copy output files to correct locations.
- shutil.move(tmp_output_jar, options.output_path)
- shutil.move(tmp_mapping_path, options.mapping_output)
-
-
def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False):
ret = []
@@ -508,11 +472,14 @@ def _CreateDynamicConfig(options):
if api_level > _min_api:
ret.append('-keep @interface %s' % annotation_name)
ret.append("""\
--keep,allowobfuscation,allowoptimization @%s class ** {
- <methods>;
+-if @%s class * {
+ *** *(...);
+}
+-keep,allowobfuscation class <1> {
+ *** <2>(...);
}""" % annotation_name)
ret.append("""\
--keepclassmembers,allowobfuscation,allowoptimization class ** {
+-keepclassmembers,allowobfuscation class ** {
@%s <methods>;
}""" % annotation_name)
return '\n'.join(ret)
@@ -545,8 +512,7 @@ def _MaybeWriteStampAndDepFile(options, inputs):
build_utils.Touch(options.stamp)
output = options.stamp
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, output, inputs=inputs, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
def main():
@@ -597,12 +563,8 @@ def main():
with open(options.output_config, 'w') as f:
f.write(merged_configs)
- if options.r8_path:
- _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data,
- print_stdout)
- else:
- _OptimizeWithProguard(options, proguard_configs, libraries,
- dynamic_config_data, print_stdout)
+ _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data,
+ print_stdout)
# After ProGuard / R8 has run:
for output in options.extra_mapping_output_paths:
diff --git a/chromium/build/android/gyp/proguard.pydeps b/chromium/build/android/gyp/proguard.pydeps
index 98934d7aae2..11f51cc0f0d 100644
--- a/chromium/build/android/gyp/proguard.pydeps
+++ b/chromium/build/android/gyp/proguard.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
../../gn_helpers.py
+dex_jdk_libs.py
proguard.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/turbine.pydeps b/chromium/build/android/gyp/turbine.pydeps
index 19396459519..45b0d27d3f6 100644
--- a/chromium/build/android/gyp/turbine.pydeps
+++ b/chromium/build/android/gyp/turbine.pydeps
@@ -1,6 +1,7 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py
../../gn_helpers.py
+../../print_python_deps.py
turbine.py
util/__init__.py
util/build_utils.py
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py
index bc15fbb61f2..067f62e4b9b 100644
--- a/chromium/build/android/gyp/util/build_utils.py
+++ b/chromium/build/android/gyp/util/build_utils.py
@@ -541,49 +541,6 @@ def GetSortedTransitiveDependencies(top, deps_func):
return list(deps_map)
-def ComputePythonDependencies():
- """Gets the paths of imported non-system python modules.
-
- A path is assumed to be a "system" import if it is outside of chromium's
- src/. The paths will be relative to the current directory.
- """
- _ForceLazyModulesToLoad()
- module_paths = (m.__file__ for m in sys.modules.values()
- if m is not None and hasattr(m, '__file__'))
- abs_module_paths = map(os.path.abspath, module_paths)
-
- abs_dir_source_root = os.path.abspath(DIR_SOURCE_ROOT)
- non_system_module_paths = [
- p for p in abs_module_paths if p.startswith(abs_dir_source_root)
- ]
-
- def ConvertPycToPy(s):
- if s.endswith('.pyc'):
- return s[:-1]
- return s
-
- non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
- non_system_module_paths = map(os.path.relpath, non_system_module_paths)
- return sorted(set(non_system_module_paths))
-
-
-def _ForceLazyModulesToLoad():
- """Forces any lazily imported modules to fully load themselves.
-
- Inspecting the modules' __file__ attribute causes lazily imported modules
- (e.g. from email) to get fully imported and update sys.modules. Iterate
- over the values until sys.modules stabilizes so that no modules are missed.
- """
- while True:
- num_modules_before = len(sys.modules.keys())
- for m in sys.modules.values():
- if m is not None and hasattr(m, '__file__'):
- _ = m.__file__
- num_modules_after = len(sys.modules.keys())
- if num_modules_before == num_modules_after:
- break
-
-
def InitLogging(enabling_env):
logging.basicConfig(
level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
@@ -611,12 +568,10 @@ def AddDepfileOption(parser):
help='Path to depfile (refer to `gn help depfile`)')
-def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
assert depfile_path != first_gn_output # http://crbug.com/646165
assert not isinstance(inputs, string_types) # Easy mistake to make
inputs = inputs or []
- if add_pydeps:
- inputs = ComputePythonDependencies() + inputs
MakeDirectory(os.path.dirname(depfile_path))
# Ninja does not support multiple outputs in depfiles.
with open(depfile_path, 'w') as depfile:
diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py
index a8a815e7e4f..2830d25c969 100644
--- a/chromium/build/android/gyp/util/md5_check.py
+++ b/chromium/build/android/gyp/util/md5_check.py
@@ -14,6 +14,9 @@ import zipfile
from util import build_utils
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
# When set and a difference is detected, a diff of what changed is printed.
PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
@@ -48,7 +51,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5,
input_strings = list(input_strings or [])
output_paths = list(output_paths or [])
- input_paths += build_utils.ComputePythonDependencies()
+ input_paths += print_python_deps.ComputePythonDependencies()
CallAndRecordIfStale(
on_stale_md5,
@@ -64,8 +67,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5,
# on bots that build with & without patch, and the patch changes the depfile
# location.
if hasattr(options, 'depfile') and options.depfile:
- build_utils.WriteDepfile(
- options.depfile, output_paths[0], depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
def CallAndRecordIfStale(function,
@@ -125,15 +127,21 @@ def CallAndRecordIfStale(function,
old_metadata = None
force = force or _FORCE_REBUILD
missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ too_new = []
# When outputs are missing, don't bother gathering change information.
if not missing_outputs and os.path.exists(record_path):
- with open(record_path, 'r') as jsonfile:
- try:
- old_metadata = _Metadata.FromFile(jsonfile)
- except: # pylint: disable=bare-except
- pass # Not yet using new file format.
-
- changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+ record_mtime = os.path.getmtime(record_path)
+ # Outputs newer than the change information must have been modified outside
+ # of the build, and should be considered stale.
+ too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+ if not too_new:
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
if not changes.HasChanges():
return
@@ -153,11 +161,13 @@ def CallAndRecordIfStale(function,
class Changes(object):
"""Provides and API for querying what changed between runs."""
- def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+ too_new):
self.old_metadata = old_metadata
self.new_metadata = new_metadata
self.force = force
self.missing_outputs = missing_outputs
+ self.too_new = too_new
def _GetOldTag(self, path, subpath=None):
return self.old_metadata and self.old_metadata.GetTag(path, subpath)
@@ -254,6 +264,8 @@ class Changes(object):
return 'force=True'
elif self.missing_outputs:
return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.too_new:
+ return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new)
elif self.old_metadata is None:
return 'Previous stamp file not found.'
diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py
index 9b3b9039f39..2169320ee54 100755
--- a/chromium/build/android/gyp/util/md5_check_test.py
+++ b/chromium/build/android/gyp/util/md5_check_test.py
@@ -47,13 +47,21 @@ class TestMd5Check(unittest.TestCase):
outputs_missing=False,
expected_changes=None,
added_or_modified_only=None,
- track_subentries=False):
+ track_subentries=False,
+ output_newer_than_record=False):
output_paths = None
if outputs_specified:
output_file1 = tempfile.NamedTemporaryFile()
if outputs_missing:
output_file1.close() # Gets deleted on close().
output_paths = [output_file1.name]
+ if output_newer_than_record:
+ output_mtime = os.path.getmtime(output_file1.name)
+ os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+ else:
+ # touch the record file so it doesn't look like it's older that
+ # the output we've just created
+ os.utime(record_path.name, None)
self.called = False
self.changes = None
@@ -97,6 +105,13 @@ class TestMd5Check(unittest.TestCase):
outputs_specified=True, outputs_missing=True,
expected_changes='Outputs do not exist:*',
added_or_modified_only=False)
+ CheckCallAndRecord(True,
+ 'should call when output is newer than record',
+ expected_changes='Outputs newer than stamp file:*',
+ outputs_specified=True,
+ outputs_missing=False,
+ added_or_modified_only=False,
+ output_newer_than_record=True)
CheckCallAndRecord(True, force=True, message='should call when forced',
expected_changes='force=True',
added_or_modified_only=False)
diff --git a/chromium/build/android/gyp/util/parallel.py b/chromium/build/android/gyp/util/parallel.py
new file mode 100644
index 00000000000..082ad97225e
--- /dev/null
+++ b/chromium/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+ logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+ def __init__(self, value):
+ self._value = value
+
+ def get(self):
+ return self._value
+
+ def wait(self):
+ pass
+
+ def ready(self):
+ return True
+
+ def successful(self):
+ return True
+
+
+class _ExceptionWrapper(object):
+ """Used to marshal exception messages back to main process."""
+
+ def __init__(self, msg, exception_type=None):
+ self.msg = msg
+ self.exception_type = exception_type
+
+ def MaybeThrow(self):
+ if self.exception_type:
+ raise getattr(__builtins__,
+ self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+ """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+ def __init__(self, func):
+ global _is_child_process
+ _is_child_process = True
+ self._func = func
+
+ def __call__(self, index, _=None):
+ try:
+ return self._func(*_fork_params[index], **_fork_kwargs)
+ except Exception as e:
+ # Only keep the exception type for builtin exception types or else risk
+ # further marshalling exceptions.
+ exception_type = None
+ if hasattr(__builtins__, type(e).__name__):
+ exception_type = type(e).__name__
+ # multiprocessing is supposed to catch and return exceptions automatically
+ # but it doesn't seem to work properly :(.
+ return _ExceptionWrapper(traceback.format_exc(), exception_type)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+ """Allows for host-side logic to be run after child process has terminated.
+
+ * Unregisters associated pool _all_pools.
+ * Raises exception caught by _FuncWrapper.
+ """
+
+ def __init__(self, result, pool=None):
+ self._result = result
+ self._pool = pool
+
+ def get(self):
+ self.wait()
+ value = self._result.get()
+ _CheckForException(value)
+ return value
+
+ def wait(self):
+ self._result.wait()
+ if self._pool:
+ _all_pools.remove(self._pool)
+ self._pool = None
+
+ def ready(self):
+ return self._result.ready()
+
+ def successful(self):
+ return self._result.successful()
+
+
+def _TerminatePools():
+ """Calls .terminate() on all active process pools.
+
+ Not supposed to be necessary according to the docs, but seems to be required
+ when child process throws an exception or Ctrl-C is hit.
+ """
+ global _silence_exceptions
+ _silence_exceptions = True
+ # Child processes cannot have pools, but atexit runs this function because
+ # it was registered before fork()ing.
+ if _is_child_process:
+ return
+
+ def close_pool(pool):
+ try:
+ pool.terminate()
+ except: # pylint: disable=bare-except
+ pass
+
+ for i, pool in enumerate(_all_pools):
+ # Without calling terminate() on a separate thread, the call can block
+ # forever.
+ thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+ target=close_pool,
+ args=(pool, ))
+ thread.daemon = True
+ thread.start()
+
+
+def _CheckForException(value):
+ if isinstance(value, _ExceptionWrapper):
+ global _silence_exceptions
+ if not _silence_exceptions:
+ value.MaybeThrow()
+ _silence_exceptions = True
+ logging.error('Subprocess raised an exception:\n%s', value.msg)
+ sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+ global _all_pools
+ global _fork_params
+ global _fork_kwargs
+ assert _fork_params is None
+ assert _fork_kwargs is None
+ pool_size = min(len(job_params), multiprocessing.cpu_count())
+ _fork_params = job_params
+ _fork_kwargs = job_kwargs
+ ret = multiprocessing.Pool(pool_size)
+ _fork_params = None
+ _fork_kwargs = None
+ if _all_pools is None:
+ _all_pools = []
+ atexit.register(_TerminatePools)
+ _all_pools.append(ret)
+ return ret
+
+
+def ForkAndCall(func, args):
+ """Runs |func| in a fork'ed process.
+
+ Returns:
+ A Result object (call .get() to get the return value)
+ """
+ if DISABLE_ASYNC:
+ pool = None
+ result = _ImmediateResult(func(*args))
+ else:
+ pool = _MakeProcessPool([args]) # Omit |kwargs|.
+ result = pool.apply_async(_FuncWrapper(func), (0, ))
+ pool.close()
+ return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+ """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+ Args:
+ kwargs: Common keyword arguments to be passed to |func|.
+
+ Yields the return values in order.
+ """
+ arg_tuples = list(arg_tuples)
+ if not arg_tuples:
+ return
+
+ if DISABLE_ASYNC:
+ for args in arg_tuples:
+ yield func(*args, **kwargs)
+ return
+
+ pool = _MakeProcessPool(arg_tuples, **kwargs)
+ wrapped_func = _FuncWrapper(func)
+ try:
+ for result in pool.imap(wrapped_func, xrange(len(arg_tuples))):
+ _CheckForException(result)
+ yield result
+ finally:
+ pool.close()
+ pool.join()
+ _all_pools.remove(pool)
diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py
index 1b92c4fb49e..7b16949f9d3 100644
--- a/chromium/build/android/gyp/util/resource_utils.py
+++ b/chromium/build/android/gyp/util/resource_utils.py
@@ -57,6 +57,7 @@ AAPT_IGNORE_PATTERN = ':'.join([
'*~', # Some editors create these as temp files.
'.*', # Never makes sense to include dot(files/dirs).
'*.d.stamp', # Ignore stamp files
+ '*.backup', # Some tools create temporary backup files.
])
MULTIPLE_RES_MAGIC_STRING = b'magic'
diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py
index 02b02fcd538..5e3897c4913 100755
--- a/chromium/build/android/gyp/write_build_config.py
+++ b/chromium/build/android/gyp/write_build_config.py
@@ -242,18 +242,22 @@ through Proguard or other tools. For most targets this is generated
from sources, with a name like `$target_name.javac.jar`. However, when using
a prebuilt jar, this will point to the source archive directly.
-* `deps_info['jar_path']`:
+* `deps_info['device_jar_path']`:
Path to a file that is the result of processing
-`deps_info['unprocessed_jar_path']` with various tools.
+`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed).
+
+* `deps_info['host_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (use by java_binary).
* `deps_info['interface_jar_path']:
Path to the interface jar generated for this library. This corresponds to
a jar file that only contains declarations. Generated by running the `ijar` on
-`deps_info['jar_path']` or the `turbine` tool on source files.
+`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files.
* `deps_info['dex_path']`:
-Path to the `.dex` file generated for this target, from `deps_info['jar_path']`
-unless this comes from a prebuilt `.aar` archive.
+Path to the `.dex` file generated for this target, from
+`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive.
* `deps_info['is_prebuilt']`:
True to indicate that this target corresponds to a prebuilt `.jar` file.
@@ -323,10 +327,10 @@ all entries from the `java_library` type, and adds:
* `deps_info['main_class']`:
Name of the main Java class that serves as an entry point for the binary.
-* `deps_info['java_runtime_classpath']`:
+* `deps_info['device_classpath']`:
The classpath used when running a Java or Android binary. Essentially the
-collection of all `deps_info['jar_path']` entries for the target and all its
-dependencies.
+collection of all `deps_info['device_jar_path']` entries for the target and all
+its dependencies.
## <a name="target_junit_binary">Target type `junit_binary`</a>:
@@ -701,8 +705,8 @@ class Deps(object):
def helper(cur):
for config in cur.Direct('java_library'):
if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
- if config['jar_path'] not in ret:
- ret.append(config['jar_path'])
+ if config['unprocessed_jar_path'] not in ret:
+ ret.append(config['unprocessed_jar_path'])
helper(self)
return ret
@@ -837,6 +841,15 @@ def _CreateJavaLocaleListFromAssets(assets, locale_paks):
return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)])
+def _AddJarMapping(jar_to_target, configs):
+ for config in configs:
+ jar = config.get('unprocessed_jar_path')
+ if jar:
+ jar_to_target[jar] = config['gn_target']
+ for jar in config.get('extra_classpath_jars', []):
+ jar_to_target[jar] = config['gn_target']
+
+
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
@@ -844,6 +857,7 @@ def main(argv):
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
+ parser.add_option('--gn-target', help='GN label for this target')
parser.add_option(
'--deps-configs',
help='GN-list of dependent build_config files.')
@@ -875,7 +889,8 @@ def main(argv):
help='Consider the assets as locale paks in BuildConfig.java')
# java library options
- parser.add_option('--jar-path', help='Path to target\'s jar output.')
+ parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
+ parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
parser.add_option('--unprocessed-jar-path',
help='Path to the .jar to use for javac classpath purposes.')
parser.add_option(
@@ -884,10 +899,6 @@ def main(argv):
parser.add_option(
'--jetified-jar-path',
help='Path to the jetified.jar to use for javac classpath purposes.')
- parser.add_option(
- '--skip-jetify',
- action='store_true',
- help='Whether to use jetified or non-jetified classpath.')
parser.add_option('--is-prebuilt', action='store_true',
help='Whether the jar was compiled or pre-compiled.')
parser.add_option('--java-sources-file', help='Path to .sources file')
@@ -1039,11 +1050,13 @@ def main(argv):
if options.fail:
parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
- jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path']
+ lib_options = ['unprocessed_jar_path', 'interface_jar_path']
+ device_lib_options = ['device_jar_path', 'dex_path']
required_options_map = {
- 'android_apk': ['build_config', 'dex_path'] + jar_path_options,
- 'android_app_bundle_module': ['build_config', 'dex_path',
- 'final_dex_path', 'res_size_info'] + jar_path_options,
+ 'android_apk': ['build_config'] + lib_options + device_lib_options,
+ 'android_app_bundle_module':
+ ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
+ device_lib_options,
'android_assets': ['build_config'],
'android_resources': ['build_config', 'resources_zip'],
'dist_aar': ['build_config'],
@@ -1051,9 +1064,9 @@ def main(argv):
'group': ['build_config'],
'java_annotation_processor': ['build_config', 'main_class'],
'java_binary': ['build_config'],
- 'java_library': ['build_config'] + jar_path_options,
+ 'java_library': ['build_config', 'host_jar_path'] + lib_options,
'junit_binary': ['build_config'],
- 'system_java_library': ['build_config'],
+ 'system_java_library': ['build_config', 'unprocessed_jar_path'],
'android_app_bundle': ['build_config', 'module_build_configs'],
}
required_options = required_options_map.get(options.type)
@@ -1093,10 +1106,10 @@ def main(argv):
'--library-renames can only be used with --type=android_apk or '
'--type=android_app_bundle_module')
- if options.jar_path and options.supports_android and not options.dex_path:
+ if options.device_jar_path and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
- if any(getattr(options, x) for x in jar_path_options):
- for attr in jar_path_options:
+ if any(getattr(options, x) for x in lib_options):
+ for attr in lib_options:
if not getattr(options, attr):
raise('Expected %s to be set.' % attr)
@@ -1152,6 +1165,7 @@ def main(argv):
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
+ 'gn_target': options.gn_target,
'deps_configs': deps.direct_deps_config_paths,
'chromium_code': not options.non_chromium_code,
},
@@ -1254,20 +1268,21 @@ def main(argv):
raise Exception('Not all deps support the Android platform: '
+ str(deps_not_support_android))
- if is_apk_or_module_target:
+ if is_apk_or_module_target or options.type == 'dist_jar':
all_dex_files = [c['dex_path'] for c in all_library_deps]
if is_java_target:
# Classpath values filled in below (after applying tested_apk_config).
config['javac'] = {}
- if options.jar_path:
- deps_info['jar_path'] = options.jar_path
+ if options.unprocessed_jar_path:
deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
deps_info['interface_jar_path'] = options.interface_jar_path
- if options.skip_jetify:
- deps_info['jetified_jar_path'] = options.interface_jar_path
- else:
- deps_info['jetified_jar_path'] = options.jetified_jar_path
+ if options.device_jar_path:
+ deps_info['device_jar_path'] = options.device_jar_path
+ if options.host_jar_path:
+ deps_info['host_jar_path'] = options.host_jar_path
+ deps_info['jetified_jar_path'] = (options.jetified_jar_path
+ or options.interface_jar_path)
if options.dex_path:
deps_info['dex_path'] = options.dex_path
if is_apk_or_module_target:
@@ -1403,7 +1418,7 @@ def main(argv):
# Adding base module to classpath to compile against its R.java file
if base_module_build_config:
javac_full_classpath.append(
- base_module_build_config['deps_info']['jar_path'])
+ base_module_build_config['deps_info']['unprocessed_jar_path'])
javac_full_interface_classpath.append(
base_module_build_config['deps_info']['interface_jar_path'])
jetified_full_jar_classpath.append(
@@ -1459,15 +1474,24 @@ def main(argv):
if is_java_target or options.type == 'android_app_bundle':
# The classpath to use to run this target (or as an input to ProGuard).
- java_full_classpath = []
- if is_java_target and options.jar_path:
- java_full_classpath.append(options.jar_path)
- java_full_classpath.extend(c['jar_path'] for c in all_library_deps)
+ device_classpath = []
+ if is_java_target and options.device_jar_path:
+ device_classpath.append(options.device_jar_path)
+ device_classpath.extend(
+ c.get('device_jar_path') for c in all_library_deps
+ if c.get('device_jar_path'))
if options.type == 'android_app_bundle':
for d in deps.Direct('android_app_bundle_module'):
- java_full_classpath.extend(
- c for c in d.get('java_runtime_classpath', [])
- if c not in java_full_classpath)
+ device_classpath.extend(c for c in d.get('device_classpath', [])
+ if c not in device_classpath)
+
+ if options.type in ('dist_jar', 'java_binary', 'junit_binary'):
+ # The classpath to use to run this target.
+ host_classpath = []
+ if options.host_jar_path:
+ host_classpath.append(options.host_jar_path)
+ host_classpath.extend(c['host_jar_path'] for c in all_library_deps)
+ deps_info['host_classpath'] = host_classpath
all_configs = build_utils.ParseGnList(options.proguard_configs)
deps_info['proguard_configs'] = list(all_configs)
@@ -1563,7 +1587,7 @@ def main(argv):
if dep_config['type'] == 'android_app_bundle':
base_config = GetDepConfig(dep_config['base_module_config'])
extra_main_r_text_files.append(base_config['r_text_path'])
- static_lib_jar_paths[config_path] = base_config['jar_path']
+ static_lib_jar_paths[config_path] = base_config['device_jar_path']
all_configs.extend(dep_config['proguard_all_configs'])
extra_proguard_classpath_jars.extend(
dep_config['proguard_classpath_jars'])
@@ -1578,19 +1602,19 @@ def main(argv):
for package in base_config['extra_package_names']:
if package not in extra_package_names:
extra_package_names.append(package)
- for cp_entry in dep_config['java_runtime_classpath']:
+ for cp_entry in dep_config['device_classpath']:
configs_by_classpath_entry[cp_entry].append(config_path)
- for cp_entry in java_full_classpath:
+ for cp_entry in device_classpath:
configs_by_classpath_entry[cp_entry].append(options.build_config)
for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems():
config_path = (candidate_configs[0]
if len(candidate_configs) == 1 else options.build_config)
classpath_entries_by_owning_config[config_path].append(cp_entry)
- java_full_classpath.append(cp_entry)
+ device_classpath.append(cp_entry)
- java_full_classpath = sorted(set(java_full_classpath))
+ device_classpath = sorted(set(device_classpath))
deps_info['static_library_proguard_mapping_output_paths'] = sorted([
d['proguard_mapping_path']
@@ -1606,7 +1630,7 @@ def main(argv):
'junit_binary'):
deps_info['jni']['all_source'] = sorted(set(all_java_sources))
- system_jars = [c['jar_path'] for c in system_library_deps]
+ system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
if system_library_deps:
config['android'] = {}
@@ -1635,7 +1659,7 @@ def main(argv):
deps_proguard_enabled = []
deps_proguard_disabled = []
for d in deps.Direct('android_app_bundle_module'):
- if not d['java_runtime_classpath']:
+ if not d['device_classpath']:
# We don't care about modules that have no Java code for proguarding.
continue
if d['proguard_enabled']:
@@ -1685,9 +1709,10 @@ def main(argv):
# Add all tested classes to the test's classpath to ensure that the test's
# java code is a superset of the tested apk's java code
- java_full_classpath.extend(
- p for p in tested_apk_config['java_runtime_classpath']
- if p not in java_full_classpath)
+ device_classpath_extended = list(device_classpath)
+ device_classpath_extended.extend(
+ p for p in tested_apk_config['device_classpath']
+ if p not in device_classpath)
# Include in the classpath classes that are added directly to the apk under
# test (those that are not a part of a java_library).
javac_classpath.append(tested_apk_config['unprocessed_jar_path'])
@@ -1706,13 +1731,13 @@ def main(argv):
p for p in tested_apk_config['javac_full_classpath']
if p not in javac_full_classpath)
- # Exclude dex files from the test apk that exist within the apk under test.
- # TODO(agrieve): When proguard is enabled, this filtering logic happens
- # within proguard.py. Move the logic for the proguard case to here.
+ # Exclude .jar files from the test apk that exist within the apk under test.
tested_apk_library_deps = tested_apk_deps.All('java_library')
- tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
- all_dex_files = [
- p for p in all_dex_files if not p in tested_apk_deps_dex_files
+ tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps}
+ all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files]
+ tested_apk_jar_files = set(tested_apk_config['device_classpath'])
+ device_classpath = [
+ p for p in device_classpath if p not in tested_apk_jar_files
]
if options.type in ('android_apk', 'dist_aar', 'dist_jar',
@@ -1722,20 +1747,27 @@ def main(argv):
set(extra_proguard_classpath_jars))
# Dependencies for the final dex file of an apk.
- if is_apk_or_module_target or options.final_dex_path:
+ if (is_apk_or_module_target or options.final_dex_path
+ or options.type == 'dist_jar'):
config['final_dex'] = {}
dex_config = config['final_dex']
dex_config['path'] = options.final_dex_path
- if is_apk_or_module_target:
+ if is_apk_or_module_target or options.type == 'dist_jar':
dex_config['all_dex_files'] = all_dex_files
if is_java_target:
config['javac']['classpath'] = javac_classpath
config['javac']['interface_classpath'] = javac_interface_classpath
- # Direct() will be of type 'java_annotation_processor'.
+ # Direct() will be of type 'java_annotation_processor', and so not included
+ # in All('java_library').
+ # Annotation processors run as part of the build, so need host_jar_path.
config['javac']['processor_classpath'] = [
- c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [
- c['jar_path'] for c in processor_deps.All('java_library')]
+ c['host_jar_path'] for c in processor_deps.Direct()
+ if c.get('host_jar_path')
+ ]
+ config['javac']['processor_classpath'] += [
+ c['host_jar_path'] for c in processor_deps.All('java_library')
+ ]
config['javac']['processor_classes'] = [
c['main_class'] for c in processor_deps.Direct()]
deps_info['javac_full_classpath'] = javac_full_classpath
@@ -1746,16 +1778,18 @@ def main(argv):
javac_full_classpath = set()
for d in deps.Direct('android_app_bundle_module'):
javac_full_classpath.update(p for p in d['javac_full_classpath'])
- javac_full_classpath.add(d['jar_path'])
+ javac_full_classpath.add(d['unprocessed_jar_path'])
deps_info['javac_full_classpath'] = sorted(javac_full_classpath)
- if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary',
- 'android_app_bundle_module', 'android_app_bundle'):
- deps_info['java_runtime_classpath'] = java_full_classpath
+ if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
+ 'android_app_bundle'):
+ deps_info['device_classpath'] = device_classpath
+ if options.tested_apk_config:
+ deps_info['java_runtime_classpath_extended'] = (device_classpath_extended)
if options.type in ('android_apk', 'dist_jar'):
all_interface_jars = []
- if options.jar_path:
+ if options.interface_jar_path:
all_interface_jars.append(options.interface_jar_path)
all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
@@ -1847,7 +1881,7 @@ def main(argv):
# are not duplicated on the feature module.
if base_module_build_config:
base = base_module_build_config
- RemoveObjDups(config, base, 'deps_info', 'java_runtime_classpath')
+ RemoveObjDups(config, base, 'deps_info', 'device_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
RemoveObjDups(config, base, 'deps_info', 'jetified_full_jar_classpath')
@@ -1855,11 +1889,29 @@ def main(argv):
RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
RemoveObjDups(config, base, 'extra_android_manifests')
+ if is_java_target:
+ jar_to_target = {}
+ _AddJarMapping(jar_to_target, [deps_info])
+ _AddJarMapping(jar_to_target, deps.all_deps_configs)
+ if base_module_build_config:
+ _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+ if options.tested_apk_config:
+ _AddJarMapping(jar_to_target, [tested_apk_config])
+ for jar, target in itertools.izip(
+ tested_apk_config['javac_full_classpath'],
+ tested_apk_config['javac_full_classpath_targets']):
+ jar_to_target[jar] = target
+
+ # Used by bytecode_processor to give better error message when missing
+ # deps are found.
+ config['deps_info']['javac_full_classpath_targets'] = [
+ jar_to_target[x] for x in deps_info['javac_full_classpath']
+ ]
+
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs,
- add_pydeps=False) # pydeps listed in GN.
+ build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs)
if __name__ == '__main__':
diff --git a/chromium/build/android/gyp/write_native_libraries_java.py b/chromium/build/android/gyp/write_native_libraries_java.py
index 65688b9fd05..cb0c5d398ec 100755
--- a/chromium/build/android/gyp/write_native_libraries_java.py
+++ b/chromium/build/android/gyp/write_native_libraries_java.py
@@ -26,6 +26,7 @@ def _FormatLibraryName(library_name):
def main():
parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
parser.add_argument('--final', action='store_true', help='Use final fields.')
parser.add_argument(
'--enable-chromium-linker',
@@ -97,6 +98,12 @@ def main():
zip_path='org/chromium/base/library_loader/NativeLibraries.java',
data=NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
+ if options.depfile:
+ assert options.native_libraries_list
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=[options.native_libraries_list])
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/chromium/build/android/gyp/zip.py b/chromium/build/android/gyp/zip.py
index b9503960fa3..ed8f61a9c9e 100755
--- a/chromium/build/android/gyp/zip.py
+++ b/chromium/build/android/gyp/zip.py
@@ -63,8 +63,9 @@ def main(args):
# Depfile used only by dist_jar().
if options.depfile:
- build_utils.WriteDepfile(
- options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=depfile_deps)
if __name__ == '__main__':
diff --git a/chromium/build/android/incremental_install/BUILD.gn b/chromium/build/android/incremental_install/BUILD.gn
index 9cef85696da..8d26e9622b0 100644
--- a/chromium/build/android/incremental_install/BUILD.gn
+++ b/chromium/build/android/incremental_install/BUILD.gn
@@ -5,8 +5,6 @@
import("//build/config/android/rules.gni")
android_library("bootstrap_java") {
- # Use .dex rather than .dex.jar to be usable by package_apk().
- dex_path = "$target_out_dir/bootstrap.dex"
sources = [
"java/org/chromium/incrementalinstall/BootstrapApplication.java",
"java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
@@ -18,3 +16,8 @@ android_library("bootstrap_java") {
jacoco_never_instrument = true
no_build_hooks = true
}
+
+dist_dex("apk_dex") {
+ output = "$target_out_dir/apk.dex"
+ deps = [ ":bootstrap_java" ]
+}
diff --git a/chromium/build/android/lint/suppressions.xml b/chromium/build/android/lint/suppressions.xml
index 638b6c82c29..ac9f8e7aa27 100644
--- a/chromium/build/android/lint/suppressions.xml
+++ b/chromium/build/android/lint/suppressions.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8" ?>
<lint>
<!--
STOP! It looks like you want to suppress some lint errors:
@@ -6,19 +6,12 @@ STOP! It looks like you want to suppress some lint errors:
Ask the author for a fix and/or revert the patch.
- It is preferred to add suppressions in the code instead of
sweeping it under the rug here. See:
-
http://developer.android.com/tools/debugging/improving-w-lint.html
Still reading?
-- You can edit this file manually to suppress an issue
- globally if it is not applicable to the project. When inserting new tags,
- keep this file in sorted order.
-- You can also automatically add issues found so for in the
- build process by running:
-
- build/android/lint/suppress.py
-
- which will generate this file (Comments are not preserved).
+- Edit this file manually to suppress an issue. Please make the suppression as
+ local as possible, i.e. by warning message or by file.
+- When adding new issues, please keep the issue ids in sorted order.
-->
<issue id="AcceptsUserCertificates">
<!-- See https://crbug.com/827265 and comment in the file for context. -->
@@ -26,10 +19,7 @@ Still reading?
<ignore regexp="android_webview/tools/system_webview_shell/apk/res/xml/network_security_config.xml"/>
<ignore regexp="test"/>
</issue>
- <!-- AllowBackup defaults to true, and causes a lint warning if not explicitly set. -->
- <issue id="AllowBackup">
- <ignore path="AndroidManifest.xml"/>
- </issue>
+ <issue id="AllowBackup" severity="ignore"/>
<!-- TODO(crbug.com/804427): Remove this suppression or add rationale. -->
<issue id="AppCompatResource" severity="ignore"/>
<!-- We use asserts in Chromium. See https://chromium.googlesource.com/chromium/src/+/master/styleguide/java/java.md#Asserts -->
@@ -72,7 +62,6 @@ Still reading?
<issue id="DefaultLocale">
<ignore regexp="clank"/>
<ignore regexp="com/android/tv"/>
- <ignore regexp="org/chromium/chrome/browser/payments/PaymentRequestMetricsTest.class"/>
<ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
<!-- TODO(crbug.com/1081240): Fix -->
<ignore regexp="chrome/android/feed/core/java/src/org/chromium/chrome/browser/feed/FeedSurfaceMediator.java"/>
@@ -101,38 +90,24 @@ Still reading?
<!-- TODO(crbug.com/804438): Cannot update until android.media.ExifInterface supports file descriptors -->
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/BitmapUtils.java"/>
</issue>
- <issue id="ExportedContentProvider">
- <ignore path="AndroidManifest.xml"/>
- </issue>
- <issue id="ExportedService" severity="Error">
- <ignore regexp="AndroidManifest.xml"/>
- </issue>
+ <issue id="ExportedContentProvider" severity="ignore"/>
+ <issue id="ExportedService" severity="ignore"/>
<!-- TODO(crbug.com/635567): Fix this properly. -->
- <issue id="GoogleAppIndexingUrlError" severity="Error">
- <ignore regexp="AndroidManifest.xml"/>
- </issue>
+ <issue id="GoogleAppIndexingUrlError" severity="ignore"/>
<!-- TODO(crbug.com/635567): Fix this properly. -->
- <issue id="GoogleAppIndexingWarning" severity="Error">
- <ignore regexp="AndroidManifest.xml"/>
- </issue>
+ <issue id="GoogleAppIndexingWarning" severity="ignore"/>
<issue id="HandlerLeak">
<ignore regexp="android_webview/glue/java/src/com/android/webview/chromium/WebViewContentsClientAdapter.java"/>
<ignore regexp="chromecast/internal"/>
<ignore regexp="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java"/>
</issue>
- <issue id="HardcodedDebugMode" severity="Fatal">
- <ignore path="AndroidManifest.xml"/>
- </issue>
+ <issue id="HardcodedDebugMode" severity="ignore"/>
<issue id="HardcodedText" severity="Error">
<ignore regexp="chromecast/internal"/>
<ignore regexp="remoting/android/host/res/layout/main.xml"/>
</issue>
<issue id="IconColors" severity="Error">
- <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-hdpi/notification_icon.png"/>
- <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-mdpi/notification_icon.png"/>
- <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xhdpi/notification_icon.png"/>
- <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxhdpi/notification_icon.png"/>
- <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxxhdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-.*/notification_icon.png"/>
</issue>
<issue id="IconDensities">
<!-- This is intentional to reduce APK size. See: http://crrev/c/1352161 -->
@@ -207,6 +182,7 @@ Still reading?
<ignore regexp="android_webview/tools/system_webview_shell/apk/res/xml/network_security_config.xml"/>
<ignore regexp="test"/>
</issue>
+ <issue id="Instantiatable" severity="ignore"/>
<issue id="InconsistentArrays" severity="Error">
<ignore regexp="android_webview/locale_paks.resources.zip/values/locale-paks.xml"/>
<ignore regexp="chrome/android/chrome_locale_paks.resources.zip/values/locale-paks.xml"/>
@@ -236,6 +212,7 @@ Still reading?
<ignore regexp="chrome/android/feed/core/java/res/layout/feed_more_button.xml"/>
</issue>
<issue id="MissingApplicationIcon" severity="ignore"/>
+ <issue id="MissingClass" severity="ignore"/>
<issue id="MissingDefaultResource">
<!-- Only used by ToolbarControlContainer guarded by tablet form-factor. -->
<ignore regexp="toolbar_background.9.png"/>
@@ -247,12 +224,7 @@ Still reading?
<issue id="MissingPermission" severity="ignore"/>
<!-- TODO(yolandyan) remove this once all tests are converted to junit4 -->
<issue id="MissingPrefix" severity="ignore"/>
- <issue id="MissingQuantity">
- <ignore regexp="android_chrome_strings.xml"/>
- <ignore regexp="android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="components/browser_ui/strings/android/browser_ui_strings_grd"/>
- <ignore regexp="clank/third_party/chime/chime_systemtray_strings_grd.resources.zip"/>
- </issue>
+ <issue id="MissingQuantity" severity="ignore"/>
<issue id="MissingRegistered" severity="ignore"/>
<issue id="MissingSuperCall" severity="Error">
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/selection/SelectionToolbar.java"/>
@@ -263,9 +235,7 @@ Still reading?
<ignore regexp="restriction_values.xml.*"/>
<ignore regexp="remoting/resources/strings_java.resources.zip"/>
</issue>
- <issue id="MissingVersion">
- <ignore path="AndroidManifest.xml"/>
- </issue>
+ <issue id="MissingVersion" severity="ignore"/>
<issue id="NewApi">
<!-- Do not add new suppressions without rationale. -->
<!-- 2: We support these via desugar. -->
@@ -275,21 +245,17 @@ Still reading?
<ignore regexp="Field requires API level .*`android.app.TaskInfo"/>
<!-- 1: This is for testonly target android_support_chromium_java in android_sdk. -->
<ignore regexp="third_party/android_sdk/public/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/>
- <!-- 1: TODO(crbug.com/1081242): Fix -->
- <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/multiwindow/MultiWindowUtils.java"/>
- <!-- 1: TODO(crbug.com/1081243): Fix -->
- <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/PickerVideoPlayer.java"/>
- <!-- 1: TODO(crbug.com/1081280): Fix -->
- <ignore regexp="chrome/android/features/tab_ui/javatests/src/org/chromium/chrome/browser/tasks/tab_management/ConditionalTabStripTest.java"/>
<!-- 1: TODO(crbug.com/1082222): Fix -->
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/omnibox/suggestions/header/HeaderView.java"/>
+ <!-- 1: TODO(crbug.com/1085410): Fix -->
+ <ignore regexp="components/content_capture/android/java/src/org/chromium/components/content_capture"/>
+ <!-- 1: TODO(crbug.com/1085487): Fix -->
+ <ignore regexp="chrome/android/javatests/src/org/chromium/chrome/browser/directactions/DirectActionTestRule.java"/>
<!-- Endnote: Please specify number of suppressions when adding more -->
</issue>
<!-- This warning just adds a lot of false positives. -->
<issue id="ObsoleteSdkInt" severity="ignore"/>
- <issue id="OldTargetApi">
- <ignore path="AndroidManifest.xml"/>
- </issue>
+ <issue id="OldTargetApi" severity="ignore"/>
<issue id="OnClick">
<!-- False positive, see: http://issuetracker.google.com/148523770 for similar issue. -->
<ignore regexp="tools/android/audio_focus_grabber/java/res/layout/audio_focus_grabber_activity.xml"/>
@@ -322,6 +288,10 @@ Still reading?
<ignore regexp="chrome/android/java/res/layout/sheet_tab_toolbar.xml"/>
</issue>
<issue id="RtlSymmetry" severity="ignore"/>
+ <issue id="SetTextI18n">
+ <!-- Tests can use setText without translating. -->
+ <ignore regexp="/javatests/"/>
+ </issue>
<issue id="SetJavaScriptEnabled" severity="ignore"/>
<issue id="SignatureOrSystemPermissions" severity="ignore"/>
<issue id="SpUsage" severity="Error">
@@ -336,23 +306,14 @@ Still reading?
<ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
<ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-fr/android_chrome_strings.xml"/>
<ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ur/android_chrome_strings.xml"/>
<ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
+ <!-- This string has a % in it. -->
+ <ignore regexp="data_reduction_promo_infobar_title"/>
</issue>
- <!-- Most .xtb files in this group have a % that is not part of a formatted string. https://crbug.com/941164 -->
- <issue id="StringFormatInvalid" severity="Error">
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-da/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-et/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-is/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pt-rBR/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sq/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sv/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-tl/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-uz/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zu/android_chrome_strings.xml"/>
- </issue>
+ <!-- Many .xtb files have a % that is not part of a formatted string. https://crbug.com/941164 -->
+ <issue id="StringFormatInvalid" severity="ignore"/>
+ <issue id="StringFormatMatches" severity="ignore"/>
<!-- We have many C++ enums that we don't care about in java -->
<issue id="SwitchIntDef" severity="ignore"/>
<issue id="TextFields" severity="Error">
@@ -367,63 +328,27 @@ Still reading?
<issue id="UniqueConstants" severity="ignore"/>
<issue id="UnusedAttribute" severity="ignore"/>
<issue id="UnusedIds" severity="ignore"/>
- <issue id="UnusedQuantity" severity="Error">
- <!-- This is needed for suppressing warnings on upstream and downstream build bots -->
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-cs/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-in/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ja/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-km/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ko/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-lo/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-lt/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ms/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-my/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-sk/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-th/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-vi/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rCN/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rHK/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rTW/android_chrome_tab_ui_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ja/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-km/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ko/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-lo/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-lt/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ms/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-my/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sk/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-th/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-vi/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rCN/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rHK/android_chrome_strings.xml"/>
- <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rTW/android_chrome_strings.xml"/>
- <ignore regexp="clank/third_party/chime/chime_systemtray_strings_grd.resources.zip"/>
- <ignore regexp="components/browser_ui/strings/android/browser_ui_strings_grd"/>
- </issue>
+ <issue id="UnusedQuantity" severity="ignore"/>
<issue id="UnusedResources">
<!-- Do not add new suppressions without rationale. -->
- <!-- 3 raw resources are accessed by URL in various places -->
- <ignore regexp="gen/remoting/android/.*/res/raw/credits.html"/>
- <ignore regexp="gen/remoting/android/.*/res/raw/credits_css.css"/>
- <ignore regexp="gen/remoting/android/.*/res/raw/credits_js.js"/>
- <!-- 1 all resources in remoting internal -->
+ <!-- 1: raw resources are accessed by URL in various places -->
+ <ignore regexp="gen/remoting/android/.*/res/raw/credits.*"/>
+ <!-- 1: all resources in remoting internal -->
<ignore regexp="remoting/android/internal"/>
- <!-- 1 string test only, used in CronetSmokeTestCase dynamically -->
+ <!-- 1: string test only, used in CronetSmokeTestCase dynamically -->
<ignore regexp="R.string.TestSupportImplClass"/>
- <!-- 1 resource used by android webview glue layer, could be refactored -->
+ <!-- 1: resource used by android webview glue layer, could be refactored -->
<ignore regexp="R.string.private_browsing_warning"/>
- <!-- 4 The WAM server currently has 2 codes paths for minting a WebAPK, and
+ <!-- 4: The WAM server currently has 2 codes paths for minting a WebAPK, and
it needs these "unused" resources.
TODO(crbug.com/1001115): Remove suppression once 2 code paths are merged -->
<ignore regexp="The resource `R.mipmap.ic_launcher_background` appears to be unused"/>
<ignore regexp="The resource `R.mipmap.ic_launcher_foreground` appears to be unused"/>
<ignore regexp="The resource `R.mipmap.maskable_splash_icon_xxhdpi` appears to be unused"/>
<ignore regexp="The resource `R.mipmap.maskable_splash_icon_xxxhdpi` appears to be unused"/>
- <!-- 1 Module titles may only be used by the Play Store. -->
+ <!-- 1: Module titles may only be used by the Play Store. -->
<ignore regexp="The resource `R.string.*_module_title` appears to be unused"/>
- <!-- 2 resource sets used by clank widgets for each channel -->
+ <!-- 2: resource sets used by clank widgets for each channel -->
<ignore regexp="The resource `R.string.bookmark_widget_title.*` appears to be unused"/>
<ignore regexp="The resource `R.string.search_widget_title.*` appears to be unused"/>
<!-- crbug.com/1004570 remove this line and the following seven lines after the bug resolved -->
@@ -502,6 +427,8 @@ Still reading?
<ignore regexp="The resource `R.plurals.public_notification_text` appears to be unused"/>
<ignore regexp="The resource `R.mipmap.app_shortcut_icon` appears to be unused"/>
<ignore regexp="The resource `R.mipmap.app_single_page_icon` appears to be unused"/>
+ <!-- 1: Some strings in components_strings_grd are not used in other targets. -->
+ <ignore regexp="webview_.*__lint.*components_strings_grd"/>
<!-- Endnote: Please specify number of suppressions when adding more -->
</issue>
<issue id="UsableSpace">
@@ -525,9 +452,7 @@ Still reading?
<ignore regexp="chromecast/internal"/>
<ignore regexp="tools/android/kerberos/SpnegoAuthenticator/res/layout/activity_account_authenticator.xml"/>
</issue>
- <issue id="UsesMinSdkAttributes" severity="Error">
- <ignore regexp="AndroidManifest.xml"/>
- </issue>
+ <issue id="UsesMinSdkAttributes" severity="ignore"/>
<issue id="ValidFragment" severity="Error">
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/BaseMediaRouteDialogManager.java"/>
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteChooserDialogManager.java"/>
@@ -550,11 +475,13 @@ Still reading?
<issue id="WebViewApiAvailability" severity="ignore"/>
<issue id="WrongCall" severity="ignore"/>
<issue id="WrongConstant">
- <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/SSLClientCertificateRequest.java"/>
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/bookmarks/BookmarkItemsAdapter.java"/>
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/instantapps/InstantAppsHandler.java"/>
<ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/prefeditor/EditorDialog.java"/>
+ <ignore regexp="components/browser_ui/client_certificate/android/java/src/org/chromium/components/browser_ui/client_certificate/SSLClientCertificateRequest.java"/>
<ignore regexp="third_party/android_data_chart/java/src/org/chromium/third_party/android/datausagechart/ChartDataUsageView.java"/>
+ <!-- 1: TODO(crbug.com/1085411): Fix -->
+ <ignore regexp="media/base/android/java/src/org/chromium/media/MediaCodecEncoder.java"/>
<!-- Discussed in crbug.com/1069204, ignoring this class of errors since these are Q+ constants. -->
<ignore regexp="Must be one of: LineBreaker.BREAK_STRATEGY_SIMPLE, LineBreaker.BREAK_STRATEGY_HIGH_QUALITY, LineBreaker.BREAK_STRATEGY_BALANCED"/>
</issue>
diff --git a/chromium/build/android/list_class_verification_failures_test.py b/chromium/build/android/list_class_verification_failures_test.py
index a3da0fd6d7e..4248064c9da 100644
--- a/chromium/build/android/list_class_verification_failures_test.py
+++ b/chromium/build/android/list_class_verification_failures_test.py
@@ -6,16 +6,13 @@ import unittest
import list_class_verification_failures as list_verification
-from pylib.constants import host_paths
-
import devil_chromium # pylint: disable=unused-import
from devil.android import device_errors
from devil.android import device_utils
from devil.android.ndk import abis
from devil.android.sdk import version_codes
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
def _CreateOdexLine(java_class_name, type_idx, verification_status):
@@ -40,7 +37,9 @@ class _DetermineDeviceToUseTest(unittest.TestCase):
return_value=fake_attached_devices)
result = list_verification.DetermineDeviceToUse(user_specified_devices)
self.assertEqual(result, fake_attached_devices[0])
+ # pylint: disable=no-member
device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+ # pylint: enable=no-member
def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self):
user_specified_devices = []
@@ -48,7 +47,9 @@ class _DetermineDeviceToUseTest(unittest.TestCase):
side_effect=device_errors.NoDevicesError())
with self.assertRaises(device_errors.NoDevicesError) as _:
list_verification.DetermineDeviceToUse(user_specified_devices)
+ # pylint: disable=no-member
device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+ # pylint: enable=no-member
def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self):
user_specified_devices = ['123']
@@ -57,8 +58,10 @@ class _DetermineDeviceToUseTest(unittest.TestCase):
return_value=fake_attached_devices)
result = list_verification.DetermineDeviceToUse(user_specified_devices)
self.assertEqual(result, fake_attached_devices[0])
+ # pylint: disable=no-member
device_utils.DeviceUtils.HealthyDevices.assert_called_with(
device_arg=user_specified_devices)
+ # pylint: enable=no-member
class _ListClassVerificationFailuresTest(unittest.TestCase):
diff --git a/chromium/build/android/pylib/base/mock_environment.py b/chromium/build/android/pylib/base/mock_environment.py
index 9ebb083a086..5bdefd0a0d9 100644
--- a/chromium/build/android/pylib/base/mock_environment.py
+++ b/chromium/build/android/pylib/base/mock_environment.py
@@ -3,10 +3,8 @@
# found in the LICENSE file.
from pylib.base import environment
-from pylib.constants import host_paths
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
MockEnvironment = mock.MagicMock(environment.Environment)
diff --git a/chromium/build/android/pylib/base/mock_test_instance.py b/chromium/build/android/pylib/base/mock_test_instance.py
index 18def019903..8ef723bf050 100644
--- a/chromium/build/android/pylib/base/mock_test_instance.py
+++ b/chromium/build/android/pylib/base/mock_test_instance.py
@@ -3,10 +3,8 @@
# found in the LICENSE file.
from pylib.base import test_instance
-from pylib.constants import host_paths
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
MockTestInstance = mock.MagicMock(test_instance.TestInstance)
diff --git a/chromium/build/android/pylib/constants/host_paths.py b/chromium/build/android/pylib/constants/host_paths.py
index b249d3c2919..e00e0e79eb8 100644
--- a/chromium/build/android/pylib/constants/host_paths.py
+++ b/chromium/build/android/pylib/constants/host_paths.py
@@ -20,10 +20,9 @@ BUILD_COMMON_PATH = os.path.join(
ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join(
DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development',
'scripts')
+BUILD_PATH = os.path.join(DIR_SOURCE_ROOT, 'build')
DEVIL_PATH = os.path.join(
DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
-PYMOCK_PATH = os.path.join(
- DIR_SOURCE_ROOT, 'third_party', 'pymock')
TRACING_PATH = os.path.join(
DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
diff --git a/chromium/build/android/pylib/device/commands/BUILD.gn b/chromium/build/android/pylib/device/commands/BUILD.gn
index a3ee6462706..13b69f618cf 100644
--- a/chromium/build/android/pylib/device/commands/BUILD.gn
+++ b/chromium/build/android/pylib/device/commands/BUILD.gn
@@ -8,10 +8,13 @@ group("commands") {
data_deps = [ ":chromium_commands_java" ]
}
-android_library("chromium_commands_java") {
+android_library("unzip_java") {
jacoco_never_instrument = true
sources = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
- dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar"
- deps = [ "//base:base_java" ]
- data = [ dex_path ]
+}
+
+dist_dex("chromium_commands_java") {
+ deps = [ ":unzip_java" ]
+ output = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+ data = [ output ]
}
diff --git a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
index 706e1abcf57..97811c83a4a 100644
--- a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
+++ b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -19,9 +19,6 @@ AutofillTableTest.UpdateAutofillProfile
AutofillProfileTest.*
CreditCardTest.SetInfoExpirationMonth
-# crbug.com/139398
-DownloadItemModelTest.InterruptTooltip
-
# Tests crashing in the APK
# l10n_util.cc(655)] Check failed: std::string::npos != pos
DownloadItemModelTest.InterruptStatus
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_parser.py b/chromium/build/android/pylib/instrumentation/instrumentation_parser.py
index 8605178924f..d38f6a5551c 100644
--- a/chromium/build/android/pylib/instrumentation/instrumentation_parser.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -20,6 +20,8 @@ STATUS_CODE_SKIP = -3
# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html
STATUS_CODE_ASSUMPTION_FAILURE = -4
+STATUS_CODE_TEST_DURATION = 1337
+
# http://developer.android.com/reference/android/app/Activity.html
RESULT_CODE_OK = -1
RESULT_CODE_CANCELED = 0
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
index 3b61977278e..a30334c6d09 100644
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -61,6 +61,23 @@ _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES = (
_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE)
_PICKLE_FORMAT_VERSION = 12
+# The ID of the bundle value Instrumentation uses to report which test index the
+# results are for in a collection of tests. Note that this index is 1-based.
+_BUNDLE_CURRENT_ID = 'current'
+# The ID of the bundle value Instrumentation uses to report the test class.
+_BUNDLE_CLASS_ID = 'class'
+# The ID of the bundle value Instrumentation uses to report the test name.
+_BUNDLE_TEST_ID = 'test'
+# The ID of the bundle value Instrumentation uses to report if a test was
+# skipped.
+_BUNDLE_SKIPPED_ID = 'test_skipped'
+# The ID of the bundle value Instrumentation uses to report the crash stack, if
+# the test crashed.
+_BUNDLE_STACK_ID = 'stack'
+
+# The ID of the bundle value Chrome uses to report the test duration.
+_BUNDLE_DURATION_ID = 'duration_ms'
+
class MissingSizeAnnotationError(test_exception.TestException):
def __init__(self, class_name):
@@ -103,9 +120,8 @@ def ParseAmInstrumentRawOutput(raw_output):
return (code, bundle, statuses)
-def GenerateTestResults(
- result_code, result_bundle, statuses, start_ms, duration_ms, device_abi,
- symbolizer):
+def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+ device_abi, symbolizer):
"""Generate test results from |statuses|.
Args:
@@ -116,7 +132,6 @@ def GenerateTestResults(
- the bundle dump as a dict mapping string keys to string values
Note that this is the same as the third item in the 3-tuple returned by
|_ParseAmInstrumentRawOutput|.
- start_ms: The start time of the test in milliseconds.
duration_ms: The duration of the test in milliseconds.
device_abi: The device_abi, which is needed for symbolization.
symbolizer: The symbolizer used to symbolize stack.
@@ -129,10 +144,21 @@ def GenerateTestResults(
results = []
current_result = None
+ cumulative_duration = 0
for status_code, bundle in statuses:
- test_class = bundle.get('class', '')
- test_method = bundle.get('test', '')
+ if status_code == instrumentation_parser.STATUS_CODE_TEST_DURATION:
+ # For the first result, duration will be set below to the difference
+ # between the reported and actual durations to account for overhead like
+ # starting instrumentation.
+ if len(results) > 1:
+ current_duration = int(bundle.get(_BUNDLE_DURATION_ID, duration_ms))
+ current_result.SetDuration(current_duration)
+ cumulative_duration += current_duration
+ continue
+
+ test_class = bundle.get(_BUNDLE_CLASS_ID, '')
+ test_method = bundle.get(_BUNDLE_TEST_ID, '')
if test_class and test_method:
test_name = '%s#%s' % (test_class, test_method)
else:
@@ -142,10 +168,10 @@ def GenerateTestResults(
if current_result:
results.append(current_result)
current_result = test_result.InstrumentationTestResult(
- test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+ test_name, base_test_result.ResultType.UNKNOWN, duration_ms)
else:
if status_code == instrumentation_parser.STATUS_CODE_OK:
- if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'):
+ if bundle.get(_BUNDLE_SKIPPED_ID, '').lower() in ('true', '1', 'yes'):
current_result.SetType(base_test_result.ResultType.SKIP)
elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
current_result.SetType(base_test_result.ResultType.PASS)
@@ -159,15 +185,13 @@ def GenerateTestResults(
logging.error('Unrecognized status code %d. Handling as an error.',
status_code)
current_result.SetType(base_test_result.ResultType.FAIL)
- if 'stack' in bundle:
+ if _BUNDLE_STACK_ID in bundle:
if symbolizer and device_abi:
- current_result.SetLog(
- '%s\n%s' % (
- bundle['stack'],
- '\n'.join(symbolizer.ExtractAndResolveNativeStackTraces(
- bundle['stack'], device_abi))))
+ current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join(
+ symbolizer.ExtractAndResolveNativeStackTraces(
+ bundle[_BUNDLE_STACK_ID], device_abi))))
else:
- current_result.SetLog(bundle['stack'])
+ current_result.SetLog(bundle[_BUNDLE_STACK_ID])
if current_result:
if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
@@ -179,6 +203,9 @@ def GenerateTestResults(
results.append(current_result)
+ if results:
+ results[0].SetDuration(duration_ms - cumulative_duration)
+
return results
@@ -521,6 +548,8 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._skia_gold_properties = None
self._initializeSkiaGoldAttributes(args)
+ self._wpr_enable_record = args.wpr_enable_record
+
self._external_shard_index = args.test_launcher_shard_index
self._total_external_shards = args.test_launcher_total_shards
@@ -731,7 +760,7 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._use_webview_provider = args.use_webview_provider
def _initializeSkiaGoldAttributes(self, args):
- self._skia_gold_properties = gold_utils.SkiaGoldProperties(args)
+ self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args)
@property
def additional_apks(self):
@@ -865,6 +894,14 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def wait_for_java_debugger(self):
return self._wait_for_java_debugger
+ @property
+ def wpr_record_mode(self):
+ return self._wpr_enable_record
+
+ @property
+ def wpr_replay_mode(self):
+ return not self._wpr_enable_record
+
#override
def TestType(self):
return 'instrumentation'
@@ -930,7 +967,8 @@ class InstrumentationTestInstance(test_instance.TestInstance):
'class': c['class'],
'method': m['method'],
'annotations': a,
- 'is_junit4': c['superclass'] == 'java.lang.Object'
+ # TODO(https://crbug.com/1084729): Remove is_junit4.
+ 'is_junit4': True
})
return inflated_tests
@@ -1005,11 +1043,10 @@ class InstrumentationTestInstance(test_instance.TestInstance):
return ParseAmInstrumentRawOutput(raw_output)
@staticmethod
- def GenerateTestResults(
- result_code, result_bundle, statuses, start_ms, duration_ms,
- device_abi, symbolizer):
+ def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+ device_abi, symbolizer):
return GenerateTestResults(result_code, result_bundle, statuses,
- start_ms, duration_ms, device_abi, symbolizer)
+ duration_ms, device_abi, symbolizer)
#override
def TearDown(self):
diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
index d3003b8239e..fdb4114a63d 100755
--- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
+++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -12,11 +12,9 @@ import tempfile
import unittest
from pylib.base import base_test_result
-from pylib.constants import host_paths
from pylib.instrumentation import instrumentation_test_instance
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
_INSTRUMENTATION_TEST_INSTANCE_PATH = (
'pylib.instrumentation.instrumentation_test_instance.%s')
@@ -497,15 +495,17 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
]
expected_tests = [
- {
- 'annotations': {
- 'Feature': {'value': ['Foo']},
- 'MediumTest': None,
+ {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Foo']
+ },
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod2',
},
- 'class': 'org.chromium.test.SampleTest',
- 'is_junit4': False,
- 'method': 'testMethod2',
- },
]
o._excluded_annotations = [('SmallTest', None)]
@@ -556,16 +556,18 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
]
expected_tests = [
- {
- 'annotations': {
- 'Feature': {'value': ['Foo']},
- 'SmallTest': None,
- 'TestValue': '1',
+ {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Foo']
+ },
+ 'SmallTest': None,
+ 'TestValue': '1',
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
},
- 'class': 'org.chromium.test.SampleTest',
- 'is_junit4': False,
- 'method': 'testMethod1',
- },
]
o._annotations = [('TestValue', '1')]
@@ -724,24 +726,28 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
]
expected_tests = [
- {
- 'annotations': {
- 'Feature': {'value': ['Baz']},
- 'MediumTest': None,
+ {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Baz']
+ },
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod2',
},
- 'class': 'org.chromium.test.SampleTest',
- 'is_junit4': False,
- 'method': 'testMethod2',
- },
- {
- 'annotations': {
- 'Feature': {'value': ['Bar']},
- 'SmallTest': None,
+ {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['Bar']
+ },
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
},
- 'class': 'org.chromium.test.SampleTest2',
- 'is_junit4': False,
- 'method': 'testMethod1',
- },
]
o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')]
@@ -753,7 +759,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
def testGenerateTestResults_noStatus(self):
results = instrumentation_test_instance.GenerateTestResults(
- None, None, [], 0, 1000, None, None)
+ None, None, [], 1000, None, None)
self.assertEqual([], results)
def testGenerateTestResults_testPassed(self):
@@ -768,7 +774,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
@@ -789,7 +795,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
@@ -808,7 +814,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
@@ -824,7 +830,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
@@ -842,7 +848,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
self.assertEqual(stacktrace, results[0].GetLog())
@@ -859,7 +865,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase):
}),
]
results = instrumentation_test_instance.GenerateTestResults(
- None, None, statuses, 0, 1000, None, None)
+ None, None, statuses, 1000, None, None)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
diff --git a/chromium/build/android/pylib/instrumentation/test_result.py b/chromium/build/android/pylib/instrumentation/test_result.py
index 24e80a8e5fb..a1c7307fce9 100644
--- a/chromium/build/android/pylib/instrumentation/test_result.py
+++ b/chromium/build/android/pylib/instrumentation/test_result.py
@@ -8,13 +8,12 @@ from pylib.base import base_test_result
class InstrumentationTestResult(base_test_result.BaseTestResult):
"""Result information for a single instrumentation test."""
- def __init__(self, full_name, test_type, start_date, dur, log=''):
+ def __init__(self, full_name, test_type, dur, log=''):
"""Construct an InstrumentationTestResult object.
Args:
full_name: Full name of the test.
test_type: Type of the test result as defined in ResultType.
- start_date: Date in milliseconds when the test began running.
dur: Duration of the test run in milliseconds.
log: A string listing any errors.
"""
@@ -27,4 +26,7 @@ class InstrumentationTestResult(base_test_result.BaseTestResult):
else:
self._class_name = full_name
self._test_name = full_name
- self._start_date = start_date
+
+ def SetDuration(self, duration):
+ """Set the test duration."""
+ self._duration = duration
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index 6a64e190969..5a46e6fcb1c 100644
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -36,17 +36,16 @@ from pylib.instrumentation import instrumentation_test_instance
from pylib.local.device import local_device_environment
from pylib.local.device import local_device_test_run
from pylib.output import remote_output_manager
+from pylib.utils import chrome_proxy_utils
from pylib.utils import gold_utils
from pylib.utils import instrumentation_tracing
from pylib.utils import shared_preference_utils
-
from py_trace_event import trace_event
from py_trace_event import trace_time
from py_utils import contextlib_ext
from py_utils import tempfile_ext
import tombstones
-
with host_paths.SysPath(
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0):
import jinja2 # pylint: disable=import-error
@@ -57,6 +56,10 @@ _JINJA_TEMPLATE_DIR = os.path.join(
host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation')
_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja'
+_WPR_GO_LINUX_X86_64_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'webpagereplay', 'bin',
+ 'linux', 'x86_64', 'wpr')
+
_TAG = 'test_runner_py'
TIMEOUT_ANNOTATIONS = [
@@ -88,6 +91,8 @@ _EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.'
FEATURE_ANNOTATION = 'Feature'
RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest'
+WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory'
+WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest'
# This needs to be kept in sync with formatting in |RenderUtils.imageName|
RE_RENDER_IMAGE_NAME = re.compile(
@@ -101,6 +106,8 @@ RENDER_TEST_MODEL_SDK_CONFIGS = {
'Nexus 5X': [23],
}
+_TEST_BATCH_MAX_GROUP_SIZE = 256
+
@contextlib.contextmanager
def _LogTestEndpoints(device, test_name):
@@ -136,16 +143,24 @@ _CURRENT_FOCUS_CRASH_RE = re.compile(
r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+def _GetTargetPackageName(test_apk):
+ # apk_under_test does not work for smoke tests, where it is set to an
+ # apk that is not listed as the targetPackage in the test apk's manifest.
+ return test_apk.GetAllInstrumentations()[0]['android:targetPackage']
+
+
class LocalDeviceInstrumentationTestRun(
local_device_test_run.LocalDeviceTestRun):
def __init__(self, env, test_instance):
super(LocalDeviceInstrumentationTestRun, self).__init__(
env, test_instance)
+ self._chrome_proxy = None
self._context_managers = collections.defaultdict(list)
self._flag_changers = {}
+ self._render_tests_device_output_dir = None
self._shared_prefs_to_restore = []
- self._skia_gold_work_dir = None
self._skia_gold_session_manager = None
+ self._skia_gold_work_dir = None
#override
def TestPackage(self):
@@ -153,6 +168,8 @@ class LocalDeviceInstrumentationTestRun(
#override
def SetUp(self):
+ target_package = _GetTargetPackageName(self._test_instance.test_apk)
+
@local_device_environment.handle_shard_failures_with(
self._env.BlacklistDevice)
@trace_event.traced
@@ -267,18 +284,10 @@ class LocalDeviceInstrumentationTestRun(
def set_debug_app(dev):
# Set debug app in order to enable reading command line flags on user
# builds
- package_name = None
- if self._test_instance.apk_under_test:
- package_name = self._test_instance.apk_under_test.GetPackageName()
- elif self._test_instance.test_apk:
- package_name = self._test_instance.test_apk.GetPackageName()
- else:
- logging.error("Couldn't set debug app: no package name found")
- return
cmd = ['am', 'set-debug-app', '--persistent']
if self._test_instance.wait_for_java_debugger:
cmd.append('-w')
- cmd.append(package_name)
+ cmd.append(target_package)
dev.RunShellCommand(cmd, check_return=True)
@trace_event.traced
@@ -379,13 +388,12 @@ class LocalDeviceInstrumentationTestRun(
# expectations can be re-used between tests, saving a significant amount
# of time.
self._skia_gold_work_dir = tempfile.mkdtemp()
- self._skia_gold_session_manager = gold_utils.SkiaGoldSessionManager(
+ self._skia_gold_session_manager = gold_utils.AndroidSkiaGoldSessionManager(
self._skia_gold_work_dir, self._test_instance.skia_gold_properties)
if self._test_instance.wait_for_java_debugger:
- apk = self._test_instance.apk_under_test or self._test_instance.test_apk
logging.warning('*' * 80)
logging.warning('Waiting for debugger to attach to process: %s',
- apk.GetPackageName())
+ target_package)
logging.warning('*' * 80)
#override
@@ -459,6 +467,31 @@ class LocalDeviceInstrumentationTestRun(
return tests
#override
+ def _GroupTests(self, tests):
+ batched_tests = dict()
+ other_tests = []
+ for test in tests:
+ if 'Batch' in test['annotations']:
+ batch_name = test['annotations']['Batch']['value']
+ if not batch_name:
+ batch_name = test['class']
+ if not batch_name in batched_tests:
+ batched_tests[batch_name] = []
+ batched_tests[batch_name].append(test)
+ else:
+ other_tests.append(test)
+
+ all_tests = []
+ for _, tests in batched_tests.items():
+ tests.sort() # Ensure a consistent ordering across external shards.
+ all_tests.extend([
+ tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE]
+ for i in range(0, len(tests), _TEST_BATCH_MAX_GROUP_SIZE)
+ ])
+ all_tests.extend(other_tests)
+ return all_tests
+
+ #override
def _GetUniqueTestName(self, test):
return instrumentation_test_instance.GetUniqueTestName(test)
@@ -506,12 +539,9 @@ class LocalDeviceInstrumentationTestRun(
device.adb, suffix='.json', dir=device.GetExternalStoragePath())
extras[EXTRA_TRACE_FILE] = trace_device_file.name
+ target = '%s/%s' % (self._test_instance.test_package,
+ self._test_instance.junit4_runner_class)
if isinstance(test, list):
- if not self._test_instance.driver_apk:
- raise Exception('driver_apk does not exist. '
- 'Please build it and try again.')
- if any(t.get('is_junit4') for t in test):
- raise Exception('driver apk does not support JUnit4 tests')
def name_and_timeout(t):
n = instrumentation_test_instance.GetTestName(t)
@@ -520,26 +550,15 @@ class LocalDeviceInstrumentationTestRun(
test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
- test_name = ','.join(test_names)
+ test_name = instrumentation_test_instance.GetTestName(test[0]) + '_batch'
+ extras['class'] = ','.join(test_names)
test_display_name = test_name
- target = '%s/%s' % (
- self._test_instance.driver_package,
- self._test_instance.driver_name)
- extras.update(
- self._test_instance.GetDriverEnvironmentVars(
- test_list=test_names))
timeout = sum(timeouts)
else:
+ assert test['is_junit4']
test_name = instrumentation_test_instance.GetTestName(test)
test_display_name = self._GetUniqueTestName(test)
- if test['is_junit4']:
- target = '%s/%s' % (
- self._test_instance.test_package,
- self._test_instance.junit4_runner_class)
- else:
- target = '%s/%s' % (
- self._test_instance.test_package,
- self._test_instance.junit3_runner_class)
+
extras['class'] = test_name
if 'flags' in test and test['flags']:
flags_to_add.extend(test['flags'])
@@ -556,14 +575,39 @@ class LocalDeviceInstrumentationTestRun(
timeout = None
logging.info('preparing to run %s: %s', test_display_name, test)
- render_tests_device_output_dir = None
if _IsRenderTest(test):
# TODO(mikecase): Add DeviceTempDirectory class and use that instead.
- render_tests_device_output_dir = posixpath.join(
- device.GetExternalStoragePath(),
- 'render_test_output_dir')
+ self._render_tests_device_output_dir = posixpath.join(
+ device.GetExternalStoragePath(), 'render_test_output_dir')
flags_to_add.append('--render-test-output-dir=%s' %
- render_tests_device_output_dir)
+ self._render_tests_device_output_dir)
+
+ if _IsWPRRecordReplayTest(test):
+ wpr_archive_relative_path = _GetWPRArchivePath(test)
+ if not wpr_archive_relative_path:
+ raise RuntimeError('Could not find the WPR archive file path '
+ 'from annotation.')
+ wpr_archive_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ wpr_archive_relative_path)
+ if not os.path.isdir(wpr_archive_path):
+ raise RuntimeError('WPRArchiveDirectory annotation should point'
+ 'to a directory only.')
+
+ archive_path = os.path.join(wpr_archive_path,
+ self._GetUniqueTestName(test) + '.wprgo')
+
+ if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH):
+ # If we got to this stage, then we should have
+ # checkout_android set.
+ raise RuntimeError(
+ 'WPR Go binary not found at {}'.format(_WPR_GO_LINUX_X86_64_PATH))
+ # Tells the server to use the binaries retrieved from CIPD.
+ chrome_proxy_utils.ChromeProxySession.SetWPRServerBinary(
+ _WPR_GO_LINUX_X86_64_PATH)
+ self._chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+ self._chrome_proxy.wpr_record_mode = self._test_instance.wpr_record_mode
+ self._chrome_proxy.Start(device, archive_path)
+ flags_to_add.extend(self._chrome_proxy.GetFlags())
if flags_to_add:
self._CreateFlagChangerIfNeeded(device)
@@ -588,7 +632,7 @@ class LocalDeviceInstrumentationTestRun(
result_code, result_bundle, statuses = (
self._test_instance.ParseAmInstrumentRawOutput(output))
results = self._test_instance.GenerateTestResults(
- result_code, result_bundle, statuses, start_ms, duration_ms,
+ result_code, result_bundle, statuses, duration_ms,
device.product_cpu_abi, self._test_instance.symbolizer)
if self._env.trace_output:
@@ -620,11 +664,12 @@ class LocalDeviceInstrumentationTestRun(
# check to see if any failure images were generated even if the test
# does not fail.
try:
- self._ProcessRenderTestResults(
- device, render_tests_device_output_dir, results)
+ self._ProcessRenderTestResults(device, results)
finally:
- device.RemovePath(render_tests_device_output_dir,
- recursive=True, force=True)
+ device.RemovePath(self._render_tests_device_output_dir,
+ recursive=True,
+ force=True)
+ self._render_tests_device_output_dir = None
def pull_ui_screen_captures():
screenshots = []
@@ -653,13 +698,23 @@ class LocalDeviceInstrumentationTestRun(
json_data['image_link'] = image_archive.Link()
return json_data
+ def stop_chrome_proxy():
+ # Removes the port forwarding
+ if self._chrome_proxy:
+ self._chrome_proxy.Stop(device)
+ if not self._chrome_proxy.wpr_replay_mode:
+ logging.info('WPR Record test generated archive file %s',
+ self._chrome_proxy.wpr_archive_path)
+ self._chrome_proxy = None
+
+
# While constructing the TestResult objects, we can parallelize several
# steps that involve ADB. These steps should NOT depend on any info in
# the results! Things such as whether the test CRASHED have not yet been
# determined.
post_test_steps = [
- restore_flags, restore_timeout_scale, handle_coverage_data,
- handle_render_test_data, pull_ui_screen_captures
+ restore_flags, restore_timeout_scale, stop_chrome_proxy,
+ handle_coverage_data, handle_render_test_data, pull_ui_screen_captures
]
if self._env.concurrent_adb:
reraiser_thread.RunAsync(post_test_steps)
@@ -920,16 +975,14 @@ class LocalDeviceInstrumentationTestRun(
screenshot_device_file.close()
_SetLinkOnResults(results, link_name, screenshot_host_file.Link())
- def _ProcessRenderTestResults(
- self, device, render_tests_device_output_dir, results):
- self._ProcessSkiaGoldRenderTestResults(
- device, render_tests_device_output_dir, results)
- self._ProcessLocalRenderTestResults(device, render_tests_device_output_dir,
- results)
+ def _ProcessRenderTestResults(self, device, results):
+ if not self._render_tests_device_output_dir:
+ return
+ self._ProcessSkiaGoldRenderTestResults(device, results)
- def _ProcessSkiaGoldRenderTestResults(
- self, device, render_tests_device_output_dir, results):
- gold_dir = posixpath.join(render_tests_device_output_dir, _DEVICE_GOLD_DIR)
+ def _ProcessSkiaGoldRenderTestResults(self, device, results):
+ gold_dir = posixpath.join(self._render_tests_device_output_dir,
+ _DEVICE_GOLD_DIR)
if not device.FileExists(gold_dir):
return
@@ -958,8 +1011,27 @@ class LocalDeviceInstrumentationTestRun(
'when doing Skia Gold comparison.' % image_name)
continue
+ # Add 'ignore': '1' if a comparison failure would not be surfaced, as
+ # that implies that we aren't actively maintaining baselines for the
+ # test. This helps prevent unrelated CLs from getting comments posted to
+ # them.
+ with open(json_path) as infile:
+ # All the key/value pairs in the JSON file are strings, so convert
+ # to a bool.
+ json_dict = json.load(infile)
+ fail_on_unsupported = json_dict.get('fail_on_unsupported_configs',
+ 'false')
+ fail_on_unsupported = fail_on_unsupported.lower() == 'true'
+ should_hide_failure = (
+ device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get(
+ device.product_model, []) and not fail_on_unsupported)
+ if should_hide_failure:
+ json_dict['ignore'] = '1'
+ with open(json_path, 'w') as outfile:
+ json.dump(json_dict, outfile)
+
gold_session = self._skia_gold_session_manager.GetSkiaGoldSession(
- keys_file=json_path)
+ keys_input=json_path)
try:
status, error = gold_session.RunComparison(
@@ -978,14 +1050,7 @@ class LocalDeviceInstrumentationTestRun(
# Don't fail the test if we ran on an unsupported configuration unless
# the test has explicitly opted in, as it's likely that baselines
# aren't maintained for that configuration.
- with open(json_path) as infile:
- # All the key/value pairs in the JSON file are strings, so convert
- # to a bool.
- fail_on_unsupported = json.load(infile).get(
- 'fail_on_unsupported_configs', 'false')
- fail_on_unsupported = fail_on_unsupported.lower() == 'true'
- if device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get(
- device.product_model, []) and not fail_on_unsupported:
+ if should_hide_failure:
if self._test_instance.skia_gold_properties.local_pixel_tests:
_AppendToLog(
results, 'Gold comparison for %s failed, but model %s with SDK '
@@ -1004,7 +1069,7 @@ class LocalDeviceInstrumentationTestRun(
failure_log = (
'Skia Gold reported failure for RenderTest %s. See '
'RENDER_TESTS.md for how to fix this failure.' % render_name)
- status_codes = gold_utils.SkiaGoldSession.StatusCodes
+ status_codes = gold_utils.AndroidSkiaGoldSession.StatusCodes
if status == status_codes.AUTH_FAILURE:
_AppendToLog(results,
'Gold authentication failed with output %s' % error)
@@ -1053,63 +1118,6 @@ class LocalDeviceInstrumentationTestRun(
'Given unhandled SkiaGoldSession StatusCode %s with error %s',
status, error)
- def _ProcessLocalRenderTestResults(self, device,
- render_tests_device_output_dir, results):
- failure_images_device_dir = posixpath.join(
- render_tests_device_output_dir, 'failures')
- if not device.FileExists(failure_images_device_dir):
- return
-
- diff_images_device_dir = posixpath.join(
- render_tests_device_output_dir, 'diffs')
-
- golden_images_device_dir = posixpath.join(
- render_tests_device_output_dir, 'goldens')
-
- for failure_filename in device.ListDirectory(failure_images_device_dir):
-
- with self._env.output_manager.ArchivedTempfile(
- 'fail_%s' % failure_filename, 'render_tests',
- output_manager.Datatype.PNG) as failure_image_host_file:
- device.PullFile(
- posixpath.join(failure_images_device_dir, failure_filename),
- failure_image_host_file.name)
- failure_link = failure_image_host_file.Link()
-
- golden_image_device_file = posixpath.join(
- golden_images_device_dir, failure_filename)
- if device.PathExists(golden_image_device_file):
- with self._env.output_manager.ArchivedTempfile(
- 'golden_%s' % failure_filename, 'render_tests',
- output_manager.Datatype.PNG) as golden_image_host_file:
- device.PullFile(
- golden_image_device_file, golden_image_host_file.name)
- golden_link = golden_image_host_file.Link()
- else:
- golden_link = ''
-
- diff_image_device_file = posixpath.join(
- diff_images_device_dir, failure_filename)
- if device.PathExists(diff_image_device_file):
- with self._env.output_manager.ArchivedTempfile(
- 'diff_%s' % failure_filename, 'render_tests',
- output_manager.Datatype.PNG) as diff_image_host_file:
- device.PullFile(
- diff_image_device_file, diff_image_host_file.name)
- diff_link = diff_image_host_file.Link()
- else:
- diff_link = ''
-
- processed_template_output = _GenerateRenderTestHtml(
- failure_filename, failure_link, golden_link, diff_link)
-
- with self._env.output_manager.ArchivedTempfile(
- '%s.html' % failure_filename, 'render_tests',
- output_manager.Datatype.HTML) as html_results:
- html_results.write(processed_template_output)
- html_results.flush()
- _SetLinkOnResults(results, failure_filename, html_results.Link())
-
#override
def _ShouldRetry(self, test, result):
# We've tried to disable retries in the past with mixed results.
@@ -1145,6 +1153,22 @@ class LocalDeviceInstrumentationTestRun(
return timeout
+def _IsWPRRecordReplayTest(test):
+ """Determines whether a test or a list of tests is a WPR RecordReplay Test."""
+ if not isinstance(test, list):
+ test = [test]
+ return any([
+ WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+ FEATURE_ANNOTATION, {}).get('value', ()) for t in test
+ ])
+
+
+def _GetWPRArchivePath(test):
+ """Retrieves the archive path from the WPRArchiveDirectory annotation."""
+ return test['annotations'].get(WPR_ARCHIVE_FILE_PATH_ANNOTATION,
+ {}).get('value', ())
+
+
def _IsRenderTest(test):
"""Determines if a test or list of tests has a RenderTest amongst them."""
if not isinstance(test, list):
diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
index 3129c1121b0..dd57d92061e 100755
--- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
+++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
@@ -61,6 +61,88 @@ class LocalDeviceInstrumentationTestRunTest(unittest.TestCase):
'SadTest.testNotRun', base_test_result.ResultType.NOTRUN)
self.assertTrue(self._obj._ShouldRetry(test, result))
+ def testIsWPRRecordReplayTest_matchedWithKey(self):
+ test = {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['WPRRecordReplayTest', 'dummy']
+ }
+ },
+ 'class': 'WPRDummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertTrue(
+ local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+ def testIsWPRRecordReplayTest_noMatchedKey(self):
+ test = {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['abc', 'dummy']
+ }
+ },
+ 'class': 'WPRDummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertFalse(
+ local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+ def testGetWPRArchivePath_matchedWithKey(self):
+ test = {
+ 'annotations': {
+ 'WPRArchiveDirectory': {
+ 'value': 'abc'
+ }
+ },
+ 'class': 'WPRDummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertEqual(
+ local_device_instrumentation_test_run._GetWPRArchivePath(test), 'abc')
+
+ def testGetWPRArchivePath_noMatchedWithKey(self):
+ test = {
+ 'annotations': {
+ 'Feature': {
+ 'value': 'abc'
+ }
+ },
+ 'class': 'WPRDummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertFalse(
+ local_device_instrumentation_test_run._GetWPRArchivePath(test))
+
+ def testIsRenderTest_matchedWithKey(self):
+ test = {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['RenderTest', 'dummy']
+ }
+ },
+ 'class': 'DummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertTrue(local_device_instrumentation_test_run._IsRenderTest(test))
+
+ def testIsRenderTest_noMatchedKey(self):
+ test = {
+ 'annotations': {
+ 'Feature': {
+ 'value': ['abc', 'dummy']
+ }
+ },
+ 'class': 'DummyTest',
+ 'method': 'testRun',
+ 'is_junit4': True,
+ }
+ self.assertFalse(local_device_instrumentation_test_run._IsRenderTest(test))
+
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/chromium/build/android/pylib/local/device/local_device_test_run.py b/chromium/build/android/pylib/local/device/local_device_test_run.py
index 2018751fed5..69b27186507 100644
--- a/chromium/build/android/pylib/local/device/local_device_test_run.py
+++ b/chromium/build/android/pylib/local/device/local_device_test_run.py
@@ -137,6 +137,7 @@ class LocalDeviceTestRun(test_run.TestRun):
with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
tries = 0
while tries < self._env.max_tries and tests:
+ grouped_tests = self._GroupTests(tests)
logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
if tries > 0 and self._env.recover_devices:
if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1
@@ -171,12 +172,14 @@ class LocalDeviceTestRun(test_run.TestRun):
try:
if self._ShouldShard():
- tc = test_collection.TestCollection(self._CreateShards(tests))
+ tc = test_collection.TestCollection(
+ self._CreateShards(grouped_tests))
self._env.parallel_devices.pMap(
run_tests_on_device, tc, try_results).pGet(None)
else:
- self._env.parallel_devices.pMap(
- run_tests_on_device, tests, try_results).pGet(None)
+ self._env.parallel_devices.pMap(run_tests_on_device,
+ grouped_tests,
+ try_results).pGet(None)
except TestsTerminated:
for unknown_result in try_results.GetUnknown():
try_results.AddResult(
@@ -236,9 +239,16 @@ class LocalDeviceTestRun(test_run.TestRun):
if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
raise InvalidShardingSettings(shard_index, total_shards)
- return [
- t for t in tests
- if hash(self._GetUniqueTestName(t)) % total_shards == shard_index]
+ sharded_tests = []
+ for t in self._GroupTests(tests):
+ if (hash(self._GetUniqueTestName(t[0] if isinstance(t, list) else t)) %
+ total_shards == shard_index):
+ if isinstance(t, list):
+ sharded_tests.extend(t)
+ else:
+ sharded_tests.append(t)
+
+ return sharded_tests
def GetTool(self, device):
if str(device) not in self._tools:
@@ -260,6 +270,10 @@ class LocalDeviceTestRun(test_run.TestRun):
def _GetTests(self):
raise NotImplementedError
+ def _GroupTests(self, tests):
+ # pylint: disable=no-self-use
+ return tests
+
def _RunTest(self, device, test):
raise NotImplementedError
diff --git a/chromium/build/android/pylib/local/device/local_device_test_run_test.py b/chromium/build/android/pylib/local/device/local_device_test_run_test.py
index 525bf25200b..aeea5881c8c 100755
--- a/chromium/build/android/pylib/local/device/local_device_test_run_test.py
+++ b/chromium/build/android/pylib/local/device/local_device_test_run_test.py
@@ -8,11 +8,9 @@
import unittest
from pylib.base import base_test_result
-from pylib.constants import host_paths
from pylib.local.device import local_device_test_run
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
class SubstituteDeviceRootTest(unittest.TestCase):
diff --git a/chromium/build/android/pylib/output/remote_output_manager_test.py b/chromium/build/android/pylib/output/remote_output_manager_test.py
index 6917260dd7c..d87c6eb3a9c 100755
--- a/chromium/build/android/pylib/output/remote_output_manager_test.py
+++ b/chromium/build/android/pylib/output/remote_output_manager_test.py
@@ -9,11 +9,9 @@ import unittest
from pylib.base import output_manager
from pylib.base import output_manager_test_case
-from pylib.constants import host_paths
from pylib.output import remote_output_manager
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+import mock # pylint: disable=import-error
@mock.patch('pylib.utils.google_storage_helper')
diff --git a/chromium/build/android/pylib/symbols/deobfuscator.py b/chromium/build/android/pylib/symbols/deobfuscator.py
index 42084ddc789..ffc23b87048 100644
--- a/chromium/build/android/pylib/symbols/deobfuscator.py
+++ b/chromium/build/android/pylib/symbols/deobfuscator.py
@@ -150,7 +150,7 @@ class DeobfuscatorPool(object):
# De-obfuscation is broken.
if self._num_restarts == _MAX_RESTARTS:
- return lines
+ raise Exception('Deobfuscation seems broken.')
# Restart any closed Deobfuscators.
for i, d in enumerate(self._pool):
diff --git a/chromium/build/android/pylib/utils/app_bundle_utils.py b/chromium/build/android/pylib/utils/app_bundle_utils.py
index f076ed39cd6..59efb775a66 100644
--- a/chromium/build/android/pylib/utils/app_bundle_utils.py
+++ b/chromium/build/android/pylib/utils/app_bundle_utils.py
@@ -18,6 +18,8 @@ import bundletool
# List of valid modes for GenerateBundleApks()
BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE',
+ 'TEXTURE_COMPRESSION_FORMAT')
_SYSTEM_MODES = ('system_compressed', 'system')
_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
@@ -50,7 +52,8 @@ def GenerateBundleApks(bundle_path,
minimal=False,
minimal_sdk_version=None,
check_for_noop=True,
- system_image_locales=None):
+ system_image_locales=None,
+ optimize_for=None):
"""Generate an .apks archive from a an app bundle if needed.
Args:
@@ -68,6 +71,8 @@ def GenerateBundleApks(bundle_path,
check_for_noop: Use md5_check to short-circuit when inputs have not changed.
system_image_locales: Locales to package in the APK when mode is "system"
or "system_compressed".
+ optimize_for: Overrides split configuration, which must be None or
+ one of OPTIMIZE_FOR_OPTIONS.
"""
device_spec = None
if minimal_sdk_version:
@@ -110,6 +115,13 @@ def GenerateBundleApks(bundle_path,
(mode, BUILD_APKS_MODES))
cmd_args += ['--mode=' + mode]
+ if optimize_for:
+ if optimize_for not in OPTIMIZE_FOR_OPTIONS:
+ raise Exception('Invalid optimize_for parameter %s '
+ '(should be in %s)' %
+ (mode, OPTIMIZE_FOR_OPTIONS))
+ cmd_args += ['--optimize-for=' + optimize_for]
+
with tempfile.NamedTemporaryFile(suffix='.json') as spec_file:
if device_spec:
json.dump(device_spec, spec_file)
diff --git a/chromium/build/android/pylib/utils/chrome_proxy_utils.py b/chromium/build/android/pylib/utils/chrome_proxy_utils.py
new file mode 100644
index 00000000000..149d0b9c8c5
--- /dev/null
+++ b/chromium/build/android/pylib/utils/chrome_proxy_utils.py
@@ -0,0 +1,171 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for setting up and tear down WPR and TsProxy service."""
+
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+from devil.android import forwarder
+
+PROXY_HOST_IP = '127.0.0.1'
+# From Catapult/WebPageReplay document.
+IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I='
+PROXY_SERVER = 'socks5://localhost'
+DEFAULT_DEVICE_PORT = 1080
+DEFAULT_ROUND_TRIP_LATENCY_MS = 100
+DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000
+DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000
+
+
+class WPRServer(object):
+ """Utils to set up a webpagereplay_go_server instance."""
+
+ def __init__(self):
+ self._archive_path = None
+ self._host_http_port = 0
+ self._host_https_port = 0
+ self._record_mode = False
+ self._server = None
+
+ def StartServer(self, wpr_archive_path):
+ """Starts a webpagereplay_go_server instance."""
+ if wpr_archive_path == self._archive_path and self._server:
+ # Reuse existing webpagereplay_go_server instance.
+ return
+
+ if self._server:
+ self.StopServer()
+
+ replay_options = []
+ if self._record_mode:
+ replay_options.append('--record')
+
+ ports = {}
+ if not self._server:
+ self._server = webpagereplay_go_server.ReplayServer(
+ wpr_archive_path,
+ PROXY_HOST_IP,
+ http_port=self._host_http_port,
+ https_port=self._host_https_port,
+ replay_options=replay_options)
+ self._archive_path = wpr_archive_path
+ ports = self._server.StartServer()
+
+ self._host_http_port = ports['http']
+ self._host_https_port = ports['https']
+
+ def StopServer(self):
+ """Stops the webpagereplay_go_server instance and resets archive."""
+ self._server.StopServer()
+ self._server = None
+ self._host_http_port = 0
+ self._host_https_port = 0
+
+ @staticmethod
+ def SetServerBinaryPath(go_binary_path):
+ """Sets the go_binary_path for webpagereplay_go_server.ReplayServer."""
+ webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path)
+
+ @property
+ def record_mode(self):
+ return self._record_mode
+
+ @record_mode.setter
+ def record_mode(self, value):
+ self._record_mode = value
+
+ @property
+ def http_port(self):
+ return self._host_http_port
+
+ @property
+ def https_port(self):
+ return self._host_https_port
+
+ @property
+ def archive_path(self):
+ return self._archive_path
+
+
+class ChromeProxySession(object):
+ """Utils to help set up a Chrome Proxy."""
+
+ def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT):
+ self._device_proxy_port = device_proxy_port
+ self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP)
+ self._wpr_server = WPRServer()
+
+ @property
+ def wpr_record_mode(self):
+ """Returns whether this proxy session was running in record mode."""
+ return self._wpr_server.record_mode
+
+ @wpr_record_mode.setter
+ def wpr_record_mode(self, value):
+ self._wpr_server.record_mode = value
+
+ @property
+ def wpr_replay_mode(self):
+ """Returns whether this proxy session was running in replay mode."""
+ return not self._wpr_server.record_mode
+
+ @property
+ def wpr_archive_path(self):
+ """Returns the wpr archive file path used in this proxy session."""
+ return self._wpr_server.archive_path
+
+ @property
+ def device_proxy_port(self):
+ return self._device_proxy_port
+
+ def GetFlags(self):
+ """Gets the chrome command line flags to be needed by ChromeProxySession."""
+ extra_flags = []
+
+ extra_flags.append('--ignore-certificate-errors-spki-list=%s' %
+ IGNORE_CERT_ERROR_SPKI_LIST)
+ extra_flags.append('--proxy-server=%s:%s' %
+ (PROXY_SERVER, self._device_proxy_port))
+ return extra_flags
+
+ @staticmethod
+ def SetWPRServerBinary(go_binary_path):
+ """Sets the WPR server go_binary_path."""
+ WPRServer.SetServerBinaryPath(go_binary_path)
+
+ def Start(self, device, wpr_archive_path):
+ """Starts the wpr_server as well as the ts_proxy server and setups env.
+
+ Args:
+ device: A DeviceUtils instance.
+ wpr_archive_path: A abs path to the wpr archive file.
+
+ """
+ self._wpr_server.StartServer(wpr_archive_path)
+ self._ts_proxy_server.StartServer()
+
+ # Maps device port to host port
+ forwarder.Forwarder.Map(
+ [(self._device_proxy_port, self._ts_proxy_server.port)], device)
+ # Maps tsProxy port to wpr http/https ports
+ self._ts_proxy_server.UpdateOutboundPorts(
+ http_port=self._wpr_server.http_port,
+ https_port=self._wpr_server.https_port)
+ self._ts_proxy_server.UpdateTrafficSettings(
+ round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS,
+ download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS,
+ upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS)
+
+ def Stop(self, device):
+ """Stops the wpr_server, and ts_proxy server and tears down env.
+
+ Note that Stop does not reset wpr_record_mode, wpr_replay_mode,
+ wpr_archive_path property.
+
+ Args:
+ device: A DeviceUtils instance.
+ """
+ self._wpr_server.StopServer()
+ self._ts_proxy_server.StopServer()
+ forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device)
diff --git a/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py b/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py
new file mode 100755
index 00000000000..b38b268fe8a
--- /dev/null
+++ b/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for chrome_proxy_utils."""
+
+#pylint: disable=protected-access
+
+import os
+import unittest
+
+from pylib.utils import chrome_proxy_utils
+
+from devil.android import forwarder
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+import mock # pylint: disable=import-error
+
+
+def _DeviceUtilsMock(test_serial, is_ready=True):
+ """Returns a DeviceUtils instance based on given serial."""
+ adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+ adb.__str__ = mock.Mock(return_value=test_serial)
+ adb.GetDeviceSerial.return_value = test_serial
+ adb.is_ready = is_ready
+ return device_utils.DeviceUtils(adb)
+
+
+class ChromeProxySessionTest(unittest.TestCase):
+ """Unittest for ChromeProxySession."""
+
+ #pylint: disable=no-self-use
+
+ @mock.patch.object(forwarder.Forwarder, 'Map')
+ @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings')
+ @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port',
+ new_callable=mock.PropertyMock)
+ def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock,
+ start_server_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._host_http_port = 1
+ chrome_proxy._wpr_server._host_https_port = 2
+ port_mock.return_value = 3
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.Start(device, 'abc')
+
+ forwarder_mock.assert_called_once_with([(4, 3)], device)
+ wpr_mock.assert_called_once_with('abc')
+ start_server_mock.assert_called_once()
+ outboundport_mock.assert_called_once_with(http_port=1, https_port=2)
+ traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000,
+ round_trip_latency_ms=100,
+ upload_bandwidth_kbps=72000)
+ port_mock.assert_called_once()
+
+ @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+ @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+ def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.wpr_record_mode = True
+ chrome_proxy._wpr_server._archive_path = 'abc'
+ chrome_proxy.Stop(device)
+
+ forwarder_mock.assert_called_once_with(4, device)
+ wpr_mock.assert_called_once_with()
+ ts_proxy_mock.assert_called_once_with()
+
+ #pylint: enable=no-self-use
+
+ @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+ def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, [])
+ chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__)
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.wpr_record_mode = True
+ chrome_proxy.Stop(device)
+
+ forwarder_mock.assert_called_once_with(4, device)
+ wpr_mock.assert_called_once_with()
+ ts_proxy_mock.assert_called_once_with()
+ self.assertFalse(chrome_proxy.wpr_replay_mode)
+ self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__))
+
+ def test_SetWPRRecordMode(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy.wpr_record_mode = True
+ self.assertTrue(chrome_proxy._wpr_server.record_mode)
+ self.assertTrue(chrome_proxy.wpr_record_mode)
+ self.assertFalse(chrome_proxy.wpr_replay_mode)
+
+ chrome_proxy.wpr_record_mode = False
+ self.assertFalse(chrome_proxy._wpr_server.record_mode)
+ self.assertFalse(chrome_proxy.wpr_record_mode)
+ self.assertTrue(chrome_proxy.wpr_replay_mode)
+
+ def test_SetWPRArchivePath(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._archive_path = 'abc'
+ self.assertEquals(chrome_proxy.wpr_archive_path, 'abc')
+
+ def test_UseDefaultDeviceProxyPort(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+ expected_flags = [
+ '--ignore-certificate-errors-spki-list='
+ 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+ '--proxy-server=socks5://localhost:1080'
+ ]
+ self.assertEquals(chrome_proxy.device_proxy_port, 1080)
+ self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+ def test_UseNewDeviceProxyPort(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(1)
+ expected_flags = [
+ '--ignore-certificate-errors-spki-list='
+ 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+ '--proxy-server=socks5://localhost:1'
+ ]
+ self.assertEquals(chrome_proxy.device_proxy_port, 1)
+ self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+
+class WPRServerTest(unittest.TestCase):
+ @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+ def test_StartSever_fresh_replaymode(self, wpr_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_archive_file = os.path.abspath(__file__)
+ wpr_server.StartServer(wpr_archive_file)
+
+ wpr_mock.assert_called_once_with(wpr_archive_file,
+ '127.0.0.1',
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+
+ self.assertEqual(wpr_server._archive_path, wpr_archive_file)
+ self.assertTrue(wpr_server._server)
+
+ @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+ def test_StartSever_fresh_recordmode(self, wpr_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server.record_mode = True
+ wpr_server.StartServer(os.path.abspath(__file__))
+ wpr_archive_file = os.path.abspath(__file__)
+
+ wpr_mock.assert_called_once_with(wpr_archive_file,
+ '127.0.0.1',
+ http_port=0,
+ https_port=0,
+ replay_options=['--record'])
+
+ self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+ self.assertTrue(wpr_server._server)
+
+ #pylint: disable=no-self-use
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ def test_StartSever_recordmode(self, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ start_server_mock.return_value = {'http': 1, 'https': 2}
+ wpr_server.StartServer(os.path.abspath(__file__))
+
+ start_server_mock.assert_called_once()
+ self.assertEqual(wpr_server._host_http_port, 1)
+ self.assertEqual(wpr_server._host_https_port, 2)
+ self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+ self.assertTrue(wpr_server._server)
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ def test_StartSever_reuseServer(self, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server._archive_path = os.path.abspath(__file__)
+ wpr_server.StartServer(os.path.abspath(__file__))
+ start_server_mock.assert_not_called()
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server._archive_path = ''
+ wpr_server.StartServer(os.path.abspath(__file__))
+ start_server_mock.assert_called_once()
+ stop_server_mock.assert_called_once()
+
+ #pylint: enable=no-self-use
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ def test_StopServer(self, stop_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server.StopServer()
+ stop_server_mock.assert_called_once()
+ self.assertFalse(wpr_server._server)
+ self.assertFalse(wpr_server._archive_path)
+ self.assertFalse(wpr_server.http_port)
+ self.assertFalse(wpr_server.https_port)
+
+ def test_SetWPRRecordMode(self):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server.record_mode = True
+ self.assertTrue(wpr_server.record_mode)
+ wpr_server.record_mode = False
+ self.assertFalse(wpr_server.record_mode)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/chromium/build/android/pylib/utils/gold_utils.py b/chromium/build/android/pylib/utils/gold_utils.py
index 2b0aa60434f..f4f0840e429 100644
--- a/chromium/build/android/pylib/utils/gold_utils.py
+++ b/chromium/build/android/pylib/utils/gold_utils.py
@@ -1,332 +1,31 @@
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Utilities for interacting with the Skia Gold image diffing service."""
+"""//build/android implementations of //testing/skia_gold_common.
+
+Used for interacting with the Skia Gold image diffing service.
+"""
-import json
-import logging
import os
import shutil
-import tempfile
from devil.utils import cmd_helper
from pylib.base.output_manager import Datatype
from pylib.constants import host_paths
-from pylib.utils import local_utils
from pylib.utils import repo_utils
-DEFAULT_INSTANCE = 'chrome'
-
-GOLDCTL_BINARY = os.path.join(host_paths.DIR_SOURCE_ROOT, 'tools',
- 'skia_goldctl', 'linux', 'goldctl')
-
-
-class SkiaGoldSession(object):
- class StatusCodes(object):
- """Status codes for RunComparison."""
- SUCCESS = 0
- AUTH_FAILURE = 1
- INIT_FAILURE = 2
- COMPARISON_FAILURE_REMOTE = 3
- COMPARISON_FAILURE_LOCAL = 4
- LOCAL_DIFF_FAILURE = 5
- NO_OUTPUT_MANAGER = 6
-
- class ComparisonResults(object):
- """Struct-like object for storing results of an image comparison."""
-
- def __init__(self):
- self.triage_link = None
- self.triage_link_omission_reason = None
- self.local_diff_given_image = None
- self.local_diff_closest_image = None
- self.local_diff_diff_image = None
-
- def __init__(self,
- working_dir,
- gold_properties,
- keys_file,
- corpus,
- instance=DEFAULT_INSTANCE):
- """A class to handle all aspects of an image comparison via Skia Gold.
-
- A single SkiaGoldSession is valid for a single instance/corpus/keys_file
- combination.
-
- Args:
- working_dir: The directory to store config files, etc. Sharing the same
- working directory between multiple SkiaGoldSessions allows re-use of
- authentication and downloaded baselines.
- gold_properties: A SkiaGoldProperties instance for the current test run.
- keys_file: A path to a JSON file containing various comparison config
- data such as corpus and debug information like the hardware/software
- configuration the images will be produced on.
- corpus: The corpus that images that will be compared belong to.
- instance: The name of the Skia Gold instance to interact with.
- """
- self._working_dir = working_dir
- self._gold_properties = gold_properties
- self._keys_file = keys_file
- self._corpus = corpus
- self._instance = instance
- self._triage_link_file = tempfile.NamedTemporaryFile(
- suffix='.txt', dir=working_dir, delete=False).name
- # A map of image name (string) to ComparisonResults for that image.
- self._comparison_results = {}
- self._authenticated = False
- self._initialized = False
-
- # pylint: disable=too-many-return-statements
- def RunComparison(self,
- name,
- png_file,
- output_manager,
- use_luci=True):
- """Helper method to run all steps to compare a produced image.
-
- Handles authentication, initialization, comparison, and, if necessary,
- local diffing.
+with host_paths.SysPath(host_paths.BUILD_PATH):
+ from skia_gold_common import skia_gold_session
+ from skia_gold_common import skia_gold_session_manager
+ from skia_gold_common import skia_gold_properties
- Args:
- name: The name of the image being compared.
- png_file: A path to a PNG file containing the image to be compared.
- output_manager: The output manager used to save local diff images if
- necessary. Can be None, but will fail if it ends up needing to be used
- and is not set.
- use_luci: If true, authentication will use the service account provided
- by the LUCI context. If false, will attempt to use whatever is set up
- in gsutil, which is only supported for local runs.
- Returns:
- A tuple (status, error). |status| is a value from
- SkiaGoldSession.StatusCodes signifying the result of the comparison.
- |error| is an error message describing the status if not successful.
- """
- auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci)
- if auth_rc:
- return self.StatusCodes.AUTH_FAILURE, auth_stdout
-
- init_rc, init_stdout = self.Initialize()
- if init_rc:
- return self.StatusCodes.INIT_FAILURE, init_stdout
-
- compare_rc, compare_stdout = self.Compare(name=name, png_file=png_file)
- if not compare_rc:
- return self.StatusCodes.SUCCESS, None
-
- logging.error('Gold comparison failed: %s', compare_stdout)
- if not self._gold_properties.local_pixel_tests:
- return self.StatusCodes.COMPARISON_FAILURE_REMOTE, compare_stdout
-
- if not output_manager:
- return (self.StatusCodes.NO_OUTPUT_MANAGER,
- 'No output manager for local diff images')
- diff_rc, diff_stdout = self.Diff(
- name=name, png_file=png_file, output_manager=output_manager)
- if diff_rc:
- return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout
- return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout
-
- def Authenticate(self, use_luci=True):
- """Authenticates with Skia Gold for this session.
+class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession):
+ def _StoreDiffLinks(self, image_name, output_manager, output_dir):
+ """See SkiaGoldSession._StoreDiffLinks for general documentation.
- Args:
- use_luci: If true, authentication will use the service account provided
- by the LUCI context. If false, will attempt to use whatever is set up
- in gsutil, which is only supported for local runs.
-
- Returns:
- A tuple (return_code, output). |return_code| is the return code of the
- authentication process. |output| is the stdout + stderr of the
- authentication process.
+ |output_manager| must be a build.android.pylib.base.OutputManager instance.
"""
- if self._authenticated:
- return 0, None
- if self._gold_properties.bypass_skia_gold_functionality:
- logging.warning('Not actually authenticating with Gold due to '
- '--bypass-skia-gold-functionality being present.')
- return 0, None
-
- auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir]
- if use_luci:
- auth_cmd.append('--luci')
- elif not self._gold_properties.local_pixel_tests:
- raise RuntimeError(
- 'Cannot authenticate to Skia Gold with use_luci=False unless running '
- 'local pixel tests')
-
- rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(
- auth_cmd, merge_stderr=True)
- if rc == 0:
- self._authenticated = True
- return rc, stdout
-
- def Initialize(self):
- """Initializes the working directory if necessary.
-
- This can technically be skipped if the same information is passed to the
- command used for image comparison, but that is less efficient under the
- hood. Doing it that way effectively requires an initialization for every
- comparison (~250 ms) instead of once at the beginning.
-
- Returns:
- A tuple (return_code, output). |return_code| is the return code of the
- initialization process. |output| is the stdout + stderr of the
- initialization process.
- """
- if self._initialized:
- return 0, None
- if self._gold_properties.bypass_skia_gold_functionality:
- logging.warning('Not actually initializing Gold due to '
- '--bypass-skia-gold-functionality being present.')
- return 0, None
-
- init_cmd = [
- GOLDCTL_BINARY,
- 'imgtest',
- 'init',
- '--passfail',
- '--instance',
- self._instance,
- '--corpus',
- self._corpus,
- '--keys-file',
- self._keys_file,
- '--work-dir',
- self._working_dir,
- '--failure-file',
- self._triage_link_file,
- '--commit',
- self._gold_properties.git_revision,
- ]
- if self._gold_properties.IsTryjobRun():
- init_cmd.extend([
- '--issue',
- str(self._gold_properties.issue),
- '--patchset',
- str(self._gold_properties.patchset),
- '--jobid',
- str(self._gold_properties.job_id),
- '--crs',
- str(self._gold_properties.code_review_system),
- '--cis',
- str(self._gold_properties.continuous_integration_system),
- ])
- rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(
- init_cmd, merge_stderr=True)
- if rc == 0:
- self._initialized = True
- return rc, stdout
-
- def Compare(self, name, png_file):
- """Compares the given image to images known to Gold.
-
- Triage links can later be retrieved using GetTriageLink().
-
- Args:
- name: The name of the image being compared.
- png_file: A path to a PNG file containing the image to be compared.
-
- Returns:
- A tuple (return_code, output). |return_code| is the return code of the
- comparison process. |output| is the stdout + stderr of the comparison
- process.
- """
- if self._gold_properties.bypass_skia_gold_functionality:
- logging.warning('Not actually comparing with Gold due to '
- '--bypass-skia-gold-functionality being present.')
- return 0, None
-
- compare_cmd = [
- GOLDCTL_BINARY,
- 'imgtest',
- 'add',
- '--test-name',
- name,
- '--png-file',
- png_file,
- '--work-dir',
- self._working_dir,
- ]
- if self._gold_properties.local_pixel_tests:
- compare_cmd.append('--dryrun')
-
- rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(
- compare_cmd, merge_stderr=True)
-
- self._comparison_results[name] = self.ComparisonResults()
- if rc == 0:
- self._comparison_results[name].triage_link_omission_reason = (
- 'Comparison succeeded, no triage link')
- elif self._gold_properties.IsTryjobRun():
- # TODO(skbug.com/9879): Remove the explicit corpus when Gold's UI is
- # updated to show results from all corpora for tryjobs.
- cl_triage_link = ('https://{instance}-gold.skia.org/search?'
- 'issue={issue}&'
- 'new_clstore=true&'
- 'query=source_type%3D{corpus}')
- cl_triage_link = cl_triage_link.format(
- instance=self._instance,
- issue=self._gold_properties.issue,
- corpus=self._corpus)
- self._comparison_results[name].triage_link = cl_triage_link
- else:
- try:
- with open(self._triage_link_file) as tlf:
- triage_link = tlf.read().strip()
- self._comparison_results[name].triage_link = triage_link
- except IOError:
- self._comparison_results[name].triage_link_omission_reason = (
- 'Failed to read triage link from file')
- return rc, stdout
-
- def Diff(self, name, png_file, output_manager):
- """Performs a local image diff against the closest known positive in Gold.
-
- This is used for running tests on a workstation, where uploading data to
- Gold for ingestion is not allowed, and thus the web UI is not available.
-
- Image links can later be retrieved using Get*ImageLink().
-
- Args:
- name: The name of the image being compared.
- png_file: The path to a PNG file containing the image to be diffed.
- output_manager: The output manager used to save local diff images.
-
- Returns:
- A tuple (return_code, output). |return_code| is the return code of the
- diff process. |output| is the stdout + stderr of the diff process.
- """
- # Instead of returning that everything is okay and putting in dummy links,
- # just fail since this should only be called when running locally and
- # --bypass-skia-gold-functionality is only meant for use on the bots.
- if self._gold_properties.bypass_skia_gold_functionality:
- raise RuntimeError(
- '--bypass-skia-gold-functionality is not supported when running '
- 'tests locally.')
-
- # Output managers only support archived files, not directories, so we have
- # to use a temporary directory and later move the data into the archived
- # files.
- output_dir = tempfile.mkdtemp(dir=self._working_dir)
- diff_cmd = [
- GOLDCTL_BINARY,
- 'diff',
- '--corpus',
- self._corpus,
- '--instance',
- self._instance,
- '--input',
- png_file,
- '--test',
- name,
- '--work-dir',
- self._working_dir,
- '--out-dir',
- output_dir,
- ]
- rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(
- diff_cmd, merge_stderr=True)
given_path = closest_path = diff_path = None
# The directory should contain "input-<hash>.png", "closest-<hash>.png",
# and "diff.png".
@@ -338,272 +37,46 @@ class SkiaGoldSession(object):
closest_path = filepath
elif f == 'diff.png':
diff_path = filepath
- results = self._comparison_results.setdefault(name,
+ results = self._comparison_results.setdefault(image_name,
self.ComparisonResults())
if given_path:
- with output_manager.ArchivedTempfile('given_%s.png' % name,
+ with output_manager.ArchivedTempfile('given_%s.png' % image_name,
'gold_local_diffs',
Datatype.PNG) as given_file:
shutil.move(given_path, given_file.name)
results.local_diff_given_image = given_file.Link()
if closest_path:
- with output_manager.ArchivedTempfile('closest_%s.png' % name,
+ with output_manager.ArchivedTempfile('closest_%s.png' % image_name,
'gold_local_diffs',
Datatype.PNG) as closest_file:
shutil.move(closest_path, closest_file.name)
results.local_diff_closest_image = closest_file.Link()
if diff_path:
- with output_manager.ArchivedTempfile(
- 'diff_%s.png' % name, 'gold_local_diffs', Datatype.PNG) as diff_file:
+ with output_manager.ArchivedTempfile('diff_%s.png' % image_name,
+ 'gold_local_diffs',
+ Datatype.PNG) as diff_file:
shutil.move(diff_path, diff_file.name)
results.local_diff_diff_image = diff_file.Link()
- return rc, stdout
-
- def GetTriageLink(self, name):
- """Gets the triage link for the given image.
- Args:
- name: The name of the image to retrieve the triage link for.
-
- Returns:
- A string containing the triage link if it is available, or None if it is
- not available for some reason. The reason can be retrieved using
- GetTriageLinkOmissionReason.
- """
- return self._comparison_results.get(name,
- self.ComparisonResults()).triage_link
-
- def GetTriageLinkOmissionReason(self, name):
- """Gets the reason why a triage link is not available for an image.
-
- Args:
- name: The name of the image whose triage link does not exist.
-
- Returns:
- A string containing the reason why a triage link is not available.
- """
- if name not in self._comparison_results:
- return 'No image comparison performed for %s' % name
- results = self._comparison_results[name]
- # This method should not be called if there is a valid triage link.
- assert results.triage_link is None
- if results.triage_link_omission_reason:
- return results.triage_link_omission_reason
- if results.local_diff_given_image:
- return 'Gold only used to do a local image diff'
- raise RuntimeError(
- 'Somehow have a ComparisonResults instance for %s that should not '
- 'exist' % name)
-
- def GetGivenImageLink(self, name):
- """Gets the link to the given image used for local diffing.
-
- Args:
- name: The name of the image that was diffed.
-
- Returns:
- A string containing the link to where the image is saved, or None if it
- does not exist. Since local diffing should only be done when running
- locally, this *should* be a file:// URL, but there is no guarantee of
- that.
- """
- assert name in self._comparison_results
- return self._comparison_results[name].local_diff_given_image
-
- def GetClosestImageLink(self, name):
- """Gets the link to the closest known image used for local diffing.
-
- Args:
- name: The name of the image that was diffed.
-
- Returns:
- A string containing the link to where the image is saved, or None if it
- does not exist. Since local diffing should only be done when running
- locally, this *should* be a file:// URL, but there is no guarantee of
- that.
- """
- assert name in self._comparison_results
- return self._comparison_results[name].local_diff_closest_image
-
- def GetDiffImageLink(self, name):
- """Gets the link to the diff between the given and closest images.
-
- Args:
- name: The name of the image that was diffed.
-
- Returns:
- A string containing the link to where the image is saved, or None if it
- does not exist. Since local diffing should only be done when running
- locally, this *should* be a file:// URL, but there is no guarantee of
- that.
- """
- assert name in self._comparison_results
- return self._comparison_results[name].local_diff_diff_image
-
-
-class SkiaGoldSessionManager(object):
- def __init__(self, working_dir, gold_properties):
- """Class to manage one or more SkiaGoldSessions.
-
- A separate session is required for each instance/corpus/keys_file
- combination, so this class will lazily create them as necessary.
-
- Args:
- working_dir: The working directory under which each individual
- SkiaGoldSessions' working directory will be created.
- gold_properties: A SkiaGoldProperties instance that will be used to create
- any SkiaGoldSessions.
- """
- self._working_dir = working_dir
- self._gold_properties = gold_properties
- self._sessions = {}
-
- def GetSkiaGoldSession(self,
- keys_file,
- corpus=None,
- instance=DEFAULT_INSTANCE):
- """Gets a SkiaGoldSession for the given arguments.
-
- Lazily creates one if necessary.
-
- Args:
- keys_file: A path to a JSON file containing various comparison config
- data such as corpus and debug information like the hardware/software
- configuration the image was produced on.
- corpus: The corpus the session is for. If None, the corpus will be
- determined using available information.
- instance: The name of the Skia Gold instance to interact with.
- """
- with open(keys_file) as f:
- keys = json.load(f)
- keys_string = json.dumps(keys, sort_keys=True)
- if corpus is None:
- corpus = keys.get('source_type', instance)
- # Use the string representation of the keys JSON as a proxy for a hash since
- # dicts themselves are not hashable.
- session = self._sessions.setdefault(instance,
- {}).setdefault(corpus, {}).setdefault(
- keys_string, None)
- if not session:
- working_dir = tempfile.mkdtemp(dir=self._working_dir)
- session = SkiaGoldSession(working_dir, self._gold_properties, keys_file,
- corpus, instance)
- self._sessions[instance][corpus][keys_string] = session
- return session
-
-
-class SkiaGoldProperties(object):
- def __init__(self, args):
- """Class to validate and store properties related to Skia Gold.
-
- Args:
- args: The parsed arguments from an argparse.ArgumentParser.
- """
- self._git_revision = None
- self._issue = None
- self._patchset = None
- self._job_id = None
- self._local_pixel_tests = None
- self._no_luci_auth = None
- self._bypass_skia_gold_functionality = None
-
- # Could in theory be configurable, but hard-coded for now since there's
- # no plan to support anything else.
- self._code_review_system = 'gerrit'
- self._continuous_integration_system = 'buildbucket'
-
- self._InitializeProperties(args)
-
- def IsTryjobRun(self):
- return self.issue is not None
-
- @property
- def continuous_integration_system(self):
- return self._continuous_integration_system
-
- @property
- def code_review_system(self):
- return self._code_review_system
-
- @property
- def git_revision(self):
- return self._GetGitRevision()
-
- @property
- def issue(self):
- return self._issue
-
- @property
- def job_id(self):
- return self._job_id
-
- @property
- def local_pixel_tests(self):
- return self._IsLocalRun()
-
- @property
- def no_luci_auth(self):
- return self._no_luci_auth
-
- @property
- def patchset(self):
- return self._patchset
-
- @property
- def bypass_skia_gold_functionality(self):
- return self._bypass_skia_gold_functionality
-
- def _GetGitRevision(self):
- if not self._git_revision:
- # Automated tests should always pass the revision, so assume we're on
- # a workstation and try to get the local origin/master HEAD.
- if not self._IsLocalRun():
- raise RuntimeError(
- '--git-revision was not passed when running on a bot')
- revision = repo_utils.GetGitOriginMasterHeadSHA1(
- host_paths.DIR_SOURCE_ROOT)
- if not revision or len(revision) != 40:
- raise RuntimeError(
- '--git-revision not passed and unable to determine from git')
- self._git_revision = revision
- return self._git_revision
-
- def _IsLocalRun(self):
- if self._local_pixel_tests is None:
- self._local_pixel_tests = not local_utils.IsOnSwarming()
- if self._local_pixel_tests:
- logging.warning(
- 'Automatically determined that test is running on a workstation')
- else:
- logging.warning(
- 'Automatically determined that test is running on a bot')
- return self._local_pixel_tests
+ @staticmethod
+ def _RunCmdForRcAndOutput(cmd):
+ rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd,
+ merge_stderr=True)
+ return rc, stdout
- def _InitializeProperties(self, args):
- if hasattr(args, 'local_pixel_tests'):
- # If not set, will be automatically determined later if needed.
- self._local_pixel_tests = args.local_pixel_tests
- if hasattr(args, 'no_luci_auth'):
- self._no_luci_auth = args.no_luci_auth
+class AndroidSkiaGoldSessionManager(
+ skia_gold_session_manager.SkiaGoldSessionManager):
+ @staticmethod
+ def _GetDefaultInstance():
+ return 'chrome'
- if hasattr(args, 'bypass_skia_gold_functionality'):
- self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
+ @staticmethod
+ def _GetSessionClass():
+ return AndroidSkiaGoldSession
- # Will be automatically determined later if needed.
- if not hasattr(args, 'git_revision') or not args.git_revision:
- return
- self._git_revision = args.git_revision
- # Only expected on tryjob runs.
- if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
- return
- self._issue = args.gerrit_issue
- if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
- raise RuntimeError(
- '--gerrit-issue passed, but --gerrit-patchset not passed.')
- self._patchset = args.gerrit_patchset
- if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
- raise RuntimeError(
- '--gerrit-issue passed, but --buildbucket-id not passed.')
- self._job_id = args.buildbucket_id
+class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties):
+ @staticmethod
+ def _GetGitOriginMasterHeadSha1():
+ return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT)
diff --git a/chromium/build/android/pylib/utils/gold_utils_test.py b/chromium/build/android/pylib/utils/gold_utils_test.py
index ae3f7ecb3c3..3499484e612 100755
--- a/chromium/build/android/pylib/utils/gold_utils_test.py
+++ b/chromium/build/android/pylib/utils/gold_utils_test.py
@@ -6,39 +6,21 @@
#pylint: disable=protected-access
-import collections
-import json
+import contextlib
import os
+import tempfile
import unittest
from pylib.constants import host_paths
from pylib.utils import gold_utils
-from py_utils import tempfile_ext
-with host_paths.SysPath(host_paths.PYMOCK_PATH):
- import mock # pylint: disable=import-error
+with host_paths.SysPath(host_paths.BUILD_PATH):
+ from skia_gold_common import unittest_utils
-_SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [
- 'local_pixel_tests',
- 'no_luci_auth',
- 'git_revision',
- 'gerrit_issue',
- 'gerrit_patchset',
- 'buildbucket_id',
- 'bypass_skia_gold_functionality',
-])
+import mock # pylint: disable=import-error
+from pyfakefs import fake_filesystem_unittest # pylint: disable=import-error
-
-def createSkiaGoldArgs(local_pixel_tests=None,
- no_luci_auth=None,
- git_revision=None,
- gerrit_issue=None,
- gerrit_patchset=None,
- buildbucket_id=None,
- bypass_skia_gold_functionality=None):
- return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, git_revision,
- gerrit_issue, gerrit_patchset, buildbucket_id,
- bypass_skia_gold_functionality)
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
def assertArgWith(test, arg_list, arg, value):
@@ -46,852 +28,85 @@ def assertArgWith(test, arg_list, arg, value):
test.assertEqual(arg_list[i + 1], value)
-class SkiaGoldSessionRunComparisonTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.RunComparison."""
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_comparisonSuccess(self, auth_mock, init_mock, compare_mock,
- diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (0, None)
- compare_mock.return_value = (0, None)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(os.path.join(working_dir, 'keys.json'), 'w') as f:
- json.dump({}, f)
- session = gold_utils.SkiaGoldSession(working_dir, None, keys_file, None)
- status, _ = session.RunComparison(None, None, None)
- self.assertEqual(status, gold_utils.SkiaGoldSession.StatusCodes.SUCCESS)
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 1)
- self.assertEqual(diff_mock.call_count, 0)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
- auth_mock.return_value = (1, 'Auth failed')
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, None, None, None)
- status, error = session.RunComparison(None, None, None)
- self.assertEqual(status,
- gold_utils.SkiaGoldSession.StatusCodes.AUTH_FAILURE)
- self.assertEqual(error, 'Auth failed')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 0)
- self.assertEqual(compare_mock.call_count, 0)
- self.assertEqual(diff_mock.call_count, 0)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (1, 'Init failed')
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, None, None, None)
- status, error = session.RunComparison(None, None, None)
- self.assertEqual(status,
- gold_utils.SkiaGoldSession.StatusCodes.INIT_FAILURE)
- self.assertEqual(error, 'Init failed')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 0)
- self.assertEqual(diff_mock.call_count, 0)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_compareFailureRemote(self, auth_mock, init_mock, compare_mock,
- diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (0, None)
- compare_mock.return_value = (1, 'Compare failed')
- args = createSkiaGoldArgs(local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(os.path.join(working_dir, 'keys.json'), 'w') as f:
- json.dump({}, f)
- session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None)
- status, error = session.RunComparison(None, None, None)
- self.assertEqual(
- status,
- gold_utils.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE)
- self.assertEqual(error, 'Compare failed')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 1)
- self.assertEqual(diff_mock.call_count, 0)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_compareFailureLocal(self, auth_mock, init_mock, compare_mock,
- diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (0, None)
- compare_mock.return_value = (1, 'Compare failed')
- diff_mock.return_value = (0, None)
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(os.path.join(working_dir, 'keys.json'), 'w') as f:
- json.dump({}, f)
- session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None)
- status, error = session.RunComparison(None, None,
- 'Definitely an output manager')
- self.assertEqual(
- status,
- gold_utils.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL)
- self.assertEqual(error, 'Compare failed')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 1)
- self.assertEqual(diff_mock.call_count, 1)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_diffFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (0, None)
- compare_mock.return_value = (1, 'Compare failed')
- diff_mock.return_value = (1, 'Diff failed')
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(os.path.join(working_dir, 'keys.json'), 'w') as f:
- json.dump({}, f)
- session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None)
- status, error = session.RunComparison(None, None,
- 'Definitely an output manager')
- self.assertEqual(
- status, gold_utils.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE)
- self.assertEqual(error, 'Diff failed')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(init_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 1)
- self.assertEqual(diff_mock.call_count, 1)
-
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize')
- @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate')
- def test_noOutputDirLocal(self, auth_mock, init_mock, compare_mock,
- diff_mock):
- auth_mock.return_value = (0, None)
- init_mock.return_value = (0, None)
- compare_mock.return_value = (1, 'Compare failed')
- diff_mock.return_value = (0, None)
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(os.path.join(working_dir, 'keys.json'), 'w') as f:
- json.dump({}, f)
- session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None)
- status, error = session.RunComparison(None, None, None)
- self.assertEqual(status,
- gold_utils.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER)
- self.assertEqual(error, 'No output manager for local diff images')
- self.assertEqual(auth_mock.call_count, 1)
- self.assertEqual(compare_mock.call_count, 1)
- self.assertEqual(diff_mock.call_count, 0)
-
-
-class SkiaGoldSessionAuthenticateTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.Authenticate."""
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandOutputReturned(self, cmd_mock):
- cmd_mock.return_value = (1, 'Something bad :(', None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, stdout = session.Authenticate()
- self.assertEqual(cmd_mock.call_count, 1)
- self.assertEqual(rc, 1)
- self.assertEqual(stdout, 'Something bad :(')
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_bypassSkiaGoldFunctionality(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', bypass_skia_gold_functionality=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, _ = session.Authenticate()
- self.assertEqual(rc, 0)
- cmd_mock.assert_not_called()
+class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+ def setUp(self):
+ self.setUpPyfakefs()
+ self._working_dir = tempfile.mkdtemp()
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_shortCircuitAlreadyAuthenticated(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session._authenticated = True
- rc, _ = session.Authenticate()
- self.assertEqual(rc, 0)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_successSetsShortCircuit(self, cmd_mock):
- cmd_mock.return_value = (0, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- self.assertFalse(session._authenticated)
- rc, _ = session.Authenticate()
- self.assertEqual(rc, 0)
- self.assertTrue(session._authenticated)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_failureDoesNotSetShortCircuit(self, cmd_mock):
- cmd_mock.return_value = (1, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- self.assertFalse(session._authenticated)
- rc, _ = session.Authenticate()
- self.assertEqual(rc, 1)
- self.assertFalse(session._authenticated)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandWithUseLuciTrue(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Authenticate(use_luci=True)
- self.assertIn('--luci', cmd_mock.call_args[0][0])
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandWithUseLuciFalse(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Authenticate(use_luci=False)
- self.assertNotIn('--luci', cmd_mock.call_args[0][0])
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandWithUseLuciFalseNotLocal(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- with self.assertRaises(RuntimeError):
- session.Authenticate(use_luci=False)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
+ @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput')
def test_commandCommonArgs(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Authenticate()
- call_args = cmd_mock.call_args[0][0]
- self.assertIn('auth', call_args)
- assertArgWith(self, call_args, '--work-dir', working_dir)
-
-
-class SkiaGoldSessionInitializeTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.Initialize."""
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_bypassSkiaGoldFunctionality(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', bypass_skia_gold_functionality=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, _ = session.Initialize()
- self.assertEqual(rc, 0)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_shortCircuitAlreadyInitialized(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session._initialized = True
- rc, _ = session.Initialize()
- self.assertEqual(rc, 0)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_successSetsShortCircuit(self, cmd_mock):
- cmd_mock.return_value = (0, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- self.assertFalse(session._initialized)
- rc, _ = session.Initialize()
- self.assertEqual(rc, 0)
- self.assertTrue(session._initialized)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_failureDoesNotSetShortCircuit(self, cmd_mock):
- cmd_mock.return_value = (1, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- self.assertFalse(session._initialized)
- rc, _ = session.Initialize()
- self.assertEqual(rc, 1)
- self.assertFalse(session._initialized)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandCommonArgs(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(
- working_dir, sgp, 'keys_file', 'corpus', instance='instance')
- session.Initialize()
- call_args = cmd_mock.call_args[0][0]
- self.assertIn('imgtest', call_args)
- self.assertIn('init', call_args)
- self.assertIn('--passfail', call_args)
- assertArgWith(self, call_args, '--instance', 'instance')
- assertArgWith(self, call_args, '--corpus', 'corpus')
- assertArgWith(self, call_args, '--keys-file', 'keys_file')
- assertArgWith(self, call_args, '--work-dir', working_dir)
- assertArgWith(self, call_args, '--failure-file', session._triage_link_file)
- assertArgWith(self, call_args, '--commit', 'a')
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandTryjobArgs(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Initialize()
- call_args = cmd_mock.call_args[0][0]
- assertArgWith(self, call_args, '--issue', '1')
- assertArgWith(self, call_args, '--patchset', '2')
- assertArgWith(self, call_args, '--jobid', '3')
- assertArgWith(self, call_args, '--crs', 'gerrit')
- assertArgWith(self, call_args, '--cis', 'buildbucket')
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandTryjobArgsMissing(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Initialize()
- call_args = cmd_mock.call_args[0][0]
- self.assertNotIn('--issue', call_args)
- self.assertNotIn('--patchset', call_args)
- self.assertNotIn('--jobid', call_args)
- self.assertNotIn('--crs', call_args)
- self.assertNotIn('--cis', call_args)
-
-
-class SkiaGoldSessionCompareTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.Compare."""
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandOutputReturned(self, cmd_mock):
- cmd_mock.return_value = (1, 'Something bad :(', None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, stdout = session.Compare(None, None)
- self.assertEqual(cmd_mock.call_count, 1)
- self.assertEqual(rc, 1)
- self.assertEqual(stdout, 'Something bad :(')
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_bypassSkiaGoldFunctionality(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', bypass_skia_gold_functionality=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, _ = session.Compare(None, None)
- self.assertEqual(rc, 0)
- cmd_mock.assert_not_called()
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandWithLocalPixelTestsTrue(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Compare(None, None)
- self.assertIn('--dryrun', cmd_mock.call_args[0][0])
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandWithLocalPixelTestsFalse(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
+ cmd_mock.return_value = (None, None)
args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- session.Compare(None, None)
- self.assertNotIn('--dryrun', cmd_mock.call_args[0][0])
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandCommonArgs(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(
- working_dir, sgp, 'keys_file', 'corpus', instance='instance')
- session.Compare('name', 'png_file')
- call_args = cmd_mock.call_args[0][0]
- self.assertIn('imgtest', call_args)
- self.assertIn('add', call_args)
- assertArgWith(self, call_args, '--test-name', 'name')
- assertArgWith(self, call_args, '--png-file', 'png_file')
- assertArgWith(self, call_args, '--work-dir', working_dir)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_noLinkOnSuccess(self, cmd_mock):
- cmd_mock.return_value = (0, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None)
- rc, _ = session.Compare('name', 'png_file')
- self.assertEqual(rc, 0)
- self.assertEqual(session._comparison_results['name'].triage_link, None)
- self.assertNotEqual(
- session._comparison_results['name'].triage_link_omission_reason, None)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_clLinkOnTrybot(self, cmd_mock):
- cmd_mock.return_value = (1, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None)
- rc, _ = session.Compare('name', 'png_file')
- self.assertEqual(rc, 1)
- self.assertNotEqual(session._comparison_results['name'].triage_link, None)
- self.assertIn('issue=1', session._comparison_results['name'].triage_link)
- self.assertEqual(
- session._comparison_results['name'].triage_link_omission_reason, None)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_individualLinkOnCi(self, cmd_mock):
- cmd_mock.return_value = (1, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None)
- m = mock.mock_open(read_data='foobar')
- with mock.patch('__builtin__.open', m, create=True):
- rc, _ = session.Compare('name', 'png_file')
- self.assertEqual(rc, 1)
- self.assertNotEqual(session._comparison_results['name'].triage_link, None)
- self.assertEqual(session._comparison_results['name'].triage_link, 'foobar')
- self.assertEqual(
- session._comparison_results['name'].triage_link_omission_reason, None)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_validOmissionOnIoError(self, cmd_mock):
- cmd_mock.return_value = (1, None, None)
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None)
- m = mock.mock_open()
- m.side_effect = IOError('No read today')
- with mock.patch('__builtin__.open', m, create=True):
- rc, _ = session.Compare('name', 'png_file')
- self.assertEqual(rc, 1)
- self.assertEqual(session._comparison_results['name'].triage_link, None)
- self.assertNotEqual(
- session._comparison_results['name'].triage_link_omission_reason, None)
- self.assertIn(
- 'Failed to read',
- session._comparison_results['name'].triage_link_omission_reason)
-
-
-class SkiaGoldSessionDiffTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.Diff."""
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandOutputReturned(self, cmd_mock):
- cmd_mock.return_value = (1, 'Something bad :(', None)
- args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- rc, stdout = session.Diff(None, None, None)
- self.assertEqual(cmd_mock.call_count, 1)
- self.assertEqual(rc, 1)
- self.assertEqual(stdout, 'Something bad :(')
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_bypassSkiaGoldFunctionality(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(
- git_revision='a', bypass_skia_gold_functionality=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None)
- with self.assertRaises(RuntimeError):
- session.Diff(None, None, None)
-
- @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError')
- def test_commandCommonArgs(self, cmd_mock):
- cmd_mock.return_value = (None, None, None)
- args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- session = gold_utils.SkiaGoldSession(
- working_dir, sgp, None, 'corpus', instance='instance')
- session.Diff('name', 'png_file', None)
+ sgp = gold_utils.AndroidSkiaGoldProperties(args)
+ session = gold_utils.AndroidSkiaGoldSession(self._working_dir,
+ sgp,
+ None,
+ 'corpus',
+ instance='instance')
+ session.Diff('name', 'png_file', None)
call_args = cmd_mock.call_args[0][0]
self.assertIn('diff', call_args)
assertArgWith(self, call_args, '--corpus', 'corpus')
assertArgWith(self, call_args, '--instance', 'instance')
assertArgWith(self, call_args, '--input', 'png_file')
assertArgWith(self, call_args, '--test', 'name')
- assertArgWith(self, call_args, '--work-dir', working_dir)
+ assertArgWith(self, call_args, '--work-dir', self._working_dir)
i = call_args.index('--out-dir')
# The output directory should be a subdirectory of the working directory.
- self.assertIn(working_dir, call_args[i + 1])
-
-
-class SkiaGoldSessionTriageLinkOmissionTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason."""
-
- # Avoid having to bother with the working directory.
- class FakeGoldSession(gold_utils.SkiaGoldSession):
- def __init__(self): # pylint: disable=super-init-not-called
- self._comparison_results = {
- 'foo': gold_utils.SkiaGoldSession.ComparisonResults(),
- }
-
- def test_noComparison(self):
- session = self.FakeGoldSession()
- session._comparison_results = {}
- reason = session.GetTriageLinkOmissionReason('foo')
- self.assertEqual(reason, 'No image comparison performed for foo')
-
- def test_validReason(self):
- session = self.FakeGoldSession()
- session._comparison_results['foo'].triage_link_omission_reason = 'bar'
- reason = session.GetTriageLinkOmissionReason('foo')
- self.assertEqual(reason, 'bar')
-
- def test_onlyLocal(self):
- session = self.FakeGoldSession()
- session._comparison_results['foo'].local_diff_given_image = 'bar'
- reason = session.GetTriageLinkOmissionReason('foo')
- self.assertEqual(reason, 'Gold only used to do a local image diff')
-
- def test_onlyWithoutTriageLink(self):
- session = self.FakeGoldSession()
- session._comparison_results['foo'].triage_link = 'bar'
- with self.assertRaises(AssertionError):
- session.GetTriageLinkOmissionReason('foo')
-
- def test_resultsShouldNotExist(self):
- session = self.FakeGoldSession()
- with self.assertRaises(RuntimeError):
- session.GetTriageLinkOmissionReason('foo')
-
-
-class SkiaGoldSessionManagerGetSessionTest(unittest.TestCase):
- """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession."""
-
- @mock.patch('gold_utils.SkiaGoldSession')
- def test_ArgsForwardedToSession(self, _):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({}, f)
- session = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance')
- self.assertEqual(session._keys_file, keys_file)
- self.assertEqual(session._corpus, 'corpus')
- self.assertEqual(session._instance, 'instance')
- # Make sure the session's working directory is a subdirectory of the
- # manager's working directory.
- self.assertEqual(os.path.dirname(session._working_dir), working_dir)
-
- @mock.patch('gold_utils.SkiaGoldSession')
- def test_corpusFromJson(self, _):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({'source_type': 'foobar'}, f)
- session = sgsm.GetSkiaGoldSession(keys_file, None, 'instance')
- self.assertEqual(session._keys_file, keys_file)
- self.assertEqual(session._corpus, 'foobar')
- self.assertEqual(session._instance, 'instance')
+ self.assertIn(self._working_dir, call_args[i + 1])
- @mock.patch('gold_utils.SkiaGoldSession')
- def test_corpusDefaultsToInstance(self, _):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({}, f)
- session = sgsm.GetSkiaGoldSession(keys_file, None, 'instance')
- self.assertEqual(session._keys_file, keys_file)
- self.assertEqual(session._corpus, 'instance')
- self.assertEqual(session._instance, 'instance')
- @mock.patch.object(gold_utils.SkiaGoldSession, '__init__')
- def test_matchingSessionReused(self, session_mock):
- session_mock.return_value = None
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({}, f)
- session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance')
- session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance')
- self.assertEqual(session1, session2)
- # For some reason, session_mock.assert_called_once() always passes,
- # so check the call count directly.
- self.assertEqual(session_mock.call_count, 1)
+class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase):
+ class FakeArchivedFile(object):
+ def __init__(self, path):
+ self.name = path
- @mock.patch.object(gold_utils.SkiaGoldSession, '__init__')
- def test_separateSessionsFromKeys(self, session_mock):
- session_mock.return_value = None
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file1 = os.path.join(working_dir, 'keys1.json')
- with open(keys_file1, 'w') as f:
- json.dump({}, f)
- keys_file2 = os.path.join(working_dir, 'keys2.json')
- with open(keys_file2, 'w') as f:
- json.dump({'something different': 1}, f)
- session1 = sgsm.GetSkiaGoldSession(keys_file1, 'corpus', 'instance')
- session2 = sgsm.GetSkiaGoldSession(keys_file2, 'corpus', 'instance')
- self.assertNotEqual(session1, session2)
- self.assertEqual(session_mock.call_count, 2)
+ def Link(self):
+ return 'file://' + self.name
- @mock.patch.object(gold_utils.SkiaGoldSession, '__init__')
- def test_separateSessionsFromCorpus(self, session_mock):
- session_mock.return_value = None
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({}, f)
- session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus1', 'instance')
- session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus2', 'instance')
- self.assertNotEqual(session1, session2)
- self.assertEqual(session_mock.call_count, 2)
+ class FakeOutputManager(object):
+ def __init__(self):
+ self.output_dir = tempfile.mkdtemp()
- @mock.patch.object(gold_utils.SkiaGoldSession, '__init__')
- def test_separateSessionsFromInstance(self, session_mock):
- session_mock.return_value = None
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with tempfile_ext.NamedTemporaryDirectory() as working_dir:
- sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp)
- keys_file = os.path.join(working_dir, 'keys.json')
- with open(keys_file, 'w') as f:
- json.dump({}, f)
- session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance1')
- session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance2')
- self.assertNotEqual(session1, session2)
- self.assertEqual(session_mock.call_count, 2)
+ @contextlib.contextmanager
+ def ArchivedTempfile(self, image_name, _, __):
+ filepath = os.path.join(self.output_dir, image_name)
+ yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath)
+ def setUp(self):
+ self.setUpPyfakefs()
+ self._working_dir = tempfile.mkdtemp()
-class SkiaGoldPropertiesInitializationTest(unittest.TestCase):
- """Tests that SkiaGoldProperties initializes (or doesn't) when expected."""
-
- def verifySkiaGoldProperties(self, instance, expected):
- self.assertEqual(instance._local_pixel_tests,
- expected.get('local_pixel_tests'))
- self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth'))
- self.assertEqual(instance._git_revision, expected.get('git_revision'))
- self.assertEqual(instance._issue, expected.get('gerrit_issue'))
- self.assertEqual(instance._patchset, expected.get('gerrit_patchset'))
- self.assertEqual(instance._job_id, expected.get('buildbucket_id'))
- self.assertEqual(instance._bypass_skia_gold_functionality,
- expected.get('bypass_skia_gold_functionality'))
-
- def test_initializeSkiaGoldAttributes_unsetLocal(self):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {})
-
- def test_initializeSkiaGoldAttributes_explicitLocal(self):
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True})
-
- def test_initializeSkiaGoldAttributes_explicitNonLocal(self):
- args = createSkiaGoldArgs(local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False})
-
- def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self):
- args = createSkiaGoldArgs(no_luci_auth=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True})
-
- def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self):
- args = createSkiaGoldArgs(bypass_skia_gold_functionality=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True})
-
- def test_initializeSkiaGoldAttributes_explicitGitRevision(self):
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'})
-
- def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision(self):
- args = createSkiaGoldArgs(
- gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(sgp, {})
-
- def test_initializeSkiaGoldAttributes_tryjobArgs(self):
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.verifySkiaGoldProperties(
- sgp, {
- 'git_revision': 'a',
- 'gerrit_issue': 1,
- 'gerrit_patchset': 2,
- 'buildbucket_id': 3
- })
-
- def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self):
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, buildbucket_id=3)
- with self.assertRaises(RuntimeError):
- gold_utils.SkiaGoldProperties(args)
-
- def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self):
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, gerrit_patchset=2)
- with self.assertRaises(RuntimeError):
- gold_utils.SkiaGoldProperties(args)
-
-
-class SkiaGoldPropertiesCalculationTest(unittest.TestCase):
- """Tests that SkiaGoldProperties properly calculates certain properties."""
-
- def testLocalPixelTests_determineTrue(self):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with mock.patch.dict(os.environ, {}, clear=True):
- self.assertTrue(sgp.local_pixel_tests)
-
- def testLocalPixelTests_determineFalse(self):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True):
- self.assertFalse(sgp.local_pixel_tests)
-
- def testIsTryjobRun_noIssue(self):
- args = createSkiaGoldArgs()
- sgp = gold_utils.SkiaGoldProperties(args)
- self.assertFalse(sgp.IsTryjobRun())
-
- def testIsTryjobRun_issue(self):
- args = createSkiaGoldArgs(
- git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3)
- sgp = gold_utils.SkiaGoldProperties(args)
- self.assertTrue(sgp.IsTryjobRun())
-
- def testGetGitRevision_revisionSet(self):
- args = createSkiaGoldArgs(git_revision='a')
- sgp = gold_utils.SkiaGoldProperties(args)
- self.assertEqual(sgp.git_revision, 'a')
-
- def testGetGitRevision_findValidRevision(self):
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with mock.patch(
- 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head:
- expected = 'a' * 40
- patched_head.return_value = expected
- self.assertEqual(sgp.git_revision, expected)
- # Should be cached.
- self.assertEqual(sgp._git_revision, expected)
-
- def testGetGitRevision_noExplicitOnBot(self):
- args = createSkiaGoldArgs(local_pixel_tests=False)
- sgp = gold_utils.SkiaGoldProperties(args)
- with self.assertRaises(RuntimeError):
- _ = sgp.git_revision
-
- def testGetGitRevision_findEmptyRevision(self):
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with mock.patch(
- 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head:
- patched_head.return_value = ''
- with self.assertRaises(RuntimeError):
- _ = sgp.git_revision
-
- def testGetGitRevision_findMalformedRevision(self):
- args = createSkiaGoldArgs(local_pixel_tests=True)
- sgp = gold_utils.SkiaGoldProperties(args)
- with mock.patch(
- 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head:
- patched_head.return_value = 'a' * 39
- with self.assertRaises(RuntimeError):
- _ = sgp.git_revision
+ def test_outputManagerUsed(self):
+ args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+ sgp = gold_utils.AndroidSkiaGoldProperties(args)
+ session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp, None,
+ None, None)
+ with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f:
+ f.write('input')
+ with open(os.path.join(self._working_dir, 'closest-closesthash.png'),
+ 'w') as f:
+ f.write('closest')
+ with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f:
+ f.write('diff')
+
+ output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager()
+ session._StoreDiffLinks('foo', output_manager, self._working_dir)
+
+ copied_input = os.path.join(output_manager.output_dir, 'given_foo.png')
+ copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png')
+ copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png')
+ with open(copied_input) as f:
+ self.assertEqual(f.read(), 'input')
+ with open(copied_closest) as f:
+ self.assertEqual(f.read(), 'closest')
+ with open(copied_diff) as f:
+ self.assertEqual(f.read(), 'diff')
+
+ self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input)
+ self.assertEqual(session.GetClosestImageLink('foo'),
+ 'file://' + copied_closest)
+ self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff)
if __name__ == '__main__':
diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py
index f9e82088522..cd80694f4d1 100755
--- a/chromium/build/android/resource_sizes.py
+++ b/chromium/build/android/resource_sizes.py
@@ -471,7 +471,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
padding_fraction = -_PercentageDifference(
native_code.ComputeUncompressedSize(), native_code_unaligned_size)
# Ignore this check for small / no native code
- if native_code.ComputeUncompressedSize() > 100000:
+ if native_code.ComputeUncompressedSize() > 1000000:
assert 0 <= padding_fraction < .02, (
'Padding was: {} (file_size={}, sections_sum={})'.format(
padding_fraction, native_code.ComputeUncompressedSize(),
diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py
index 444af5b5bdd..e4cd353dc84 100755
--- a/chromium/build/android/test_runner.py
+++ b/chromium/build/android/test_runner.py
@@ -539,6 +539,13 @@ def AddInstrumentationTestOptions(parser):
help='Wait for java debugger to attach before running any application '
'code. Also disables test timeouts and sets retries=0.')
+ # WPR record mode.
+ parser.add_argument('--wpr-enable-record',
+ action='store_true',
+ default=False,
+ help='If true, WPR server runs in record mode.'
+ 'otherwise, runs in replay mode.')
+
# These arguments are suppressed from the help text because they should
# only ever be specified by an intermediate script.
parser.add_argument(
diff --git a/chromium/build/android/test_runner.pydeps b/chromium/build/android/test_runner.pydeps
index 3e4a4612cf1..acc2f34da7a 100644
--- a/chromium/build/android/test_runner.pydeps
+++ b/chromium/build/android/test_runner.pydeps
@@ -13,12 +13,17 @@
../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py
../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/atexit_with_log.py
+../../third_party/catapult/common/py_utils/py_utils/binary_manager.py
../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
../../third_party/catapult/common/py_utils/py_utils/lock.py
../../third_party/catapult/common/py_utils/py_utils/modules_util.py
+../../third_party/catapult/common/py_utils/py_utils/retry_util.py
../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/common/py_utils/py_utils/ts_proxy_server.py
+../../third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py
../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
@@ -39,6 +44,7 @@
../../third_party/catapult/devil/devil/android/crash_handler.py
../../third_party/catapult/devil/devil/android/decorators.py
../../third_party/catapult/devil/devil/android/device_blacklist.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
../../third_party/catapult/devil/devil/android/device_errors.py
../../third_party/catapult/devil/devil/android/device_list.py
../../third_party/catapult/devil/devil/android/device_signal.py
@@ -125,6 +131,11 @@
../../tools/swarming_client/libs/logdog/streamname.py
../../tools/swarming_client/libs/logdog/varint.py
../gn_helpers.py
+../print_python_deps.py
+../skia_gold_common/__init__.py
+../skia_gold_common/skia_gold_properties.py
+../skia_gold_common/skia_gold_session.py
+../skia_gold_common/skia_gold_session_manager.py
../util/lib/common/chrome_test_server_spawner.py
../util/lib/common/unittest_util.py
convert_dex_profile.py
@@ -196,6 +207,7 @@ pylib/symbols/__init__.py
pylib/symbols/deobfuscator.py
pylib/symbols/stack_symbolizer.py
pylib/utils/__init__.py
+pylib/utils/chrome_proxy_utils.py
pylib/utils/decorators.py
pylib/utils/device_dependencies.py
pylib/utils/dexdump.py