diff options
Diffstat (limited to 'chromium/build')
194 files changed, 6197 insertions, 4761 deletions
diff --git a/chromium/build/OWNERS b/chromium/build/OWNERS index 96a137d7987..00646fc0104 100644 --- a/chromium/build/OWNERS +++ b/chromium/build/OWNERS @@ -1,8 +1,8 @@ agrieve@chromium.org brucedawson@chromium.org dpranke@chromium.org +dpranke@google.com jochen@chromium.org -scottmg@chromium.org thakis@chromium.org thomasanderson@chromium.org tikuta@chromium.org @@ -14,16 +14,10 @@ per-file .gitignore=* per-file check_gn_headers_whitelist.txt=* per-file mac_toolchain.py=erikchen@chromium.org per-file mac_toolchain.py=justincohen@chromium.org -per-file package_mac_toolchain.py=erikchen@chromium.org -per-file package_mac_toolchain.py=justincohen@chromium.org per-file whitespace_file.txt=* per-file OWNERS.status=* per-file OWNERS.setnoparent=set noparent per-file OWNERS.setnoparent=file://ENG_REVIEW_OWNERS -# gn-dev is probably a better team here, but the tooling won't let us -# have more than one team per component, and infra-dev is a catch-all -# for other build-related lists. -# -# TEAM: infra-dev@chromium.org +# TEAM: build@chromium.org # COMPONENT: Build diff --git a/chromium/build/OWNERS.setnoparent b/chromium/build/OWNERS.setnoparent index 79598277b24..ed3f158f1c5 100644 --- a/chromium/build/OWNERS.setnoparent +++ b/chromium/build/OWNERS.setnoparent @@ -32,7 +32,7 @@ file://third_party/blink/API_OWNERS file://chrome/browser/extensions/component_extensions_whitelist/EXTENSION_WHITELIST_OWNERS file://extensions/common/api/API_OWNERS -# This restriction is in place to avoid accidential addition to our top level +# This restriction is in place to avoid accidental addition to our top level # layout files, such as add duplicated assets, or introducing new colors when # we don't want them. file://ui/android/java/res/LAYOUT_OWNERS @@ -56,3 +56,7 @@ file://chrome/android/java/src/org/chromium/chrome/browser/notifications/channel # The Weblayer API is supposed to be stable and will be used outside of the # chromium repository. file://weblayer/API_OWNERS + +# New features for lock/login UI on Chrome OS need to work stably in all corner +# cases. +file://ash/login/LOGIN_LOCK_OWNERS diff --git a/chromium/build/android/BUILD.gn b/chromium/build/android/BUILD.gn index d5632d064d2..e9eccf26d25 100644 --- a/chromium/build/android/BUILD.gn +++ b/chromium/build/android/BUILD.gn @@ -34,6 +34,7 @@ if (enable_java_templates) { "android_tool_prefix=" + rebase_path(android_tool_prefix, root_build_dir), "android_configuration_failure_dir=" + rebase_path(android_configuration_failure_dir, root_build_dir), + "final_android_sdk=$final_android_sdk" ] if (defined(android_secondary_abi_cpu)) { _secondary_label_info = diff --git a/chromium/build/android/OWNERS b/chromium/build/android/OWNERS index 654c83ec4af..a10904e8e54 100644 --- a/chromium/build/android/OWNERS +++ b/chromium/build/android/OWNERS @@ -4,5 +4,3 @@ pasko@chromium.org skyostil@chromium.org tiborg@chromium.org wnwen@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/android/PRESUBMIT.py b/chromium/build/android/PRESUBMIT.py index 91f2c886a8d..d6d3a442bc8 100644 --- a/chromium/build/android/PRESUBMIT.py +++ b/chromium/build/android/PRESUBMIT.py @@ -21,6 +21,8 @@ def CommonChecks(input_api, output_api): r'gyp/.*\.py$', ] tests = [] + # yapf likes formatting the extra_paths_list to be less readable. + # yapf: disable tests.extend( input_api.canned_checks.GetPylint( input_api, @@ -41,7 +43,7 @@ def CommonChecks(input_api, output_api): J('..', '..', 'third_party', 'catapult', 'tracing'), J('..', '..', 'third_party', 'depot_tools'), J('..', '..', 'third_party', 'colorama', 'src'), - J('..', '..', 'third_party', 'pymock'), + J('..', '..', 'build'), ])) tests.extend( input_api.canned_checks.GetPylint( @@ -52,6 +54,7 @@ def CommonChecks(input_api, output_api): r'.*_pb2\.py', ], extra_paths_list=[J('gyp'), J('gn')])) + # yapf: enable # Disabled due to http://crbug.com/410936 #output.extend(input_api.canned_checks.RunUnitTestsInDirectory( @@ -86,6 +89,7 @@ def CommonChecks(input_api, output_api): J('pylib', 'symbols', 'apk_native_libs_unittest.py'), J('pylib', 'symbols', 'elf_symbolizer_unittest.py'), J('pylib', 'symbols', 'symbol_utils_unittest.py'), + J('pylib', 'utils', 'chrome_proxy_utils_test.py'), J('pylib', 'utils', 'decorators_test.py'), J('pylib', 'utils', 'device_dependencies_test.py'), J('pylib', 'utils', 'dexdump_test.py'), diff --git a/chromium/build/android/adb_gdb b/chromium/build/android/adb_gdb index 1dc3ce5f3bc..bd0f1f315ca 100755 --- a/chromium/build/android/adb_gdb +++ b/chromium/build/android/adb_gdb @@ -581,32 +581,32 @@ get_ndk_toolchain_prebuilt () { get_ndk_toolchain_fullprefix () { local NDK_DIR="$1" local ARCH="$2" - local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG + local TARGET NAME HOST_OS HOST_ARCH LD CONFIG # NOTE: This will need to be updated if the NDK changes the names or moves # the location of its prebuilt toolchains. # - GCC= + LD= HOST_OS=$(get_ndk_host_system) HOST_ARCH=$(get_ndk_host_arch) CONFIG=$(get_arch_gnu_config $ARCH) - GCC=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc") - if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then - GCC=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc") + LD=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld") + if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then + LD=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld") fi - if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then + if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then # Special case, the x86 toolchain used to be incorrectly # named i686-android-linux-gcc! - GCC=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc") + LD=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld") fi - if [ -z "$GCC" ]; then + if [ -z "$LD" ]; then panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \ Please verify your NDK installation!" fi - echo "${GCC%%gcc}" + echo "${LD%%ld}" } # $1: NDK install path diff --git a/chromium/build/android/apk_operations.py b/chromium/build/android/apk_operations.py index a09fae9e787..d2798147a0b 100755 --- a/chromium/build/android/apk_operations.py +++ b/chromium/build/android/apk_operations.py @@ -96,7 +96,8 @@ def _GenerateBundleApks(info, output_path=None, minimal=False, minimal_sdk_version=None, - mode=None): + mode=None, + optimize_for=None): """Generate an .apks archive from a bundle on demand. Args: @@ -105,6 +106,8 @@ def _GenerateBundleApks(info, minimal: Create the minimal set of apks possible (english-only). minimal_sdk_version: When minimal=True, use this sdkVersion. mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES. + optimize_for: Override split config, either None, or one of + app_bundle_utils.OPTIMIZE_FOR_OPTIONS. """ logging.info('Generating .apks file') app_bundle_utils.GenerateBundleApks( @@ -118,7 +121,8 @@ def _GenerateBundleApks(info, system_image_locales=info.system_image_locales, mode=mode, minimal=minimal, - minimal_sdk_version=minimal_sdk_version) + minimal_sdk_version=minimal_sdk_version, + optimize_for=optimize_for) def _InstallBundle(devices, apk_helper_instance, package_name, @@ -1732,6 +1736,10 @@ class _BuildBundleApks(_Command): 'single universal APK, "system" generates an archive with a system ' 'image APK, while "system_compressed" generates a compressed system ' 'APK, with an additional stub APK for the system image.') + group.add_argument( + '--optimize-for', + choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS, + help='Override split configuration.') def Run(self): _GenerateBundleApks( @@ -1739,7 +1747,8 @@ class _BuildBundleApks(_Command): output_path=self.args.output_apks, minimal=self.args.minimal, minimal_sdk_version=self.args.sdk_version, - mode=self.args.build_mode) + mode=self.args.build_mode, + optimize_for=self.args.optimize_for) class _ManifestCommand(_Command): diff --git a/chromium/build/android/bytecode/BUILD.gn b/chromium/build/android/bytecode/BUILD.gn index f9e1baf2604..4d29aca9dbc 100644 --- a/chromium/build/android/bytecode/BUILD.gn +++ b/chromium/build/android/bytecode/BUILD.gn @@ -4,13 +4,10 @@ import("//build/config/android/rules.gni") -assert(current_toolchain == default_toolchain) - -java_binary("java_bytecode_rewriter") { +java_binary("bytecode_processor") { sources = [ "java/org/chromium/bytecode/ByteCodeProcessor.java", "java/org/chromium/bytecode/ClassPathValidator.java", - "java/org/chromium/bytecode/ThreadAssertionClassAdapter.java", "java/org/chromium/bytecode/TypeUtils.java", ] main_class = "org.chromium.bytecode.ByteCodeProcessor" @@ -18,5 +15,6 @@ java_binary("java_bytecode_rewriter") { "//third_party/android_deps:org_ow2_asm_asm_java", "//third_party/android_deps:org_ow2_asm_asm_util_java", ] - wrapper_script_name = "helper/java_bytecode_rewriter" + wrapper_script_name = "helper/bytecode_processor" + enable_bytecode_checks = false } diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java index 636e094cf02..b767f4f0890 100644 --- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java +++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java @@ -5,40 +5,29 @@ package org.chromium.bytecode; import org.objectweb.asm.ClassReader; -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.ClassWriter; import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.PrintStream; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.zip.CRC32; +import java.util.concurrent.TimeUnit; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; -import java.util.zip.ZipOutputStream; /** * Java application that takes in an input jar, performs a series of bytecode @@ -46,154 +35,55 @@ import java.util.zip.ZipOutputStream; */ class ByteCodeProcessor { private static final String CLASS_FILE_SUFFIX = ".class"; - private static final String TEMPORARY_FILE_SUFFIX = ".temp"; private static final int BUFFER_SIZE = 16384; private static boolean sVerbose; private static boolean sIsPrebuilt; - private static boolean sShouldUseThreadAnnotations; - private static boolean sShouldCheckClassPath; private static ClassLoader sDirectClassPathClassLoader; private static ClassLoader sFullClassPathClassLoader; private static Set<String> sFullClassPathJarPaths; private static Set<String> sMissingClassesAllowlist; + private static Map<String, String> sJarToGnTarget; private static ClassPathValidator sValidator; - private static class EntryDataPair { - private final ZipEntry mEntry; - private final byte[] mData; - - private EntryDataPair(ZipEntry mEntry, byte[] mData) { - this.mEntry = mEntry; - this.mData = mData; - } - - private static EntryDataPair create(String zipPath, byte[] data) { - ZipEntry entry = new ZipEntry(zipPath); - entry.setMethod(ZipEntry.STORED); - entry.setTime(0); - entry.setSize(data.length); - CRC32 crc = new CRC32(); - crc.update(data); - entry.setCrc(crc.getValue()); - return new EntryDataPair(entry, data); - } - } - - private static EntryDataPair processEntry(ZipEntry entry, byte[] data) - throws ClassPathValidator.ClassNotLoadedException { - // Copy all non-.class files to the output jar. - if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) { - return new EntryDataPair(entry, data); - } - + private static Void processEntry(ZipEntry entry, byte[] data) { ClassReader reader = new ClassReader(data); - if (sShouldCheckClassPath) { - sValidator.validateClassPathsAndOutput(reader, sDirectClassPathClassLoader, - sFullClassPathClassLoader, sFullClassPathJarPaths, sIsPrebuilt, sVerbose, - sMissingClassesAllowlist); - } - - ClassWriter writer = new ClassWriter(reader, 0); - ClassVisitor chain = writer; - /* DEBUGGING: - To see objectweb.asm code that will generate bytecode for a given class: - - java -cp - "third_party/android_deps/libs/org_ow2_asm_asm/asm-7.0.jar:third_party/android_deps/libs/org_ow2_asm_asm_util/asm-util-7.0.jar:out/Debug/lib.java/jar_containing_yourclass.jar" - org.objectweb.asm.util.ASMifier org.package.YourClassName - - See this pdf for more details: https://asm.ow2.io/asm4-guide.pdf - - To see the bytecode for a specific class, uncomment this code with your class name: - - if (entry.getName().contains("YOUR_CLASS_NAME")) { - chain = new TraceClassVisitor(chain, new PrintWriter(System.out)); + if (sIsPrebuilt) { + sValidator.validateFullClassPath( + reader, sFullClassPathClassLoader, sMissingClassesAllowlist); + } else { + sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader, + sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist, + sVerbose); } - */ - if (sShouldUseThreadAnnotations) { - chain = new ThreadAssertionClassAdapter(chain); - } - reader.accept(chain, 0); - byte[] patchedByteCode = writer.toByteArray(); - return EntryDataPair.create(entry.getName(), patchedByteCode); + return null; } - private static void process(String inputJarPath, String outputJarPath) - throws ClassPathValidator.ClassNotLoadedException, ExecutionException, - InterruptedException { - String tempJarPath = outputJarPath + TEMPORARY_FILE_SUFFIX; + private static void process(String gnTarget, String inputJarPath) + throws ExecutionException, InterruptedException { ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); try (ZipInputStream inputStream = new ZipInputStream( - new BufferedInputStream(new FileInputStream(inputJarPath))); - ZipOutputStream tempStream = new ZipOutputStream( - new BufferedOutputStream(new FileOutputStream(tempJarPath)))) { - List<Future<EntryDataPair>> list = new ArrayList<>(); + new BufferedInputStream(new FileInputStream(inputJarPath)))) { while (true) { ZipEntry entry = inputStream.getNextEntry(); if (entry == null) { break; } byte[] data = readAllBytes(inputStream); - list.add(executorService.submit(() -> processEntry(entry, data))); + executorService.submit(() -> processEntry(entry, data)); } executorService.shutdown(); // This is essential in order to avoid waiting infinitely. - // Write the zip file entries in order to preserve determinism. - for (Future<EntryDataPair> futurePair : list) { - EntryDataPair pair = futurePair.get(); - tempStream.putNextEntry(pair.mEntry); - tempStream.write(pair.mData); - tempStream.closeEntry(); - } + executorService.awaitTermination(1, TimeUnit.HOURS); } catch (IOException e) { throw new RuntimeException(e); } - try { - Path src = Paths.get(tempJarPath); - Path dest = Paths.get(outputJarPath); - Files.move(src, dest, StandardCopyOption.REPLACE_EXISTING); - } catch (IOException ioException) { - throw new RuntimeException(ioException); - } if (sValidator.hasErrors()) { - System.err.println("Direct classpath is incomplete. To fix, add deps on the " - + "GN target(s) that provide:"); - for (Map.Entry<String, Map<String, Set<String>>> entry : - sValidator.getErrors().entrySet()) { - printValidationError(System.err, entry.getKey(), entry.getValue()); - } + sValidator.printAll(gnTarget, sJarToGnTarget); System.exit(1); } } - private static void printValidationError( - PrintStream out, String jarName, Map<String, Set<String>> missingClasses) { - out.print(" * "); - out.println(jarName); - int i = 0; - final int numErrorsPerJar = 2; - // The list of missing classes is non-exhaustive because each class that fails to validate - // reports only the first missing class. - for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) { - String missingClass = entry.getKey(); - Set<String> filesThatNeededIt = entry.getValue(); - out.print(" * "); - if (i == numErrorsPerJar) { - out.print(String.format("And %d more...", missingClasses.size() - numErrorsPerJar)); - break; - } - out.print(missingClass.replace('/', '.')); - out.print(" (needed by "); - out.print(filesThatNeededIt.iterator().next().replace('/', '.')); - if (filesThatNeededIt.size() > 1) { - out.print(String.format(" and %d more", filesThatNeededIt.size() - 1)); - } - out.println(")"); - i++; - } - } - private static byte[] readAllBytes(InputStream inputStream) throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int numRead = 0; @@ -235,12 +125,10 @@ class ByteCodeProcessor { ExecutionException, InterruptedException { // Invoke this script using //build/android/gyp/bytecode_processor.py int currIndex = 0; + String gnTarget = args[currIndex++]; String inputJarPath = args[currIndex++]; - String outputJarPath = args[currIndex++]; sVerbose = args[currIndex++].equals("--verbose"); sIsPrebuilt = args[currIndex++].equals("--is-prebuilt"); - sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations"); - sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path"); sMissingClassesAllowlist = new HashSet<>(); currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist); @@ -254,19 +142,26 @@ class ByteCodeProcessor { currIndex = parseListArgument(args, currIndex, directClassPathJarPaths); sDirectClassPathClassLoader = loadJars(directClassPathJarPaths); + ArrayList<String> fullClassPathJarPaths = new ArrayList<>(); + currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths); + ArrayList<String> gnTargets = new ArrayList<>(); + parseListArgument(args, currIndex, gnTargets); + sJarToGnTarget = new HashMap<>(); + assert fullClassPathJarPaths.size() == gnTargets.size(); + for (int i = 0; i < fullClassPathJarPaths.size(); ++i) { + sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i)); + } + // Load all jars that are on the classpath for the input jar for analyzing class // hierarchy. sFullClassPathJarPaths = new HashSet<>(); - sFullClassPathJarPaths.clear(); sFullClassPathJarPaths.add(inputJarPath); sFullClassPathJarPaths.addAll(sdkJarPaths); - sFullClassPathJarPaths.addAll( - Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length))); - + sFullClassPathJarPaths.addAll(fullClassPathJarPaths); sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths); sFullClassPathJarPaths.removeAll(directClassPathJarPaths); sValidator = new ClassPathValidator(); - process(inputJarPath, outputJarPath); + process(gnTarget, inputJarPath); } } diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java index f3ed501873d..ce1803fca8a 100644 --- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java +++ b/chromium/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java @@ -6,12 +6,14 @@ package org.chromium.bytecode; import org.objectweb.asm.ClassReader; +import java.io.PrintStream; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.Consumer; /** * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and @@ -20,9 +22,18 @@ import java.util.TreeSet; * can't find the class with any given classpath. */ public class ClassPathValidator { + // Number of warnings to print. + private static final int MAX_MISSING_CLASS_WARNINGS = 4; + // Number of missing classes to show per missing jar. + private static final int MAX_ERRORS_PER_JAR = 2; // Map of missing .jar -> Missing class -> Classes that failed. // TreeMap so that error messages have sorted list of jars. - private final Map<String, Map<String, Set<String>>> mErrors = new TreeMap<>(); + private final Map<String, Map<String, Set<String>>> mDirectErrors = + Collections.synchronizedMap(new TreeMap<>()); + // Missing classes we only track the first one for each jar. + // Map of missingClass -> srcClass. + private final Map<String, String> mMissingClasses = + Collections.synchronizedMap(new TreeMap<>()); static class ClassNotLoadedException extends ClassNotFoundException { private final String mClassName; @@ -37,17 +48,6 @@ public class ClassPathValidator { } } - private static void printAndQuit(ClassNotLoadedException e, ClassReader classReader, - boolean verbose) throws ClassNotLoadedException { - System.err.println("Class \"" + e.getClassName() - + "\" not found on any classpath. Used by class \"" + classReader.getClassName() - + "\""); - if (verbose) { - throw e; - } - System.exit(1); - } - private static void validateClass(ClassLoader classLoader, String className) throws ClassNotLoadedException { if (className.startsWith("[")) { @@ -87,10 +87,10 @@ public class ClassPathValidator { * * @param classReader .class file interface for reading the constant pool. * @param classLoader classpath you wish to validate. - * @throws ClassNotLoadedException thrown if it can't load a certain class. + * @param errorConsumer Called for each missing class. */ - private static void validateClassPath(ClassReader classReader, ClassLoader classLoader) - throws ClassNotLoadedException { + private static void validateClassPath(ClassReader classReader, ClassLoader classLoader, + Consumer<ClassNotLoadedException> errorConsumer) { char[] charBuffer = new char[classReader.getMaxStringLength()]; // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count - // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4 @@ -99,73 +99,135 @@ public class ClassPathValidator { // Class entries correspond to 7 in the constant pool // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4 if (offset > 0 && classReader.readByte(offset - 1) == 7) { - validateClass(classLoader, classReader.readUTF8(offset, charBuffer)); + try { + validateClass(classLoader, classReader.readUTF8(offset, charBuffer)); + } catch (ClassNotLoadedException e) { + errorConsumer.accept(e); + } } } } - public void validateClassPathsAndOutput(ClassReader classReader, - ClassLoader directClassPathClassLoader, ClassLoader fullClassPathClassLoader, - Collection<String> jarsOnlyInFullClassPath, boolean isPrebuilt, boolean verbose, - Set<String> missingClassAllowlist) throws ClassNotLoadedException { - if (isPrebuilt) { - // Prebuilts only need transitive dependencies checked, not direct dependencies. + public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader, + Set<String> missingClassAllowlist) { + // Prebuilts only need transitive dependencies checked, not direct dependencies. + validateClassPath(classReader, fullClassLoader, (e) -> { + if (!missingClassAllowlist.contains(e.getClassName())) { + addMissingError(classReader.getClassName(), e.getClassName()); + } + }); + } + + public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader, + ClassLoader fullClassLoader, Collection<String> jarsOnlyInFullClassPath, + Set<String> missingClassAllowlist, boolean verbose) { + validateClassPath(classReader, directClassLoader, (e) -> { try { - validateClassPath(classReader, fullClassPathClassLoader); - } catch (ClassNotLoadedException e) { + validateClass(fullClassLoader, e.getClassName()); + } catch (ClassNotLoadedException d) { if (!missingClassAllowlist.contains(e.getClassName())) { - printAndQuit(e, classReader, verbose); + addMissingError(classReader.getClassName(), e.getClassName()); } + return; } - } else { - try { - validateClassPath(classReader, directClassPathClassLoader); - } catch (ClassNotLoadedException e) { + if (verbose) { + System.err.println("Class \"" + e.getClassName() + + "\" not found in direct dependencies," + + " but found in indirect dependiences."); + } + // Iterating through all jars that are in the full classpath but not the direct + // classpath to find which one provides the class we are looking for. + for (String jarPath : jarsOnlyInFullClassPath) { try { - validateClass(fullClassPathClassLoader, e.getClassName()); - } catch (ClassNotLoadedException d) { - if (!missingClassAllowlist.contains(d.getClassName())) { - printAndQuit(d, classReader, verbose); - } - } - if (verbose) { - System.err.println("Class \"" + e.getClassName() - + "\" not found in direct dependencies," - + " but found in indirect dependiences."); - } - // Iterating through all jars that are in the full classpath but not the direct - // classpath to find which one provides the class we are looking for. - for (String jarPath : jarsOnlyInFullClassPath) { - try { - ClassLoader smallLoader = - ByteCodeProcessor.loadJars(Collections.singletonList(jarPath)); - validateClass(smallLoader, e.getClassName()); - Map<String, Set<String>> failedClassesByMissingClass = mErrors.get(jarPath); - if (failedClassesByMissingClass == null) { - // TreeMap so that error messages have sorted list of classes. - failedClassesByMissingClass = new TreeMap<>(); - mErrors.put(jarPath, failedClassesByMissingClass); - } - Set<String> failedClasses = - failedClassesByMissingClass.get(e.getClassName()); - if (failedClasses == null) { - failedClasses = new TreeSet<>(); - failedClassesByMissingClass.put(e.getClassName(), failedClasses); - } - failedClasses.add(classReader.getClassName()); - break; - } catch (ClassNotLoadedException f) { - } + ClassLoader smallLoader = + ByteCodeProcessor.loadJars(Collections.singletonList(jarPath)); + validateClass(smallLoader, e.getClassName()); + addDirectError(jarPath, classReader.getClassName(), e.getClassName()); + break; + } catch (ClassNotLoadedException f) { } } - } + }); + } + + private void addMissingError(String srcClass, String missingClass) { + mMissingClasses.put(missingClass, srcClass); } - public Map<String, Map<String, Set<String>>> getErrors() { - return mErrors; + private void addDirectError(String jarPath, String srcClass, String missingClass) { + synchronized (mDirectErrors) { + Map<String, Set<String>> failedClassesByMissingClass = mDirectErrors.get(jarPath); + if (failedClassesByMissingClass == null) { + // TreeMap so that error messages have sorted list of classes. + failedClassesByMissingClass = new TreeMap<>(); + mDirectErrors.put(jarPath, failedClassesByMissingClass); + } + Set<String> failedClasses = failedClassesByMissingClass.get(missingClass); + if (failedClasses == null) { + failedClasses = new TreeSet<>(); + failedClassesByMissingClass.put(missingClass, failedClasses); + } + failedClasses.add(srcClass); + } } public boolean hasErrors() { - return !mErrors.isEmpty(); + return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty(); + } + + private static void printValidationError( + PrintStream out, String gnTarget, Map<String, Set<String>> missingClasses) { + out.print(" * "); + out.println(gnTarget); + int i = 0; + // The list of missing classes is non-exhaustive because each class that fails to validate + // reports only the first missing class. + for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) { + String missingClass = entry.getKey(); + Set<String> filesThatNeededIt = entry.getValue(); + out.print(" * "); + if (i == MAX_ERRORS_PER_JAR) { + out.print(String.format( + "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR)); + break; + } + out.print(missingClass.replace('/', '.')); + out.print(" (needed by "); + out.print(filesThatNeededIt.iterator().next().replace('/', '.')); + if (filesThatNeededIt.size() > 1) { + out.print(String.format(" and %d more", filesThatNeededIt.size() - 1)); + } + out.println(")"); + i++; + } + } + + public void printAll(String gnTarget, Map<String, String> jarToGnTarget) { + String streamer = "============================="; + System.err.println(); + System.err.println(streamer + " Dependency Checks Failed " + streamer); + System.err.println("Target: " + gnTarget); + if (!mMissingClasses.isEmpty()) { + int i = 0; + for (Map.Entry<String, String> entry : mMissingClasses.entrySet()) { + if (++i > MAX_MISSING_CLASS_WARNINGS) { + System.err.println(String.format("... and %d more.", + mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS)); + break; + } + System.err.println(String.format( + "Class \"%s\" not found on any classpath. Used by class \"%s\"", + entry.getKey(), entry.getValue())); + } + System.err.println(); + } + if (!mDirectErrors.isEmpty()) { + System.err.println("Direct classpath is incomplete. To fix, add deps on:"); + for (Map.Entry<String, Map<String, Set<String>>> entry : mDirectErrors.entrySet()) { + printValidationError( + System.err, jarToGnTarget.get(entry.getKey()), entry.getValue()); + } + System.err.println(); + } } } diff --git a/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java b/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java deleted file mode 100644 index 0feae07aabf..00000000000 --- a/chromium/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package org.chromium.bytecode; - -import static org.objectweb.asm.Opcodes.ASM7; -import static org.objectweb.asm.Opcodes.INVOKESTATIC; - -import org.objectweb.asm.AnnotationVisitor; -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.MethodVisitor; - -/** - * A ClassVisitor which adds calls to - * {@link org.chromium.base.ThreadUtils}'s assertOnUiThread/assertOnBackgroundThread when the - * corresponding {@link androidx.annotation.UiThread} or - * {@link androidx.annotation.WorkerThread} annotations are present. The function calls - * are placed at the start of the method. - */ -class ThreadAssertionClassAdapter extends ClassVisitor { - private static final String THREAD_UTILS_DESCRIPTOR = "org/chromium/base/ThreadUtils"; - private static final String THREAD_UTILS_SIGNATURE = "()V"; - private static final String UI_THREAD_ANNOTATION_DESCRIPTOR = - "Landroid/support/annotation/UiThread;"; - private static final String WORKER_THREAD_ANNOTATION_DESCRIPTOR = - "Landroid/support/annotation/WorkerThread;"; - - ThreadAssertionClassAdapter(ClassVisitor visitor) { - super(ASM7, visitor); - } - - @Override - public MethodVisitor visitMethod(final int access, final String name, String desc, - String signature, String[] exceptions) { - return new AddAssertMethodVisitor( - super.visitMethod(access, name, desc, signature, exceptions)); - } - - private static class AddAssertMethodVisitor extends MethodVisitor { - String mAssertMethodName = ""; - - AddAssertMethodVisitor(MethodVisitor mv) { - super(ASM7, mv); - } - - /** - * Call for annotations on the method. Checks if the annotation is @UiThread - * or @WorkerThread, and if so will set the mAssertMethodName property to the name of the - * method to call in order to assert that a method is running on the intented thread. - * - * @param descriptor Annotation descriptor containing its name and package. - */ - @Override - public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) { - switch (descriptor) { - case UI_THREAD_ANNOTATION_DESCRIPTOR: - mAssertMethodName = "assertOnUiThread"; - break; - case WORKER_THREAD_ANNOTATION_DESCRIPTOR: - mAssertMethodName = "assertOnBackgroundThread"; - break; - default: - break; - } - - return super.visitAnnotation(descriptor, visible); - } - - /** - * Called to start visiting code. Will also insert the assertOnXThread methods at the start - * of the method if needed. - */ - @Override - public void visitCode() { - super.visitCode(); - if (!mAssertMethodName.equals("")) { - visitMethodInsn(INVOKESTATIC, THREAD_UTILS_DESCRIPTOR, mAssertMethodName, - THREAD_UTILS_SIGNATURE, false); - } - } - } -}
\ No newline at end of file diff --git a/chromium/build/android/devil_chromium.json b/chromium/build/android/devil_chromium.json index 6727072c8d4..5d66730ae8b 100644 --- a/chromium/build/android/devil_chromium.json +++ b/chromium/build/android/devil_chromium.json @@ -55,25 +55,6 @@ } } }, - "pymock": { - "file_info": { - "darwin_x86_64": { - "local_paths": [ - "../../third_party/pymock" - ] - }, - "linux2_x86_64": { - "local_paths": [ - "../../third_party/pymock" - ] - }, - "win32_AMD64": { - "local_paths": [ - "../../third_party/pymock" - ] - } - } - }, "simpleperf": { "file_info": { "android_armeabi-v7a": { diff --git a/chromium/build/android/devil_chromium.py b/chromium/build/android/devil_chromium.py index 6a6def6afc9..1cd5a87154d 100644 --- a/chromium/build/android/devil_chromium.py +++ b/chromium/build/android/devil_chromium.py @@ -113,7 +113,6 @@ def Initialize(output_directory=None, custom_deps=None, adb_path=None): This includes: - Libraries: - the android SDK ("android_sdk") - - pymock ("pymock") - Build products: - host & device forwarder binaries ("forwarder_device" and "forwarder_host") diff --git a/chromium/build/android/docs/android_app_bundles.md b/chromium/build/android/docs/android_app_bundles.md index 8934477195a..1edcba4a94a 100644 --- a/chromium/build/android/docs/android_app_bundles.md +++ b/chromium/build/android/docs/android_app_bundles.md @@ -167,8 +167,8 @@ performed, which consists of the following steps: This synchronized proguarding step is added by the `android_app_bundle()` GN template. In practice this means the following: - - If `proguard_enabled` and `proguard_jar_path` must be passed to - `android_app_bundle` targets, but not to `android_app_bundle_module` ones. + - `proguard_enabled` must be passed to `android_app_bundle` targets, but not + to `android_app_bundle_module` ones. - `proguard_configs` can be still passed to individual modules, just like regular APKs. All proguard configs will be merged during the diff --git a/chromium/build/android/docs/java_toolchain.md b/chromium/build/android/docs/java_toolchain.md index f19c9b383fc..9829f8d395c 100644 --- a/chromium/build/android/docs/java_toolchain.md +++ b/chromium/build/android/docs/java_toolchain.md @@ -20,6 +20,12 @@ also have a default `jar_excluded_patterns` set (more on that later): All target names must end with "_java" so that the build system can distinguish them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)). +Most targets produce two separate `.jar` files: +* Device `.jar`: Used to produce `.dex.jar`, which is used on-device. +* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`). + * Host `.jar` files live in `lib.java/` so that they are archived in + builder/tester bots (which do not archive `obj/`). + ## From Source to Final Dex ### Step 1: Create interface .jar with turbine or ijar @@ -37,11 +43,6 @@ What are interface jars?: removed. * Dependant targets use interface `.jar` files to skip having to be rebuilt when only private implementation details change. - * To accomplish this behavior, library targets list only their - interface `.jar` files as outputs. Ninja's `restat=1` feature then causes - dependent targets to be rebuilt only when the interface `.jar` changes. - Final dex targets are always rebuilt because they depend on the - non-interface `.jar` through a `depfile`. [//third_party/ijar]: /third_party/ijar/README.chromium [//third_party/turbine]: /third_party/turbine/README.chromium @@ -77,20 +78,23 @@ This step can be disabled via GN arg: `use_errorprone_java_compiler = false` [ErrorProne]: https://errorprone.info/ [ep_plugins]: /tools/android/errorprone_plugin/ -### Step 3: Bytecode Processing - -* `//build/android/bytecode` runs on the compiled `.jar` in order to: - * Enable Java assertions (when dcheck is enabled). - * Assert that libraries have properly declared `deps`. - -### Step 4: Desugaring +### Step 3: Desugaring (Device .jar Only) -This step happens only when targets have `supports_android = true`. +This step happens only when targets have `supports_android = true`. It is not +applied to `.jar` files used by `junit_binary`. * `//third_party/bazel/desugar` converts certain Java 8 constructs, such as lambdas and default interface methods, into constructs that are compatible with Java 7. +### Step 4: Instrumenting (Device .jar Only) + +This step happens only when this GN arg is set: `use_jacoco_coverage = true` + +* [Jacoco] adds instrumentation hooks to methods. + +[Jacoco]: https://www.eclemma.org/jacoco/ + ### Step 5: Filtering This step happens only when targets that have `jar_excluded_patterns` or @@ -108,27 +112,12 @@ This step happens only when targets that have `jar_excluded_patterns` or [Android Resources]: life_of_a_resource.md [apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java -### Step 6: Instrumentation - -This step happens only when this GN arg is set: `use_jacoco_coverage = true` - -* [Jacoco] adds instrumentation hooks to methods. - -[Jacoco]: https://www.eclemma.org/jacoco/ - -### Step 7: Copy to lib.java - -* The `.jar` is copied into `$root_build_dir/lib.java` (under target-specific - subdirectories) so that it will be included by bot archive steps. - * These `.jar` files are the ones used when running `java_binary` and - `junit_binary` targets. - -### Step 8: Per-Library Dexing +### Step 6: Per-Library Dexing This step happens only when targets have `supports_android = true`. * [d8] converts `.jar` files containing `.class` files into `.dex.jar` files - containing `.dex` files. + containing `classes.dex` files. * Dexing is incremental - it will reuse dex'ed classes from a previous build if the corresponding `.class` file is unchanged. * These per-library `.dex.jar` files are used directly by [incremental install], @@ -139,7 +128,7 @@ This step happens only when targets have `supports_android = true`. [d8]: https://developer.android.com/studio/command-line/d8 [incremental install]: /build/android/incremental_install/README.md -### Step 9: Apk / Bundle Module Compile +### Step 7: Apk / Bundle Module Compile * Each `android_apk` and `android_bundle_module` template has a nested `java_library` target. The nested library includes final copies of files @@ -150,7 +139,7 @@ This step happens only when targets have `supports_android = true`. [JNI glue]: /base/android/jni_generator/README.md -### Step 10: Final Dexing +### Step 8: Final Dexing This step is skipped when building using [Incremental Install]. @@ -160,19 +149,11 @@ When `is_java_debug = true`: When `is_java_debug = false`: * [R8] performs whole-program optimization on all library `lib.java` `.jar` files and outputs a final `.r8dex.jar`. - * For App Bundles, R8 creates a single `.r8dex.jar` with the code from all - modules. + * For App Bundles, R8 creates a `.r8dex.jar` for each module. [Incremental Install]: /build/android/incremental_install/README.md [R8]: https://r8.googlesource.com/r8 -### Step 11: Bundle Module Dex Splitting - -This step happens only when `is_java_debug = false`. - -* [dexsplitter.py] splits the single `*dex.jar` into per-module `*dex.jar` - files. - ## Test APKs with apk_under_test Test APKs are normal APKs that contain an `<instrumentation>` tag within their @@ -266,8 +247,7 @@ We use several tools for static analysis. [lint_plugins]: http://tools.android.com/tips/lint-custom-rules -### [Bytecode Rewriter](/build/android/bytecode/) -* Runs as part of normal compilation. +### [Bytecode Processor](/build/android/bytecode/) * Performs a single check: * That target `deps` are not missing any entries. * In other words: Enforces that targets do not rely on indirect dependencies diff --git a/chromium/build/android/emma_coverage_stats_test.py b/chromium/build/android/emma_coverage_stats_test.py index 44f6dc3586a..d67f6be2180 100755 --- a/chromium/build/android/emma_coverage_stats_test.py +++ b/chromium/build/android/emma_coverage_stats_test.py @@ -9,10 +9,8 @@ import unittest from xml.etree import ElementTree import emma_coverage_stats -from pylib.constants import host_paths -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error EMPTY_COVERAGE_STATS_DICT = { 'files': {}, diff --git a/chromium/build/android/gradle/OWNERS b/chromium/build/android/gradle/OWNERS index d1f94845f4d..a0e08269724 100644 --- a/chromium/build/android/gradle/OWNERS +++ b/chromium/build/android/gradle/OWNERS @@ -1,4 +1,2 @@ agrieve@chromium.org wnwen@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/android/gradle/generate_gradle.py b/chromium/build/android/gradle/generate_gradle.py index 5501aa984d7..85b24410eaa 100755 --- a/chromium/build/android/gradle/generate_gradle.py +++ b/chromium/build/android/gradle/generate_gradle.py @@ -16,7 +16,6 @@ import re import shutil import subprocess import sys -import zipfile _BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir) sys.path.append(_BUILD_ANDROID) @@ -28,6 +27,7 @@ from pylib.constants import host_paths sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp')) import jinja_template from util import build_utils +from util import resource_utils _DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools') @@ -38,7 +38,6 @@ _FILE_DIR = os.path.dirname(__file__) _GENERATED_JAVA_SUBDIR = 'generated_java' _JNI_LIBS_SUBDIR = 'symlinked-libs' _ARMEABI_SUBDIR = 'armeabi' -_RES_SUBDIR = 'extracted-res' _GRADLE_BUILD_FILE = 'build.gradle' _CMAKE_FILE = 'CMakeLists.txt' # This needs to come first alphabetically among all modules. @@ -255,11 +254,8 @@ class _ProjectEntry(object): 'junit_binary', ) - def ResZips(self): - return self.DepsInfo().get('owned_resources_zips', []) - - def ResDirs(self): - return self.DepsInfo().get('owned_resources_dirs', []) + def ResSources(self): + return self.DepsInfo().get('lint_resource_sources', []) def JavaFiles(self): if self._java_files is None: @@ -360,24 +356,12 @@ class _ProjectContextGenerator(object): def EntryOutputDir(self, entry): return os.path.join(self.project_dir, entry.GradleSubdir()) - def AllResZips(self, root_entry): - res_zips = [] - for entry in self._GetEntries(root_entry): - res_zips += entry.ResZips() - return set(_RebasePath(res_zips)) - def GeneratedInputs(self, root_entry): generated_inputs = set() - generated_inputs.update(self.AllResZips(root_entry)) for entry in self._GetEntries(root_entry): generated_inputs.update(entry.PrebuiltJars()) return generated_inputs - def GeneratedZips(self, root_entry): - entry_output_dir = self.EntryOutputDir(root_entry) - return [(s, os.path.join(entry_output_dir, _RES_SUBDIR)) - for s in self.AllResZips(root_entry)] - def GenerateManifest(self, root_entry): android_manifest = root_entry.DepsInfo().get('android_manifest') if not android_manifest: @@ -401,13 +385,15 @@ class _ProjectContextGenerator(object): p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars()) self.processed_prebuilts.update(prebuilts) variables['prebuilts'] = self._Relativize(root_entry, prebuilts) - res_dirs = set( - p for e in self._GetEntries(root_entry) for p in e.ResDirs()) + res_sources_files = _RebasePath( + set(p for e in self._GetEntries(root_entry) for p in e.ResSources())) + res_sources = [] + for res_sources_file in res_sources_files: + res_sources.extend(build_utils.ReadSourcesList(res_sources_file)) + res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources) # Do not add generated resources for the all module since it creates many # duplicates, and currently resources are only used for editing. self.processed_res_dirs.update(res_dirs) - res_dirs.add( - os.path.join(self.EntryOutputDir(root_entry), _RES_SUBDIR)) variables['res_dirs'] = self._Relativize(root_entry, res_dirs) if self.split_projects: deps = [_ProjectEntry.FromBuildConfigPath(p) @@ -527,11 +513,35 @@ def _CreateJniLibsDir(output_dir, entry_output_dir, so_files): def _GenerateLocalProperties(sdk_dir): - """Returns the data for project.properties as a string.""" + """Returns the data for local.properties as a string.""" return '\n'.join([ '# Generated by //build/android/gradle/generate_gradle.py', 'sdk.dir=%s' % sdk_dir, - '']) + '', + ]) + + +def _GenerateGradleWrapperPropertiesCanary(): + """Returns the data for gradle-wrapper.properties as a string.""" + # Before May 2020, this wasn't necessary. Might not be necessary at some point + # in the future? + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + ('distributionUrl=https\\://services.gradle.org/distributions/' + 'gradle-6.5-rc-1-all.zip\n'), + '', + ]) + + +def _GenerateGradleProperties(): + """Returns the data for gradle.properties as a string.""" + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + '', + '# Tells Gradle to show warnings during project sync.', + 'org.gradle.warning.mode=all', + '', + ]) def _GenerateBaseVars(generator, build_vars): @@ -692,23 +702,6 @@ def _GenerateSettingsGradle(project_entries): return '\n'.join(lines) -def _ExtractFile(zip_path, extracted_path): - logging.debug('Extracting %s to %s', zip_path, extracted_path) - with zipfile.ZipFile(zip_path) as z: - z.extractall(extracted_path) - - -def _ExtractZips(entry_output_dir, zip_tuples): - """Extracts all zips to the directory given in the tuples.""" - extracted_paths = set(s[1] for s in zip_tuples) - for extracted_path in extracted_paths: - assert _IsSubpathOf(extracted_path, entry_output_dir) - shutil.rmtree(extracted_path, True) - - for zip_path, extracted_path in zip_tuples: - _ExtractFile(zip_path, extracted_path) - - def _FindAllProjectEntries(main_entries): """Returns the list of all _ProjectEntry instances given the root project.""" found = set() @@ -930,8 +923,16 @@ def main(): _WriteFile( os.path.join(generator.project_dir, 'local.properties'), _GenerateLocalProperties(args.sdk_path)) + _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'), + _GenerateGradleProperties()) + + wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper', + 'gradle-wrapper.properties') + if os.path.exists(wrapper_properties): + os.unlink(wrapper_properties) + if args.canary: + _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary()) - zip_tuples = [] generated_inputs = set() for entry in entries: entries_to_gen = [entry] @@ -939,13 +940,9 @@ def main(): for entry_to_gen in entries_to_gen: # Build all paths references by .gradle that exist within output_dir. generated_inputs.update(generator.GeneratedInputs(entry_to_gen)) - zip_tuples.extend(generator.GeneratedZips(entry_to_gen)) if generated_inputs: targets = _RebasePath(generated_inputs, output_dir) _RunNinja(output_dir, targets) - if zip_tuples: - # This extracts generated xml files (e.g. strings). - _ExtractZips(generator.project_dir, zip_tuples) logging.warning('Generated files will only appear once you\'ve built them.') logging.warning('Generated projects for Android Studio %s', channel) diff --git a/chromium/build/android/gradle/root.jinja b/chromium/build/android/gradle/root.jinja index c80292d1226..549dc245593 100644 --- a/chromium/build/android/gradle/root.jinja +++ b/chromium/build/android/gradle/root.jinja @@ -9,18 +9,18 @@ buildscript { jcenter() {% if channel == 'canary' %} // Workaround for http://b/144885480. - maven() { - url "http://dl.bintray.com/kotlin/kotlin-eap" - } + //maven() { + // url "http://dl.bintray.com/kotlin/kotlin-eap" + //} {% endif %} } dependencies { {% if channel == 'canary' %} - classpath "com.android.tools.build:gradle:4.0.0-alpha04" + classpath "com.android.tools.build:gradle:4.1.0-beta01" {% elif channel == 'beta' %} - classpath "com.android.tools.build:gradle:3.1.0-beta4" + classpath "com.android.tools.build:gradle:4.0.0-rc01" {% else %} - classpath "com.android.tools.build:gradle:3.0.1" + classpath "com.android.tools.build:gradle:3.6.3" {% endif %} } } diff --git a/chromium/build/android/gyp/OWNERS b/chromium/build/android/gyp/OWNERS index 7defba6b1ae..25557e1fc55 100644 --- a/chromium/build/android/gyp/OWNERS +++ b/chromium/build/android/gyp/OWNERS @@ -2,5 +2,3 @@ agrieve@chromium.org digit@chromium.org smaier@chromium.org wnwen@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/android/gyp/aar.py b/chromium/build/android/gyp/aar.py index 87f189014a1..ffd6cf8fa12 100755 --- a/chromium/build/android/gyp/aar.py +++ b/chromium/build/android/gyp/aar.py @@ -23,15 +23,30 @@ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), import gn_helpers +# Regular expression to extract -checkdiscard / -check* lines. +# Does not support nested comments with "}" in them (oh well). +_CHECKDISCARD_PATTERN = re.compile(r'^\s*?-check.*?}\s*', + re.DOTALL | re.MULTILINE) + +_PROGUARD_TXT = 'proguard.txt' +_PROGUARD_CHECKS_TXT = 'proguard-checks.txt' + + def _IsManifestEmpty(manifest_str): - """Returns whether the given manifest has merge-worthy elements. + """Decides whether the given manifest has merge-worthy elements. E.g.: <activity>, <service>, etc. + + Args: + manifest_str: Content of a manifiest XML. + + Returns: + Whether the manifest has merge-worthy elements. """ doc = ElementTree.fromstring(manifest_str) for node in doc: if node.tag == 'application': - if len(node): + if node.getchildren(): return False elif node.tag != 'uses-sdk': return False @@ -40,6 +55,14 @@ def _IsManifestEmpty(manifest_str): def _CreateInfo(aar_file): + """Extracts and return .info data from an .aar file. + + Args: + aar_file: Path to an input .aar file. + + Returns: + A dict containing .info data. + """ data = {} data['aidl'] = [] data['assets'] = [] @@ -76,16 +99,40 @@ def _CreateInfo(aar_file): data['native_libraries'] = [name] elif name == 'classes.jar': data['has_classes_jar'] = True - elif name == 'proguard.txt': + elif name == _PROGUARD_TXT: data['has_proguard_flags'] = True elif name == 'R.txt': # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs # have no resources as well. We treat empty R.txt as having no R.txt. - data['has_r_text_file'] = (z.read('R.txt').strip() != '') + data['has_r_text_file'] = bool(z.read('R.txt').strip()) + + if data['has_proguard_flags']: + config = z.read(_PROGUARD_TXT) + if _CHECKDISCARD_PATTERN.search(config): + data['has_proguard_check_flags'] = True + return data -def _PerformExtract(aar_file, output_dir, name_allowlist): +def _SplitProguardConfig(tmp_dir): + # Put -checkdiscard (and friends) into a separate proguard config. + # https://crbug.com/1093831 + main_flag_path = os.path.join(tmp_dir, _PROGUARD_TXT) + check_flag_path = os.path.join(tmp_dir, _PROGUARD_CHECKS_TXT) + with open(main_flag_path) as f: + config_data = f.read() + with open(main_flag_path, 'w') as f: + MSG = ('# Check flag moved to proguard-checks.txt by ' + '//build/android/gyp/aar.py\n') + f.write(_CHECKDISCARD_PATTERN.sub(MSG, config_data)) + with open(check_flag_path, 'w') as f: + f.write('# Check flags extracted by //build/android/gyp/aar.py\n\n') + for m in _CHECKDISCARD_PATTERN.finditer(config_data): + f.write(m.group(0)) + + +def _PerformExtract(aar_file, output_dir, name_allowlist, + has_proguard_check_flags): with build_utils.TempDir() as tmp_dir: tmp_dir = os.path.join(tmp_dir, 'staging') os.mkdir(tmp_dir) @@ -94,6 +141,10 @@ def _PerformExtract(aar_file, output_dir, name_allowlist): # Write a breadcrumb so that SuperSize can attribute files back to the .aar. with open(os.path.join(tmp_dir, 'source.info'), 'w') as f: f.write('source={}\n'.format(aar_file)) + + if has_proguard_check_flags: + _SplitProguardConfig(tmp_dir) + shutil.rmtree(output_dir, ignore_errors=True) shutil.move(tmp_dir, output_dir) @@ -135,7 +186,7 @@ def main(): # Generated by //build/android/gyp/aar.py # To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen". -""" + gn_helpers.ToGNString(aar_info) +""" + gn_helpers.ToGNString(aar_info, pretty=True) if args.command == 'extract': if args.assert_info_file: @@ -150,12 +201,20 @@ def main(): if args.ignore_resources: names = [n for n in names if not n.startswith('res')] + has_proguard_check_flags = aar_info.get('has_proguard_check_flags') output_paths = [os.path.join(args.output_dir, n) for n in names] output_paths.append(os.path.join(args.output_dir, 'source.info')) - md5_check.CallAndRecordIfStale( - lambda: _PerformExtract(args.aar_file, args.output_dir, set(names)), - input_paths=[args.aar_file], - output_paths=output_paths) + if has_proguard_check_flags: + output_paths.append(os.path.join(args.output_dir, _PROGUARD_CHECKS_TXT)) + + def on_stale_md5(): + _PerformExtract(args.aar_file, args.output_dir, set(names), + has_proguard_check_flags) + + md5_check.CallAndRecordIfStale(on_stale_md5, + input_strings=[aar_info], + input_paths=[args.aar_file], + output_paths=output_paths) elif args.command == 'list': aar_output_present = args.output != '-' and os.path.isfile(args.output) diff --git a/chromium/build/android/gyp/aar.pydeps b/chromium/build/android/gyp/aar.pydeps index e08c5475e3d..edb351d2fc8 100644 --- a/chromium/build/android/gyp/aar.pydeps +++ b/chromium/build/android/gyp/aar.pydeps @@ -1,6 +1,7 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py ../../gn_helpers.py +../../print_python_deps.py aar.py util/__init__.py util/build_utils.py diff --git a/chromium/build/android/gyp/apkbuilder.py b/chromium/build/android/gyp/apkbuilder.py index 7f8403919ca..dd2175bbe9c 100755 --- a/chromium/build/android/gyp/apkbuilder.py +++ b/chromium/build/android/gyp/apkbuilder.py @@ -359,8 +359,7 @@ def _MaybeWriteDepAndStampFiles(options, depfile_deps): output = options.stamp else: output = options.output_apk - build_utils.WriteDepfile( - options.depfile, output, inputs=depfile_deps, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, output, inputs=depfile_deps) def main(args): diff --git a/chromium/build/android/gyp/bytecode_processor.py b/chromium/build/android/gyp/bytecode_processor.py index 3d78347998d..850a809d5d2 100755 --- a/chromium/build/android/gyp/bytecode_processor.py +++ b/chromium/build/android/gyp/bytecode_processor.py @@ -3,7 +3,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs.""" +"""Wraps bin/helper/bytecode_processor and expands @FileArgs.""" import argparse import os @@ -23,45 +23,45 @@ def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--script', required=True, help='Path to the java binary wrapper script.') + parser.add_argument('--gn-target', required=True) parser.add_argument('--input-jar', required=True) - parser.add_argument('--output-jar', required=True) - parser.add_argument('--direct-classpath-jars', required=True) - parser.add_argument('--sdk-classpath-jars', required=True) - parser.add_argument('--extra-classpath-jars', dest='extra_jars', - action='append', default=[], - help='Extra inputs, passed last to the binary script.') + parser.add_argument('--direct-classpath-jars') + parser.add_argument('--sdk-classpath-jars') + parser.add_argument('--full-classpath-jars') + parser.add_argument('--full-classpath-gn-targets') + parser.add_argument('--stamp') parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('--missing-classes-allowlist') _AddSwitch(parser, '--is-prebuilt') - _AddSwitch(parser, '--enable-thread-annotations') - _AddSwitch(parser, '--enable-check-class-path') args = parser.parse_args(argv) - sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars) - assert len(sdk_jars) > 0 - - direct_jars = build_utils.ParseGnList(args.direct_classpath_jars) - assert len(direct_jars) > 0 - - extra_classpath_jars = [] - for a in args.extra_jars: - extra_classpath_jars.extend(build_utils.ParseGnList(a)) + args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars) + args.direct_classpath_jars = build_utils.ParseGnList( + args.direct_classpath_jars) + args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars) + args.full_classpath_gn_targets = build_utils.ParseGnList( + args.full_classpath_gn_targets) args.missing_classes_allowlist = build_utils.ParseGnList( args.missing_classes_allowlist) - if args.verbose: - verbose = '--verbose' - else: - verbose = '--not-verbose' + verbose = '--verbose' if args.verbose else '--not-verbose' - cmd = ([ - args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt, - args.enable_thread_annotations, args.enable_check_class_path - ] + [str(len(args.missing_classes_allowlist))] + - args.missing_classes_allowlist + [str(len(sdk_jars))] + sdk_jars + - [str(len(direct_jars))] + direct_jars + extra_classpath_jars) + cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt] + cmd += [str(len(args.missing_classes_allowlist))] + cmd += args.missing_classes_allowlist + cmd += [str(len(args.sdk_classpath_jars))] + cmd += args.sdk_classpath_jars + cmd += [str(len(args.direct_classpath_jars))] + cmd += args.direct_classpath_jars + cmd += [str(len(args.full_classpath_jars))] + cmd += args.full_classpath_jars + cmd += [str(len(args.full_classpath_gn_targets))] + cmd += args.full_classpath_gn_targets subprocess.check_call(cmd) + if args.stamp: + build_utils.Touch(args.stamp) + if __name__ == '__main__': sys.exit(main(sys.argv)) diff --git a/chromium/build/android/gyp/compile_java.pydeps b/chromium/build/android/gyp/compile_java.pydeps index a128f47280a..f24bdcbb879 100644 --- a/chromium/build/android/gyp/compile_java.pydeps +++ b/chromium/build/android/gyp/compile_java.pydeps @@ -7,6 +7,7 @@ ../../../third_party/colorama/src/colorama/win32.py ../../../third_party/colorama/src/colorama/winterm.py ../../gn_helpers.py +../../print_python_deps.py compile_java.py util/__init__.py util/build_utils.py diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py index 2ca4ec781c8..eece2eb3fea 100755 --- a/chromium/build/android/gyp/compile_resources.py +++ b/chromium/build/android/gyp/compile_resources.py @@ -18,7 +18,6 @@ import contextlib import filecmp import hashlib import logging -import multiprocessing.dummy import os import re import shutil @@ -26,7 +25,6 @@ import subprocess import sys import tempfile import textwrap -import time import zipfile from xml.etree import ElementTree @@ -34,9 +32,11 @@ from util import build_utils from util import diff_utils from util import manifest_utils from util import md5_check +from util import parallel from util import protoresources from util import resource_utils + # Pngs that we shouldn't convert to webp. Please add rationale when updating. _PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([ # Crashes on Galaxy S5 running L (https://crbug.com/807059). @@ -546,68 +546,64 @@ def _CreateKeepPredicate(resource_exclusion_regex, build_utils.MatchesGlob(path, resource_exclusion_exceptions)) -def _ConvertToWebP(webp_binary, png_paths, path_info, webp_cache_dir): - pool = multiprocessing.dummy.Pool(10) +def _ComputeSha1(path): + with open(path, 'rb') as f: + data = f.read() + return hashlib.sha1(data).hexdigest() - build_utils.MakeDirectory(webp_cache_dir) - cwebp_version = subprocess.check_output([webp_binary, '-version']).rstrip() - cwebp_arguments = ['-mt', '-quiet', '-m', '6', '-q', '100', '-lossless'] +def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir): + sha1_hash = _ComputeSha1(png_path) - sha1_time = [0] - cwebp_time = [0] - cache_hits = [0] + # The set of arguments that will appear in the cache key. + quality_args = ['-m', '6', '-q', '100', '-lossless'] - def cal_sha1(png_path): - start = time.time() - with open(png_path, 'rb') as f: - png_content = f.read() + webp_cache_path = os.path.join( + webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version, + ''.join(quality_args))) + # No need to add .webp. Android can load images fine without them. + webp_path = os.path.splitext(png_path)[0] - sha1_hex = hashlib.sha1(png_content).hexdigest() - sha1_time[0] += time.time() - start - return sha1_hex + cache_hit = os.path.exists(webp_cache_path) + if cache_hit: + os.link(webp_cache_path, webp_path) + else: + # We place the generated webp image to webp_path, instead of in the + # webp_cache_dir to avoid concurrency issues. + args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args + subprocess.check_call(args) - def get_converted_image(png_path): - sha1_hash = cal_sha1(png_path) + try: + os.link(webp_path, webp_cache_path) + except OSError: + # Because of concurrent run, a webp image may already exists in + # webp_cache_path. + pass - webp_cache_path = os.path.join( - webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version, - ''.join(cwebp_arguments))) - # No need to add an extension, android can load images fine without them. - webp_path = os.path.splitext(png_path)[0] + os.remove(png_path) + original_dir = os.path.dirname(os.path.dirname(png_path)) + rename_tuple = (os.path.relpath(png_path, original_dir), + os.path.relpath(webp_path, original_dir)) + return rename_tuple, cache_hit - if os.path.exists(webp_cache_path): - cache_hits[0] += 1 - os.link(webp_cache_path, webp_path) - else: - # We place the generated webp image to webp_path, instead of in the - # webp_cache_dir to avoid concurrency issues. - start = time.time() - args = [webp_binary, png_path] + cwebp_arguments + ['-o', webp_path] - subprocess.check_call(args) - cwebp_time[0] += time.time() - start - - try: - os.link(webp_path, webp_cache_path) - except OSError: - # Because of concurrent run, a webp image may already exists in - # webp_cache_path. - pass - - os.remove(png_path) - original_dir = os.path.dirname(os.path.dirname(png_path)) - path_info.RegisterRename( - os.path.relpath(png_path, original_dir), - os.path.relpath(webp_path, original_dir)) - - png_paths = [f for f in png_paths if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)] - try: - pool.map(get_converted_image, png_paths) - finally: - pool.close() - pool.join() - logging.debug('png->webp: cache: %d/%d sha1 time: %.1fms cwebp time: %.1fms', - cache_hits[0], len(png_paths), sha1_time[0], cwebp_time[0]) + +def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir): + cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip() + shard_args = [(f, ) for f in png_paths + if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)] + + build_utils.MakeDirectory(webp_cache_dir) + results = parallel.BulkForkAndCall(_ConvertToWebPSingle, + shard_args, + cwebp_binary=cwebp_binary, + cwebp_version=cwebp_version, + webp_cache_dir=webp_cache_dir) + total_cache_hits = 0 + for rename_tuple, cache_hit in results: + path_info.RegisterRename(*rename_tuple) + total_cache_hits += int(cache_hit) + + logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args)) def _RemoveImageExtensions(directory, path_info): @@ -627,10 +623,9 @@ def _RemoveImageExtensions(directory, path_info): os.path.relpath(path_no_extension, directory)) -def _CompileSingleDep(args): - index, dep_path, aapt2_path, partials_dir, exclusion_rules = args - basename = os.path.basename(dep_path) - unique_name = '{}_{}'.format(index, basename) +def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path, + partials_dir): + unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir)) partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name)) compile_command = [ @@ -639,7 +634,7 @@ def _CompileSingleDep(args): # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched. # '--no-crunch', '--dir', - dep_path, + dep_subdir, '-o', partial_path ] @@ -654,33 +649,16 @@ def _CompileSingleDep(args): # Filtering these files is expensive, so only apply filters to the partials # that have been explicitly targeted. - keep_predicate = _CreateValuesKeepPredicate(exclusion_rules, dep_path) if keep_predicate: - logging.debug('Applying .arsc filtering to %s', dep_path) + logging.debug('Applying .arsc filtering to %s', dep_subdir) protoresources.StripUnwantedResources(partial_path, keep_predicate) return partial_path -def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules): - partials_dir = os.path.join(temp_dir, 'partials') - build_utils.MakeDirectory(partials_dir) - - def iter_params(): - for i, dep_path in enumerate(dep_subdirs): - yield i, dep_path, aapt2_path, partials_dir, exclusion_rules - - pool = multiprocessing.dummy.Pool(10) - try: - return pool.map(_CompileSingleDep, iter_params()) - finally: - pool.close() - pool.join() - - -def _CreateValuesKeepPredicate(exclusion_rules, dep_path): +def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir): patterns = [ x[1] for x in exclusion_rules - if build_utils.MatchesGlob(dep_path, [x[0]]) + if build_utils.MatchesGlob(dep_subdir, [x[0]]) ] if not patterns: return None @@ -689,6 +667,23 @@ def _CreateValuesKeepPredicate(exclusion_rules, dep_path): return lambda x: not any(r.search(x) for r in regexes) +def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules): + partials_dir = os.path.join(temp_dir, 'partials') + build_utils.MakeDirectory(partials_dir) + + job_params = [(i, dep_subdir, + _CreateValuesKeepPredicate(exclusion_rules, dep_subdir)) + for i, dep_subdir in enumerate(dep_subdirs)] + + # Filtering is slow, so ensure jobs with keep_predicate are started first. + job_params.sort(key=lambda x: not x[2]) + return list( + parallel.BulkForkAndCall(_CompileSingleDep, + job_params, + aapt2_path=aapt2_path, + partials_dir=partials_dir)) + + def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips): for zip_file in dependencies_res_zips: zip_info_file_path = zip_file + '.info' diff --git a/chromium/build/android/gyp/compile_resources.pydeps b/chromium/build/android/gyp/compile_resources.pydeps index f34926c185e..cb1d7621cd7 100644 --- a/chromium/build/android/gyp/compile_resources.pydeps +++ b/chromium/build/android/gyp/compile_resources.pydeps @@ -46,6 +46,7 @@ ../../../third_party/protobuf/python/google/protobuf/text_format.py ../../../third_party/six/src/six.py ../../gn_helpers.py +../../print_python_deps.py compile_resources.py proto/Configuration_pb2.py proto/Resources_pb2.py @@ -55,5 +56,6 @@ util/build_utils.py util/diff_utils.py util/manifest_utils.py util/md5_check.py +util/parallel.py util/protoresources.py util/resource_utils.py diff --git a/chromium/build/android/gyp/copy_ex.py b/chromium/build/android/gyp/copy_ex.py index 8451555ee57..f93597f973f 100755 --- a/chromium/build/android/gyp/copy_ex.py +++ b/chromium/build/android/gyp/copy_ex.py @@ -119,8 +119,7 @@ def main(args): DoRenaming(options, deps) if options.depfile: - build_utils.WriteDepfile( - options.depfile, options.stamp, deps, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, options.stamp, deps) if options.stamp: build_utils.Touch(options.stamp) diff --git a/chromium/build/android/gyp/create_app_bundle_apks.pydeps b/chromium/build/android/gyp/create_app_bundle_apks.pydeps index bdee0af2c2d..064ab48f0f6 100644 --- a/chromium/build/android/gyp/create_app_bundle_apks.pydeps +++ b/chromium/build/android/gyp/create_app_bundle_apks.pydeps @@ -22,6 +22,7 @@ ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py ../../gn_helpers.py +../../print_python_deps.py ../pylib/__init__.py ../pylib/utils/__init__.py ../pylib/utils/app_bundle_utils.py diff --git a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps index d8825145a19..65222c6976d 100644 --- a/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps +++ b/chromium/build/android/gyp/create_bundle_wrapper_script.pydeps @@ -22,7 +22,7 @@ ../../../third_party/catapult/devil/devil/android/constants/chrome.py ../../../third_party/catapult/devil/devil/android/constants/file_system.py ../../../third_party/catapult/devil/devil/android/decorators.py -../../../third_party/catapult/devil/devil/android/device_blacklist.py +../../../third_party/catapult/devil/devil/android/device_denylist.py ../../../third_party/catapult/devil/devil/android/device_errors.py ../../../third_party/catapult/devil/devil/android/device_signal.py ../../../third_party/catapult/devil/devil/android/device_temp_file.py @@ -85,6 +85,7 @@ ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py ../../gn_helpers.py +../../print_python_deps.py ../adb_command_line.py ../apk_operations.py ../convert_dex_profile.py diff --git a/chromium/build/android/gyp/create_size_info_files.py b/chromium/build/android/gyp/create_size_info_files.py index 27046db1150..b446b7f5dd4 100755 --- a/chromium/build/android/gyp/create_size_info_files.py +++ b/chromium/build/android/gyp/create_size_info_files.py @@ -179,11 +179,9 @@ def main(args): _MergeResInfoFiles(options.res_info_path, res_inputs) all_inputs = jar_inputs + pak_inputs + res_inputs - build_utils.WriteDepfile( - options.depfile, - options.jar_info_path, - inputs=all_inputs, - add_pydeps=False) + build_utils.WriteDepfile(options.depfile, + options.jar_info_path, + inputs=all_inputs) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/create_ui_locale_resources.py b/chromium/build/android/gyp/create_ui_locale_resources.py index 97868cbfde8..007afb37ec5 100755 --- a/chromium/build/android/gyp/create_ui_locale_resources.py +++ b/chromium/build/android/gyp/create_ui_locale_resources.py @@ -60,7 +60,6 @@ def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) - build_utils.AddDepfileOption(parser) parser.add_argument( '--locale-list', required=True, @@ -83,9 +82,6 @@ def main(): android_locale = resource_utils.ToAndroidLocaleName(locale) _AddLocaleResourceFileToZip(out_zip, android_locale, locale) - if args.depfile: - build_utils.WriteDepfile(args.depfile, args.output_zip) - if __name__ == '__main__': main() diff --git a/chromium/build/android/gyp/create_ui_locale_resources.pydeps b/chromium/build/android/gyp/create_ui_locale_resources.pydeps new file mode 100644 index 00000000000..663ed03619d --- /dev/null +++ b/chromium/build/android/gyp/create_ui_locale_resources.pydeps @@ -0,0 +1,28 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +create_ui_locale_resources.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/chromium/build/android/gyp/desugar.py b/chromium/build/android/gyp/desugar.py index 1e1c15678ee..f12aafbe74e 100755 --- a/chromium/build/android/gyp/desugar.py +++ b/chromium/build/android/gyp/desugar.py @@ -53,11 +53,9 @@ def main(): stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings) if options.depfile: - build_utils.WriteDepfile( - options.depfile, - options.output_jar, - inputs=options.bootclasspath + options.classpath, - add_pydeps=False) + build_utils.WriteDepfile(options.depfile, + options.output_jar, + inputs=options.bootclasspath + options.classpath) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/dex.py b/chromium/build/android/gyp/dex.py index 0b3dcbd28b9..6fd0ab35dd8 100755 --- a/chromium/build/android/gyp/dex.py +++ b/chromium/build/android/gyp/dex.py @@ -62,6 +62,9 @@ def _ParseArgs(args): '--multi-dex', action='store_true', help='Allow multiple dex files within output.') + parser.add_argument('--library', + action='store_true', + help='Allow numerous dex files within output.') parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.') parser.add_argument('--desugar', action='store_true') parser.add_argument( @@ -159,9 +162,18 @@ def _RunD8(dex_cmd, input_paths, output_path): output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE) return output - # stdout sometimes spams with things like: - # Stripped invalid locals information from 1 method. - build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter) + with tempfile.NamedTemporaryFile() as flag_file: + # Chosen arbitrarily. Needed to avoid command-line length limits. + MAX_ARGS = 50 + if len(dex_cmd) > MAX_ARGS: + flag_file.write('\n'.join(dex_cmd[MAX_ARGS:])) + flag_file.flush() + dex_cmd = dex_cmd[:MAX_ARGS] + dex_cmd.append('@' + flag_file.name) + + # stdout sometimes spams with things like: + # Stripped invalid locals information from 1 method. + build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter) def _EnvWithArtLibPath(binary_path): @@ -325,13 +337,15 @@ def _PerformDexlayout(tmp_dir, tmp_dex_output, options): def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None): tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip') - if (output.endswith('.dex') - or not all(f.endswith('.dex') for f in d8_inputs)): + needs_dexing = not all(f.endswith('.dex') for f in d8_inputs) + needs_dexmerge = output.endswith('.dex') or not (options and options.library) + if needs_dexing or needs_dexmerge: if options: if options.main_dex_list_path: dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path] - elif options.multi_dex and int(options.min_api or 1) < 21: - # When dexing library targets, it doesn't matter what's in the main dex. + elif options.library and int(options.min_api or 1) < 21: + # When dexing D8 requires a main dex list pre-21. For library targets, + # it doesn't matter what's in the main dex, so just use a dummy one. tmp_main_dex_list_path = os.path.join(tmp_dir, 'main_list.txt') with open(tmp_main_dex_list_path, 'w') as f: f.write('Foo.class\n') @@ -420,7 +434,7 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd): # If the only change is deleting a file, class_files will be empty. if class_files: # Dex necessary classes into intermediate dex files. - dex_cmd = dex_cmd + ['--intermediate', '--file-per-class'] + dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file'] _RunD8(dex_cmd, class_files, options.incremental_dir) logging.debug('Dexed class files.') @@ -444,9 +458,9 @@ def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd): def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar): dex_cmd = [ build_utils.JAVA_PATH, - '-jar', + '-cp', r8_jar_path, - 'd8', + 'com.android.tools.r8.D8', ] with build_utils.TempDir() as tmp_dir: _CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd) @@ -479,7 +493,10 @@ def main(args): final_dex_inputs += options.dex_inputs dex_cmd = [ - build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8', + build_utils.JAVA_PATH, + '-cp', + options.r8_jar_path, + 'com.android.tools.r8.D8', ] if options.release: dex_cmd += ['--release'] diff --git a/chromium/build/android/gyp/dex.pydeps b/chromium/build/android/gyp/dex.pydeps index 5fe5b2b99c1..23856f3c847 100644 --- a/chromium/build/android/gyp/dex.pydeps +++ b/chromium/build/android/gyp/dex.pydeps @@ -1,6 +1,7 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py ../../gn_helpers.py +../../print_python_deps.py ../convert_dex_profile.py dex.py util/__init__.py diff --git a/chromium/build/android/gyp/dex_jdk_libs.py b/chromium/build/android/gyp/dex_jdk_libs.py index 0cda991a4c9..01dc3c93091 100755 --- a/chromium/build/android/gyp/dex_jdk_libs.py +++ b/chromium/build/android/gyp/dex_jdk_libs.py @@ -29,30 +29,44 @@ def _ParseArgs(args): return options -def main(args): - options = _ParseArgs(args) - +def DexJdkLibJar(r8_path, min_api, desugar_jdk_libs_json, desugar_jdk_libs_jar, + keep_rule_file, output): # TODO(agrieve): Spews a lot of stderr about missing classes. with build_utils.TempDir() as tmp_dir: cmd = [ build_utils.JAVA_PATH, - '-jar', - options.r8_path, - 'l8', + '-cp', + r8_path, + 'com.android.tools.r8.L8', '--min-api', - options.min_api, - #'--lib', build_utils.JAVA_HOME, + min_api, + '--lib', + build_utils.JAVA_HOME, '--desugared-lib', - options.desugar_jdk_libs_json, - '--output', - tmp_dir, - options.desugar_jdk_libs_jar + desugar_jdk_libs_json, ] - subprocess.check_output(cmd, stderr=subprocess.STDOUT) + if keep_rule_file: + cmd += ['--pg-conf', keep_rule_file] + + cmd += ['--output', tmp_dir, desugar_jdk_libs_jar] + + subprocess.check_output(cmd, stderr=subprocess.STDOUT) if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')): raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!') - shutil.move(os.path.join(tmp_dir, 'classes.dex'), options.output) + + # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used + # at all. + if os.path.exists(os.path.join(tmp_dir, 'classes.dex')): + shutil.move(os.path.join(tmp_dir, 'classes.dex'), output) + return True + return False + + +def main(args): + options = _ParseArgs(args) + DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json, + options.desugar_jdk_libs_jar, None, options.output) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/dexsplitter.py b/chromium/build/android/gyp/dexsplitter.py index 8e8230b97bc..47bea7ee80c 100755 --- a/chromium/build/android/gyp/dexsplitter.py +++ b/chromium/build/android/gyp/dexsplitter.py @@ -50,9 +50,9 @@ def _ParseOptions(args): def _RunDexsplitter(options, output_dir): cmd = [ build_utils.JAVA_PATH, - '-jar', + '-cp', options.r8_path, - 'dexsplitter', + 'com.android.tools.r8.dexsplitter.DexSplitter', '--output', output_dir, '--proguard-map', diff --git a/chromium/build/android/gyp/dist_aar.py b/chromium/build/android/gyp/dist_aar.py index a74037af07a..fed1983b9ca 100755 --- a/chromium/build/android/gyp/dist_aar.py +++ b/chromium/build/android/gyp/dist_aar.py @@ -14,7 +14,7 @@ import sys import tempfile import zipfile -from filter_zip import CreatePathTransform +import filter_zip from util import build_utils @@ -117,8 +117,8 @@ def main(args): build_utils.AddToZipHermetic( z, 'AndroidManifest.xml', src_path=options.android_manifest) - path_transform = CreatePathTransform(options.jar_excluded_globs, - options.jar_included_globs, []) + path_transform = filter_zip.CreatePathTransform( + options.jar_excluded_globs, options.jar_included_globs, []) with tempfile.NamedTemporaryFile() as jar_file: build_utils.MergeZips( jar_file.name, options.jars, path_transform=path_transform) @@ -152,8 +152,7 @@ def main(args): if options.depfile: all_inputs = (options.jars + options.dependencies_res_zips + options.r_text_files + options.proguard_configs) - build_utils.WriteDepfile(options.depfile, options.output, all_inputs, - add_pydeps=False) + build_utils.WriteDepfile(options.depfile, options.output, all_inputs) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/filter_zip.py b/chromium/build/android/gyp/filter_zip.py index 6f854191254..9b52288c7a9 100755 --- a/chromium/build/android/gyp/filter_zip.py +++ b/chromium/build/android/gyp/filter_zip.py @@ -5,6 +5,7 @@ # found in the LICENSE file. import argparse +import shutil import sys from util import build_utils @@ -20,6 +21,21 @@ _RESOURCE_CLASSES = [ def CreatePathTransform(exclude_globs, include_globs, strip_resource_classes_for): + """Returns a function to strip paths for the given patterns. + + Args: + exclude_globs: List of globs that if matched should be excluded. + include_globs: List of globs that if not matched should be excluded. + strip_resource_classes_for: List of Java packages for which to strip + R.java classes from. + + Returns: + * None if no filters are needed. + * A function "(path) -> path" that returns None when |path| should be + stripped, or |path| otherwise. + """ + if not (exclude_globs or include_globs or strip_resource_classes_for): + return None exclude_globs = list(exclude_globs or []) if strip_resource_classes_for: exclude_globs.extend(p.replace('.', '/') + '/' + f @@ -52,19 +68,18 @@ def main(): argv = build_utils.ExpandFileArgs(sys.argv[1:]) args = parser.parse_args(argv) - if args.exclude_globs: - args.exclude_globs = build_utils.ParseGnList(args.exclude_globs) - if args.include_globs: - args.include_globs= build_utils.ParseGnList(args.include_globs) - if args.strip_resource_classes_for: - args.strip_resource_classes_for = build_utils.ParseGnList( - args.strip_resource_classes_for) + args.exclude_globs = build_utils.ParseGnList(args.exclude_globs) + args.include_globs = build_utils.ParseGnList(args.include_globs) + args.strip_resource_classes_for = build_utils.ParseGnList( + args.strip_resource_classes_for) path_transform = CreatePathTransform(args.exclude_globs, args.include_globs, args.strip_resource_classes_for) with build_utils.AtomicOutput(args.output) as f: - build_utils.MergeZips( - f.name, [args.input], path_transform=path_transform) + if path_transform: + build_utils.MergeZips(f.name, [args.input], path_transform=path_transform) + else: + shutil.copy(args.input, f.name) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/gcc_preprocess.py b/chromium/build/android/gyp/gcc_preprocess.py index 8b3444c2b01..8c5c404c744 100755 --- a/chromium/build/android/gyp/gcc_preprocess.py +++ b/chromium/build/android/gyp/gcc_preprocess.py @@ -47,7 +47,7 @@ def main(args): DoGcc(options) if options.depfile: - build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, options.output) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/java_cpp_enum.py b/chromium/build/android/gyp/java_cpp_enum.py index 39cba3d785d..0b9ee541e4e 100755 --- a/chromium/build/android/gyp/java_cpp_enum.py +++ b/chromium/build/android/gyp/java_cpp_enum.py @@ -124,15 +124,20 @@ class EnumDefinition(object): def _TransformKeys(d, func): """Normalize keys in |d| and update references to old keys in |d| values.""" - normal_keys = {k: func(k) for k in d} + keys_map = {k: func(k) for k in d} ret = collections.OrderedDict() for k, v in d.items(): # Need to transform values as well when the entry value was explicitly set # (since it could contain references to other enum entry values). if isinstance(v, str): - for normal_key in normal_keys: - v = v.replace(normal_key, normal_keys[normal_key]) - ret[normal_keys[k]] = v + # First check if a full replacement is available. This avoids issues when + # one key is a substring of another. + if v in d: + v = keys_map[v] + else: + for old_key, new_key in keys_map.items(): + v = v.replace(old_key, new_key) + ret[keys_map[k]] = v return ret @@ -412,7 +417,6 @@ ${ENUM_ENTRIES} def DoMain(argv): usage = 'usage: %prog [options] [output_dir] input_file(s)...' parser = optparse.OptionParser(usage=usage) - build_utils.AddDepfileOption(parser) parser.add_option('--srcjar', help='When specified, a .srcjar at the given path is ' @@ -429,9 +433,6 @@ def DoMain(argv): for output_path, data in DoGenerate(input_paths): build_utils.AddToZipHermetic(srcjar, output_path, data=data) - if options.depfile: - build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False) - if __name__ == '__main__': DoMain(sys.argv[1:]) diff --git a/chromium/build/android/gyp/java_cpp_enum_tests.py b/chromium/build/android/gyp/java_cpp_enum_tests.py index 088c450aeb6..1acb57f82c4 100755 --- a/chromium/build/android/gyp/java_cpp_enum_tests.py +++ b/chromium/build/android/gyp/java_cpp_enum_tests.py @@ -484,6 +484,42 @@ public @interface ClassName { self.assertEqual(collections.OrderedDict([('A', 0)]), definition.entries) + def testParseEnumClassOneValueSubstringOfAnother(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class SafeBrowsingStatus { + kChecking = 0, + kEnabled = 1, + kDisabled = 2, + kDisabledByAdmin = 3, + kDisabledByExtension = 4, + kEnabledStandard = 5, + kEnabledEnhanced = 6, + // New enum values must go above here. + kMaxValue = kEnabledEnhanced, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('SafeBrowsingStatus', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual( + collections.OrderedDict([ + ('CHECKING', '0'), + ('ENABLED', '1'), + ('DISABLED', '2'), + ('DISABLED_BY_ADMIN', '3'), + ('DISABLED_BY_EXTENSION', '4'), + ('ENABLED_STANDARD', '5'), + ('ENABLED_ENHANCED', '6'), + ('MAX_VALUE', 'ENABLED_ENHANCED'), + ]), definition.entries) + self.assertEqual( + collections.OrderedDict([ + ('MAX_VALUE', 'New enum values must go above here.') + ]), definition.comments) + def testParseEnumStruct(self): test_data = """ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace diff --git a/chromium/build/android/gyp/java_cpp_strings.py b/chromium/build/android/gyp/java_cpp_strings.py index acaaf223efb..498e05e3540 100755 --- a/chromium/build/android/gyp/java_cpp_strings.py +++ b/chromium/build/android/gyp/java_cpp_strings.py @@ -114,6 +114,8 @@ class StringFileParser(object): if string_line.groups()[1]: self._current_value = string_line.groups()[1] self._AppendString() + else: + self._in_string = True return True else: self._in_string = False @@ -141,19 +143,19 @@ class StringFileParser(object): return self._strings -def _GenerateOutput(template, source_path, template_path, strings): +def _GenerateOutput(template, source_paths, template_path, strings): description_template = """ // This following string constants were inserted by // {SCRIPT_NAME} // From - // {SOURCE_PATH} + // {SOURCE_PATHS} // Into // {TEMPLATE_PATH} """ values = { 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), - 'SOURCE_PATH': source_path, + 'SOURCE_PATHS': ',\n // '.join(source_paths), 'TEMPLATE_PATH': template_path, } description = description_template.format(**values) @@ -173,15 +175,18 @@ def _ParseStringFile(path): def _Generate(source_paths, template_path): with open(template_path) as f: lines = f.readlines() - template = ''.join(lines) - for source_path in source_paths: - strings = _ParseStringFile(source_path) - package, class_name = ParseTemplateFile(lines) - package_path = package.replace('.', os.path.sep) - file_name = class_name + '.java' - output_path = os.path.join(package_path, file_name) - output = _GenerateOutput(template, source_path, template_path, strings) - yield output, output_path + + template = ''.join(lines) + package, class_name = ParseTemplateFile(lines) + package_path = package.replace('.', os.path.sep) + file_name = class_name + '.java' + output_path = os.path.join(package_path, file_name) + strings = [] + for source_path in source_paths: + strings.extend(_ParseStringFile(source_path)) + + output = _GenerateOutput(template, source_paths, template_path, strings) + return output, output_path def _Main(argv): @@ -205,8 +210,8 @@ def _Main(argv): with build_utils.AtomicOutput(args.srcjar) as f: with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: - for data, path in _Generate(args.inputs, args.template): - build_utils.AddToZipHermetic(srcjar, path, data=data) + data, path = _Generate(args.inputs, args.template) + build_utils.AddToZipHermetic(srcjar, path, data=data) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/java_cpp_strings_tests.py b/chromium/build/android/gyp/java_cpp_strings_tests.py index acf51e428e0..3b7d5ca8f67 100755 --- a/chromium/build/android/gyp/java_cpp_strings_tests.py +++ b/chromium/build/android/gyp/java_cpp_strings_tests.py @@ -51,6 +51,10 @@ const char kAnotherSwitch[] = "another-value"; const char kAString[] = "a-value"; const char kNoComment[] = "no-comment"; +namespace myfeature { +const char kMyFeatureNoComment[] = "myfeature.no-comment"; +} + // Single line switch with a big space. const char kAStringWithSpace[] = "a-value"; @@ -58,23 +62,34 @@ const char kAStringWithSpace[] = "a-value"; const char kAStringWithAVeryLongNameThatWillHaveToWrap[] = "a-string-with-a-very-long-name-that-will-have-to-wrap"; +// This one has no comment before it. + +const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] = + "a-string-with-a-very-long-name-that-will-have-to-wrap2"; + // This is erroneous and should be ignored. const char kInvalidLineBreak[] = "invalid-line-break"; """.split('\n') strings = java_cpp_strings.StringFileParser(test_data).Parse() - self.assertEqual(4, len(strings)) + self.assertEqual(5, len(strings)) self.assertEqual('A_STRING', strings[0].name) self.assertEqual('"a-value"', strings[0].value) self.assertEqual('NO_COMMENT', strings[1].name) self.assertEqual('"no-comment"', strings[1].value) - self.assertEqual('A_STRING_WITH_SPACE', strings[2].name) - self.assertEqual('"a-value"', strings[2].value) + self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name) + self.assertEqual('"myfeature.no-comment"', strings[2].value) + self.assertEqual('A_STRING_WITH_SPACE', strings[3].name) + self.assertEqual('"a-value"', strings[3].value) self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP', - strings[3].name) + strings[4].name) self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"', - strings[3].value) + strings[4].value) + self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2', + strings[5].name) + self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"', + strings[5].value) def testTemplateParsing(self): test_data = """ diff --git a/chromium/build/android/gyp/lint.py b/chromium/build/android/gyp/lint.py index fb751bd6ed6..fa526e6df88 100755 --- a/chromium/build/android/gyp/lint.py +++ b/chromium/build/android/gyp/lint.py @@ -3,10 +3,8 @@ # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - """Runs Android's lint tool.""" - from __future__ import print_function import argparse @@ -22,9 +20,8 @@ from xml.etree import ElementTree from util import build_utils from util import manifest_utils -from util import resource_utils -_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long +_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long # These checks are not useful for test targets and adds an unnecessary burden # to suppress them. @@ -41,12 +38,83 @@ _DISABLED_FOR_TESTS = [ "UnusedResources", ] - -def _RunLint(lint_path, +_RES_ZIP_DIR = 'RESZIPS' +_SRCJAR_DIR = 'SRCJARS' + + +def _SrcRelative(path): + """Returns relative path to top-level src dir.""" + return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT) + + +def _GenerateProjectFile(android_manifest, + android_sdk_root, + cache_dir, + sources=None, + srcjar_sources=None, + resource_sources=None, + android_sdk_version=None): + project = ElementTree.Element('project') + root = ElementTree.SubElement(project, 'root') + # An absolute path helps error paths to be shorter. + root.set('dir', os.path.abspath(build_utils.DIR_SOURCE_ROOT)) + sdk = ElementTree.SubElement(project, 'sdk') + # Lint requires that the sdk path be an absolute path. + sdk.set('dir', os.path.abspath(android_sdk_root)) + cache = ElementTree.SubElement(project, 'cache') + cache.set('dir', _SrcRelative(cache_dir)) + main_module = ElementTree.SubElement(project, 'module') + main_module.set('name', 'main') + main_module.set('android', 'true') + main_module.set('library', 'false') + if android_sdk_version: + main_module.set('compile_sdk_version', android_sdk_version) + manifest = ElementTree.SubElement(main_module, 'manifest') + manifest.set('file', _SrcRelative(android_manifest)) + if srcjar_sources: + for srcjar_file in srcjar_sources: + src = ElementTree.SubElement(main_module, 'src') + src.set('file', _SrcRelative(srcjar_file)) + if sources: + for source in sources: + src = ElementTree.SubElement(main_module, 'src') + src.set('file', _SrcRelative(source)) + if resource_sources: + for resource_file in resource_sources: + resource = ElementTree.SubElement(main_module, 'resource') + resource.set('file', _SrcRelative(resource_file)) + return project + + +def _GenerateAndroidManifest(original_manifest_path, + min_sdk_version, + manifest_package=None): + # Set minSdkVersion and package in the manifest to the correct values. + doc, manifest, _ = manifest_utils.ParseManifest(original_manifest_path) + uses_sdk = manifest.find('./uses-sdk') + if uses_sdk is None: + uses_sdk = ElementTree.Element('uses-sdk') + manifest.insert(0, uses_sdk) + uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE, + min_sdk_version) + if manifest_package: + manifest.set('package', manifest_package) + return doc + + +def _WriteXmlFile(root, path): + build_utils.MakeDirectory(os.path.dirname(path)) + with build_utils.AtomicOutput(path) as f: + # Although we can write it just with ElementTree.tostring, using minidom + # makes it a lot easier to read as a human (also on code search). + f.write( + minidom.parseString(ElementTree.tostring( + root, encoding='utf-8')).toprettyxml(indent=' ')) + + +def _RunLint(lint_binary_path, config_path, manifest_path, - result_path, - product_dir, sources, cache_dir, android_sdk_version, @@ -56,268 +124,139 @@ def _RunLint(lint_path, resource_sources, resource_zips, android_sdk_root, + lint_gen_dir, testonly_target=False, can_fail_build=False, - include_unexpected=False, silent=False): logging.info('Lint starting') - def _RebasePath(path): - """Returns relative path to top-level src dir. - - Args: - path: A path relative to cwd. - """ - ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT) - # If it's outside of src/, just use abspath. - if ret.startswith('..'): - ret = os.path.abspath(path) - return ret - - def _ProcessResultFile(): - with open(result_path, 'rb') as f: - content = f.read().replace( - _RebasePath(product_dir), 'PRODUCT_DIR') - - with open(result_path, 'wb') as f: - f.write(content) - - def _ParseAndShowResultFile(): - dom = minidom.parse(result_path) - issues = dom.getElementsByTagName('issue') + cmd = [ + _SrcRelative(lint_binary_path), + # Consider all lint warnings as errors. Warnings should either always be + # fixed or completely suppressed in suppressions.xml. They should not + # bloat build output if they are not important enough to be fixed. + '-Werror', + '--exitcode', # Sets error code if there are errors. + '--quiet', # Silences lint's "." progress updates. + ] + if config_path: + cmd.extend(['--config', _SrcRelative(config_path)]) + if testonly_target: + cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)]) + + if not manifest_path: + manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build', + 'android', 'AndroidManifest.xml') + + logging.info('Generating Android manifest file') + android_manifest_tree = _GenerateAndroidManifest(manifest_path, + min_sdk_version, + manifest_package) + # Include the rebased manifest_path in the lint generated path so that it is + # clear in error messages where the original AndroidManifest.xml came from. + lint_android_manifest_path = os.path.join(lint_gen_dir, + _SrcRelative(manifest_path)) + logging.info('Writing xml file %s', lint_android_manifest_path) + _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path) + + resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR) + # These are zip files with generated resources (e. g. strings from GRD). + logging.info('Extracting resource zips') + for resource_zip in resource_zips: + # Use a consistent root and name rather than a temporary file so that + # suppressions can be local to the lint target and the resource target. + resource_dir = os.path.join(resource_root_dir, resource_zip) + shutil.rmtree(resource_dir, True) + os.makedirs(resource_dir) + resource_sources.extend( + build_utils.ExtractAll(resource_zip, path=resource_dir)) + + logging.info('Extracting srcjars') + srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR) + srcjar_sources = [] + if srcjars: + for srcjar in srcjars: + # Use path without extensions since otherwise the file name includes + # .srcjar and lint treats it as a srcjar. + srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0]) + shutil.rmtree(srcjar_dir, True) + os.makedirs(srcjar_dir) + # Sadly lint's srcjar support is broken since it only considers the first + # srcjar. Until we roll a lint version with that fixed, we need to extract + # it ourselves. + srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir)) + + logging.info('Generating project file') + project_file_root = _GenerateProjectFile(lint_android_manifest_path, + android_sdk_root, cache_dir, sources, + srcjar_sources, resource_sources, + android_sdk_version) + + project_xml_path = os.path.join(lint_gen_dir, 'project.xml') + logging.info('Writing xml file %s', project_xml_path) + _WriteXmlFile(project_file_root, project_xml_path) + cmd += ['--project', _SrcRelative(project_xml_path)] + + logging.info('Preparing environment variables') + env = os.environ.copy() + # It is important that lint uses the checked-in JDK11 as it is almost 50% + # faster than JDK8. + env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME, + build_utils.DIR_SOURCE_ROOT) + # This filter is necessary for JDK11. + stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings + + try: + logging.debug('Lint command %s', cmd) + start = time.time() + # Lint outputs "No issues found" if it succeeds, and uses stderr when it + # fails, so we can safely ignore stdout. + build_utils.CheckOutput(cmd, + cwd=build_utils.DIR_SOURCE_ROOT, + env=env, + stderr_filter=stderr_filter) + end = time.time() - start + logging.info('Lint command took %ss', end) + except build_utils.CalledProcessError as e: if not silent: - print(file=sys.stderr) - for issue in issues: - issue_id = issue.attributes['id'].value - message = issue.attributes['message'].value - location_elem = issue.getElementsByTagName('location')[0] - path = location_elem.attributes['file'].value - line = location_elem.getAttribute('line') - error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id) - print(error.encode('utf-8'), file=sys.stderr) - for attr in ['errorLine1', 'errorLine2']: - error_line = issue.getAttribute(attr) - if error_line: - print(error_line.encode('utf-8'), file=sys.stderr) - return len(issues) - - with build_utils.TempDir() as temp_dir: - cmd = [ - _RebasePath(lint_path), - '-Werror', - '--exitcode', - '--showall', - '--xml', - _RebasePath(result_path), - # An explicit sdk root needs to be specified since we have an extra - # intermediate 'lastest' directory under cmdline-tools which prevents - # lint from automatically deducing the location of the sdk. The sdk is - # required for many checks (e.g. NewApi). Lint also requires absolute - # paths. - '--sdk-home', - os.path.abspath(android_sdk_root), - ] - if config_path: - cmd.extend(['--config', _RebasePath(config_path)]) - if testonly_target: - cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)]) - - tmp_dir_counter = [0] - def _NewTempSubdir(prefix, append_digit=True): - # Helper function to create a new sub directory based on the number of - # subdirs created earlier. - if append_digit: - tmp_dir_counter[0] += 1 - prefix += str(tmp_dir_counter[0]) - new_dir = os.path.join(temp_dir, prefix) - os.makedirs(new_dir) - return new_dir - - resource_dirs = resource_utils.DeduceResourceDirsFromFileList( - resource_sources) - # These are zip files with generated resources (e. g. strings from GRD). - for resource_zip in resource_zips: - resource_dir = _NewTempSubdir(resource_zip, append_digit=False) - resource_dirs.append(resource_dir) - build_utils.ExtractAll(resource_zip, path=resource_dir) - - for resource_dir in resource_dirs: - cmd.extend(['--resources', _RebasePath(resource_dir)]) - - # There may be multiple source files with the same basename (but in - # different directories). It is difficult to determine what part of the path - # corresponds to the java package, and so instead just link the source files - # into temporary directories (creating a new one whenever there is a name - # conflict). - def PathInDir(d, src): - subpath = os.path.join(d, _RebasePath(src)) - subdir = os.path.dirname(subpath) - if not os.path.exists(subdir): - os.makedirs(subdir) - return subpath - - src_dirs = [] - for src in sources: - src_dir = None - for d in src_dirs: - if not os.path.exists(PathInDir(d, src)): - src_dir = d - break - if not src_dir: - src_dir = _NewTempSubdir('SRC_ROOT') - src_dirs.append(src_dir) - cmd.extend(['--sources', _RebasePath(src_dir)]) - # In cases where the build dir is outside of the src dir, this can - # result in trying to symlink a file to itself for this file: - # gen/components/version_info/android/java/org/chromium/ - # components/version_info/VersionConstants.java - src = os.path.abspath(src) - dst = PathInDir(src_dir, src) - if src == dst: - continue - os.symlink(src, dst) - - if srcjars: - srcjar_dir = _NewTempSubdir('GENERATED_SRC_ROOT', append_digit=False) - cmd.extend(['--sources', _RebasePath(srcjar_dir)]) - for srcjar in srcjars: - # We choose to allow srcjars that contain java files which have the - # same package and name to clobber each other. This happens for - # generated files like BuildConfig.java. It is generated for - # targets like base_build_config_gen as well as targets like - # chrome_modern_public_base_bundle_module__build_config_srcjar. - # Although we could extract each srcjar to a separate folder, that - # slows down some invocations of lint by 20 seconds or more. - # TODO(wnwen): Switch lint.py to generate a project.xml file which - # supports srcjar inputs by default. - build_utils.ExtractAll(srcjar, path=srcjar_dir, no_clobber=False) - - project_dir = _NewTempSubdir('PROJECT_ROOT', append_digit=False) - if android_sdk_version: - # Create dummy project.properies file in a temporary "project" directory. - # It is the only way to add Android SDK to the Lint's classpath. Proper - # classpath is necessary for most source-level checks. - with open(os.path.join(project_dir, 'project.properties'), 'w') \ - as propfile: - print('target=android-{}'.format(android_sdk_version), file=propfile) - - # Put the manifest in a temporary directory in order to avoid lint detecting - # sibling res/ and src/ directories (which should be pass explicitly if they - # are to be included). - if not manifest_path: - manifest_path = os.path.join( - build_utils.DIR_SOURCE_ROOT, 'build', 'android', - 'AndroidManifest.xml') - lint_manifest_path = os.path.join(project_dir, 'AndroidManifest.xml') - shutil.copyfile(os.path.abspath(manifest_path), lint_manifest_path) - - # Check that minSdkVersion and package is correct and add it to the manifest - # in case it does not exist. - doc, manifest, _ = manifest_utils.ParseManifest(lint_manifest_path) - manifest_utils.AssertUsesSdk(manifest, min_sdk_version) - manifest_utils.AssertPackage(manifest, manifest_package) - uses_sdk = manifest.find('./uses-sdk') - if uses_sdk is None: - uses_sdk = ElementTree.Element('uses-sdk') - manifest.insert(0, uses_sdk) - uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE, - min_sdk_version) - if manifest_package: - manifest.set('package', manifest_package) - manifest_utils.SaveManifest(doc, lint_manifest_path) - - cmd.append(project_dir) - - if os.path.exists(result_path): - os.remove(result_path) - - env = os.environ.copy() - stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings - if cache_dir: - env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir) - # When _JAVA_OPTIONS is set, java prints to stderr: - # Picked up _JAVA_OPTIONS: ... - # - # We drop all lines that contain _JAVA_OPTIONS from the output - stderr_filter = lambda l: re.sub( - r'.*_JAVA_OPTIONS.*\n?', - '', - build_utils.FilterReflectiveAccessJavaWarnings(l)) - - def fail_func(returncode, stderr): - if returncode != 0: - return True - if (include_unexpected and - 'Unexpected failure during lint analysis' in stderr): - return True - return False - - try: - env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME, - build_utils.DIR_SOURCE_ROOT) - logging.debug('Lint command %s', cmd) - start = time.time() - build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT, - env=env or None, stderr_filter=stderr_filter, - fail_func=fail_func) - end = time.time() - start - logging.info('Lint command took %ss', end) - except build_utils.CalledProcessError: - # There is a problem with lint usage - if not os.path.exists(result_path): - raise - - # Sometimes produces empty (almost) files: - if os.path.getsize(result_path) < 10: - if can_fail_build: - raise - elif not silent: - traceback.print_exc() - return - - # There are actual lint issues - try: - num_issues = _ParseAndShowResultFile() - except Exception: # pylint: disable=broad-except - if not silent: - print('Lint created unparseable xml file...') - print('File contents:') - with open(result_path) as f: - print(f.read()) - if can_fail_build: - traceback.print_exc() - if can_fail_build: - raise - else: - return - - _ProcessResultFile() - if num_issues == 0 and include_unexpected: - msg = 'Please refer to output above for unexpected lint failures.\n' - else: - msg = ('\nLint found %d new issues.\n' - ' - For full explanation, please refer to %s\n' - ' - For more information about lint and how to fix lint issues,' - ' please refer to %s\n' % - (num_issues, _RebasePath(result_path), _LINT_MD_URL)) - if not silent: - print(msg, file=sys.stderr) - if can_fail_build: - raise Exception('Lint failed.') + print('Lint found new issues.\n' + ' - Here is the project.xml file passed to lint: {}\n' + ' - For more information about lint and how to fix lint issues,' + ' please refer to {}\n'.format(_SrcRelative(project_xml_path), + _LINT_MD_URL)) + if can_fail_build: + raise + else: + print(e) + else: + # Lint succeeded, no need to keep generated files for debugging purposes. + shutil.rmtree(resource_root_dir, ignore_errors=True) + shutil.rmtree(srcjar_root_dir, ignore_errors=True) logging.info('Lint completed') -def _FindInDirectories(directories, filename_filter): - all_files = [] - for directory in directories: - all_files.extend(build_utils.FindInDirectory(directory, filename_filter)) - return all_files - - def _ParseArgs(argv): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) + parser.add_argument('--lint-binary-path', + required=True, + help='Path to lint executable.') + parser.add_argument('--cache-dir', + required=True, + help='Path to the directory in which the android cache ' + 'directory tree should be stored.') + parser.add_argument('--config-path', help='Path to lint suppressions file.') + parser.add_argument('--lint-gen-dir', + required=True, + help='Path to store generated xml files.') + parser.add_argument('--stamp', help='Path to stamp upon success.') + parser.add_argument('--android-sdk-version', + help='Version (API level) of the Android SDK used for ' + 'building.') + parser.add_argument('--min-sdk-version', + required=True, + help='Minimal SDK version to lint against.') parser.add_argument('--android-sdk-root', required=True, help='Lint needs an explicit path to the android sdk.') @@ -326,32 +265,20 @@ def _ParseArgs(argv): help='If set, some checks like UnusedResources will be ' 'disabled since they are not helpful for test ' 'targets.') - parser.add_argument('--lint-path', required=True, - help='Path to lint executable.') - parser.add_argument('--product-dir', required=True, - help='Path to product dir.') - parser.add_argument('--result-path', required=True, - help='Path to XML lint result file.') - parser.add_argument('--cache-dir', required=True, - help='Path to the directory in which the android cache ' - 'directory tree should be stored.') - parser.add_argument('--platform-xml-path', required=True, - help='Path to api-platforms.xml') - parser.add_argument('--android-sdk-version', - help='Version (API level) of the Android SDK used for ' - 'building.') - parser.add_argument('--can-fail-build', action='store_true', - help='If set, script will exit with nonzero exit status' - ' if lint errors are present') - parser.add_argument('--include-unexpected-failures', action='store_true', + parser.add_argument('--manifest-package', + help='Package name of the AndroidManifest.xml.') + parser.add_argument('--can-fail-build', + action='store_true', help='If set, script will exit with nonzero exit status' - ' if lint itself crashes with unexpected failures.') - parser.add_argument('--config-path', - help='Path to lint suppressions file.') + ' if lint errors are present') + parser.add_argument('--silent', + action='store_true', + help='If set, script will not log anything.') parser.add_argument('--java-sources', help='File containing a list of java sources files.') + parser.add_argument('--srcjars', help='GN list of included srcjars.') parser.add_argument('--manifest-path', - help='Path to AndroidManifest.xml') + help='Path to original AndroidManifest.xml') parser.add_argument('--resource-sources', default=[], action='append', @@ -362,25 +289,12 @@ def _ParseArgs(argv): action='append', help='GYP-list of resource zips, zip files of generated ' 'resource files.') - parser.add_argument('--silent', action='store_true', - help='If set, script will not log anything.') - parser.add_argument('--srcjars', - help='GN list of included srcjars.') - parser.add_argument('--stamp', help='Path to stamp upon success.') - parser.add_argument( - '--min-sdk-version', - required=True, - help='Minimal SDK version to lint against.') - parser.add_argument( - '--manifest-package', help='Package name of the AndroidManifest.xml.') args = parser.parse_args(build_utils.ExpandFileArgs(argv)) - args.java_sources = build_utils.ParseGnList(args.java_sources) args.srcjars = build_utils.ParseGnList(args.srcjars) args.resource_sources = build_utils.ParseGnList(args.resource_sources) args.resource_zips = build_utils.ParseGnList(args.resource_zips) - return args @@ -391,7 +305,6 @@ def main(): sources = [] for java_sources_file in args.java_sources: sources.extend(build_utils.ReadSourcesList(java_sources_file)) - resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) @@ -400,14 +313,11 @@ def main(): resource_sources + [ args.manifest_path, ]) - depfile_deps = [p for p in possible_depfile_deps if p] - _RunLint(args.lint_path, + _RunLint(args.lint_binary_path, args.config_path, args.manifest_path, - args.result_path, - args.product_dir, sources, args.cache_dir, args.android_sdk_version, @@ -417,18 +327,15 @@ def main(): resource_sources, args.resource_zips, args.android_sdk_root, + args.lint_gen_dir, testonly_target=args.testonly, can_fail_build=args.can_fail_build, - include_unexpected=args.include_unexpected_failures, silent=args.silent) logging.info('Creating stamp file') build_utils.Touch(args.stamp) if args.depfile: - build_utils.WriteDepfile(args.depfile, - args.stamp, - depfile_deps, - add_pydeps=False) # pydeps listed in GN. + build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/lint.pydeps b/chromium/build/android/gyp/lint.pydeps index d9a96c70194..68a62f6bf94 100644 --- a/chromium/build/android/gyp/lint.pydeps +++ b/chromium/build/android/gyp/lint.pydeps @@ -1,29 +1,7 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py -../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/bccache.py -../../../third_party/jinja2/compiler.py -../../../third_party/jinja2/defaults.py -../../../third_party/jinja2/environment.py -../../../third_party/jinja2/exceptions.py -../../../third_party/jinja2/filters.py -../../../third_party/jinja2/idtracking.py -../../../third_party/jinja2/lexer.py -../../../third_party/jinja2/loaders.py -../../../third_party/jinja2/nodes.py -../../../third_party/jinja2/optimizer.py -../../../third_party/jinja2/parser.py -../../../third_party/jinja2/runtime.py -../../../third_party/jinja2/tests.py -../../../third_party/jinja2/utils.py -../../../third_party/jinja2/visitor.py -../../../third_party/markupsafe/__init__.py -../../../third_party/markupsafe/_compat.py -../../../third_party/markupsafe/_native.py ../../gn_helpers.py lint.py util/__init__.py util/build_utils.py util/manifest_utils.py -util/resource_utils.py diff --git a/chromium/build/android/gyp/main_dex_list.py b/chromium/build/android/gyp/main_dex_list.py index 9c36063468a..75ca886b833 100755 --- a/chromium/build/android/gyp/main_dex_list.py +++ b/chromium/build/android/gyp/main_dex_list.py @@ -56,8 +56,9 @@ def main(): args = _ParseArgs() proguard_cmd = [ build_utils.JAVA_PATH, - '-jar', + '-cp', args.r8_path, + 'com.android.tools.r8.R8', '--classfile', '--no-desugaring', '--lib', @@ -131,11 +132,9 @@ def main(): f.write(main_dex_list) if args.depfile: - build_utils.WriteDepfile( - args.depfile, - args.main_dex_list_path, - inputs=args.class_inputs_filearg, - add_pydeps=False) + build_utils.WriteDepfile(args.depfile, + args.main_dex_list_path, + inputs=args.class_inputs_filearg) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/merge_manifest.py b/chromium/build/android/gyp/merge_manifest.py index 3f784588206..f205aa42b48 100755 --- a/chromium/build/android/gyp/merge_manifest.py +++ b/chromium/build/android/gyp/merge_manifest.py @@ -22,8 +22,8 @@ _MANIFEST_MERGER_JARS = [ os.path.join('common', 'common.jar'), os.path.join('sdk-common', 'sdk-common.jar'), os.path.join('sdklib', 'sdklib.jar'), - os.path.join('external', 'com', 'google', 'guava', 'guava', '27.1-jre', - 'guava-27.1-jre.jar'), + os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre', + 'guava-28.1-jre.jar'), os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib', 'kotlin-stdlib.jar'), os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5', @@ -137,8 +137,7 @@ def main(argv): if args.depfile: inputs = extras + classpath.split(':') - build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs, - add_pydeps=False) + build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/prepare_resources.pydeps b/chromium/build/android/gyp/prepare_resources.pydeps index 1066a5ff1ec..c0d225db2c6 100644 --- a/chromium/build/android/gyp/prepare_resources.pydeps +++ b/chromium/build/android/gyp/prepare_resources.pydeps @@ -22,6 +22,7 @@ ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py ../../gn_helpers.py +../../print_python_deps.py prepare_resources.py util/__init__.py util/build_utils.py diff --git a/chromium/build/android/gyp/proguard.py b/chromium/build/android/gyp/proguard.py index 18919589382..c151be70aef 100755 --- a/chromium/build/android/gyp/proguard.py +++ b/chromium/build/android/gyp/proguard.py @@ -12,6 +12,7 @@ import sys import tempfile import zipfile +import dex_jdk_libs from util import build_utils from util import diff_utils @@ -107,15 +108,17 @@ def _ParseOptions(): args = build_utils.ExpandFileArgs(sys.argv[1:]) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('--proguard-path', help='Path to the proguard.jar to use.') - group.add_argument('--r8-path', help='Path to the R8.jar to use.') + parser.add_argument('--r8-path', + required=True, + help='Path to the R8.jar to use.') parser.add_argument( '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.') parser.add_argument('--input-paths', action='append', required=True, help='GN-list of .jar files to optimize.') + parser.add_argument('--desugar-jdk-libs-jar', + help='Path to desugar_jdk_libs.jar.') parser.add_argument('--output-path', help='Path to the generated .jar file.') parser.add_argument( '--proguard-configs', @@ -196,6 +199,8 @@ def _ParseOptions(): parser.add_argument( '--stamp', help='File to touch upon success. Mutually exclusive with --output-path') + parser.add_argument('--desugared-library-keep-rule-output', + help='Path to desugared library keep rule output file.') options = parser.parse_args(args) @@ -213,9 +218,6 @@ def _ParseOptions(): if options.expected_configs_file and not options.output_config: parser.error('--expected-configs-file requires --output-config') - if options.proguard_path and options.disable_outlining: - parser.error('--disable-outlining requires --r8-path') - if options.only_verify_expectations and not options.stamp: parser.error('--only-verify-expectations requires --stamp') @@ -268,12 +270,18 @@ class _DexPathContext(object): self.staging_dir = os.path.join(work_dir, name) os.mkdir(self.staging_dir) - def CreateOutput(self): + def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) if self._final_output_path.endswith('.dex'): + if has_imported_lib: + raise Exception( + 'Trying to create a single .dex file, but a dependency requires ' + 'JDK Library Desugaring (which necessitates a second file).' + 'Refer to %s to see what desugaring was required' % + keep_rule_output) if len(found_files) != 1: raise Exception('Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) @@ -323,8 +331,9 @@ def _OptimizeWithR8(options, cmd = [ build_utils.JAVA_PATH, - '-jar', + '-cp', options.r8_path, + 'com.android.tools.r8.R8', '--no-data-resources', '--output', base_dex_context.staging_dir, @@ -333,7 +342,12 @@ def _OptimizeWithR8(options, ] if options.desugar_jdk_libs_json: - cmd += ['--desugared-lib', options.desugar_jdk_libs_json] + cmd += [ + '--desugared-lib', + options.desugar_jdk_libs_json, + '--desugared-lib-pg-conf-output', + options.desugared_library_keep_rule_output, + ] if options.min_api: cmd += ['--min-api', options.min_api] @@ -357,10 +371,8 @@ def _OptimizeWithR8(options, p for p in feature.input_paths if p not in module_input_jars ] module_input_jars.update(feature_input_jars) - cmd += [ - '--feature-jar', - feature.staging_dir + ':' + ':'.join(feature_input_jars) - ] + for in_jar in feature_input_jars: + cmd += ['--feature', in_jar, feature.staging_dir] cmd += base_dex_context.input_paths # Add any extra input jars to the base module (e.g. desugar runtime). @@ -382,7 +394,18 @@ def _OptimizeWithR8(options, 'android/docs/java_optimization.md#Debugging-common-failures\n')) raise ProguardProcessError(err, debugging_link) - base_dex_context.CreateOutput() + base_has_imported_lib = False + if options.desugar_jdk_libs_json: + existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir) + base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( + options.r8_path, options.min_api, options.desugar_jdk_libs_json, + options.desugar_jdk_libs_jar, + options.desugared_library_keep_rule_output, + os.path.join(base_dex_context.staging_dir, + 'classes%d.dex' % (len(existing_files) + 1))) + + base_dex_context.CreateOutput(base_has_imported_lib, + options.desugared_library_keep_rule_output) for feature in feature_contexts: feature.CreateOutput() @@ -393,65 +416,6 @@ def _OptimizeWithR8(options, out_file.writelines(l for l in in_file if not l.startswith('#')) -def _OptimizeWithProguard(options, - config_paths, - libraries, - dynamic_config_data, - print_stdout=False): - with build_utils.TempDir() as tmp_dir: - combined_injars_path = os.path.join(tmp_dir, 'injars.jar') - combined_libjars_path = os.path.join(tmp_dir, 'libjars.jar') - combined_proguard_configs_path = os.path.join(tmp_dir, 'includes.txt') - tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') - tmp_output_jar = os.path.join(tmp_dir, 'output.jar') - - build_utils.MergeZips(combined_injars_path, options.input_paths) - build_utils.MergeZips(combined_libjars_path, libraries) - with open(combined_proguard_configs_path, 'w') as f: - f.write(_CombineConfigs(config_paths, dynamic_config_data)) - - if options.proguard_path.endswith('.jar'): - cmd = [ - build_utils.JAVA_PATH, '-jar', options.proguard_path, '-include', - combined_proguard_configs_path - ] - else: - cmd = [options.proguard_path, '@' + combined_proguard_configs_path] - - cmd += [ - '-forceprocessing', - '-libraryjars', - combined_libjars_path, - '-injars', - combined_injars_path, - '-outjars', - tmp_output_jar, - '-printmapping', - tmp_mapping_path, - ] - - # Warning: and Error: are sent to stderr, but messages and Note: are sent - # to stdout. - stdout_filter = None - stderr_filter = None - if print_stdout: - stdout_filter = _ProguardOutputFilter() - stderr_filter = _ProguardOutputFilter() - build_utils.CheckOutput( - cmd, - print_stdout=True, - print_stderr=True, - stdout_filter=stdout_filter, - stderr_filter=stderr_filter) - - # ProGuard will skip writing if the file would be empty. - build_utils.Touch(tmp_mapping_path) - - # Copy output files to correct locations. - shutil.move(tmp_output_jar, options.output_path) - shutil.move(tmp_mapping_path, options.mapping_output) - - def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False): ret = [] @@ -508,11 +472,14 @@ def _CreateDynamicConfig(options): if api_level > _min_api: ret.append('-keep @interface %s' % annotation_name) ret.append("""\ --keep,allowobfuscation,allowoptimization @%s class ** { - <methods>; +-if @%s class * { + *** *(...); +} +-keep,allowobfuscation class <1> { + *** <2>(...); }""" % annotation_name) ret.append("""\ --keepclassmembers,allowobfuscation,allowoptimization class ** { +-keepclassmembers,allowobfuscation class ** { @%s <methods>; }""" % annotation_name) return '\n'.join(ret) @@ -545,8 +512,7 @@ def _MaybeWriteStampAndDepFile(options, inputs): build_utils.Touch(options.stamp) output = options.stamp if options.depfile: - build_utils.WriteDepfile( - options.depfile, output, inputs=inputs, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, output, inputs=inputs) def main(): @@ -597,12 +563,8 @@ def main(): with open(options.output_config, 'w') as f: f.write(merged_configs) - if options.r8_path: - _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, - print_stdout) - else: - _OptimizeWithProguard(options, proguard_configs, libraries, - dynamic_config_data, print_stdout) + _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, + print_stdout) # After ProGuard / R8 has run: for output in options.extra_mapping_output_paths: diff --git a/chromium/build/android/gyp/proguard.pydeps b/chromium/build/android/gyp/proguard.pydeps index 98934d7aae2..11f51cc0f0d 100644 --- a/chromium/build/android/gyp/proguard.pydeps +++ b/chromium/build/android/gyp/proguard.pydeps @@ -1,6 +1,7 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py ../../gn_helpers.py +dex_jdk_libs.py proguard.py util/__init__.py util/build_utils.py diff --git a/chromium/build/android/gyp/turbine.pydeps b/chromium/build/android/gyp/turbine.pydeps index 19396459519..45b0d27d3f6 100644 --- a/chromium/build/android/gyp/turbine.pydeps +++ b/chromium/build/android/gyp/turbine.pydeps @@ -1,6 +1,7 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py ../../gn_helpers.py +../../print_python_deps.py turbine.py util/__init__.py util/build_utils.py diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py index bc15fbb61f2..067f62e4b9b 100644 --- a/chromium/build/android/gyp/util/build_utils.py +++ b/chromium/build/android/gyp/util/build_utils.py @@ -541,49 +541,6 @@ def GetSortedTransitiveDependencies(top, deps_func): return list(deps_map) -def ComputePythonDependencies(): - """Gets the paths of imported non-system python modules. - - A path is assumed to be a "system" import if it is outside of chromium's - src/. The paths will be relative to the current directory. - """ - _ForceLazyModulesToLoad() - module_paths = (m.__file__ for m in sys.modules.values() - if m is not None and hasattr(m, '__file__')) - abs_module_paths = map(os.path.abspath, module_paths) - - abs_dir_source_root = os.path.abspath(DIR_SOURCE_ROOT) - non_system_module_paths = [ - p for p in abs_module_paths if p.startswith(abs_dir_source_root) - ] - - def ConvertPycToPy(s): - if s.endswith('.pyc'): - return s[:-1] - return s - - non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) - non_system_module_paths = map(os.path.relpath, non_system_module_paths) - return sorted(set(non_system_module_paths)) - - -def _ForceLazyModulesToLoad(): - """Forces any lazily imported modules to fully load themselves. - - Inspecting the modules' __file__ attribute causes lazily imported modules - (e.g. from email) to get fully imported and update sys.modules. Iterate - over the values until sys.modules stabilizes so that no modules are missed. - """ - while True: - num_modules_before = len(sys.modules.keys()) - for m in sys.modules.values(): - if m is not None and hasattr(m, '__file__'): - _ = m.__file__ - num_modules_after = len(sys.modules.keys()) - if num_modules_before == num_modules_after: - break - - def InitLogging(enabling_env): logging.basicConfig( level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING, @@ -611,12 +568,10 @@ def AddDepfileOption(parser): help='Path to depfile (refer to `gn help depfile`)') -def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True): +def WriteDepfile(depfile_path, first_gn_output, inputs=None): assert depfile_path != first_gn_output # http://crbug.com/646165 assert not isinstance(inputs, string_types) # Easy mistake to make inputs = inputs or [] - if add_pydeps: - inputs = ComputePythonDependencies() + inputs MakeDirectory(os.path.dirname(depfile_path)) # Ninja does not support multiple outputs in depfiles. with open(depfile_path, 'w') as depfile: diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py index a8a815e7e4f..2830d25c969 100644 --- a/chromium/build/android/gyp/util/md5_check.py +++ b/chromium/build/android/gyp/util/md5_check.py @@ -14,6 +14,9 @@ import zipfile from util import build_utils +sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build')) +import print_python_deps + # When set and a difference is detected, a diff of what changed is printed. PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0)) @@ -48,7 +51,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5, input_strings = list(input_strings or []) output_paths = list(output_paths or []) - input_paths += build_utils.ComputePythonDependencies() + input_paths += print_python_deps.ComputePythonDependencies() CallAndRecordIfStale( on_stale_md5, @@ -64,8 +67,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5, # on bots that build with & without patch, and the patch changes the depfile # location. if hasattr(options, 'depfile') and options.depfile: - build_utils.WriteDepfile( - options.depfile, output_paths[0], depfile_deps, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps) def CallAndRecordIfStale(function, @@ -125,15 +127,21 @@ def CallAndRecordIfStale(function, old_metadata = None force = force or _FORCE_REBUILD missing_outputs = [x for x in output_paths if force or not os.path.exists(x)] + too_new = [] # When outputs are missing, don't bother gathering change information. if not missing_outputs and os.path.exists(record_path): - with open(record_path, 'r') as jsonfile: - try: - old_metadata = _Metadata.FromFile(jsonfile) - except: # pylint: disable=bare-except - pass # Not yet using new file format. - - changes = Changes(old_metadata, new_metadata, force, missing_outputs) + record_mtime = os.path.getmtime(record_path) + # Outputs newer than the change information must have been modified outside + # of the build, and should be considered stale. + too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime] + if not too_new: + with open(record_path, 'r') as jsonfile: + try: + old_metadata = _Metadata.FromFile(jsonfile) + except: # pylint: disable=bare-except + pass # Not yet using new file format. + + changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new) if not changes.HasChanges(): return @@ -153,11 +161,13 @@ def CallAndRecordIfStale(function, class Changes(object): """Provides and API for querying what changed between runs.""" - def __init__(self, old_metadata, new_metadata, force, missing_outputs): + def __init__(self, old_metadata, new_metadata, force, missing_outputs, + too_new): self.old_metadata = old_metadata self.new_metadata = new_metadata self.force = force self.missing_outputs = missing_outputs + self.too_new = too_new def _GetOldTag(self, path, subpath=None): return self.old_metadata and self.old_metadata.GetTag(path, subpath) @@ -254,6 +264,8 @@ class Changes(object): return 'force=True' elif self.missing_outputs: return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) + elif self.too_new: + return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new) elif self.old_metadata is None: return 'Previous stamp file not found.' diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py index 9b3b9039f39..2169320ee54 100755 --- a/chromium/build/android/gyp/util/md5_check_test.py +++ b/chromium/build/android/gyp/util/md5_check_test.py @@ -47,13 +47,21 @@ class TestMd5Check(unittest.TestCase): outputs_missing=False, expected_changes=None, added_or_modified_only=None, - track_subentries=False): + track_subentries=False, + output_newer_than_record=False): output_paths = None if outputs_specified: output_file1 = tempfile.NamedTemporaryFile() if outputs_missing: output_file1.close() # Gets deleted on close(). output_paths = [output_file1.name] + if output_newer_than_record: + output_mtime = os.path.getmtime(output_file1.name) + os.utime(record_path.name, (output_mtime - 1, output_mtime - 1)) + else: + # touch the record file so it doesn't look like it's older that + # the output we've just created + os.utime(record_path.name, None) self.called = False self.changes = None @@ -97,6 +105,13 @@ class TestMd5Check(unittest.TestCase): outputs_specified=True, outputs_missing=True, expected_changes='Outputs do not exist:*', added_or_modified_only=False) + CheckCallAndRecord(True, + 'should call when output is newer than record', + expected_changes='Outputs newer than stamp file:*', + outputs_specified=True, + outputs_missing=False, + added_or_modified_only=False, + output_newer_than_record=True) CheckCallAndRecord(True, force=True, message='should call when forced', expected_changes='force=True', added_or_modified_only=False) diff --git a/chromium/build/android/gyp/util/parallel.py b/chromium/build/android/gyp/util/parallel.py new file mode 100644 index 00000000000..082ad97225e --- /dev/null +++ b/chromium/build/android/gyp/util/parallel.py @@ -0,0 +1,214 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helpers related to multiprocessing. + +Based on: //tools/binary_size/libsupersize/parallel.py +""" + +import atexit +import logging +import multiprocessing +import os +import sys +import threading +import traceback + +DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1' +if DISABLE_ASYNC: + logging.warning('Running in synchronous mode.') + +_all_pools = None +_is_child_process = False +_silence_exceptions = False + +# Used to pass parameters to forked processes without pickling. +_fork_params = None +_fork_kwargs = None + + +class _ImmediateResult(object): + def __init__(self, value): + self._value = value + + def get(self): + return self._value + + def wait(self): + pass + + def ready(self): + return True + + def successful(self): + return True + + +class _ExceptionWrapper(object): + """Used to marshal exception messages back to main process.""" + + def __init__(self, msg, exception_type=None): + self.msg = msg + self.exception_type = exception_type + + def MaybeThrow(self): + if self.exception_type: + raise getattr(__builtins__, + self.exception_type)('Originally caused by: ' + self.msg) + + +class _FuncWrapper(object): + """Runs on the fork()'ed side to catch exceptions and spread *args.""" + + def __init__(self, func): + global _is_child_process + _is_child_process = True + self._func = func + + def __call__(self, index, _=None): + try: + return self._func(*_fork_params[index], **_fork_kwargs) + except Exception as e: + # Only keep the exception type for builtin exception types or else risk + # further marshalling exceptions. + exception_type = None + if hasattr(__builtins__, type(e).__name__): + exception_type = type(e).__name__ + # multiprocessing is supposed to catch and return exceptions automatically + # but it doesn't seem to work properly :(. + return _ExceptionWrapper(traceback.format_exc(), exception_type) + except: # pylint: disable=bare-except + return _ExceptionWrapper(traceback.format_exc()) + + +class _WrappedResult(object): + """Allows for host-side logic to be run after child process has terminated. + + * Unregisters associated pool _all_pools. + * Raises exception caught by _FuncWrapper. + """ + + def __init__(self, result, pool=None): + self._result = result + self._pool = pool + + def get(self): + self.wait() + value = self._result.get() + _CheckForException(value) + return value + + def wait(self): + self._result.wait() + if self._pool: + _all_pools.remove(self._pool) + self._pool = None + + def ready(self): + return self._result.ready() + + def successful(self): + return self._result.successful() + + +def _TerminatePools(): + """Calls .terminate() on all active process pools. + + Not supposed to be necessary according to the docs, but seems to be required + when child process throws an exception or Ctrl-C is hit. + """ + global _silence_exceptions + _silence_exceptions = True + # Child processes cannot have pools, but atexit runs this function because + # it was registered before fork()ing. + if _is_child_process: + return + + def close_pool(pool): + try: + pool.terminate() + except: # pylint: disable=bare-except + pass + + for i, pool in enumerate(_all_pools): + # Without calling terminate() on a separate thread, the call can block + # forever. + thread = threading.Thread(name='Pool-Terminate-{}'.format(i), + target=close_pool, + args=(pool, )) + thread.daemon = True + thread.start() + + +def _CheckForException(value): + if isinstance(value, _ExceptionWrapper): + global _silence_exceptions + if not _silence_exceptions: + value.MaybeThrow() + _silence_exceptions = True + logging.error('Subprocess raised an exception:\n%s', value.msg) + sys.exit(1) + + +def _MakeProcessPool(job_params, **job_kwargs): + global _all_pools + global _fork_params + global _fork_kwargs + assert _fork_params is None + assert _fork_kwargs is None + pool_size = min(len(job_params), multiprocessing.cpu_count()) + _fork_params = job_params + _fork_kwargs = job_kwargs + ret = multiprocessing.Pool(pool_size) + _fork_params = None + _fork_kwargs = None + if _all_pools is None: + _all_pools = [] + atexit.register(_TerminatePools) + _all_pools.append(ret) + return ret + + +def ForkAndCall(func, args): + """Runs |func| in a fork'ed process. + + Returns: + A Result object (call .get() to get the return value) + """ + if DISABLE_ASYNC: + pool = None + result = _ImmediateResult(func(*args)) + else: + pool = _MakeProcessPool([args]) # Omit |kwargs|. + result = pool.apply_async(_FuncWrapper(func), (0, )) + pool.close() + return _WrappedResult(result, pool=pool) + + +def BulkForkAndCall(func, arg_tuples, **kwargs): + """Calls |func| in a fork'ed process for each set of args within |arg_tuples|. + + Args: + kwargs: Common keyword arguments to be passed to |func|. + + Yields the return values in order. + """ + arg_tuples = list(arg_tuples) + if not arg_tuples: + return + + if DISABLE_ASYNC: + for args in arg_tuples: + yield func(*args, **kwargs) + return + + pool = _MakeProcessPool(arg_tuples, **kwargs) + wrapped_func = _FuncWrapper(func) + try: + for result in pool.imap(wrapped_func, xrange(len(arg_tuples))): + _CheckForException(result) + yield result + finally: + pool.close() + pool.join() + _all_pools.remove(pool) diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py index 1b92c4fb49e..7b16949f9d3 100644 --- a/chromium/build/android/gyp/util/resource_utils.py +++ b/chromium/build/android/gyp/util/resource_utils.py @@ -57,6 +57,7 @@ AAPT_IGNORE_PATTERN = ':'.join([ '*~', # Some editors create these as temp files. '.*', # Never makes sense to include dot(files/dirs). '*.d.stamp', # Ignore stamp files + '*.backup', # Some tools create temporary backup files. ]) MULTIPLE_RES_MAGIC_STRING = b'magic' diff --git a/chromium/build/android/gyp/write_build_config.py b/chromium/build/android/gyp/write_build_config.py index 02b02fcd538..5e3897c4913 100755 --- a/chromium/build/android/gyp/write_build_config.py +++ b/chromium/build/android/gyp/write_build_config.py @@ -242,18 +242,22 @@ through Proguard or other tools. For most targets this is generated from sources, with a name like `$target_name.javac.jar`. However, when using a prebuilt jar, this will point to the source archive directly. -* `deps_info['jar_path']`: +* `deps_info['device_jar_path']`: Path to a file that is the result of processing -`deps_info['unprocessed_jar_path']` with various tools. +`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed). + +* `deps_info['host_jar_path']`: +Path to a file that is the result of processing +`deps_info['unprocessed_jar_path']` with various tools (use by java_binary). * `deps_info['interface_jar_path']: Path to the interface jar generated for this library. This corresponds to a jar file that only contains declarations. Generated by running the `ijar` on -`deps_info['jar_path']` or the `turbine` tool on source files. +`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files. * `deps_info['dex_path']`: -Path to the `.dex` file generated for this target, from `deps_info['jar_path']` -unless this comes from a prebuilt `.aar` archive. +Path to the `.dex` file generated for this target, from +`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive. * `deps_info['is_prebuilt']`: True to indicate that this target corresponds to a prebuilt `.jar` file. @@ -323,10 +327,10 @@ all entries from the `java_library` type, and adds: * `deps_info['main_class']`: Name of the main Java class that serves as an entry point for the binary. -* `deps_info['java_runtime_classpath']`: +* `deps_info['device_classpath']`: The classpath used when running a Java or Android binary. Essentially the -collection of all `deps_info['jar_path']` entries for the target and all its -dependencies. +collection of all `deps_info['device_jar_path']` entries for the target and all +its dependencies. ## <a name="target_junit_binary">Target type `junit_binary`</a>: @@ -701,8 +705,8 @@ class Deps(object): def helper(cur): for config in cur.Direct('java_library'): if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']: - if config['jar_path'] not in ret: - ret.append(config['jar_path']) + if config['unprocessed_jar_path'] not in ret: + ret.append(config['unprocessed_jar_path']) helper(self) return ret @@ -837,6 +841,15 @@ def _CreateJavaLocaleListFromAssets(assets, locale_paks): return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)]) +def _AddJarMapping(jar_to_target, configs): + for config in configs: + jar = config.get('unprocessed_jar_path') + if jar: + jar_to_target[jar] = config['gn_target'] + for jar in config.get('extra_classpath_jars', []): + jar_to_target[jar] = config['gn_target'] + + def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) @@ -844,6 +857,7 @@ def main(argv): parser.add_option( '--type', help='Type of this target (e.g. android_library).') + parser.add_option('--gn-target', help='GN label for this target') parser.add_option( '--deps-configs', help='GN-list of dependent build_config files.') @@ -875,7 +889,8 @@ def main(argv): help='Consider the assets as locale paks in BuildConfig.java') # java library options - parser.add_option('--jar-path', help='Path to target\'s jar output.') + parser.add_option('--device-jar-path', help='Path to .jar for dexing.') + parser.add_option('--host-jar-path', help='Path to .jar for java_binary.') parser.add_option('--unprocessed-jar-path', help='Path to the .jar to use for javac classpath purposes.') parser.add_option( @@ -884,10 +899,6 @@ def main(argv): parser.add_option( '--jetified-jar-path', help='Path to the jetified.jar to use for javac classpath purposes.') - parser.add_option( - '--skip-jetify', - action='store_true', - help='Whether to use jetified or non-jetified classpath.') parser.add_option('--is-prebuilt', action='store_true', help='Whether the jar was compiled or pre-compiled.') parser.add_option('--java-sources-file', help='Path to .sources file') @@ -1039,11 +1050,13 @@ def main(argv): if options.fail: parser.error('\n'.join(build_utils.ParseGnList(options.fail))) - jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path'] + lib_options = ['unprocessed_jar_path', 'interface_jar_path'] + device_lib_options = ['device_jar_path', 'dex_path'] required_options_map = { - 'android_apk': ['build_config', 'dex_path'] + jar_path_options, - 'android_app_bundle_module': ['build_config', 'dex_path', - 'final_dex_path', 'res_size_info'] + jar_path_options, + 'android_apk': ['build_config'] + lib_options + device_lib_options, + 'android_app_bundle_module': + ['build_config', 'final_dex_path', 'res_size_info'] + lib_options + + device_lib_options, 'android_assets': ['build_config'], 'android_resources': ['build_config', 'resources_zip'], 'dist_aar': ['build_config'], @@ -1051,9 +1064,9 @@ def main(argv): 'group': ['build_config'], 'java_annotation_processor': ['build_config', 'main_class'], 'java_binary': ['build_config'], - 'java_library': ['build_config'] + jar_path_options, + 'java_library': ['build_config', 'host_jar_path'] + lib_options, 'junit_binary': ['build_config'], - 'system_java_library': ['build_config'], + 'system_java_library': ['build_config', 'unprocessed_jar_path'], 'android_app_bundle': ['build_config', 'module_build_configs'], } required_options = required_options_map.get(options.type) @@ -1093,10 +1106,10 @@ def main(argv): '--library-renames can only be used with --type=android_apk or ' '--type=android_app_bundle_module') - if options.jar_path and options.supports_android and not options.dex_path: + if options.device_jar_path and not options.dex_path: raise Exception('java_library that supports Android requires a dex path.') - if any(getattr(options, x) for x in jar_path_options): - for attr in jar_path_options: + if any(getattr(options, x) for x in lib_options): + for attr in lib_options: if not getattr(options, attr): raise('Expected %s to be set.' % attr) @@ -1152,6 +1165,7 @@ def main(argv): 'name': os.path.basename(options.build_config), 'path': options.build_config, 'type': options.type, + 'gn_target': options.gn_target, 'deps_configs': deps.direct_deps_config_paths, 'chromium_code': not options.non_chromium_code, }, @@ -1254,20 +1268,21 @@ def main(argv): raise Exception('Not all deps support the Android platform: ' + str(deps_not_support_android)) - if is_apk_or_module_target: + if is_apk_or_module_target or options.type == 'dist_jar': all_dex_files = [c['dex_path'] for c in all_library_deps] if is_java_target: # Classpath values filled in below (after applying tested_apk_config). config['javac'] = {} - if options.jar_path: - deps_info['jar_path'] = options.jar_path + if options.unprocessed_jar_path: deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path deps_info['interface_jar_path'] = options.interface_jar_path - if options.skip_jetify: - deps_info['jetified_jar_path'] = options.interface_jar_path - else: - deps_info['jetified_jar_path'] = options.jetified_jar_path + if options.device_jar_path: + deps_info['device_jar_path'] = options.device_jar_path + if options.host_jar_path: + deps_info['host_jar_path'] = options.host_jar_path + deps_info['jetified_jar_path'] = (options.jetified_jar_path + or options.interface_jar_path) if options.dex_path: deps_info['dex_path'] = options.dex_path if is_apk_or_module_target: @@ -1403,7 +1418,7 @@ def main(argv): # Adding base module to classpath to compile against its R.java file if base_module_build_config: javac_full_classpath.append( - base_module_build_config['deps_info']['jar_path']) + base_module_build_config['deps_info']['unprocessed_jar_path']) javac_full_interface_classpath.append( base_module_build_config['deps_info']['interface_jar_path']) jetified_full_jar_classpath.append( @@ -1459,15 +1474,24 @@ def main(argv): if is_java_target or options.type == 'android_app_bundle': # The classpath to use to run this target (or as an input to ProGuard). - java_full_classpath = [] - if is_java_target and options.jar_path: - java_full_classpath.append(options.jar_path) - java_full_classpath.extend(c['jar_path'] for c in all_library_deps) + device_classpath = [] + if is_java_target and options.device_jar_path: + device_classpath.append(options.device_jar_path) + device_classpath.extend( + c.get('device_jar_path') for c in all_library_deps + if c.get('device_jar_path')) if options.type == 'android_app_bundle': for d in deps.Direct('android_app_bundle_module'): - java_full_classpath.extend( - c for c in d.get('java_runtime_classpath', []) - if c not in java_full_classpath) + device_classpath.extend(c for c in d.get('device_classpath', []) + if c not in device_classpath) + + if options.type in ('dist_jar', 'java_binary', 'junit_binary'): + # The classpath to use to run this target. + host_classpath = [] + if options.host_jar_path: + host_classpath.append(options.host_jar_path) + host_classpath.extend(c['host_jar_path'] for c in all_library_deps) + deps_info['host_classpath'] = host_classpath all_configs = build_utils.ParseGnList(options.proguard_configs) deps_info['proguard_configs'] = list(all_configs) @@ -1563,7 +1587,7 @@ def main(argv): if dep_config['type'] == 'android_app_bundle': base_config = GetDepConfig(dep_config['base_module_config']) extra_main_r_text_files.append(base_config['r_text_path']) - static_lib_jar_paths[config_path] = base_config['jar_path'] + static_lib_jar_paths[config_path] = base_config['device_jar_path'] all_configs.extend(dep_config['proguard_all_configs']) extra_proguard_classpath_jars.extend( dep_config['proguard_classpath_jars']) @@ -1578,19 +1602,19 @@ def main(argv): for package in base_config['extra_package_names']: if package not in extra_package_names: extra_package_names.append(package) - for cp_entry in dep_config['java_runtime_classpath']: + for cp_entry in dep_config['device_classpath']: configs_by_classpath_entry[cp_entry].append(config_path) - for cp_entry in java_full_classpath: + for cp_entry in device_classpath: configs_by_classpath_entry[cp_entry].append(options.build_config) for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems(): config_path = (candidate_configs[0] if len(candidate_configs) == 1 else options.build_config) classpath_entries_by_owning_config[config_path].append(cp_entry) - java_full_classpath.append(cp_entry) + device_classpath.append(cp_entry) - java_full_classpath = sorted(set(java_full_classpath)) + device_classpath = sorted(set(device_classpath)) deps_info['static_library_proguard_mapping_output_paths'] = sorted([ d['proguard_mapping_path'] @@ -1606,7 +1630,7 @@ def main(argv): 'junit_binary'): deps_info['jni']['all_source'] = sorted(set(all_java_sources)) - system_jars = [c['jar_path'] for c in system_library_deps] + system_jars = [c['unprocessed_jar_path'] for c in system_library_deps] system_interface_jars = [c['interface_jar_path'] for c in system_library_deps] if system_library_deps: config['android'] = {} @@ -1635,7 +1659,7 @@ def main(argv): deps_proguard_enabled = [] deps_proguard_disabled = [] for d in deps.Direct('android_app_bundle_module'): - if not d['java_runtime_classpath']: + if not d['device_classpath']: # We don't care about modules that have no Java code for proguarding. continue if d['proguard_enabled']: @@ -1685,9 +1709,10 @@ def main(argv): # Add all tested classes to the test's classpath to ensure that the test's # java code is a superset of the tested apk's java code - java_full_classpath.extend( - p for p in tested_apk_config['java_runtime_classpath'] - if p not in java_full_classpath) + device_classpath_extended = list(device_classpath) + device_classpath_extended.extend( + p for p in tested_apk_config['device_classpath'] + if p not in device_classpath) # Include in the classpath classes that are added directly to the apk under # test (those that are not a part of a java_library). javac_classpath.append(tested_apk_config['unprocessed_jar_path']) @@ -1706,13 +1731,13 @@ def main(argv): p for p in tested_apk_config['javac_full_classpath'] if p not in javac_full_classpath) - # Exclude dex files from the test apk that exist within the apk under test. - # TODO(agrieve): When proguard is enabled, this filtering logic happens - # within proguard.py. Move the logic for the proguard case to here. + # Exclude .jar files from the test apk that exist within the apk under test. tested_apk_library_deps = tested_apk_deps.All('java_library') - tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps] - all_dex_files = [ - p for p in all_dex_files if not p in tested_apk_deps_dex_files + tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps} + all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files] + tested_apk_jar_files = set(tested_apk_config['device_classpath']) + device_classpath = [ + p for p in device_classpath if p not in tested_apk_jar_files ] if options.type in ('android_apk', 'dist_aar', 'dist_jar', @@ -1722,20 +1747,27 @@ def main(argv): set(extra_proguard_classpath_jars)) # Dependencies for the final dex file of an apk. - if is_apk_or_module_target or options.final_dex_path: + if (is_apk_or_module_target or options.final_dex_path + or options.type == 'dist_jar'): config['final_dex'] = {} dex_config = config['final_dex'] dex_config['path'] = options.final_dex_path - if is_apk_or_module_target: + if is_apk_or_module_target or options.type == 'dist_jar': dex_config['all_dex_files'] = all_dex_files if is_java_target: config['javac']['classpath'] = javac_classpath config['javac']['interface_classpath'] = javac_interface_classpath - # Direct() will be of type 'java_annotation_processor'. + # Direct() will be of type 'java_annotation_processor', and so not included + # in All('java_library'). + # Annotation processors run as part of the build, so need host_jar_path. config['javac']['processor_classpath'] = [ - c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [ - c['jar_path'] for c in processor_deps.All('java_library')] + c['host_jar_path'] for c in processor_deps.Direct() + if c.get('host_jar_path') + ] + config['javac']['processor_classpath'] += [ + c['host_jar_path'] for c in processor_deps.All('java_library') + ] config['javac']['processor_classes'] = [ c['main_class'] for c in processor_deps.Direct()] deps_info['javac_full_classpath'] = javac_full_classpath @@ -1746,16 +1778,18 @@ def main(argv): javac_full_classpath = set() for d in deps.Direct('android_app_bundle_module'): javac_full_classpath.update(p for p in d['javac_full_classpath']) - javac_full_classpath.add(d['jar_path']) + javac_full_classpath.add(d['unprocessed_jar_path']) deps_info['javac_full_classpath'] = sorted(javac_full_classpath) - if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary', - 'android_app_bundle_module', 'android_app_bundle'): - deps_info['java_runtime_classpath'] = java_full_classpath + if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module', + 'android_app_bundle'): + deps_info['device_classpath'] = device_classpath + if options.tested_apk_config: + deps_info['java_runtime_classpath_extended'] = (device_classpath_extended) if options.type in ('android_apk', 'dist_jar'): all_interface_jars = [] - if options.jar_path: + if options.interface_jar_path: all_interface_jars.append(options.interface_jar_path) all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps) @@ -1847,7 +1881,7 @@ def main(argv): # are not duplicated on the feature module. if base_module_build_config: base = base_module_build_config - RemoveObjDups(config, base, 'deps_info', 'java_runtime_classpath') + RemoveObjDups(config, base, 'deps_info', 'device_classpath') RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath') RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath') RemoveObjDups(config, base, 'deps_info', 'jetified_full_jar_classpath') @@ -1855,11 +1889,29 @@ def main(argv): RemoveObjDups(config, base, 'final_dex', 'all_dex_files') RemoveObjDups(config, base, 'extra_android_manifests') + if is_java_target: + jar_to_target = {} + _AddJarMapping(jar_to_target, [deps_info]) + _AddJarMapping(jar_to_target, deps.all_deps_configs) + if base_module_build_config: + _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']]) + if options.tested_apk_config: + _AddJarMapping(jar_to_target, [tested_apk_config]) + for jar, target in itertools.izip( + tested_apk_config['javac_full_classpath'], + tested_apk_config['javac_full_classpath_targets']): + jar_to_target[jar] = target + + # Used by bytecode_processor to give better error message when missing + # deps are found. + config['deps_info']['javac_full_classpath_targets'] = [ + jar_to_target[x] for x in deps_info['javac_full_classpath'] + ] + build_utils.WriteJson(config, options.build_config, only_if_changed=True) if options.depfile: - build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs, - add_pydeps=False) # pydeps listed in GN. + build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs) if __name__ == '__main__': diff --git a/chromium/build/android/gyp/write_native_libraries_java.py b/chromium/build/android/gyp/write_native_libraries_java.py index 65688b9fd05..cb0c5d398ec 100755 --- a/chromium/build/android/gyp/write_native_libraries_java.py +++ b/chromium/build/android/gyp/write_native_libraries_java.py @@ -26,6 +26,7 @@ def _FormatLibraryName(library_name): def main(): parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) parser.add_argument('--final', action='store_true', help='Use final fields.') parser.add_argument( '--enable-chromium-linker', @@ -97,6 +98,12 @@ def main(): zip_path='org/chromium/base/library_loader/NativeLibraries.java', data=NATIVE_LIBRARIES_TEMPLATE.format(**format_dict)) + if options.depfile: + assert options.native_libraries_list + build_utils.WriteDepfile(options.depfile, + options.output, + inputs=[options.native_libraries_list]) + if __name__ == '__main__': sys.exit(main()) diff --git a/chromium/build/android/gyp/zip.py b/chromium/build/android/gyp/zip.py index b9503960fa3..ed8f61a9c9e 100755 --- a/chromium/build/android/gyp/zip.py +++ b/chromium/build/android/gyp/zip.py @@ -63,8 +63,9 @@ def main(args): # Depfile used only by dist_jar(). if options.depfile: - build_utils.WriteDepfile( - options.depfile, options.output, inputs=depfile_deps, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, + options.output, + inputs=depfile_deps) if __name__ == '__main__': diff --git a/chromium/build/android/incremental_install/BUILD.gn b/chromium/build/android/incremental_install/BUILD.gn index 9cef85696da..8d26e9622b0 100644 --- a/chromium/build/android/incremental_install/BUILD.gn +++ b/chromium/build/android/incremental_install/BUILD.gn @@ -5,8 +5,6 @@ import("//build/config/android/rules.gni") android_library("bootstrap_java") { - # Use .dex rather than .dex.jar to be usable by package_apk(). - dex_path = "$target_out_dir/bootstrap.dex" sources = [ "java/org/chromium/incrementalinstall/BootstrapApplication.java", "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java", @@ -18,3 +16,8 @@ android_library("bootstrap_java") { jacoco_never_instrument = true no_build_hooks = true } + +dist_dex("apk_dex") { + output = "$target_out_dir/apk.dex" + deps = [ ":bootstrap_java" ] +} diff --git a/chromium/build/android/lint/suppressions.xml b/chromium/build/android/lint/suppressions.xml index 638b6c82c29..ac9f8e7aa27 100644 --- a/chromium/build/android/lint/suppressions.xml +++ b/chromium/build/android/lint/suppressions.xml @@ -1,4 +1,4 @@ -<?xml version="1.0" encoding="utf-8"?> +<?xml version="1.0" encoding="utf-8" ?> <lint> <!-- STOP! It looks like you want to suppress some lint errors: @@ -6,19 +6,12 @@ STOP! It looks like you want to suppress some lint errors: Ask the author for a fix and/or revert the patch. - It is preferred to add suppressions in the code instead of sweeping it under the rug here. See: - http://developer.android.com/tools/debugging/improving-w-lint.html Still reading? -- You can edit this file manually to suppress an issue - globally if it is not applicable to the project. When inserting new tags, - keep this file in sorted order. -- You can also automatically add issues found so for in the - build process by running: - - build/android/lint/suppress.py - - which will generate this file (Comments are not preserved). +- Edit this file manually to suppress an issue. Please make the suppression as + local as possible, i.e. by warning message or by file. +- When adding new issues, please keep the issue ids in sorted order. --> <issue id="AcceptsUserCertificates"> <!-- See https://crbug.com/827265 and comment in the file for context. --> @@ -26,10 +19,7 @@ Still reading? <ignore regexp="android_webview/tools/system_webview_shell/apk/res/xml/network_security_config.xml"/> <ignore regexp="test"/> </issue> - <!-- AllowBackup defaults to true, and causes a lint warning if not explicitly set. --> - <issue id="AllowBackup"> - <ignore path="AndroidManifest.xml"/> - </issue> + <issue id="AllowBackup" severity="ignore"/> <!-- TODO(crbug.com/804427): Remove this suppression or add rationale. --> <issue id="AppCompatResource" severity="ignore"/> <!-- We use asserts in Chromium. See https://chromium.googlesource.com/chromium/src/+/master/styleguide/java/java.md#Asserts --> @@ -72,7 +62,6 @@ Still reading? <issue id="DefaultLocale"> <ignore regexp="clank"/> <ignore regexp="com/android/tv"/> - <ignore regexp="org/chromium/chrome/browser/payments/PaymentRequestMetricsTest.class"/> <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/> <!-- TODO(crbug.com/1081240): Fix --> <ignore regexp="chrome/android/feed/core/java/src/org/chromium/chrome/browser/feed/FeedSurfaceMediator.java"/> @@ -101,38 +90,24 @@ Still reading? <!-- TODO(crbug.com/804438): Cannot update until android.media.ExifInterface supports file descriptors --> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/BitmapUtils.java"/> </issue> - <issue id="ExportedContentProvider"> - <ignore path="AndroidManifest.xml"/> - </issue> - <issue id="ExportedService" severity="Error"> - <ignore regexp="AndroidManifest.xml"/> - </issue> + <issue id="ExportedContentProvider" severity="ignore"/> + <issue id="ExportedService" severity="ignore"/> <!-- TODO(crbug.com/635567): Fix this properly. --> - <issue id="GoogleAppIndexingUrlError" severity="Error"> - <ignore regexp="AndroidManifest.xml"/> - </issue> + <issue id="GoogleAppIndexingUrlError" severity="ignore"/> <!-- TODO(crbug.com/635567): Fix this properly. --> - <issue id="GoogleAppIndexingWarning" severity="Error"> - <ignore regexp="AndroidManifest.xml"/> - </issue> + <issue id="GoogleAppIndexingWarning" severity="ignore"/> <issue id="HandlerLeak"> <ignore regexp="android_webview/glue/java/src/com/android/webview/chromium/WebViewContentsClientAdapter.java"/> <ignore regexp="chromecast/internal"/> <ignore regexp="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java"/> </issue> - <issue id="HardcodedDebugMode" severity="Fatal"> - <ignore path="AndroidManifest.xml"/> - </issue> + <issue id="HardcodedDebugMode" severity="ignore"/> <issue id="HardcodedText" severity="Error"> <ignore regexp="chromecast/internal"/> <ignore regexp="remoting/android/host/res/layout/main.xml"/> </issue> <issue id="IconColors" severity="Error"> - <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-hdpi/notification_icon.png"/> - <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-mdpi/notification_icon.png"/> - <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xhdpi/notification_icon.png"/> - <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxhdpi/notification_icon.png"/> - <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxxhdpi/notification_icon.png"/> + <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-.*/notification_icon.png"/> </issue> <issue id="IconDensities"> <!-- This is intentional to reduce APK size. See: http://crrev/c/1352161 --> @@ -207,6 +182,7 @@ Still reading? <ignore regexp="android_webview/tools/system_webview_shell/apk/res/xml/network_security_config.xml"/> <ignore regexp="test"/> </issue> + <issue id="Instantiatable" severity="ignore"/> <issue id="InconsistentArrays" severity="Error"> <ignore regexp="android_webview/locale_paks.resources.zip/values/locale-paks.xml"/> <ignore regexp="chrome/android/chrome_locale_paks.resources.zip/values/locale-paks.xml"/> @@ -236,6 +212,7 @@ Still reading? <ignore regexp="chrome/android/feed/core/java/res/layout/feed_more_button.xml"/> </issue> <issue id="MissingApplicationIcon" severity="ignore"/> + <issue id="MissingClass" severity="ignore"/> <issue id="MissingDefaultResource"> <!-- Only used by ToolbarControlContainer guarded by tablet form-factor. --> <ignore regexp="toolbar_background.9.png"/> @@ -247,12 +224,7 @@ Still reading? <issue id="MissingPermission" severity="ignore"/> <!-- TODO(yolandyan) remove this once all tests are converted to junit4 --> <issue id="MissingPrefix" severity="ignore"/> - <issue id="MissingQuantity"> - <ignore regexp="android_chrome_strings.xml"/> - <ignore regexp="android_chrome_tab_ui_strings.xml"/> - <ignore regexp="components/browser_ui/strings/android/browser_ui_strings_grd"/> - <ignore regexp="clank/third_party/chime/chime_systemtray_strings_grd.resources.zip"/> - </issue> + <issue id="MissingQuantity" severity="ignore"/> <issue id="MissingRegistered" severity="ignore"/> <issue id="MissingSuperCall" severity="Error"> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/selection/SelectionToolbar.java"/> @@ -263,9 +235,7 @@ Still reading? <ignore regexp="restriction_values.xml.*"/> <ignore regexp="remoting/resources/strings_java.resources.zip"/> </issue> - <issue id="MissingVersion"> - <ignore path="AndroidManifest.xml"/> - </issue> + <issue id="MissingVersion" severity="ignore"/> <issue id="NewApi"> <!-- Do not add new suppressions without rationale. --> <!-- 2: We support these via desugar. --> @@ -275,21 +245,17 @@ Still reading? <ignore regexp="Field requires API level .*`android.app.TaskInfo"/> <!-- 1: This is for testonly target android_support_chromium_java in android_sdk. --> <ignore regexp="third_party/android_sdk/public/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/> - <!-- 1: TODO(crbug.com/1081242): Fix --> - <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/multiwindow/MultiWindowUtils.java"/> - <!-- 1: TODO(crbug.com/1081243): Fix --> - <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/PickerVideoPlayer.java"/> - <!-- 1: TODO(crbug.com/1081280): Fix --> - <ignore regexp="chrome/android/features/tab_ui/javatests/src/org/chromium/chrome/browser/tasks/tab_management/ConditionalTabStripTest.java"/> <!-- 1: TODO(crbug.com/1082222): Fix --> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/omnibox/suggestions/header/HeaderView.java"/> + <!-- 1: TODO(crbug.com/1085410): Fix --> + <ignore regexp="components/content_capture/android/java/src/org/chromium/components/content_capture"/> + <!-- 1: TODO(crbug.com/1085487): Fix --> + <ignore regexp="chrome/android/javatests/src/org/chromium/chrome/browser/directactions/DirectActionTestRule.java"/> <!-- Endnote: Please specify number of suppressions when adding more --> </issue> <!-- This warning just adds a lot of false positives. --> <issue id="ObsoleteSdkInt" severity="ignore"/> - <issue id="OldTargetApi"> - <ignore path="AndroidManifest.xml"/> - </issue> + <issue id="OldTargetApi" severity="ignore"/> <issue id="OnClick"> <!-- False positive, see: http://issuetracker.google.com/148523770 for similar issue. --> <ignore regexp="tools/android/audio_focus_grabber/java/res/layout/audio_focus_grabber_activity.xml"/> @@ -322,6 +288,10 @@ Still reading? <ignore regexp="chrome/android/java/res/layout/sheet_tab_toolbar.xml"/> </issue> <issue id="RtlSymmetry" severity="ignore"/> + <issue id="SetTextI18n"> + <!-- Tests can use setText without translating. --> + <ignore regexp="/javatests/"/> + </issue> <issue id="SetJavaScriptEnabled" severity="ignore"/> <issue id="SignatureOrSystemPermissions" severity="ignore"/> <issue id="SpUsage" severity="Error"> @@ -336,23 +306,14 @@ Still reading? <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/> <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-fr/android_chrome_strings.xml"/> <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/> + <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ur/android_chrome_strings.xml"/> <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values/android_chrome_strings.xml"/> + <!-- This string has a % in it. --> + <ignore regexp="data_reduction_promo_infobar_title"/> </issue> - <!-- Most .xtb files in this group have a % that is not part of a formatted string. https://crbug.com/941164 --> - <issue id="StringFormatInvalid" severity="Error"> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-da/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-et/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-is/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-pt-rBR/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sq/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sv/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-tl/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-uz/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zu/android_chrome_strings.xml"/> - </issue> + <!-- Many .xtb files have a % that is not part of a formatted string. https://crbug.com/941164 --> + <issue id="StringFormatInvalid" severity="ignore"/> + <issue id="StringFormatMatches" severity="ignore"/> <!-- We have many C++ enums that we don't care about in java --> <issue id="SwitchIntDef" severity="ignore"/> <issue id="TextFields" severity="Error"> @@ -367,63 +328,27 @@ Still reading? <issue id="UniqueConstants" severity="ignore"/> <issue id="UnusedAttribute" severity="ignore"/> <issue id="UnusedIds" severity="ignore"/> - <issue id="UnusedQuantity" severity="Error"> - <!-- This is needed for suppressing warnings on upstream and downstream build bots --> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-cs/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-in/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ja/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-km/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ko/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-lo/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-lt/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-ms/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-my/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-sk/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-th/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-vi/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rCN/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rHK/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/android/features/tab_ui/java_strings_grd.resources.zip/values-zh-rTW/android_chrome_tab_ui_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ja/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-km/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ko/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-lo/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-lt/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-ms/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-my/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-sk/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-th/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-vi/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rCN/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rHK/android_chrome_strings.xml"/> - <ignore regexp="chrome/browser/ui/android/strings/ui_strings_grd.resources.zip/values-zh-rTW/android_chrome_strings.xml"/> - <ignore regexp="clank/third_party/chime/chime_systemtray_strings_grd.resources.zip"/> - <ignore regexp="components/browser_ui/strings/android/browser_ui_strings_grd"/> - </issue> + <issue id="UnusedQuantity" severity="ignore"/> <issue id="UnusedResources"> <!-- Do not add new suppressions without rationale. --> - <!-- 3 raw resources are accessed by URL in various places --> - <ignore regexp="gen/remoting/android/.*/res/raw/credits.html"/> - <ignore regexp="gen/remoting/android/.*/res/raw/credits_css.css"/> - <ignore regexp="gen/remoting/android/.*/res/raw/credits_js.js"/> - <!-- 1 all resources in remoting internal --> + <!-- 1: raw resources are accessed by URL in various places --> + <ignore regexp="gen/remoting/android/.*/res/raw/credits.*"/> + <!-- 1: all resources in remoting internal --> <ignore regexp="remoting/android/internal"/> - <!-- 1 string test only, used in CronetSmokeTestCase dynamically --> + <!-- 1: string test only, used in CronetSmokeTestCase dynamically --> <ignore regexp="R.string.TestSupportImplClass"/> - <!-- 1 resource used by android webview glue layer, could be refactored --> + <!-- 1: resource used by android webview glue layer, could be refactored --> <ignore regexp="R.string.private_browsing_warning"/> - <!-- 4 The WAM server currently has 2 codes paths for minting a WebAPK, and + <!-- 4: The WAM server currently has 2 codes paths for minting a WebAPK, and it needs these "unused" resources. TODO(crbug.com/1001115): Remove suppression once 2 code paths are merged --> <ignore regexp="The resource `R.mipmap.ic_launcher_background` appears to be unused"/> <ignore regexp="The resource `R.mipmap.ic_launcher_foreground` appears to be unused"/> <ignore regexp="The resource `R.mipmap.maskable_splash_icon_xxhdpi` appears to be unused"/> <ignore regexp="The resource `R.mipmap.maskable_splash_icon_xxxhdpi` appears to be unused"/> - <!-- 1 Module titles may only be used by the Play Store. --> + <!-- 1: Module titles may only be used by the Play Store. --> <ignore regexp="The resource `R.string.*_module_title` appears to be unused"/> - <!-- 2 resource sets used by clank widgets for each channel --> + <!-- 2: resource sets used by clank widgets for each channel --> <ignore regexp="The resource `R.string.bookmark_widget_title.*` appears to be unused"/> <ignore regexp="The resource `R.string.search_widget_title.*` appears to be unused"/> <!-- crbug.com/1004570 remove this line and the following seven lines after the bug resolved --> @@ -502,6 +427,8 @@ Still reading? <ignore regexp="The resource `R.plurals.public_notification_text` appears to be unused"/> <ignore regexp="The resource `R.mipmap.app_shortcut_icon` appears to be unused"/> <ignore regexp="The resource `R.mipmap.app_single_page_icon` appears to be unused"/> + <!-- 1: Some strings in components_strings_grd are not used in other targets. --> + <ignore regexp="webview_.*__lint.*components_strings_grd"/> <!-- Endnote: Please specify number of suppressions when adding more --> </issue> <issue id="UsableSpace"> @@ -525,9 +452,7 @@ Still reading? <ignore regexp="chromecast/internal"/> <ignore regexp="tools/android/kerberos/SpnegoAuthenticator/res/layout/activity_account_authenticator.xml"/> </issue> - <issue id="UsesMinSdkAttributes" severity="Error"> - <ignore regexp="AndroidManifest.xml"/> - </issue> + <issue id="UsesMinSdkAttributes" severity="ignore"/> <issue id="ValidFragment" severity="Error"> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/BaseMediaRouteDialogManager.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteChooserDialogManager.java"/> @@ -550,11 +475,13 @@ Still reading? <issue id="WebViewApiAvailability" severity="ignore"/> <issue id="WrongCall" severity="ignore"/> <issue id="WrongConstant"> - <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/SSLClientCertificateRequest.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/bookmarks/BookmarkItemsAdapter.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/instantapps/InstantAppsHandler.java"/> <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/prefeditor/EditorDialog.java"/> + <ignore regexp="components/browser_ui/client_certificate/android/java/src/org/chromium/components/browser_ui/client_certificate/SSLClientCertificateRequest.java"/> <ignore regexp="third_party/android_data_chart/java/src/org/chromium/third_party/android/datausagechart/ChartDataUsageView.java"/> + <!-- 1: TODO(crbug.com/1085411): Fix --> + <ignore regexp="media/base/android/java/src/org/chromium/media/MediaCodecEncoder.java"/> <!-- Discussed in crbug.com/1069204, ignoring this class of errors since these are Q+ constants. --> <ignore regexp="Must be one of: LineBreaker.BREAK_STRATEGY_SIMPLE, LineBreaker.BREAK_STRATEGY_HIGH_QUALITY, LineBreaker.BREAK_STRATEGY_BALANCED"/> </issue> diff --git a/chromium/build/android/list_class_verification_failures_test.py b/chromium/build/android/list_class_verification_failures_test.py index a3da0fd6d7e..4248064c9da 100644 --- a/chromium/build/android/list_class_verification_failures_test.py +++ b/chromium/build/android/list_class_verification_failures_test.py @@ -6,16 +6,13 @@ import unittest import list_class_verification_failures as list_verification -from pylib.constants import host_paths - import devil_chromium # pylint: disable=unused-import from devil.android import device_errors from devil.android import device_utils from devil.android.ndk import abis from devil.android.sdk import version_codes -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error def _CreateOdexLine(java_class_name, type_idx, verification_status): @@ -40,7 +37,9 @@ class _DetermineDeviceToUseTest(unittest.TestCase): return_value=fake_attached_devices) result = list_verification.DetermineDeviceToUse(user_specified_devices) self.assertEqual(result, fake_attached_devices[0]) + # pylint: disable=no-member device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None) + # pylint: enable=no-member def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self): user_specified_devices = [] @@ -48,7 +47,9 @@ class _DetermineDeviceToUseTest(unittest.TestCase): side_effect=device_errors.NoDevicesError()) with self.assertRaises(device_errors.NoDevicesError) as _: list_verification.DetermineDeviceToUse(user_specified_devices) + # pylint: disable=no-member device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None) + # pylint: enable=no-member def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self): user_specified_devices = ['123'] @@ -57,8 +58,10 @@ class _DetermineDeviceToUseTest(unittest.TestCase): return_value=fake_attached_devices) result = list_verification.DetermineDeviceToUse(user_specified_devices) self.assertEqual(result, fake_attached_devices[0]) + # pylint: disable=no-member device_utils.DeviceUtils.HealthyDevices.assert_called_with( device_arg=user_specified_devices) + # pylint: enable=no-member class _ListClassVerificationFailuresTest(unittest.TestCase): diff --git a/chromium/build/android/pylib/base/mock_environment.py b/chromium/build/android/pylib/base/mock_environment.py index 9ebb083a086..5bdefd0a0d9 100644 --- a/chromium/build/android/pylib/base/mock_environment.py +++ b/chromium/build/android/pylib/base/mock_environment.py @@ -3,10 +3,8 @@ # found in the LICENSE file. from pylib.base import environment -from pylib.constants import host_paths -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error MockEnvironment = mock.MagicMock(environment.Environment) diff --git a/chromium/build/android/pylib/base/mock_test_instance.py b/chromium/build/android/pylib/base/mock_test_instance.py index 18def019903..8ef723bf050 100644 --- a/chromium/build/android/pylib/base/mock_test_instance.py +++ b/chromium/build/android/pylib/base/mock_test_instance.py @@ -3,10 +3,8 @@ # found in the LICENSE file. from pylib.base import test_instance -from pylib.constants import host_paths -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error MockTestInstance = mock.MagicMock(test_instance.TestInstance) diff --git a/chromium/build/android/pylib/constants/host_paths.py b/chromium/build/android/pylib/constants/host_paths.py index b249d3c2919..e00e0e79eb8 100644 --- a/chromium/build/android/pylib/constants/host_paths.py +++ b/chromium/build/android/pylib/constants/host_paths.py @@ -20,10 +20,9 @@ BUILD_COMMON_PATH = os.path.join( ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join( DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development', 'scripts') +BUILD_PATH = os.path.join(DIR_SOURCE_ROOT, 'build') DEVIL_PATH = os.path.join( DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil') -PYMOCK_PATH = os.path.join( - DIR_SOURCE_ROOT, 'third_party', 'pymock') TRACING_PATH = os.path.join( DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing') diff --git a/chromium/build/android/pylib/device/commands/BUILD.gn b/chromium/build/android/pylib/device/commands/BUILD.gn index a3ee6462706..13b69f618cf 100644 --- a/chromium/build/android/pylib/device/commands/BUILD.gn +++ b/chromium/build/android/pylib/device/commands/BUILD.gn @@ -8,10 +8,13 @@ group("commands") { data_deps = [ ":chromium_commands_java" ] } -android_library("chromium_commands_java") { +android_library("unzip_java") { jacoco_never_instrument = true sources = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ] - dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar" - deps = [ "//base:base_java" ] - data = [ dex_path ] +} + +dist_dex("chromium_commands_java") { + deps = [ ":unzip_java" ] + output = "$root_build_dir/lib.java/chromium_commands.dex.jar" + data = [ output ] } diff --git a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled index 706e1abcf57..97811c83a4a 100644 --- a/chromium/build/android/pylib/gtest/filter/unit_tests_disabled +++ b/chromium/build/android/pylib/gtest/filter/unit_tests_disabled @@ -19,9 +19,6 @@ AutofillTableTest.UpdateAutofillProfile AutofillProfileTest.* CreditCardTest.SetInfoExpirationMonth -# crbug.com/139398 -DownloadItemModelTest.InterruptTooltip - # Tests crashing in the APK # l10n_util.cc(655)] Check failed: std::string::npos != pos DownloadItemModelTest.InterruptStatus diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_parser.py b/chromium/build/android/pylib/instrumentation/instrumentation_parser.py index 8605178924f..d38f6a5551c 100644 --- a/chromium/build/android/pylib/instrumentation/instrumentation_parser.py +++ b/chromium/build/android/pylib/instrumentation/instrumentation_parser.py @@ -20,6 +20,8 @@ STATUS_CODE_SKIP = -3 # http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html STATUS_CODE_ASSUMPTION_FAILURE = -4 +STATUS_CODE_TEST_DURATION = 1337 + # http://developer.android.com/reference/android/app/Activity.html RESULT_CODE_OK = -1 RESULT_CODE_CANCELED = 0 diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py index 3b61977278e..a30334c6d09 100644 --- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py +++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py @@ -61,6 +61,23 @@ _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES = ( _NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE) _PICKLE_FORMAT_VERSION = 12 +# The ID of the bundle value Instrumentation uses to report which test index the +# results are for in a collection of tests. Note that this index is 1-based. +_BUNDLE_CURRENT_ID = 'current' +# The ID of the bundle value Instrumentation uses to report the test class. +_BUNDLE_CLASS_ID = 'class' +# The ID of the bundle value Instrumentation uses to report the test name. +_BUNDLE_TEST_ID = 'test' +# The ID of the bundle value Instrumentation uses to report if a test was +# skipped. +_BUNDLE_SKIPPED_ID = 'test_skipped' +# The ID of the bundle value Instrumentation uses to report the crash stack, if +# the test crashed. +_BUNDLE_STACK_ID = 'stack' + +# The ID of the bundle value Chrome uses to report the test duration. +_BUNDLE_DURATION_ID = 'duration_ms' + class MissingSizeAnnotationError(test_exception.TestException): def __init__(self, class_name): @@ -103,9 +120,8 @@ def ParseAmInstrumentRawOutput(raw_output): return (code, bundle, statuses) -def GenerateTestResults( - result_code, result_bundle, statuses, start_ms, duration_ms, device_abi, - symbolizer): +def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, + device_abi, symbolizer): """Generate test results from |statuses|. Args: @@ -116,7 +132,6 @@ def GenerateTestResults( - the bundle dump as a dict mapping string keys to string values Note that this is the same as the third item in the 3-tuple returned by |_ParseAmInstrumentRawOutput|. - start_ms: The start time of the test in milliseconds. duration_ms: The duration of the test in milliseconds. device_abi: The device_abi, which is needed for symbolization. symbolizer: The symbolizer used to symbolize stack. @@ -129,10 +144,21 @@ def GenerateTestResults( results = [] current_result = None + cumulative_duration = 0 for status_code, bundle in statuses: - test_class = bundle.get('class', '') - test_method = bundle.get('test', '') + if status_code == instrumentation_parser.STATUS_CODE_TEST_DURATION: + # For the first result, duration will be set below to the difference + # between the reported and actual durations to account for overhead like + # starting instrumentation. + if len(results) > 1: + current_duration = int(bundle.get(_BUNDLE_DURATION_ID, duration_ms)) + current_result.SetDuration(current_duration) + cumulative_duration += current_duration + continue + + test_class = bundle.get(_BUNDLE_CLASS_ID, '') + test_method = bundle.get(_BUNDLE_TEST_ID, '') if test_class and test_method: test_name = '%s#%s' % (test_class, test_method) else: @@ -142,10 +168,10 @@ def GenerateTestResults( if current_result: results.append(current_result) current_result = test_result.InstrumentationTestResult( - test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms) + test_name, base_test_result.ResultType.UNKNOWN, duration_ms) else: if status_code == instrumentation_parser.STATUS_CODE_OK: - if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'): + if bundle.get(_BUNDLE_SKIPPED_ID, '').lower() in ('true', '1', 'yes'): current_result.SetType(base_test_result.ResultType.SKIP) elif current_result.GetType() == base_test_result.ResultType.UNKNOWN: current_result.SetType(base_test_result.ResultType.PASS) @@ -159,15 +185,13 @@ def GenerateTestResults( logging.error('Unrecognized status code %d. Handling as an error.', status_code) current_result.SetType(base_test_result.ResultType.FAIL) - if 'stack' in bundle: + if _BUNDLE_STACK_ID in bundle: if symbolizer and device_abi: - current_result.SetLog( - '%s\n%s' % ( - bundle['stack'], - '\n'.join(symbolizer.ExtractAndResolveNativeStackTraces( - bundle['stack'], device_abi)))) + current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join( + symbolizer.ExtractAndResolveNativeStackTraces( + bundle[_BUNDLE_STACK_ID], device_abi)))) else: - current_result.SetLog(bundle['stack']) + current_result.SetLog(bundle[_BUNDLE_STACK_ID]) if current_result: if current_result.GetType() == base_test_result.ResultType.UNKNOWN: @@ -179,6 +203,9 @@ def GenerateTestResults( results.append(current_result) + if results: + results[0].SetDuration(duration_ms - cumulative_duration) + return results @@ -521,6 +548,8 @@ class InstrumentationTestInstance(test_instance.TestInstance): self._skia_gold_properties = None self._initializeSkiaGoldAttributes(args) + self._wpr_enable_record = args.wpr_enable_record + self._external_shard_index = args.test_launcher_shard_index self._total_external_shards = args.test_launcher_total_shards @@ -731,7 +760,7 @@ class InstrumentationTestInstance(test_instance.TestInstance): self._use_webview_provider = args.use_webview_provider def _initializeSkiaGoldAttributes(self, args): - self._skia_gold_properties = gold_utils.SkiaGoldProperties(args) + self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args) @property def additional_apks(self): @@ -865,6 +894,14 @@ class InstrumentationTestInstance(test_instance.TestInstance): def wait_for_java_debugger(self): return self._wait_for_java_debugger + @property + def wpr_record_mode(self): + return self._wpr_enable_record + + @property + def wpr_replay_mode(self): + return not self._wpr_enable_record + #override def TestType(self): return 'instrumentation' @@ -930,7 +967,8 @@ class InstrumentationTestInstance(test_instance.TestInstance): 'class': c['class'], 'method': m['method'], 'annotations': a, - 'is_junit4': c['superclass'] == 'java.lang.Object' + # TODO(https://crbug.com/1084729): Remove is_junit4. + 'is_junit4': True }) return inflated_tests @@ -1005,11 +1043,10 @@ class InstrumentationTestInstance(test_instance.TestInstance): return ParseAmInstrumentRawOutput(raw_output) @staticmethod - def GenerateTestResults( - result_code, result_bundle, statuses, start_ms, duration_ms, - device_abi, symbolizer): + def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, + device_abi, symbolizer): return GenerateTestResults(result_code, result_bundle, statuses, - start_ms, duration_ms, device_abi, symbolizer) + duration_ms, device_abi, symbolizer) #override def TearDown(self): diff --git a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py index d3003b8239e..fdb4114a63d 100755 --- a/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py +++ b/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py @@ -12,11 +12,9 @@ import tempfile import unittest from pylib.base import base_test_result -from pylib.constants import host_paths from pylib.instrumentation import instrumentation_test_instance -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error _INSTRUMENTATION_TEST_INSTANCE_PATH = ( 'pylib.instrumentation.instrumentation_test_instance.%s') @@ -497,15 +495,17 @@ class InstrumentationTestInstanceTest(unittest.TestCase): ] expected_tests = [ - { - 'annotations': { - 'Feature': {'value': ['Foo']}, - 'MediumTest': None, + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', }, - 'class': 'org.chromium.test.SampleTest', - 'is_junit4': False, - 'method': 'testMethod2', - }, ] o._excluded_annotations = [('SmallTest', None)] @@ -556,16 +556,18 @@ class InstrumentationTestInstanceTest(unittest.TestCase): ] expected_tests = [ - { - 'annotations': { - 'Feature': {'value': ['Foo']}, - 'SmallTest': None, - 'TestValue': '1', + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + 'TestValue': '1', + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', }, - 'class': 'org.chromium.test.SampleTest', - 'is_junit4': False, - 'method': 'testMethod1', - }, ] o._annotations = [('TestValue', '1')] @@ -724,24 +726,28 @@ class InstrumentationTestInstanceTest(unittest.TestCase): ] expected_tests = [ - { - 'annotations': { - 'Feature': {'value': ['Baz']}, - 'MediumTest': None, + { + 'annotations': { + 'Feature': { + 'value': ['Baz'] + }, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', }, - 'class': 'org.chromium.test.SampleTest', - 'is_junit4': False, - 'method': 'testMethod2', - }, - { - 'annotations': { - 'Feature': {'value': ['Bar']}, - 'SmallTest': None, + { + 'annotations': { + 'Feature': { + 'value': ['Bar'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', }, - 'class': 'org.chromium.test.SampleTest2', - 'is_junit4': False, - 'method': 'testMethod1', - }, ] o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')] @@ -753,7 +759,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): def testGenerateTestResults_noStatus(self): results = instrumentation_test_instance.GenerateTestResults( - None, None, [], 0, 1000, None, None) + None, None, [], 1000, None, None) self.assertEqual([], results) def testGenerateTestResults_testPassed(self): @@ -768,7 +774,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) @@ -789,7 +795,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType()) @@ -808,7 +814,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) @@ -824,7 +830,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) @@ -842,7 +848,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) self.assertEqual(stacktrace, results[0].GetLog()) @@ -859,7 +865,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): }), ] results = instrumentation_test_instance.GenerateTestResults( - None, None, statuses, 0, 1000, None, None) + None, None, statuses, 1000, None, None) self.assertEqual(1, len(results)) self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType()) diff --git a/chromium/build/android/pylib/instrumentation/test_result.py b/chromium/build/android/pylib/instrumentation/test_result.py index 24e80a8e5fb..a1c7307fce9 100644 --- a/chromium/build/android/pylib/instrumentation/test_result.py +++ b/chromium/build/android/pylib/instrumentation/test_result.py @@ -8,13 +8,12 @@ from pylib.base import base_test_result class InstrumentationTestResult(base_test_result.BaseTestResult): """Result information for a single instrumentation test.""" - def __init__(self, full_name, test_type, start_date, dur, log=''): + def __init__(self, full_name, test_type, dur, log=''): """Construct an InstrumentationTestResult object. Args: full_name: Full name of the test. test_type: Type of the test result as defined in ResultType. - start_date: Date in milliseconds when the test began running. dur: Duration of the test run in milliseconds. log: A string listing any errors. """ @@ -27,4 +26,7 @@ class InstrumentationTestResult(base_test_result.BaseTestResult): else: self._class_name = full_name self._test_name = full_name - self._start_date = start_date + + def SetDuration(self, duration): + """Set the test duration.""" + self._duration = duration diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py index 6a64e190969..5a46e6fcb1c 100644 --- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py +++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py @@ -36,17 +36,16 @@ from pylib.instrumentation import instrumentation_test_instance from pylib.local.device import local_device_environment from pylib.local.device import local_device_test_run from pylib.output import remote_output_manager +from pylib.utils import chrome_proxy_utils from pylib.utils import gold_utils from pylib.utils import instrumentation_tracing from pylib.utils import shared_preference_utils - from py_trace_event import trace_event from py_trace_event import trace_time from py_utils import contextlib_ext from py_utils import tempfile_ext import tombstones - with host_paths.SysPath( os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0): import jinja2 # pylint: disable=import-error @@ -57,6 +56,10 @@ _JINJA_TEMPLATE_DIR = os.path.join( host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation') _JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja' +_WPR_GO_LINUX_X86_64_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'webpagereplay', 'bin', + 'linux', 'x86_64', 'wpr') + _TAG = 'test_runner_py' TIMEOUT_ANNOTATIONS = [ @@ -88,6 +91,8 @@ _EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.' FEATURE_ANNOTATION = 'Feature' RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest' +WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory' +WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest' # This needs to be kept in sync with formatting in |RenderUtils.imageName| RE_RENDER_IMAGE_NAME = re.compile( @@ -101,6 +106,8 @@ RENDER_TEST_MODEL_SDK_CONFIGS = { 'Nexus 5X': [23], } +_TEST_BATCH_MAX_GROUP_SIZE = 256 + @contextlib.contextmanager def _LogTestEndpoints(device, test_name): @@ -136,16 +143,24 @@ _CURRENT_FOCUS_CRASH_RE = re.compile( r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}') +def _GetTargetPackageName(test_apk): + # apk_under_test does not work for smoke tests, where it is set to an + # apk that is not listed as the targetPackage in the test apk's manifest. + return test_apk.GetAllInstrumentations()[0]['android:targetPackage'] + + class LocalDeviceInstrumentationTestRun( local_device_test_run.LocalDeviceTestRun): def __init__(self, env, test_instance): super(LocalDeviceInstrumentationTestRun, self).__init__( env, test_instance) + self._chrome_proxy = None self._context_managers = collections.defaultdict(list) self._flag_changers = {} + self._render_tests_device_output_dir = None self._shared_prefs_to_restore = [] - self._skia_gold_work_dir = None self._skia_gold_session_manager = None + self._skia_gold_work_dir = None #override def TestPackage(self): @@ -153,6 +168,8 @@ class LocalDeviceInstrumentationTestRun( #override def SetUp(self): + target_package = _GetTargetPackageName(self._test_instance.test_apk) + @local_device_environment.handle_shard_failures_with( self._env.BlacklistDevice) @trace_event.traced @@ -267,18 +284,10 @@ class LocalDeviceInstrumentationTestRun( def set_debug_app(dev): # Set debug app in order to enable reading command line flags on user # builds - package_name = None - if self._test_instance.apk_under_test: - package_name = self._test_instance.apk_under_test.GetPackageName() - elif self._test_instance.test_apk: - package_name = self._test_instance.test_apk.GetPackageName() - else: - logging.error("Couldn't set debug app: no package name found") - return cmd = ['am', 'set-debug-app', '--persistent'] if self._test_instance.wait_for_java_debugger: cmd.append('-w') - cmd.append(package_name) + cmd.append(target_package) dev.RunShellCommand(cmd, check_return=True) @trace_event.traced @@ -379,13 +388,12 @@ class LocalDeviceInstrumentationTestRun( # expectations can be re-used between tests, saving a significant amount # of time. self._skia_gold_work_dir = tempfile.mkdtemp() - self._skia_gold_session_manager = gold_utils.SkiaGoldSessionManager( + self._skia_gold_session_manager = gold_utils.AndroidSkiaGoldSessionManager( self._skia_gold_work_dir, self._test_instance.skia_gold_properties) if self._test_instance.wait_for_java_debugger: - apk = self._test_instance.apk_under_test or self._test_instance.test_apk logging.warning('*' * 80) logging.warning('Waiting for debugger to attach to process: %s', - apk.GetPackageName()) + target_package) logging.warning('*' * 80) #override @@ -459,6 +467,31 @@ class LocalDeviceInstrumentationTestRun( return tests #override + def _GroupTests(self, tests): + batched_tests = dict() + other_tests = [] + for test in tests: + if 'Batch' in test['annotations']: + batch_name = test['annotations']['Batch']['value'] + if not batch_name: + batch_name = test['class'] + if not batch_name in batched_tests: + batched_tests[batch_name] = [] + batched_tests[batch_name].append(test) + else: + other_tests.append(test) + + all_tests = [] + for _, tests in batched_tests.items(): + tests.sort() # Ensure a consistent ordering across external shards. + all_tests.extend([ + tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE] + for i in range(0, len(tests), _TEST_BATCH_MAX_GROUP_SIZE) + ]) + all_tests.extend(other_tests) + return all_tests + + #override def _GetUniqueTestName(self, test): return instrumentation_test_instance.GetUniqueTestName(test) @@ -506,12 +539,9 @@ class LocalDeviceInstrumentationTestRun( device.adb, suffix='.json', dir=device.GetExternalStoragePath()) extras[EXTRA_TRACE_FILE] = trace_device_file.name + target = '%s/%s' % (self._test_instance.test_package, + self._test_instance.junit4_runner_class) if isinstance(test, list): - if not self._test_instance.driver_apk: - raise Exception('driver_apk does not exist. ' - 'Please build it and try again.') - if any(t.get('is_junit4') for t in test): - raise Exception('driver apk does not support JUnit4 tests') def name_and_timeout(t): n = instrumentation_test_instance.GetTestName(t) @@ -520,26 +550,15 @@ class LocalDeviceInstrumentationTestRun( test_names, timeouts = zip(*(name_and_timeout(t) for t in test)) - test_name = ','.join(test_names) + test_name = instrumentation_test_instance.GetTestName(test[0]) + '_batch' + extras['class'] = ','.join(test_names) test_display_name = test_name - target = '%s/%s' % ( - self._test_instance.driver_package, - self._test_instance.driver_name) - extras.update( - self._test_instance.GetDriverEnvironmentVars( - test_list=test_names)) timeout = sum(timeouts) else: + assert test['is_junit4'] test_name = instrumentation_test_instance.GetTestName(test) test_display_name = self._GetUniqueTestName(test) - if test['is_junit4']: - target = '%s/%s' % ( - self._test_instance.test_package, - self._test_instance.junit4_runner_class) - else: - target = '%s/%s' % ( - self._test_instance.test_package, - self._test_instance.junit3_runner_class) + extras['class'] = test_name if 'flags' in test and test['flags']: flags_to_add.extend(test['flags']) @@ -556,14 +575,39 @@ class LocalDeviceInstrumentationTestRun( timeout = None logging.info('preparing to run %s: %s', test_display_name, test) - render_tests_device_output_dir = None if _IsRenderTest(test): # TODO(mikecase): Add DeviceTempDirectory class and use that instead. - render_tests_device_output_dir = posixpath.join( - device.GetExternalStoragePath(), - 'render_test_output_dir') + self._render_tests_device_output_dir = posixpath.join( + device.GetExternalStoragePath(), 'render_test_output_dir') flags_to_add.append('--render-test-output-dir=%s' % - render_tests_device_output_dir) + self._render_tests_device_output_dir) + + if _IsWPRRecordReplayTest(test): + wpr_archive_relative_path = _GetWPRArchivePath(test) + if not wpr_archive_relative_path: + raise RuntimeError('Could not find the WPR archive file path ' + 'from annotation.') + wpr_archive_path = os.path.join(host_paths.DIR_SOURCE_ROOT, + wpr_archive_relative_path) + if not os.path.isdir(wpr_archive_path): + raise RuntimeError('WPRArchiveDirectory annotation should point' + 'to a directory only.') + + archive_path = os.path.join(wpr_archive_path, + self._GetUniqueTestName(test) + '.wprgo') + + if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH): + # If we got to this stage, then we should have + # checkout_android set. + raise RuntimeError( + 'WPR Go binary not found at {}'.format(_WPR_GO_LINUX_X86_64_PATH)) + # Tells the server to use the binaries retrieved from CIPD. + chrome_proxy_utils.ChromeProxySession.SetWPRServerBinary( + _WPR_GO_LINUX_X86_64_PATH) + self._chrome_proxy = chrome_proxy_utils.ChromeProxySession() + self._chrome_proxy.wpr_record_mode = self._test_instance.wpr_record_mode + self._chrome_proxy.Start(device, archive_path) + flags_to_add.extend(self._chrome_proxy.GetFlags()) if flags_to_add: self._CreateFlagChangerIfNeeded(device) @@ -588,7 +632,7 @@ class LocalDeviceInstrumentationTestRun( result_code, result_bundle, statuses = ( self._test_instance.ParseAmInstrumentRawOutput(output)) results = self._test_instance.GenerateTestResults( - result_code, result_bundle, statuses, start_ms, duration_ms, + result_code, result_bundle, statuses, duration_ms, device.product_cpu_abi, self._test_instance.symbolizer) if self._env.trace_output: @@ -620,11 +664,12 @@ class LocalDeviceInstrumentationTestRun( # check to see if any failure images were generated even if the test # does not fail. try: - self._ProcessRenderTestResults( - device, render_tests_device_output_dir, results) + self._ProcessRenderTestResults(device, results) finally: - device.RemovePath(render_tests_device_output_dir, - recursive=True, force=True) + device.RemovePath(self._render_tests_device_output_dir, + recursive=True, + force=True) + self._render_tests_device_output_dir = None def pull_ui_screen_captures(): screenshots = [] @@ -653,13 +698,23 @@ class LocalDeviceInstrumentationTestRun( json_data['image_link'] = image_archive.Link() return json_data + def stop_chrome_proxy(): + # Removes the port forwarding + if self._chrome_proxy: + self._chrome_proxy.Stop(device) + if not self._chrome_proxy.wpr_replay_mode: + logging.info('WPR Record test generated archive file %s', + self._chrome_proxy.wpr_archive_path) + self._chrome_proxy = None + + # While constructing the TestResult objects, we can parallelize several # steps that involve ADB. These steps should NOT depend on any info in # the results! Things such as whether the test CRASHED have not yet been # determined. post_test_steps = [ - restore_flags, restore_timeout_scale, handle_coverage_data, - handle_render_test_data, pull_ui_screen_captures + restore_flags, restore_timeout_scale, stop_chrome_proxy, + handle_coverage_data, handle_render_test_data, pull_ui_screen_captures ] if self._env.concurrent_adb: reraiser_thread.RunAsync(post_test_steps) @@ -920,16 +975,14 @@ class LocalDeviceInstrumentationTestRun( screenshot_device_file.close() _SetLinkOnResults(results, link_name, screenshot_host_file.Link()) - def _ProcessRenderTestResults( - self, device, render_tests_device_output_dir, results): - self._ProcessSkiaGoldRenderTestResults( - device, render_tests_device_output_dir, results) - self._ProcessLocalRenderTestResults(device, render_tests_device_output_dir, - results) + def _ProcessRenderTestResults(self, device, results): + if not self._render_tests_device_output_dir: + return + self._ProcessSkiaGoldRenderTestResults(device, results) - def _ProcessSkiaGoldRenderTestResults( - self, device, render_tests_device_output_dir, results): - gold_dir = posixpath.join(render_tests_device_output_dir, _DEVICE_GOLD_DIR) + def _ProcessSkiaGoldRenderTestResults(self, device, results): + gold_dir = posixpath.join(self._render_tests_device_output_dir, + _DEVICE_GOLD_DIR) if not device.FileExists(gold_dir): return @@ -958,8 +1011,27 @@ class LocalDeviceInstrumentationTestRun( 'when doing Skia Gold comparison.' % image_name) continue + # Add 'ignore': '1' if a comparison failure would not be surfaced, as + # that implies that we aren't actively maintaining baselines for the + # test. This helps prevent unrelated CLs from getting comments posted to + # them. + with open(json_path) as infile: + # All the key/value pairs in the JSON file are strings, so convert + # to a bool. + json_dict = json.load(infile) + fail_on_unsupported = json_dict.get('fail_on_unsupported_configs', + 'false') + fail_on_unsupported = fail_on_unsupported.lower() == 'true' + should_hide_failure = ( + device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get( + device.product_model, []) and not fail_on_unsupported) + if should_hide_failure: + json_dict['ignore'] = '1' + with open(json_path, 'w') as outfile: + json.dump(json_dict, outfile) + gold_session = self._skia_gold_session_manager.GetSkiaGoldSession( - keys_file=json_path) + keys_input=json_path) try: status, error = gold_session.RunComparison( @@ -978,14 +1050,7 @@ class LocalDeviceInstrumentationTestRun( # Don't fail the test if we ran on an unsupported configuration unless # the test has explicitly opted in, as it's likely that baselines # aren't maintained for that configuration. - with open(json_path) as infile: - # All the key/value pairs in the JSON file are strings, so convert - # to a bool. - fail_on_unsupported = json.load(infile).get( - 'fail_on_unsupported_configs', 'false') - fail_on_unsupported = fail_on_unsupported.lower() == 'true' - if device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get( - device.product_model, []) and not fail_on_unsupported: + if should_hide_failure: if self._test_instance.skia_gold_properties.local_pixel_tests: _AppendToLog( results, 'Gold comparison for %s failed, but model %s with SDK ' @@ -1004,7 +1069,7 @@ class LocalDeviceInstrumentationTestRun( failure_log = ( 'Skia Gold reported failure for RenderTest %s. See ' 'RENDER_TESTS.md for how to fix this failure.' % render_name) - status_codes = gold_utils.SkiaGoldSession.StatusCodes + status_codes = gold_utils.AndroidSkiaGoldSession.StatusCodes if status == status_codes.AUTH_FAILURE: _AppendToLog(results, 'Gold authentication failed with output %s' % error) @@ -1053,63 +1118,6 @@ class LocalDeviceInstrumentationTestRun( 'Given unhandled SkiaGoldSession StatusCode %s with error %s', status, error) - def _ProcessLocalRenderTestResults(self, device, - render_tests_device_output_dir, results): - failure_images_device_dir = posixpath.join( - render_tests_device_output_dir, 'failures') - if not device.FileExists(failure_images_device_dir): - return - - diff_images_device_dir = posixpath.join( - render_tests_device_output_dir, 'diffs') - - golden_images_device_dir = posixpath.join( - render_tests_device_output_dir, 'goldens') - - for failure_filename in device.ListDirectory(failure_images_device_dir): - - with self._env.output_manager.ArchivedTempfile( - 'fail_%s' % failure_filename, 'render_tests', - output_manager.Datatype.PNG) as failure_image_host_file: - device.PullFile( - posixpath.join(failure_images_device_dir, failure_filename), - failure_image_host_file.name) - failure_link = failure_image_host_file.Link() - - golden_image_device_file = posixpath.join( - golden_images_device_dir, failure_filename) - if device.PathExists(golden_image_device_file): - with self._env.output_manager.ArchivedTempfile( - 'golden_%s' % failure_filename, 'render_tests', - output_manager.Datatype.PNG) as golden_image_host_file: - device.PullFile( - golden_image_device_file, golden_image_host_file.name) - golden_link = golden_image_host_file.Link() - else: - golden_link = '' - - diff_image_device_file = posixpath.join( - diff_images_device_dir, failure_filename) - if device.PathExists(diff_image_device_file): - with self._env.output_manager.ArchivedTempfile( - 'diff_%s' % failure_filename, 'render_tests', - output_manager.Datatype.PNG) as diff_image_host_file: - device.PullFile( - diff_image_device_file, diff_image_host_file.name) - diff_link = diff_image_host_file.Link() - else: - diff_link = '' - - processed_template_output = _GenerateRenderTestHtml( - failure_filename, failure_link, golden_link, diff_link) - - with self._env.output_manager.ArchivedTempfile( - '%s.html' % failure_filename, 'render_tests', - output_manager.Datatype.HTML) as html_results: - html_results.write(processed_template_output) - html_results.flush() - _SetLinkOnResults(results, failure_filename, html_results.Link()) - #override def _ShouldRetry(self, test, result): # We've tried to disable retries in the past with mixed results. @@ -1145,6 +1153,22 @@ class LocalDeviceInstrumentationTestRun( return timeout +def _IsWPRRecordReplayTest(test): + """Determines whether a test or a list of tests is a WPR RecordReplay Test.""" + if not isinstance(test, list): + test = [test] + return any([ + WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get( + FEATURE_ANNOTATION, {}).get('value', ()) for t in test + ]) + + +def _GetWPRArchivePath(test): + """Retrieves the archive path from the WPRArchiveDirectory annotation.""" + return test['annotations'].get(WPR_ARCHIVE_FILE_PATH_ANNOTATION, + {}).get('value', ()) + + def _IsRenderTest(test): """Determines if a test or list of tests has a RenderTest amongst them.""" if not isinstance(test, list): diff --git a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py index 3129c1121b0..dd57d92061e 100755 --- a/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py +++ b/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py @@ -61,6 +61,88 @@ class LocalDeviceInstrumentationTestRunTest(unittest.TestCase): 'SadTest.testNotRun', base_test_result.ResultType.NOTRUN) self.assertTrue(self._obj._ShouldRetry(test, result)) + def testIsWPRRecordReplayTest_matchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['WPRRecordReplayTest', 'dummy'] + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertTrue( + local_device_instrumentation_test_run._IsWPRRecordReplayTest(test)) + + def testIsWPRRecordReplayTest_noMatchedKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['abc', 'dummy'] + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse( + local_device_instrumentation_test_run._IsWPRRecordReplayTest(test)) + + def testGetWPRArchivePath_matchedWithKey(self): + test = { + 'annotations': { + 'WPRArchiveDirectory': { + 'value': 'abc' + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertEqual( + local_device_instrumentation_test_run._GetWPRArchivePath(test), 'abc') + + def testGetWPRArchivePath_noMatchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': 'abc' + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse( + local_device_instrumentation_test_run._GetWPRArchivePath(test)) + + def testIsRenderTest_matchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['RenderTest', 'dummy'] + } + }, + 'class': 'DummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertTrue(local_device_instrumentation_test_run._IsRenderTest(test)) + + def testIsRenderTest_noMatchedKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['abc', 'dummy'] + } + }, + 'class': 'DummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse(local_device_instrumentation_test_run._IsRenderTest(test)) + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/chromium/build/android/pylib/local/device/local_device_test_run.py b/chromium/build/android/pylib/local/device/local_device_test_run.py index 2018751fed5..69b27186507 100644 --- a/chromium/build/android/pylib/local/device/local_device_test_run.py +++ b/chromium/build/android/pylib/local/device/local_device_test_run.py @@ -137,6 +137,7 @@ class LocalDeviceTestRun(test_run.TestRun): with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests): tries = 0 while tries < self._env.max_tries and tests: + grouped_tests = self._GroupTests(tests) logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries) if tries > 0 and self._env.recover_devices: if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1 @@ -171,12 +172,14 @@ class LocalDeviceTestRun(test_run.TestRun): try: if self._ShouldShard(): - tc = test_collection.TestCollection(self._CreateShards(tests)) + tc = test_collection.TestCollection( + self._CreateShards(grouped_tests)) self._env.parallel_devices.pMap( run_tests_on_device, tc, try_results).pGet(None) else: - self._env.parallel_devices.pMap( - run_tests_on_device, tests, try_results).pGet(None) + self._env.parallel_devices.pMap(run_tests_on_device, + grouped_tests, + try_results).pGet(None) except TestsTerminated: for unknown_result in try_results.GetUnknown(): try_results.AddResult( @@ -236,9 +239,16 @@ class LocalDeviceTestRun(test_run.TestRun): if total_shards < 0 or shard_index < 0 or total_shards <= shard_index: raise InvalidShardingSettings(shard_index, total_shards) - return [ - t for t in tests - if hash(self._GetUniqueTestName(t)) % total_shards == shard_index] + sharded_tests = [] + for t in self._GroupTests(tests): + if (hash(self._GetUniqueTestName(t[0] if isinstance(t, list) else t)) % + total_shards == shard_index): + if isinstance(t, list): + sharded_tests.extend(t) + else: + sharded_tests.append(t) + + return sharded_tests def GetTool(self, device): if str(device) not in self._tools: @@ -260,6 +270,10 @@ class LocalDeviceTestRun(test_run.TestRun): def _GetTests(self): raise NotImplementedError + def _GroupTests(self, tests): + # pylint: disable=no-self-use + return tests + def _RunTest(self, device, test): raise NotImplementedError diff --git a/chromium/build/android/pylib/local/device/local_device_test_run_test.py b/chromium/build/android/pylib/local/device/local_device_test_run_test.py index 525bf25200b..aeea5881c8c 100755 --- a/chromium/build/android/pylib/local/device/local_device_test_run_test.py +++ b/chromium/build/android/pylib/local/device/local_device_test_run_test.py @@ -8,11 +8,9 @@ import unittest from pylib.base import base_test_result -from pylib.constants import host_paths from pylib.local.device import local_device_test_run -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error class SubstituteDeviceRootTest(unittest.TestCase): diff --git a/chromium/build/android/pylib/output/remote_output_manager_test.py b/chromium/build/android/pylib/output/remote_output_manager_test.py index 6917260dd7c..d87c6eb3a9c 100755 --- a/chromium/build/android/pylib/output/remote_output_manager_test.py +++ b/chromium/build/android/pylib/output/remote_output_manager_test.py @@ -9,11 +9,9 @@ import unittest from pylib.base import output_manager from pylib.base import output_manager_test_case -from pylib.constants import host_paths from pylib.output import remote_output_manager -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +import mock # pylint: disable=import-error @mock.patch('pylib.utils.google_storage_helper') diff --git a/chromium/build/android/pylib/symbols/deobfuscator.py b/chromium/build/android/pylib/symbols/deobfuscator.py index 42084ddc789..ffc23b87048 100644 --- a/chromium/build/android/pylib/symbols/deobfuscator.py +++ b/chromium/build/android/pylib/symbols/deobfuscator.py @@ -150,7 +150,7 @@ class DeobfuscatorPool(object): # De-obfuscation is broken. if self._num_restarts == _MAX_RESTARTS: - return lines + raise Exception('Deobfuscation seems broken.') # Restart any closed Deobfuscators. for i, d in enumerate(self._pool): diff --git a/chromium/build/android/pylib/utils/app_bundle_utils.py b/chromium/build/android/pylib/utils/app_bundle_utils.py index f076ed39cd6..59efb775a66 100644 --- a/chromium/build/android/pylib/utils/app_bundle_utils.py +++ b/chromium/build/android/pylib/utils/app_bundle_utils.py @@ -18,6 +18,8 @@ import bundletool # List of valid modes for GenerateBundleApks() BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed') +OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE', + 'TEXTURE_COMPRESSION_FORMAT') _SYSTEM_MODES = ('system_compressed', 'system') _ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'] @@ -50,7 +52,8 @@ def GenerateBundleApks(bundle_path, minimal=False, minimal_sdk_version=None, check_for_noop=True, - system_image_locales=None): + system_image_locales=None, + optimize_for=None): """Generate an .apks archive from a an app bundle if needed. Args: @@ -68,6 +71,8 @@ def GenerateBundleApks(bundle_path, check_for_noop: Use md5_check to short-circuit when inputs have not changed. system_image_locales: Locales to package in the APK when mode is "system" or "system_compressed". + optimize_for: Overrides split configuration, which must be None or + one of OPTIMIZE_FOR_OPTIONS. """ device_spec = None if minimal_sdk_version: @@ -110,6 +115,13 @@ def GenerateBundleApks(bundle_path, (mode, BUILD_APKS_MODES)) cmd_args += ['--mode=' + mode] + if optimize_for: + if optimize_for not in OPTIMIZE_FOR_OPTIONS: + raise Exception('Invalid optimize_for parameter %s ' + '(should be in %s)' % + (mode, OPTIMIZE_FOR_OPTIONS)) + cmd_args += ['--optimize-for=' + optimize_for] + with tempfile.NamedTemporaryFile(suffix='.json') as spec_file: if device_spec: json.dump(device_spec, spec_file) diff --git a/chromium/build/android/pylib/utils/chrome_proxy_utils.py b/chromium/build/android/pylib/utils/chrome_proxy_utils.py new file mode 100644 index 00000000000..149d0b9c8c5 --- /dev/null +++ b/chromium/build/android/pylib/utils/chrome_proxy_utils.py @@ -0,0 +1,171 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Utilities for setting up and tear down WPR and TsProxy service.""" + +from py_utils import ts_proxy_server +from py_utils import webpagereplay_go_server + +from devil.android import forwarder + +PROXY_HOST_IP = '127.0.0.1' +# From Catapult/WebPageReplay document. +IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=' +PROXY_SERVER = 'socks5://localhost' +DEFAULT_DEVICE_PORT = 1080 +DEFAULT_ROUND_TRIP_LATENCY_MS = 100 +DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000 +DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000 + + +class WPRServer(object): + """Utils to set up a webpagereplay_go_server instance.""" + + def __init__(self): + self._archive_path = None + self._host_http_port = 0 + self._host_https_port = 0 + self._record_mode = False + self._server = None + + def StartServer(self, wpr_archive_path): + """Starts a webpagereplay_go_server instance.""" + if wpr_archive_path == self._archive_path and self._server: + # Reuse existing webpagereplay_go_server instance. + return + + if self._server: + self.StopServer() + + replay_options = [] + if self._record_mode: + replay_options.append('--record') + + ports = {} + if not self._server: + self._server = webpagereplay_go_server.ReplayServer( + wpr_archive_path, + PROXY_HOST_IP, + http_port=self._host_http_port, + https_port=self._host_https_port, + replay_options=replay_options) + self._archive_path = wpr_archive_path + ports = self._server.StartServer() + + self._host_http_port = ports['http'] + self._host_https_port = ports['https'] + + def StopServer(self): + """Stops the webpagereplay_go_server instance and resets archive.""" + self._server.StopServer() + self._server = None + self._host_http_port = 0 + self._host_https_port = 0 + + @staticmethod + def SetServerBinaryPath(go_binary_path): + """Sets the go_binary_path for webpagereplay_go_server.ReplayServer.""" + webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path) + + @property + def record_mode(self): + return self._record_mode + + @record_mode.setter + def record_mode(self, value): + self._record_mode = value + + @property + def http_port(self): + return self._host_http_port + + @property + def https_port(self): + return self._host_https_port + + @property + def archive_path(self): + return self._archive_path + + +class ChromeProxySession(object): + """Utils to help set up a Chrome Proxy.""" + + def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT): + self._device_proxy_port = device_proxy_port + self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP) + self._wpr_server = WPRServer() + + @property + def wpr_record_mode(self): + """Returns whether this proxy session was running in record mode.""" + return self._wpr_server.record_mode + + @wpr_record_mode.setter + def wpr_record_mode(self, value): + self._wpr_server.record_mode = value + + @property + def wpr_replay_mode(self): + """Returns whether this proxy session was running in replay mode.""" + return not self._wpr_server.record_mode + + @property + def wpr_archive_path(self): + """Returns the wpr archive file path used in this proxy session.""" + return self._wpr_server.archive_path + + @property + def device_proxy_port(self): + return self._device_proxy_port + + def GetFlags(self): + """Gets the chrome command line flags to be needed by ChromeProxySession.""" + extra_flags = [] + + extra_flags.append('--ignore-certificate-errors-spki-list=%s' % + IGNORE_CERT_ERROR_SPKI_LIST) + extra_flags.append('--proxy-server=%s:%s' % + (PROXY_SERVER, self._device_proxy_port)) + return extra_flags + + @staticmethod + def SetWPRServerBinary(go_binary_path): + """Sets the WPR server go_binary_path.""" + WPRServer.SetServerBinaryPath(go_binary_path) + + def Start(self, device, wpr_archive_path): + """Starts the wpr_server as well as the ts_proxy server and setups env. + + Args: + device: A DeviceUtils instance. + wpr_archive_path: A abs path to the wpr archive file. + + """ + self._wpr_server.StartServer(wpr_archive_path) + self._ts_proxy_server.StartServer() + + # Maps device port to host port + forwarder.Forwarder.Map( + [(self._device_proxy_port, self._ts_proxy_server.port)], device) + # Maps tsProxy port to wpr http/https ports + self._ts_proxy_server.UpdateOutboundPorts( + http_port=self._wpr_server.http_port, + https_port=self._wpr_server.https_port) + self._ts_proxy_server.UpdateTrafficSettings( + round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS, + download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS, + upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS) + + def Stop(self, device): + """Stops the wpr_server, and ts_proxy server and tears down env. + + Note that Stop does not reset wpr_record_mode, wpr_replay_mode, + wpr_archive_path property. + + Args: + device: A DeviceUtils instance. + """ + self._wpr_server.StopServer() + self._ts_proxy_server.StopServer() + forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device) diff --git a/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py b/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py new file mode 100755 index 00000000000..b38b268fe8a --- /dev/null +++ b/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py @@ -0,0 +1,235 @@ +#!/usr/bin/env vpython +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for chrome_proxy_utils.""" + +#pylint: disable=protected-access + +import os +import unittest + +from pylib.utils import chrome_proxy_utils + +from devil.android import forwarder +from devil.android import device_utils +from devil.android.sdk import adb_wrapper +from py_utils import ts_proxy_server +from py_utils import webpagereplay_go_server + +import mock # pylint: disable=import-error + + +def _DeviceUtilsMock(test_serial, is_ready=True): + """Returns a DeviceUtils instance based on given serial.""" + adb = mock.Mock(spec=adb_wrapper.AdbWrapper) + adb.__str__ = mock.Mock(return_value=test_serial) + adb.GetDeviceSerial.return_value = test_serial + adb.is_ready = is_ready + return device_utils.DeviceUtils(adb) + + +class ChromeProxySessionTest(unittest.TestCase): + """Unittest for ChromeProxySession.""" + + #pylint: disable=no-self-use + + @mock.patch.object(forwarder.Forwarder, 'Map') + @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer') + @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer') + @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts') + @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings') + @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port', + new_callable=mock.PropertyMock) + def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock, + start_server_mock, wpr_mock, forwarder_mock): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + chrome_proxy._wpr_server._host_http_port = 1 + chrome_proxy._wpr_server._host_https_port = 2 + port_mock.return_value = 3 + device = _DeviceUtilsMock('01234') + chrome_proxy.Start(device, 'abc') + + forwarder_mock.assert_called_once_with([(4, 3)], device) + wpr_mock.assert_called_once_with('abc') + start_server_mock.assert_called_once() + outboundport_mock.assert_called_once_with(http_port=1, https_port=2) + traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000, + round_trip_latency_ms=100, + upload_bandwidth_kbps=72000) + port_mock.assert_called_once() + + @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort') + @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer') + @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer') + def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + device = _DeviceUtilsMock('01234') + chrome_proxy.wpr_record_mode = True + chrome_proxy._wpr_server._archive_path = 'abc' + chrome_proxy.Stop(device) + + forwarder_mock.assert_called_once_with(4, device) + wpr_mock.assert_called_once_with() + ts_proxy_mock.assert_called_once_with() + + #pylint: enable=no-self-use + + @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort') + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer') + @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer') + def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer( + os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, []) + chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__) + device = _DeviceUtilsMock('01234') + chrome_proxy.wpr_record_mode = True + chrome_proxy.Stop(device) + + forwarder_mock.assert_called_once_with(4, device) + wpr_mock.assert_called_once_with() + ts_proxy_mock.assert_called_once_with() + self.assertFalse(chrome_proxy.wpr_replay_mode) + self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__)) + + def test_SetWPRRecordMode(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + chrome_proxy.wpr_record_mode = True + self.assertTrue(chrome_proxy._wpr_server.record_mode) + self.assertTrue(chrome_proxy.wpr_record_mode) + self.assertFalse(chrome_proxy.wpr_replay_mode) + + chrome_proxy.wpr_record_mode = False + self.assertFalse(chrome_proxy._wpr_server.record_mode) + self.assertFalse(chrome_proxy.wpr_record_mode) + self.assertTrue(chrome_proxy.wpr_replay_mode) + + def test_SetWPRArchivePath(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + chrome_proxy._wpr_server._archive_path = 'abc' + self.assertEquals(chrome_proxy.wpr_archive_path, 'abc') + + def test_UseDefaultDeviceProxyPort(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession() + expected_flags = [ + '--ignore-certificate-errors-spki-list=' + 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', + '--proxy-server=socks5://localhost:1080' + ] + self.assertEquals(chrome_proxy.device_proxy_port, 1080) + self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) + + def test_UseNewDeviceProxyPort(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(1) + expected_flags = [ + '--ignore-certificate-errors-spki-list=' + 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', + '--proxy-server=socks5://localhost:1' + ] + self.assertEquals(chrome_proxy.device_proxy_port, 1) + self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) + + +class WPRServerTest(unittest.TestCase): + @mock.patch('py_utils.webpagereplay_go_server.ReplayServer') + def test_StartSever_fresh_replaymode(self, wpr_mock): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_archive_file = os.path.abspath(__file__) + wpr_server.StartServer(wpr_archive_file) + + wpr_mock.assert_called_once_with(wpr_archive_file, + '127.0.0.1', + http_port=0, + https_port=0, + replay_options=[]) + + self.assertEqual(wpr_server._archive_path, wpr_archive_file) + self.assertTrue(wpr_server._server) + + @mock.patch('py_utils.webpagereplay_go_server.ReplayServer') + def test_StartSever_fresh_recordmode(self, wpr_mock): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_server.record_mode = True + wpr_server.StartServer(os.path.abspath(__file__)) + wpr_archive_file = os.path.abspath(__file__) + + wpr_mock.assert_called_once_with(wpr_archive_file, + '127.0.0.1', + http_port=0, + https_port=0, + replay_options=['--record']) + + self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__)) + self.assertTrue(wpr_server._server) + + #pylint: disable=no-self-use + + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer') + def test_StartSever_recordmode(self, start_server_mock): + wpr_server = chrome_proxy_utils.WPRServer() + start_server_mock.return_value = {'http': 1, 'https': 2} + wpr_server.StartServer(os.path.abspath(__file__)) + + start_server_mock.assert_called_once() + self.assertEqual(wpr_server._host_http_port, 1) + self.assertEqual(wpr_server._host_https_port, 2) + self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__)) + self.assertTrue(wpr_server._server) + + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer') + def test_StartSever_reuseServer(self, start_server_mock): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_server._server = webpagereplay_go_server.ReplayServer( + os.path.abspath(__file__), + chrome_proxy_utils.PROXY_HOST_IP, + http_port=0, + https_port=0, + replay_options=[]) + wpr_server._archive_path = os.path.abspath(__file__) + wpr_server.StartServer(os.path.abspath(__file__)) + start_server_mock.assert_not_called() + + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer') + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer') + def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_server._server = webpagereplay_go_server.ReplayServer( + os.path.abspath(__file__), + chrome_proxy_utils.PROXY_HOST_IP, + http_port=0, + https_port=0, + replay_options=[]) + wpr_server._archive_path = '' + wpr_server.StartServer(os.path.abspath(__file__)) + start_server_mock.assert_called_once() + stop_server_mock.assert_called_once() + + #pylint: enable=no-self-use + + @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer') + def test_StopServer(self, stop_server_mock): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_server._server = webpagereplay_go_server.ReplayServer( + os.path.abspath(__file__), + chrome_proxy_utils.PROXY_HOST_IP, + http_port=0, + https_port=0, + replay_options=[]) + wpr_server.StopServer() + stop_server_mock.assert_called_once() + self.assertFalse(wpr_server._server) + self.assertFalse(wpr_server._archive_path) + self.assertFalse(wpr_server.http_port) + self.assertFalse(wpr_server.https_port) + + def test_SetWPRRecordMode(self): + wpr_server = chrome_proxy_utils.WPRServer() + wpr_server.record_mode = True + self.assertTrue(wpr_server.record_mode) + wpr_server.record_mode = False + self.assertFalse(wpr_server.record_mode) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/chromium/build/android/pylib/utils/gold_utils.py b/chromium/build/android/pylib/utils/gold_utils.py index 2b0aa60434f..f4f0840e429 100644 --- a/chromium/build/android/pylib/utils/gold_utils.py +++ b/chromium/build/android/pylib/utils/gold_utils.py @@ -1,332 +1,31 @@ # Copyright 2020 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Utilities for interacting with the Skia Gold image diffing service.""" +"""//build/android implementations of //testing/skia_gold_common. + +Used for interacting with the Skia Gold image diffing service. +""" -import json -import logging import os import shutil -import tempfile from devil.utils import cmd_helper from pylib.base.output_manager import Datatype from pylib.constants import host_paths -from pylib.utils import local_utils from pylib.utils import repo_utils -DEFAULT_INSTANCE = 'chrome' - -GOLDCTL_BINARY = os.path.join(host_paths.DIR_SOURCE_ROOT, 'tools', - 'skia_goldctl', 'linux', 'goldctl') - - -class SkiaGoldSession(object): - class StatusCodes(object): - """Status codes for RunComparison.""" - SUCCESS = 0 - AUTH_FAILURE = 1 - INIT_FAILURE = 2 - COMPARISON_FAILURE_REMOTE = 3 - COMPARISON_FAILURE_LOCAL = 4 - LOCAL_DIFF_FAILURE = 5 - NO_OUTPUT_MANAGER = 6 - - class ComparisonResults(object): - """Struct-like object for storing results of an image comparison.""" - - def __init__(self): - self.triage_link = None - self.triage_link_omission_reason = None - self.local_diff_given_image = None - self.local_diff_closest_image = None - self.local_diff_diff_image = None - - def __init__(self, - working_dir, - gold_properties, - keys_file, - corpus, - instance=DEFAULT_INSTANCE): - """A class to handle all aspects of an image comparison via Skia Gold. - - A single SkiaGoldSession is valid for a single instance/corpus/keys_file - combination. - - Args: - working_dir: The directory to store config files, etc. Sharing the same - working directory between multiple SkiaGoldSessions allows re-use of - authentication and downloaded baselines. - gold_properties: A SkiaGoldProperties instance for the current test run. - keys_file: A path to a JSON file containing various comparison config - data such as corpus and debug information like the hardware/software - configuration the images will be produced on. - corpus: The corpus that images that will be compared belong to. - instance: The name of the Skia Gold instance to interact with. - """ - self._working_dir = working_dir - self._gold_properties = gold_properties - self._keys_file = keys_file - self._corpus = corpus - self._instance = instance - self._triage_link_file = tempfile.NamedTemporaryFile( - suffix='.txt', dir=working_dir, delete=False).name - # A map of image name (string) to ComparisonResults for that image. - self._comparison_results = {} - self._authenticated = False - self._initialized = False - - # pylint: disable=too-many-return-statements - def RunComparison(self, - name, - png_file, - output_manager, - use_luci=True): - """Helper method to run all steps to compare a produced image. - - Handles authentication, initialization, comparison, and, if necessary, - local diffing. +with host_paths.SysPath(host_paths.BUILD_PATH): + from skia_gold_common import skia_gold_session + from skia_gold_common import skia_gold_session_manager + from skia_gold_common import skia_gold_properties - Args: - name: The name of the image being compared. - png_file: A path to a PNG file containing the image to be compared. - output_manager: The output manager used to save local diff images if - necessary. Can be None, but will fail if it ends up needing to be used - and is not set. - use_luci: If true, authentication will use the service account provided - by the LUCI context. If false, will attempt to use whatever is set up - in gsutil, which is only supported for local runs. - Returns: - A tuple (status, error). |status| is a value from - SkiaGoldSession.StatusCodes signifying the result of the comparison. - |error| is an error message describing the status if not successful. - """ - auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci) - if auth_rc: - return self.StatusCodes.AUTH_FAILURE, auth_stdout - - init_rc, init_stdout = self.Initialize() - if init_rc: - return self.StatusCodes.INIT_FAILURE, init_stdout - - compare_rc, compare_stdout = self.Compare(name=name, png_file=png_file) - if not compare_rc: - return self.StatusCodes.SUCCESS, None - - logging.error('Gold comparison failed: %s', compare_stdout) - if not self._gold_properties.local_pixel_tests: - return self.StatusCodes.COMPARISON_FAILURE_REMOTE, compare_stdout - - if not output_manager: - return (self.StatusCodes.NO_OUTPUT_MANAGER, - 'No output manager for local diff images') - diff_rc, diff_stdout = self.Diff( - name=name, png_file=png_file, output_manager=output_manager) - if diff_rc: - return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout - return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout - - def Authenticate(self, use_luci=True): - """Authenticates with Skia Gold for this session. +class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession): + def _StoreDiffLinks(self, image_name, output_manager, output_dir): + """See SkiaGoldSession._StoreDiffLinks for general documentation. - Args: - use_luci: If true, authentication will use the service account provided - by the LUCI context. If false, will attempt to use whatever is set up - in gsutil, which is only supported for local runs. - - Returns: - A tuple (return_code, output). |return_code| is the return code of the - authentication process. |output| is the stdout + stderr of the - authentication process. + |output_manager| must be a build.android.pylib.base.OutputManager instance. """ - if self._authenticated: - return 0, None - if self._gold_properties.bypass_skia_gold_functionality: - logging.warning('Not actually authenticating with Gold due to ' - '--bypass-skia-gold-functionality being present.') - return 0, None - - auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir] - if use_luci: - auth_cmd.append('--luci') - elif not self._gold_properties.local_pixel_tests: - raise RuntimeError( - 'Cannot authenticate to Skia Gold with use_luci=False unless running ' - 'local pixel tests') - - rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError( - auth_cmd, merge_stderr=True) - if rc == 0: - self._authenticated = True - return rc, stdout - - def Initialize(self): - """Initializes the working directory if necessary. - - This can technically be skipped if the same information is passed to the - command used for image comparison, but that is less efficient under the - hood. Doing it that way effectively requires an initialization for every - comparison (~250 ms) instead of once at the beginning. - - Returns: - A tuple (return_code, output). |return_code| is the return code of the - initialization process. |output| is the stdout + stderr of the - initialization process. - """ - if self._initialized: - return 0, None - if self._gold_properties.bypass_skia_gold_functionality: - logging.warning('Not actually initializing Gold due to ' - '--bypass-skia-gold-functionality being present.') - return 0, None - - init_cmd = [ - GOLDCTL_BINARY, - 'imgtest', - 'init', - '--passfail', - '--instance', - self._instance, - '--corpus', - self._corpus, - '--keys-file', - self._keys_file, - '--work-dir', - self._working_dir, - '--failure-file', - self._triage_link_file, - '--commit', - self._gold_properties.git_revision, - ] - if self._gold_properties.IsTryjobRun(): - init_cmd.extend([ - '--issue', - str(self._gold_properties.issue), - '--patchset', - str(self._gold_properties.patchset), - '--jobid', - str(self._gold_properties.job_id), - '--crs', - str(self._gold_properties.code_review_system), - '--cis', - str(self._gold_properties.continuous_integration_system), - ]) - rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError( - init_cmd, merge_stderr=True) - if rc == 0: - self._initialized = True - return rc, stdout - - def Compare(self, name, png_file): - """Compares the given image to images known to Gold. - - Triage links can later be retrieved using GetTriageLink(). - - Args: - name: The name of the image being compared. - png_file: A path to a PNG file containing the image to be compared. - - Returns: - A tuple (return_code, output). |return_code| is the return code of the - comparison process. |output| is the stdout + stderr of the comparison - process. - """ - if self._gold_properties.bypass_skia_gold_functionality: - logging.warning('Not actually comparing with Gold due to ' - '--bypass-skia-gold-functionality being present.') - return 0, None - - compare_cmd = [ - GOLDCTL_BINARY, - 'imgtest', - 'add', - '--test-name', - name, - '--png-file', - png_file, - '--work-dir', - self._working_dir, - ] - if self._gold_properties.local_pixel_tests: - compare_cmd.append('--dryrun') - - rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError( - compare_cmd, merge_stderr=True) - - self._comparison_results[name] = self.ComparisonResults() - if rc == 0: - self._comparison_results[name].triage_link_omission_reason = ( - 'Comparison succeeded, no triage link') - elif self._gold_properties.IsTryjobRun(): - # TODO(skbug.com/9879): Remove the explicit corpus when Gold's UI is - # updated to show results from all corpora for tryjobs. - cl_triage_link = ('https://{instance}-gold.skia.org/search?' - 'issue={issue}&' - 'new_clstore=true&' - 'query=source_type%3D{corpus}') - cl_triage_link = cl_triage_link.format( - instance=self._instance, - issue=self._gold_properties.issue, - corpus=self._corpus) - self._comparison_results[name].triage_link = cl_triage_link - else: - try: - with open(self._triage_link_file) as tlf: - triage_link = tlf.read().strip() - self._comparison_results[name].triage_link = triage_link - except IOError: - self._comparison_results[name].triage_link_omission_reason = ( - 'Failed to read triage link from file') - return rc, stdout - - def Diff(self, name, png_file, output_manager): - """Performs a local image diff against the closest known positive in Gold. - - This is used for running tests on a workstation, where uploading data to - Gold for ingestion is not allowed, and thus the web UI is not available. - - Image links can later be retrieved using Get*ImageLink(). - - Args: - name: The name of the image being compared. - png_file: The path to a PNG file containing the image to be diffed. - output_manager: The output manager used to save local diff images. - - Returns: - A tuple (return_code, output). |return_code| is the return code of the - diff process. |output| is the stdout + stderr of the diff process. - """ - # Instead of returning that everything is okay and putting in dummy links, - # just fail since this should only be called when running locally and - # --bypass-skia-gold-functionality is only meant for use on the bots. - if self._gold_properties.bypass_skia_gold_functionality: - raise RuntimeError( - '--bypass-skia-gold-functionality is not supported when running ' - 'tests locally.') - - # Output managers only support archived files, not directories, so we have - # to use a temporary directory and later move the data into the archived - # files. - output_dir = tempfile.mkdtemp(dir=self._working_dir) - diff_cmd = [ - GOLDCTL_BINARY, - 'diff', - '--corpus', - self._corpus, - '--instance', - self._instance, - '--input', - png_file, - '--test', - name, - '--work-dir', - self._working_dir, - '--out-dir', - output_dir, - ] - rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError( - diff_cmd, merge_stderr=True) given_path = closest_path = diff_path = None # The directory should contain "input-<hash>.png", "closest-<hash>.png", # and "diff.png". @@ -338,272 +37,46 @@ class SkiaGoldSession(object): closest_path = filepath elif f == 'diff.png': diff_path = filepath - results = self._comparison_results.setdefault(name, + results = self._comparison_results.setdefault(image_name, self.ComparisonResults()) if given_path: - with output_manager.ArchivedTempfile('given_%s.png' % name, + with output_manager.ArchivedTempfile('given_%s.png' % image_name, 'gold_local_diffs', Datatype.PNG) as given_file: shutil.move(given_path, given_file.name) results.local_diff_given_image = given_file.Link() if closest_path: - with output_manager.ArchivedTempfile('closest_%s.png' % name, + with output_manager.ArchivedTempfile('closest_%s.png' % image_name, 'gold_local_diffs', Datatype.PNG) as closest_file: shutil.move(closest_path, closest_file.name) results.local_diff_closest_image = closest_file.Link() if diff_path: - with output_manager.ArchivedTempfile( - 'diff_%s.png' % name, 'gold_local_diffs', Datatype.PNG) as diff_file: + with output_manager.ArchivedTempfile('diff_%s.png' % image_name, + 'gold_local_diffs', + Datatype.PNG) as diff_file: shutil.move(diff_path, diff_file.name) results.local_diff_diff_image = diff_file.Link() - return rc, stdout - - def GetTriageLink(self, name): - """Gets the triage link for the given image. - Args: - name: The name of the image to retrieve the triage link for. - - Returns: - A string containing the triage link if it is available, or None if it is - not available for some reason. The reason can be retrieved using - GetTriageLinkOmissionReason. - """ - return self._comparison_results.get(name, - self.ComparisonResults()).triage_link - - def GetTriageLinkOmissionReason(self, name): - """Gets the reason why a triage link is not available for an image. - - Args: - name: The name of the image whose triage link does not exist. - - Returns: - A string containing the reason why a triage link is not available. - """ - if name not in self._comparison_results: - return 'No image comparison performed for %s' % name - results = self._comparison_results[name] - # This method should not be called if there is a valid triage link. - assert results.triage_link is None - if results.triage_link_omission_reason: - return results.triage_link_omission_reason - if results.local_diff_given_image: - return 'Gold only used to do a local image diff' - raise RuntimeError( - 'Somehow have a ComparisonResults instance for %s that should not ' - 'exist' % name) - - def GetGivenImageLink(self, name): - """Gets the link to the given image used for local diffing. - - Args: - name: The name of the image that was diffed. - - Returns: - A string containing the link to where the image is saved, or None if it - does not exist. Since local diffing should only be done when running - locally, this *should* be a file:// URL, but there is no guarantee of - that. - """ - assert name in self._comparison_results - return self._comparison_results[name].local_diff_given_image - - def GetClosestImageLink(self, name): - """Gets the link to the closest known image used for local diffing. - - Args: - name: The name of the image that was diffed. - - Returns: - A string containing the link to where the image is saved, or None if it - does not exist. Since local diffing should only be done when running - locally, this *should* be a file:// URL, but there is no guarantee of - that. - """ - assert name in self._comparison_results - return self._comparison_results[name].local_diff_closest_image - - def GetDiffImageLink(self, name): - """Gets the link to the diff between the given and closest images. - - Args: - name: The name of the image that was diffed. - - Returns: - A string containing the link to where the image is saved, or None if it - does not exist. Since local diffing should only be done when running - locally, this *should* be a file:// URL, but there is no guarantee of - that. - """ - assert name in self._comparison_results - return self._comparison_results[name].local_diff_diff_image - - -class SkiaGoldSessionManager(object): - def __init__(self, working_dir, gold_properties): - """Class to manage one or more SkiaGoldSessions. - - A separate session is required for each instance/corpus/keys_file - combination, so this class will lazily create them as necessary. - - Args: - working_dir: The working directory under which each individual - SkiaGoldSessions' working directory will be created. - gold_properties: A SkiaGoldProperties instance that will be used to create - any SkiaGoldSessions. - """ - self._working_dir = working_dir - self._gold_properties = gold_properties - self._sessions = {} - - def GetSkiaGoldSession(self, - keys_file, - corpus=None, - instance=DEFAULT_INSTANCE): - """Gets a SkiaGoldSession for the given arguments. - - Lazily creates one if necessary. - - Args: - keys_file: A path to a JSON file containing various comparison config - data such as corpus and debug information like the hardware/software - configuration the image was produced on. - corpus: The corpus the session is for. If None, the corpus will be - determined using available information. - instance: The name of the Skia Gold instance to interact with. - """ - with open(keys_file) as f: - keys = json.load(f) - keys_string = json.dumps(keys, sort_keys=True) - if corpus is None: - corpus = keys.get('source_type', instance) - # Use the string representation of the keys JSON as a proxy for a hash since - # dicts themselves are not hashable. - session = self._sessions.setdefault(instance, - {}).setdefault(corpus, {}).setdefault( - keys_string, None) - if not session: - working_dir = tempfile.mkdtemp(dir=self._working_dir) - session = SkiaGoldSession(working_dir, self._gold_properties, keys_file, - corpus, instance) - self._sessions[instance][corpus][keys_string] = session - return session - - -class SkiaGoldProperties(object): - def __init__(self, args): - """Class to validate and store properties related to Skia Gold. - - Args: - args: The parsed arguments from an argparse.ArgumentParser. - """ - self._git_revision = None - self._issue = None - self._patchset = None - self._job_id = None - self._local_pixel_tests = None - self._no_luci_auth = None - self._bypass_skia_gold_functionality = None - - # Could in theory be configurable, but hard-coded for now since there's - # no plan to support anything else. - self._code_review_system = 'gerrit' - self._continuous_integration_system = 'buildbucket' - - self._InitializeProperties(args) - - def IsTryjobRun(self): - return self.issue is not None - - @property - def continuous_integration_system(self): - return self._continuous_integration_system - - @property - def code_review_system(self): - return self._code_review_system - - @property - def git_revision(self): - return self._GetGitRevision() - - @property - def issue(self): - return self._issue - - @property - def job_id(self): - return self._job_id - - @property - def local_pixel_tests(self): - return self._IsLocalRun() - - @property - def no_luci_auth(self): - return self._no_luci_auth - - @property - def patchset(self): - return self._patchset - - @property - def bypass_skia_gold_functionality(self): - return self._bypass_skia_gold_functionality - - def _GetGitRevision(self): - if not self._git_revision: - # Automated tests should always pass the revision, so assume we're on - # a workstation and try to get the local origin/master HEAD. - if not self._IsLocalRun(): - raise RuntimeError( - '--git-revision was not passed when running on a bot') - revision = repo_utils.GetGitOriginMasterHeadSHA1( - host_paths.DIR_SOURCE_ROOT) - if not revision or len(revision) != 40: - raise RuntimeError( - '--git-revision not passed and unable to determine from git') - self._git_revision = revision - return self._git_revision - - def _IsLocalRun(self): - if self._local_pixel_tests is None: - self._local_pixel_tests = not local_utils.IsOnSwarming() - if self._local_pixel_tests: - logging.warning( - 'Automatically determined that test is running on a workstation') - else: - logging.warning( - 'Automatically determined that test is running on a bot') - return self._local_pixel_tests + @staticmethod + def _RunCmdForRcAndOutput(cmd): + rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd, + merge_stderr=True) + return rc, stdout - def _InitializeProperties(self, args): - if hasattr(args, 'local_pixel_tests'): - # If not set, will be automatically determined later if needed. - self._local_pixel_tests = args.local_pixel_tests - if hasattr(args, 'no_luci_auth'): - self._no_luci_auth = args.no_luci_auth +class AndroidSkiaGoldSessionManager( + skia_gold_session_manager.SkiaGoldSessionManager): + @staticmethod + def _GetDefaultInstance(): + return 'chrome' - if hasattr(args, 'bypass_skia_gold_functionality'): - self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality + @staticmethod + def _GetSessionClass(): + return AndroidSkiaGoldSession - # Will be automatically determined later if needed. - if not hasattr(args, 'git_revision') or not args.git_revision: - return - self._git_revision = args.git_revision - # Only expected on tryjob runs. - if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue: - return - self._issue = args.gerrit_issue - if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset: - raise RuntimeError( - '--gerrit-issue passed, but --gerrit-patchset not passed.') - self._patchset = args.gerrit_patchset - if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id: - raise RuntimeError( - '--gerrit-issue passed, but --buildbucket-id not passed.') - self._job_id = args.buildbucket_id +class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties): + @staticmethod + def _GetGitOriginMasterHeadSha1(): + return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT) diff --git a/chromium/build/android/pylib/utils/gold_utils_test.py b/chromium/build/android/pylib/utils/gold_utils_test.py index ae3f7ecb3c3..3499484e612 100755 --- a/chromium/build/android/pylib/utils/gold_utils_test.py +++ b/chromium/build/android/pylib/utils/gold_utils_test.py @@ -6,39 +6,21 @@ #pylint: disable=protected-access -import collections -import json +import contextlib import os +import tempfile import unittest from pylib.constants import host_paths from pylib.utils import gold_utils -from py_utils import tempfile_ext -with host_paths.SysPath(host_paths.PYMOCK_PATH): - import mock # pylint: disable=import-error +with host_paths.SysPath(host_paths.BUILD_PATH): + from skia_gold_common import unittest_utils -_SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [ - 'local_pixel_tests', - 'no_luci_auth', - 'git_revision', - 'gerrit_issue', - 'gerrit_patchset', - 'buildbucket_id', - 'bypass_skia_gold_functionality', -]) +import mock # pylint: disable=import-error +from pyfakefs import fake_filesystem_unittest # pylint: disable=import-error - -def createSkiaGoldArgs(local_pixel_tests=None, - no_luci_auth=None, - git_revision=None, - gerrit_issue=None, - gerrit_patchset=None, - buildbucket_id=None, - bypass_skia_gold_functionality=None): - return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, git_revision, - gerrit_issue, gerrit_patchset, buildbucket_id, - bypass_skia_gold_functionality) +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs def assertArgWith(test, arg_list, arg, value): @@ -46,852 +28,85 @@ def assertArgWith(test, arg_list, arg, value): test.assertEqual(arg_list[i + 1], value) -class SkiaGoldSessionRunComparisonTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.RunComparison.""" - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_comparisonSuccess(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (0, None) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - keys_file = os.path.join(working_dir, 'keys.json') - with open(os.path.join(working_dir, 'keys.json'), 'w') as f: - json.dump({}, f) - session = gold_utils.SkiaGoldSession(working_dir, None, keys_file, None) - status, _ = session.RunComparison(None, None, None) - self.assertEqual(status, gold_utils.SkiaGoldSession.StatusCodes.SUCCESS) - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (1, 'Auth failed') - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, None, None, None) - status, error = session.RunComparison(None, None, None) - self.assertEqual(status, - gold_utils.SkiaGoldSession.StatusCodes.AUTH_FAILURE) - self.assertEqual(error, 'Auth failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 0) - self.assertEqual(compare_mock.call_count, 0) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (1, 'Init failed') - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, None, None, None) - status, error = session.RunComparison(None, None, None) - self.assertEqual(status, - gold_utils.SkiaGoldSession.StatusCodes.INIT_FAILURE) - self.assertEqual(error, 'Init failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 0) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_compareFailureRemote(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - args = createSkiaGoldArgs(local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - keys_file = os.path.join(working_dir, 'keys.json') - with open(os.path.join(working_dir, 'keys.json'), 'w') as f: - json.dump({}, f) - session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None) - status, error = session.RunComparison(None, None, None) - self.assertEqual( - status, - gold_utils.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE) - self.assertEqual(error, 'Compare failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_compareFailureLocal(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (0, None) - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - keys_file = os.path.join(working_dir, 'keys.json') - with open(os.path.join(working_dir, 'keys.json'), 'w') as f: - json.dump({}, f) - session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None) - status, error = session.RunComparison(None, None, - 'Definitely an output manager') - self.assertEqual( - status, - gold_utils.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL) - self.assertEqual(error, 'Compare failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 1) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_diffFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (1, 'Diff failed') - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - keys_file = os.path.join(working_dir, 'keys.json') - with open(os.path.join(working_dir, 'keys.json'), 'w') as f: - json.dump({}, f) - session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None) - status, error = session.RunComparison(None, None, - 'Definitely an output manager') - self.assertEqual( - status, gold_utils.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE) - self.assertEqual(error, 'Diff failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 1) - - @mock.patch.object(gold_utils.SkiaGoldSession, 'Diff') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Compare') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Initialize') - @mock.patch.object(gold_utils.SkiaGoldSession, 'Authenticate') - def test_noOutputDirLocal(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (0, None) - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - keys_file = os.path.join(working_dir, 'keys.json') - with open(os.path.join(working_dir, 'keys.json'), 'w') as f: - json.dump({}, f) - session = gold_utils.SkiaGoldSession(working_dir, sgp, keys_file, None) - status, error = session.RunComparison(None, None, None) - self.assertEqual(status, - gold_utils.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER) - self.assertEqual(error, 'No output manager for local diff images') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - - -class SkiaGoldSessionAuthenticateTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.Authenticate.""" - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandOutputReturned(self, cmd_mock): - cmd_mock.return_value = (1, 'Something bad :(', None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, stdout = session.Authenticate() - self.assertEqual(cmd_mock.call_count, 1) - self.assertEqual(rc, 1) - self.assertEqual(stdout, 'Something bad :(') - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs( - git_revision='a', bypass_skia_gold_functionality=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, _ = session.Authenticate() - self.assertEqual(rc, 0) - cmd_mock.assert_not_called() +class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_shortCircuitAlreadyAuthenticated(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session._authenticated = True - rc, _ = session.Authenticate() - self.assertEqual(rc, 0) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_successSetsShortCircuit(self, cmd_mock): - cmd_mock.return_value = (0, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - self.assertFalse(session._authenticated) - rc, _ = session.Authenticate() - self.assertEqual(rc, 0) - self.assertTrue(session._authenticated) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_failureDoesNotSetShortCircuit(self, cmd_mock): - cmd_mock.return_value = (1, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - self.assertFalse(session._authenticated) - rc, _ = session.Authenticate() - self.assertEqual(rc, 1) - self.assertFalse(session._authenticated) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandWithUseLuciTrue(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Authenticate(use_luci=True) - self.assertIn('--luci', cmd_mock.call_args[0][0]) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandWithUseLuciFalse(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Authenticate(use_luci=False) - self.assertNotIn('--luci', cmd_mock.call_args[0][0]) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandWithUseLuciFalseNotLocal(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - with self.assertRaises(RuntimeError): - session.Authenticate(use_luci=False) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') + @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput') def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Authenticate() - call_args = cmd_mock.call_args[0][0] - self.assertIn('auth', call_args) - assertArgWith(self, call_args, '--work-dir', working_dir) - - -class SkiaGoldSessionInitializeTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.Initialize.""" - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs( - git_revision='a', bypass_skia_gold_functionality=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, _ = session.Initialize() - self.assertEqual(rc, 0) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_shortCircuitAlreadyInitialized(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session._initialized = True - rc, _ = session.Initialize() - self.assertEqual(rc, 0) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_successSetsShortCircuit(self, cmd_mock): - cmd_mock.return_value = (0, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - self.assertFalse(session._initialized) - rc, _ = session.Initialize() - self.assertEqual(rc, 0) - self.assertTrue(session._initialized) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_failureDoesNotSetShortCircuit(self, cmd_mock): - cmd_mock.return_value = (1, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - self.assertFalse(session._initialized) - rc, _ = session.Initialize() - self.assertEqual(rc, 1) - self.assertFalse(session._initialized) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession( - working_dir, sgp, 'keys_file', 'corpus', instance='instance') - session.Initialize() - call_args = cmd_mock.call_args[0][0] - self.assertIn('imgtest', call_args) - self.assertIn('init', call_args) - self.assertIn('--passfail', call_args) - assertArgWith(self, call_args, '--instance', 'instance') - assertArgWith(self, call_args, '--corpus', 'corpus') - assertArgWith(self, call_args, '--keys-file', 'keys_file') - assertArgWith(self, call_args, '--work-dir', working_dir) - assertArgWith(self, call_args, '--failure-file', session._triage_link_file) - assertArgWith(self, call_args, '--commit', 'a') - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandTryjobArgs(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Initialize() - call_args = cmd_mock.call_args[0][0] - assertArgWith(self, call_args, '--issue', '1') - assertArgWith(self, call_args, '--patchset', '2') - assertArgWith(self, call_args, '--jobid', '3') - assertArgWith(self, call_args, '--crs', 'gerrit') - assertArgWith(self, call_args, '--cis', 'buildbucket') - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandTryjobArgsMissing(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Initialize() - call_args = cmd_mock.call_args[0][0] - self.assertNotIn('--issue', call_args) - self.assertNotIn('--patchset', call_args) - self.assertNotIn('--jobid', call_args) - self.assertNotIn('--crs', call_args) - self.assertNotIn('--cis', call_args) - - -class SkiaGoldSessionCompareTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.Compare.""" - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandOutputReturned(self, cmd_mock): - cmd_mock.return_value = (1, 'Something bad :(', None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, stdout = session.Compare(None, None) - self.assertEqual(cmd_mock.call_count, 1) - self.assertEqual(rc, 1) - self.assertEqual(stdout, 'Something bad :(') - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs( - git_revision='a', bypass_skia_gold_functionality=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, _ = session.Compare(None, None) - self.assertEqual(rc, 0) - cmd_mock.assert_not_called() - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandWithLocalPixelTestsTrue(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Compare(None, None) - self.assertIn('--dryrun', cmd_mock.call_args[0][0]) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandWithLocalPixelTestsFalse(self, cmd_mock): - cmd_mock.return_value = (None, None, None) + cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - session.Compare(None, None) - self.assertNotIn('--dryrun', cmd_mock.call_args[0][0]) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession( - working_dir, sgp, 'keys_file', 'corpus', instance='instance') - session.Compare('name', 'png_file') - call_args = cmd_mock.call_args[0][0] - self.assertIn('imgtest', call_args) - self.assertIn('add', call_args) - assertArgWith(self, call_args, '--test-name', 'name') - assertArgWith(self, call_args, '--png-file', 'png_file') - assertArgWith(self, call_args, '--work-dir', working_dir) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_noLinkOnSuccess(self, cmd_mock): - cmd_mock.return_value = (0, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None) - rc, _ = session.Compare('name', 'png_file') - self.assertEqual(rc, 0) - self.assertEqual(session._comparison_results['name'].triage_link, None) - self.assertNotEqual( - session._comparison_results['name'].triage_link_omission_reason, None) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_clLinkOnTrybot(self, cmd_mock): - cmd_mock.return_value = (1, None, None) - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None) - rc, _ = session.Compare('name', 'png_file') - self.assertEqual(rc, 1) - self.assertNotEqual(session._comparison_results['name'].triage_link, None) - self.assertIn('issue=1', session._comparison_results['name'].triage_link) - self.assertEqual( - session._comparison_results['name'].triage_link_omission_reason, None) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_individualLinkOnCi(self, cmd_mock): - cmd_mock.return_value = (1, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None) - m = mock.mock_open(read_data='foobar') - with mock.patch('__builtin__.open', m, create=True): - rc, _ = session.Compare('name', 'png_file') - self.assertEqual(rc, 1) - self.assertNotEqual(session._comparison_results['name'].triage_link, None) - self.assertEqual(session._comparison_results['name'].triage_link, 'foobar') - self.assertEqual( - session._comparison_results['name'].triage_link_omission_reason, None) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_validOmissionOnIoError(self, cmd_mock): - cmd_mock.return_value = (1, None, None) - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, 'keys_file', None) - m = mock.mock_open() - m.side_effect = IOError('No read today') - with mock.patch('__builtin__.open', m, create=True): - rc, _ = session.Compare('name', 'png_file') - self.assertEqual(rc, 1) - self.assertEqual(session._comparison_results['name'].triage_link, None) - self.assertNotEqual( - session._comparison_results['name'].triage_link_omission_reason, None) - self.assertIn( - 'Failed to read', - session._comparison_results['name'].triage_link_omission_reason) - - -class SkiaGoldSessionDiffTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.Diff.""" - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandOutputReturned(self, cmd_mock): - cmd_mock.return_value = (1, 'Something bad :(', None) - args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - rc, stdout = session.Diff(None, None, None) - self.assertEqual(cmd_mock.call_count, 1) - self.assertEqual(rc, 1) - self.assertEqual(stdout, 'Something bad :(') - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs( - git_revision='a', bypass_skia_gold_functionality=True) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession(working_dir, sgp, None, None) - with self.assertRaises(RuntimeError): - session.Diff(None, None, None) - - @mock.patch('devil.utils.cmd_helper.GetCmdStatusOutputAndError') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None, None) - args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - session = gold_utils.SkiaGoldSession( - working_dir, sgp, None, 'corpus', instance='instance') - session.Diff('name', 'png_file', None) + sgp = gold_utils.AndroidSkiaGoldProperties(args) + session = gold_utils.AndroidSkiaGoldSession(self._working_dir, + sgp, + None, + 'corpus', + instance='instance') + session.Diff('name', 'png_file', None) call_args = cmd_mock.call_args[0][0] self.assertIn('diff', call_args) assertArgWith(self, call_args, '--corpus', 'corpus') assertArgWith(self, call_args, '--instance', 'instance') assertArgWith(self, call_args, '--input', 'png_file') assertArgWith(self, call_args, '--test', 'name') - assertArgWith(self, call_args, '--work-dir', working_dir) + assertArgWith(self, call_args, '--work-dir', self._working_dir) i = call_args.index('--out-dir') # The output directory should be a subdirectory of the working directory. - self.assertIn(working_dir, call_args[i + 1]) - - -class SkiaGoldSessionTriageLinkOmissionTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason.""" - - # Avoid having to bother with the working directory. - class FakeGoldSession(gold_utils.SkiaGoldSession): - def __init__(self): # pylint: disable=super-init-not-called - self._comparison_results = { - 'foo': gold_utils.SkiaGoldSession.ComparisonResults(), - } - - def test_noComparison(self): - session = self.FakeGoldSession() - session._comparison_results = {} - reason = session.GetTriageLinkOmissionReason('foo') - self.assertEqual(reason, 'No image comparison performed for foo') - - def test_validReason(self): - session = self.FakeGoldSession() - session._comparison_results['foo'].triage_link_omission_reason = 'bar' - reason = session.GetTriageLinkOmissionReason('foo') - self.assertEqual(reason, 'bar') - - def test_onlyLocal(self): - session = self.FakeGoldSession() - session._comparison_results['foo'].local_diff_given_image = 'bar' - reason = session.GetTriageLinkOmissionReason('foo') - self.assertEqual(reason, 'Gold only used to do a local image diff') - - def test_onlyWithoutTriageLink(self): - session = self.FakeGoldSession() - session._comparison_results['foo'].triage_link = 'bar' - with self.assertRaises(AssertionError): - session.GetTriageLinkOmissionReason('foo') - - def test_resultsShouldNotExist(self): - session = self.FakeGoldSession() - with self.assertRaises(RuntimeError): - session.GetTriageLinkOmissionReason('foo') - - -class SkiaGoldSessionManagerGetSessionTest(unittest.TestCase): - """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession.""" - - @mock.patch('gold_utils.SkiaGoldSession') - def test_ArgsForwardedToSession(self, _): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({}, f) - session = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance') - self.assertEqual(session._keys_file, keys_file) - self.assertEqual(session._corpus, 'corpus') - self.assertEqual(session._instance, 'instance') - # Make sure the session's working directory is a subdirectory of the - # manager's working directory. - self.assertEqual(os.path.dirname(session._working_dir), working_dir) - - @mock.patch('gold_utils.SkiaGoldSession') - def test_corpusFromJson(self, _): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({'source_type': 'foobar'}, f) - session = sgsm.GetSkiaGoldSession(keys_file, None, 'instance') - self.assertEqual(session._keys_file, keys_file) - self.assertEqual(session._corpus, 'foobar') - self.assertEqual(session._instance, 'instance') + self.assertIn(self._working_dir, call_args[i + 1]) - @mock.patch('gold_utils.SkiaGoldSession') - def test_corpusDefaultsToInstance(self, _): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({}, f) - session = sgsm.GetSkiaGoldSession(keys_file, None, 'instance') - self.assertEqual(session._keys_file, keys_file) - self.assertEqual(session._corpus, 'instance') - self.assertEqual(session._instance, 'instance') - @mock.patch.object(gold_utils.SkiaGoldSession, '__init__') - def test_matchingSessionReused(self, session_mock): - session_mock.return_value = None - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({}, f) - session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance') - session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance') - self.assertEqual(session1, session2) - # For some reason, session_mock.assert_called_once() always passes, - # so check the call count directly. - self.assertEqual(session_mock.call_count, 1) +class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase): + class FakeArchivedFile(object): + def __init__(self, path): + self.name = path - @mock.patch.object(gold_utils.SkiaGoldSession, '__init__') - def test_separateSessionsFromKeys(self, session_mock): - session_mock.return_value = None - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file1 = os.path.join(working_dir, 'keys1.json') - with open(keys_file1, 'w') as f: - json.dump({}, f) - keys_file2 = os.path.join(working_dir, 'keys2.json') - with open(keys_file2, 'w') as f: - json.dump({'something different': 1}, f) - session1 = sgsm.GetSkiaGoldSession(keys_file1, 'corpus', 'instance') - session2 = sgsm.GetSkiaGoldSession(keys_file2, 'corpus', 'instance') - self.assertNotEqual(session1, session2) - self.assertEqual(session_mock.call_count, 2) + def Link(self): + return 'file://' + self.name - @mock.patch.object(gold_utils.SkiaGoldSession, '__init__') - def test_separateSessionsFromCorpus(self, session_mock): - session_mock.return_value = None - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({}, f) - session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus1', 'instance') - session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus2', 'instance') - self.assertNotEqual(session1, session2) - self.assertEqual(session_mock.call_count, 2) + class FakeOutputManager(object): + def __init__(self): + self.output_dir = tempfile.mkdtemp() - @mock.patch.object(gold_utils.SkiaGoldSession, '__init__') - def test_separateSessionsFromInstance(self, session_mock): - session_mock.return_value = None - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with tempfile_ext.NamedTemporaryDirectory() as working_dir: - sgsm = gold_utils.SkiaGoldSessionManager(working_dir, sgp) - keys_file = os.path.join(working_dir, 'keys.json') - with open(keys_file, 'w') as f: - json.dump({}, f) - session1 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance1') - session2 = sgsm.GetSkiaGoldSession(keys_file, 'corpus', 'instance2') - self.assertNotEqual(session1, session2) - self.assertEqual(session_mock.call_count, 2) + @contextlib.contextmanager + def ArchivedTempfile(self, image_name, _, __): + filepath = os.path.join(self.output_dir, image_name) + yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath) + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() -class SkiaGoldPropertiesInitializationTest(unittest.TestCase): - """Tests that SkiaGoldProperties initializes (or doesn't) when expected.""" - - def verifySkiaGoldProperties(self, instance, expected): - self.assertEqual(instance._local_pixel_tests, - expected.get('local_pixel_tests')) - self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth')) - self.assertEqual(instance._git_revision, expected.get('git_revision')) - self.assertEqual(instance._issue, expected.get('gerrit_issue')) - self.assertEqual(instance._patchset, expected.get('gerrit_patchset')) - self.assertEqual(instance._job_id, expected.get('buildbucket_id')) - self.assertEqual(instance._bypass_skia_gold_functionality, - expected.get('bypass_skia_gold_functionality')) - - def test_initializeSkiaGoldAttributes_unsetLocal(self): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {}) - - def test_initializeSkiaGoldAttributes_explicitLocal(self): - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True}) - - def test_initializeSkiaGoldAttributes_explicitNonLocal(self): - args = createSkiaGoldArgs(local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False}) - - def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self): - args = createSkiaGoldArgs(no_luci_auth=True) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True}) - - def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self): - args = createSkiaGoldArgs(bypass_skia_gold_functionality=True) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True}) - - def test_initializeSkiaGoldAttributes_explicitGitRevision(self): - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'}) - - def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision(self): - args = createSkiaGoldArgs( - gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties(sgp, {}) - - def test_initializeSkiaGoldAttributes_tryjobArgs(self): - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) - sgp = gold_utils.SkiaGoldProperties(args) - self.verifySkiaGoldProperties( - sgp, { - 'git_revision': 'a', - 'gerrit_issue': 1, - 'gerrit_patchset': 2, - 'buildbucket_id': 3 - }) - - def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self): - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, buildbucket_id=3) - with self.assertRaises(RuntimeError): - gold_utils.SkiaGoldProperties(args) - - def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self): - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, gerrit_patchset=2) - with self.assertRaises(RuntimeError): - gold_utils.SkiaGoldProperties(args) - - -class SkiaGoldPropertiesCalculationTest(unittest.TestCase): - """Tests that SkiaGoldProperties properly calculates certain properties.""" - - def testLocalPixelTests_determineTrue(self): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with mock.patch.dict(os.environ, {}, clear=True): - self.assertTrue(sgp.local_pixel_tests) - - def testLocalPixelTests_determineFalse(self): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True): - self.assertFalse(sgp.local_pixel_tests) - - def testIsTryjobRun_noIssue(self): - args = createSkiaGoldArgs() - sgp = gold_utils.SkiaGoldProperties(args) - self.assertFalse(sgp.IsTryjobRun()) - - def testIsTryjobRun_issue(self): - args = createSkiaGoldArgs( - git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) - sgp = gold_utils.SkiaGoldProperties(args) - self.assertTrue(sgp.IsTryjobRun()) - - def testGetGitRevision_revisionSet(self): - args = createSkiaGoldArgs(git_revision='a') - sgp = gold_utils.SkiaGoldProperties(args) - self.assertEqual(sgp.git_revision, 'a') - - def testGetGitRevision_findValidRevision(self): - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with mock.patch( - 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head: - expected = 'a' * 40 - patched_head.return_value = expected - self.assertEqual(sgp.git_revision, expected) - # Should be cached. - self.assertEqual(sgp._git_revision, expected) - - def testGetGitRevision_noExplicitOnBot(self): - args = createSkiaGoldArgs(local_pixel_tests=False) - sgp = gold_utils.SkiaGoldProperties(args) - with self.assertRaises(RuntimeError): - _ = sgp.git_revision - - def testGetGitRevision_findEmptyRevision(self): - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with mock.patch( - 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head: - patched_head.return_value = '' - with self.assertRaises(RuntimeError): - _ = sgp.git_revision - - def testGetGitRevision_findMalformedRevision(self): - args = createSkiaGoldArgs(local_pixel_tests=True) - sgp = gold_utils.SkiaGoldProperties(args) - with mock.patch( - 'pylib.utils.repo_utils.GetGitOriginMasterHeadSHA1') as patched_head: - patched_head.return_value = 'a' * 39 - with self.assertRaises(RuntimeError): - _ = sgp.git_revision + def test_outputManagerUsed(self): + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = gold_utils.AndroidSkiaGoldProperties(args) + session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp, None, + None, None) + with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f: + f.write('input') + with open(os.path.join(self._working_dir, 'closest-closesthash.png'), + 'w') as f: + f.write('closest') + with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f: + f.write('diff') + + output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager() + session._StoreDiffLinks('foo', output_manager, self._working_dir) + + copied_input = os.path.join(output_manager.output_dir, 'given_foo.png') + copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png') + copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png') + with open(copied_input) as f: + self.assertEqual(f.read(), 'input') + with open(copied_closest) as f: + self.assertEqual(f.read(), 'closest') + with open(copied_diff) as f: + self.assertEqual(f.read(), 'diff') + + self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input) + self.assertEqual(session.GetClosestImageLink('foo'), + 'file://' + copied_closest) + self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff) if __name__ == '__main__': diff --git a/chromium/build/android/resource_sizes.py b/chromium/build/android/resource_sizes.py index f9e82088522..cd80694f4d1 100755 --- a/chromium/build/android/resource_sizes.py +++ b/chromium/build/android/resource_sizes.py @@ -471,7 +471,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func): padding_fraction = -_PercentageDifference( native_code.ComputeUncompressedSize(), native_code_unaligned_size) # Ignore this check for small / no native code - if native_code.ComputeUncompressedSize() > 100000: + if native_code.ComputeUncompressedSize() > 1000000: assert 0 <= padding_fraction < .02, ( 'Padding was: {} (file_size={}, sections_sum={})'.format( padding_fraction, native_code.ComputeUncompressedSize(), diff --git a/chromium/build/android/test_runner.py b/chromium/build/android/test_runner.py index 444af5b5bdd..e4cd353dc84 100755 --- a/chromium/build/android/test_runner.py +++ b/chromium/build/android/test_runner.py @@ -539,6 +539,13 @@ def AddInstrumentationTestOptions(parser): help='Wait for java debugger to attach before running any application ' 'code. Also disables test timeouts and sets retries=0.') + # WPR record mode. + parser.add_argument('--wpr-enable-record', + action='store_true', + default=False, + help='If true, WPR server runs in record mode.' + 'otherwise, runs in replay mode.') + # These arguments are suppressed from the help text because they should # only ever be specified by an intermediate script. parser.add_argument( diff --git a/chromium/build/android/test_runner.pydeps b/chromium/build/android/test_runner.pydeps index 3e4a4612cf1..acc2f34da7a 100644 --- a/chromium/build/android/test_runner.pydeps +++ b/chromium/build/android/test_runner.pydeps @@ -13,12 +13,17 @@ ../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py ../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py ../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../third_party/catapult/common/py_utils/py_utils/atexit_with_log.py +../../third_party/catapult/common/py_utils/py_utils/binary_manager.py ../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py ../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py ../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py ../../third_party/catapult/common/py_utils/py_utils/lock.py ../../third_party/catapult/common/py_utils/py_utils/modules_util.py +../../third_party/catapult/common/py_utils/py_utils/retry_util.py ../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py +../../third_party/catapult/common/py_utils/py_utils/ts_proxy_server.py +../../third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py ../../third_party/catapult/dependency_manager/dependency_manager/__init__.py ../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py ../../third_party/catapult/dependency_manager/dependency_manager/base_config.py @@ -39,6 +44,7 @@ ../../third_party/catapult/devil/devil/android/crash_handler.py ../../third_party/catapult/devil/devil/android/decorators.py ../../third_party/catapult/devil/devil/android/device_blacklist.py +../../third_party/catapult/devil/devil/android/device_denylist.py ../../third_party/catapult/devil/devil/android/device_errors.py ../../third_party/catapult/devil/devil/android/device_list.py ../../third_party/catapult/devil/devil/android/device_signal.py @@ -125,6 +131,11 @@ ../../tools/swarming_client/libs/logdog/streamname.py ../../tools/swarming_client/libs/logdog/varint.py ../gn_helpers.py +../print_python_deps.py +../skia_gold_common/__init__.py +../skia_gold_common/skia_gold_properties.py +../skia_gold_common/skia_gold_session.py +../skia_gold_common/skia_gold_session_manager.py ../util/lib/common/chrome_test_server_spawner.py ../util/lib/common/unittest_util.py convert_dex_profile.py @@ -196,6 +207,7 @@ pylib/symbols/__init__.py pylib/symbols/deobfuscator.py pylib/symbols/stack_symbolizer.py pylib/utils/__init__.py +pylib/utils/chrome_proxy_utils.py pylib/utils/decorators.py pylib/utils/device_dependencies.py pylib/utils/dexdump.py diff --git a/chromium/build/chromeos/OWNERS b/chromium/build/chromeos/OWNERS index 6ba0eaa4307..e1058c853e8 100644 --- a/chromium/build/chromeos/OWNERS +++ b/chromium/build/chromeos/OWNERS @@ -1,4 +1 @@ bpastene@chromium.org - -# TEAM: infra-dev@chromium.org -# COMPONENT: Build diff --git a/chromium/build/chromeos/test_runner.py b/chromium/build/chromeos/test_runner.py index a5774274a48..4c384b73365 100755 --- a/chromium/build/chromeos/test_runner.py +++ b/chromium/build/chromeos/test_runner.py @@ -55,7 +55,6 @@ SYSTEM_LOG_LOCATIONS = [ # `journalctl -D ...`. '/var/log/journal/', '/var/log/messages', - '/var/log/power_manager/', '/var/log/ui/', ] @@ -117,7 +116,12 @@ class RemoteTest(object): if args.logs_dir: for log in SYSTEM_LOG_LOCATIONS: self._test_cmd += ['--results-src', log] - self._test_cmd += ['--results-dest-dir', args.logs_dir] + self._test_cmd += [ + '--results-dest-dir', + os.path.join(args.logs_dir, 'system_logs') + ] + if args.flash: + self._test_cmd += ['--flash'] # This environment variable is set for tests that have been instrumented # for code coverage. Its incoming value is expected to be a location @@ -722,7 +726,10 @@ def host_cmd(args, unknown_args): if args.logs_dir: for log in SYSTEM_LOG_LOCATIONS: cros_run_test_cmd += ['--results-src', log] - cros_run_test_cmd += ['--results-dest-dir', args.logs_dir] + cros_run_test_cmd += [ + '--results-dest-dir', + os.path.join(args.logs_dir, 'system_logs') + ] test_env = setup_env() if args.deploy_chrome: @@ -801,6 +808,11 @@ def add_common_args(*parsers): dest='logs_dir', help='Will copy everything under /var/log/ from the device after the ' 'test into the specified dir.') + parser.add_argument( + '--flash', + action='store_true', + help='Will flash the device to the current SDK version before running ' + 'the test.') vm_or_device_group = parser.add_mutually_exclusive_group() vm_or_device_group.add_argument( diff --git a/chromium/build/config/BUILDCONFIG.gn b/chromium/build/config/BUILDCONFIG.gn index 6f5d5f123f8..4cc6bfc45e6 100644 --- a/chromium/build/config/BUILDCONFIG.gn +++ b/chromium/build/config/BUILDCONFIG.gn @@ -237,7 +237,7 @@ if (target_os == "android") { _default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu" } else if (target_os == "mac") { assert(host_os == "mac", "Mac cross-compiles are unsupported.") - _default_toolchain = host_toolchain + _default_toolchain = "//build/toolchain/mac:clang_$target_cpu" } else if (target_os == "win") { # On Windows, we use the same toolchain for host and target by default. # Beware, win cross builds have some caveats, see docs/win_cross.md @@ -585,10 +585,9 @@ foreach(_target_type, # On Android, write shared library output file to metadata. We will use # this information to, for instance, collect all shared libraries that # should be packaged into an APK. - if (!defined(invoker.metadata) && is_android && (_target_type == - "shared_library" || - _target_type == - "loadable_module")) { + if (!defined(invoker.metadata) && is_android && + (_target_type == "shared_library" || + _target_type == "loadable_module")) { _output_name = _target_name if (defined(invoker.output_name)) { _output_name = invoker.output_name @@ -641,7 +640,7 @@ template("component") { assert(invoker.static_component_type == "static_library" || invoker.static_component_type == "source_set") _component_mode = invoker.static_component_type - } else if (!defined(invoker.sources)) { + } else if (!defined(invoker.sources) || invoker.sources == []) { # When there are no sources defined, use a source set to avoid creating # an empty static library (which generally don't work). _component_mode = "source_set" diff --git a/chromium/build/config/OWNERS b/chromium/build/config/OWNERS index 9eccd5b9f14..13d88a2dfc9 100644 --- a/chromium/build/config/OWNERS +++ b/chromium/build/config/OWNERS @@ -1,4 +1,5 @@ dpranke@chromium.org +dpranke@google.com scottmg@chromium.org per-file *jumbo*=bratell.d@gmail.com diff --git a/chromium/build/config/android/OWNERS b/chromium/build/config/android/OWNERS index 0dad8ce2b30..9184df6a8ac 100644 --- a/chromium/build/config/android/OWNERS +++ b/chromium/build/config/android/OWNERS @@ -2,5 +2,3 @@ agrieve@chromium.org digit@chromium.org tiborg@chromium.org wnwen@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/config/android/config.gni b/chromium/build/config/android/config.gni index a560d2698a8..8838f279509 100644 --- a/chromium/build/config/android/config.gni +++ b/chromium/build/config/android/config.gni @@ -45,6 +45,10 @@ if (is_android || is_chromeos) { # repositories to support both public only and internal builds. enable_chrome_android_internal = has_chrome_android_internal + # The default to use for android:minSdkVersion for targets that do + # not explicitly set it. + default_min_sdk_version = 21 + # Android API level for 32 bits platforms android32_ndk_api_level = 16 @@ -68,9 +72,6 @@ if (is_android || is_chromeos) { } } - # Our build rules support only KitKat+. - default_min_sdk_version = 19 - if (!defined(default_android_ndk_root)) { default_android_ndk_root = "//third_party/android_ndk" default_android_ndk_version = "r20" @@ -88,6 +89,11 @@ if (is_android || is_chromeos) { public_android_sdk = true } + # For use downstream when we are building with preview Android SDK + if (!defined(final_android_sdk)) { + final_android_sdk = public_android_sdk + } + if (!defined(default_lint_android_sdk_root)) { # Purposefully repeated so that downstream can change # default_android_sdk_root without changing lint version. @@ -209,6 +215,11 @@ if (is_android || is_chromeos) { # configured to use it. enable_proguard_obfuscation = true + # Controls whether |short_resource_paths| and |strip_resource_names| are + # respected. Useful when trying to analyze APKs using tools that do not + # support mapping these names. + enable_arsc_obfuscation = true + # The target to use as the system WebView implementation. system_webview_apk_target = "//android_webview:system_webview_apk" } @@ -236,7 +247,7 @@ if (is_android || is_chromeos) { enable_bazel_desugar = true # Enables Java library desugaring. - # This will cause an extra 1MB classes.dex file to appear in every apk. + # This will cause an extra classes.dex file to appear in every apk. enable_jdk_library_desugaring = false } diff --git a/chromium/build/config/android/copy_ex.gni b/chromium/build/config/android/copy_ex.gni index 14fbedb3b37..0ea9e706683 100644 --- a/chromium/build/config/android/copy_ex.gni +++ b/chromium/build/config/android/copy_ex.gni @@ -28,6 +28,7 @@ template("copy_ex") { [ "data", "deps", + "public_deps", "testonly", "visibility", ]) diff --git a/chromium/build/config/android/internal_rules.gni b/chromium/build/config/android/internal_rules.gni index 1494124a991..b6b7627dda3 100644 --- a/chromium/build/config/android/internal_rules.gni +++ b/chromium/build/config/android/internal_rules.gni @@ -16,47 +16,69 @@ import("//build/util/generate_wrapper.gni") import("//build_overrides/build.gni") assert(is_android) -# These filters identify most targets that eventually use the java_library_impl -# template. -_java_lib_patterns = [ +# The following _java_*_types variables capture all the existing target types. +# If a new type is introduced, please add it to one of these categories, +# preferring the more specific resource/library types. +_java_resource_types = [ + "android_assets", + "android_resources", +] + +_java_library_types = [ + "java_library", + "system_java_library", + "android_app_bundle_module", +] + +# These are leaf java target types. They cannot be passed as deps to other +# targets. Thus their naming schemes are not enforced. +_java_leaf_types = [ + "android_apk", + "android_app_bundle", + "dist_aar", + "dist_jar", + "java_annotation_processor", + "java_binary", + "junit_binary", +] + +# All _java_resource_types targets must conform to these patterns. +_java_resource_patterns = [ + "*:*_assets", + "*android*:assets", + "*:*_apk_*resources", + "*android*:resources", + "*:*_resources", + "*:*_grd", + "*:*locale_paks", + "*:*_java_strings", + "*:*strings_java", +] + +# All _java_library_types targets must conform to these patterns. This includes +# all non-leaf targets that use java_library_impl. +_java_library_patterns = [ "*:*_java", "*:*_javalib", - "*:*_java_*", # e.g. java_test_support + "*:*_java_*", # e.g. chrome_java_test_support "*:java", "*:junit", "*:junit_*", "*:*_junit_*", "*:*javatests", + "*:*_bundle_module", # TODO(agrieve): Rename targets below to match above patterns. - "*android_webview/glue*:glue", + "//android_webview/glue:glue", ] -# These identify targets that have .build_config files (except for android_apk, -# java_binary, android_app_bundle since we never need to depend on these). -_java_target_patterns = _java_lib_patterns + [ - "*:*_assets", - "*android*:assets", - "*:*_apk_*resources", - "*android*:resources", - "*:*_resources", - "*:*_grd", - "*:*locale_paks", - "*_bundle_module", - ] - -# Targets that match _java_target_patterns but are not actually java targets. -_java_target_exceptions = [ "*:*_unpack_aar" ] - -# These targets match _java_lib_patterns but do not use java_library_impl. -_java_lib_exceptions = - _java_target_patterns - _java_lib_patterns + _java_target_exceptions + [ - "*:*__res", # TODO(wnwen): Rename these to the standard ones. - ] +# These identify all non-leaf targets that have .build_config files. +_java_target_patterns = _java_library_patterns + _java_resource_patterns _r8_path = "//third_party/r8/lib/r8.jar" _desugar_jdk_libs_json = "//third_party/r8/desugar_jdk_libs.json" _desugar_jdk_libs_jar = "//third_party/android_deps/libs/com_android_tools_desugar_jdk_libs/desugar_jdk_libs-1.0.5.jar" +_desugar_runtime_jar = "$root_build_dir/obj/third_party/bazel/desugar/Desugar_runtime.processed.jar" _dexdump_path = "$android_sdk_build_tools/dexdump" _dexlayout_path = "//third_party/android_build_tools/art/dexlayout" @@ -120,19 +142,32 @@ build_config_target_suffix = "__build_config_crbug_908819" # build/android/gyp/util/build_utils.py:ExpandFileArgs template("write_build_config") { _type = invoker.type - - # Don't need to enforce naming scheme for these targets since we never - # consider them in dependency chains. - if (_type != "android_apk" && _type != "java_binary" && _type != "dist_jar" && - _type != "java_annotation_processor" && _type != "dist_aar" && - _type != "android_app_bundle") { - _parent_invoker = invoker.invoker - _target_label = - get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_target_patterns) != [] && - filter_exclude([ _target_label ], _java_target_exceptions) != []) { + _parent_invoker = invoker.invoker + _target_label = + get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain") + + # Ensure targets match naming patterns so that __assetres, __header, __impl + # targets work properly. Those generated targets allow for effective deps + # filtering. + if (filter_exclude([ _type ], _java_resource_types) == []) { + if (filter_exclude([ _target_label ], _java_resource_patterns) != []) { + assert(false, "Invalid java resource target name: $_target_label") + } + } else if (filter_exclude([ _type ], _java_library_types) == []) { + if (filter_exclude([ _target_label ], _java_library_patterns) != [] || + filter_exclude([ _target_label ], _java_resource_patterns) == []) { + assert(false, "Invalid java library target name: $_target_label") + } + } else if (_type == "group") { + if (filter_exclude([ _target_label ], _java_target_patterns) != []) { assert(false, "Invalid java target name: $_target_label") } + } else if (filter_exclude([ _type ], _java_leaf_types) != []) { + assert(false, "This java type needs a category: $_type") + } + + if (defined(invoker.public_target_label)) { + _target_label = invoker.public_target_label } action_with_pydeps(target_name) { @@ -156,12 +191,11 @@ template("write_build_config") { _deps_configs = [] if (defined(invoker.possible_config_deps)) { foreach(_possible_dep, invoker.possible_config_deps) { - _target_label = get_label_info(_possible_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_target_patterns) == [] && - filter_exclude([ _target_label ], _java_target_exceptions) != []) { + _dep_label = get_label_info(_possible_dep, "label_no_toolchain") + if (filter_exclude([ _dep_label ], _java_target_patterns) == []) { # Put the bug number in the target name so that false-positives # have a hint in the error message about non-existent dependencies. - deps += [ "$_target_label$build_config_target_suffix" ] + deps += [ "$_dep_label$build_config_target_suffix" ] _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") _dep_name = get_label_info(_possible_dep, "name") _deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ] @@ -177,6 +211,8 @@ template("write_build_config") { "--deps-configs=$_rebased_deps_configs", "--build-config", rebase_path(invoker.build_config, root_build_dir), + "--gn-target", + _target_label, ] if (defined(invoker.chromium_code) && !invoker.chromium_code) { @@ -184,10 +220,16 @@ template("write_build_config") { args += [ "--non-chromium-code" ] } - if (defined(invoker.jar_path)) { + if (defined(invoker.device_jar_path)) { args += [ - "--jar-path", - rebase_path(invoker.jar_path, root_build_dir), + "--device-jar-path", + rebase_path(invoker.device_jar_path, root_build_dir), + ] + } + if (defined(invoker.host_jar_path)) { + args += [ + "--host-jar-path", + rebase_path(invoker.host_jar_path, root_build_dir), ] } if (defined(invoker.unprocessed_jar_path)) { @@ -208,22 +250,20 @@ template("write_build_config") { rebase_path(invoker.java_resources_jar, root_build_dir), ] } - if (defined(invoker.skip_jetify) && invoker.skip_jetify) { - args += [ "--skip-jetify" ] - } if (defined(invoker.jetified_jar_path)) { args += [ "--jetified-jar-path", rebase_path(invoker.jetified_jar_path, root_build_dir), ] } - if (defined(invoker.annotation_processor_deps)) { + if (defined(invoker.annotation_processor_deps) && + invoker.annotation_processor_deps != []) { _processor_configs = [] foreach(_processor_dep, invoker.annotation_processor_deps) { - _target_label = get_label_info(_processor_dep, "label_no_toolchain") + _dep_label = get_label_info(_processor_dep, "label_no_toolchain") _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir") _dep_name = get_label_info(_processor_dep, "name") - deps += [ "$_target_label$build_config_target_suffix" ] + deps += [ "$_dep_label$build_config_target_suffix" ] _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ] } _rebased_processor_configs = @@ -471,12 +511,12 @@ template("write_build_config") { } if (defined(invoker.static_library_dependent_targets)) { _dependent_configs = [] - foreach(_target, invoker.static_library_dependent_targets) { - _target_name = _target.name - _target_label = get_label_info(_target_name, "label_no_toolchain") - deps += [ "$_target_label$build_config_target_suffix" ] - _dep_gen_dir = get_label_info(_target_name, "target_gen_dir") - _dep_name = get_label_info(_target_name, "name") + foreach(_dep, invoker.static_library_dependent_targets) { + _dep_name = _dep.name + _dep_label = get_label_info(_dep_name, "label_no_toolchain") + deps += [ "$_dep_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_dep_name, "target_gen_dir") + _dep_name = get_label_info(_dep_name, "name") _config = rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir) _dependent_configs += [ _config ] @@ -494,11 +534,11 @@ template("write_build_config") { ] } if (defined(invoker.base_module_target)) { - _target_label = + _base_label = get_label_info(invoker.base_module_target, "label_no_toolchain") - _dep_gen_dir = get_label_info(_target_label, "target_gen_dir") - _dep_name = get_label_info(_target_label, "name") - deps += [ "$_target_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_base_label, "target_gen_dir") + _dep_name = get_label_info(_base_label, "name") + deps += [ "$_base_label$build_config_target_suffix" ] args += [ "--base-module-build-config", rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir), @@ -523,7 +563,8 @@ template("write_build_config") { # } _msg = [ "Tried to build an Android target in a non-default toolchain.", - "target: " + get_label_info(":$target_name", "label_with_toolchain"), + "target: $_target_label", + "current_toolchain: $current_toolchain", "default_toolchain: $default_toolchain", ] args += [ "--fail=$_msg" ] @@ -859,70 +900,89 @@ template("test_runner_script") { } if (enable_java_templates) { - android_sdk_jar = "$android_sdk/android.jar" - template("android_lint") { action_with_pydeps(target_name) { forward_variables_from(invoker, [ - "deps", "data_deps", "public_deps", "testonly", ]) + if (!defined(deps)) { + deps = [] + } + + # https://crbug.com/1098752 Fix for bot OOM (https://crbug.com/1098333). + pool = "//build/toolchain:link_pool($default_toolchain)" + + # Lint only requires generated sources and generated resources from the + # build. Since turbine __header targets already depend on and generate all + # the generated sources, and the __assetres targets include all generated + # resources, lint only needs to depend on the header and assetres targets + # rather than the top level java targets. + if (defined(invoker.deps)) { + foreach(_dep, invoker.deps) { + _target_label = get_label_info(_dep, "label_no_toolchain") + if (filter_exclude([ _target_label ], _java_library_patterns) == [] && + filter_exclude([ _target_label ], _java_resource_patterns) != + []) { + deps += [ + # Strictly speaking the __header target is sufficient, since they + # already depend on resources (due to srcjar deps), but prefer to + # be more explicit here since if/when __header targets stop + # depending on resources (e.g. if R.java generation moves to java + # targets), lint will not be affected. + "${_target_label}__assetres", + "${_target_label}__header", + ] + } else { + deps += [ _dep ] + } + } + } + if (defined(invoker.lint_suppressions_file)) { - lint_suppressions_file = invoker.lint_suppressions_file - } else if (!defined(lint_suppressions_file)) { - # WebRTC defines its own lint_suppressions_file: - # //tools_webrtc/android/suppressions.xml - lint_suppressions_file = "//build/android/lint/suppressions.xml" + _suppressions_file = invoker.lint_suppressions_file + + # The custom suppressions file might be a generated file needing a dep. + # e.g. generating it by appending to the default suppressions.xml file. + if (defined(invoker.lint_suppressions_dep)) { + deps += [ invoker.lint_suppressions_dep ] + } + } else { + _suppressions_file = "//build/android/lint/suppressions.xml" } _min_sdk_version = default_min_sdk_version if (defined(invoker.min_sdk_version)) { _min_sdk_version = invoker.min_sdk_version } - _lint_path = "$lint_android_sdk_root/cmdline-tools/latest/bin/lint" + + _lint_binary_path = "$lint_android_sdk_root/cmdline-tools/latest/bin/lint" _cache_dir = "$root_build_dir/android_lint_cache" - _result_path = "$target_out_dir/$target_name/result.xml" - _config_path = "$target_out_dir/$target_name/config.xml" - _stamp_path = "$target_out_dir/$target_name/build.lint.stamp" - _suppressions_file = lint_suppressions_file - _platform_xml_path = - "$lint_android_sdk_root/platform-tools/api/api-versions.xml" + + # Save these generated xml files in a consistent location for debugging. + _lint_gen_dir = "$target_gen_dir/$target_name" script = "//build/android/gyp/lint.py" depfile = "$target_gen_dir/$target_name.d" inputs = [ - _lint_path, - _platform_xml_path, + _lint_binary_path, _suppressions_file, ] - # _result_path is also an output, but do not list it in order to avoid it - # being uploaded to swarming as part of isolates. This happens as a - # side-effect of lint targets being listed as "data_deps" in order to - # have them run concurrently with other targets. - outputs = [ _stamp_path ] - args = [ "--depfile", rebase_path(depfile, root_build_dir), - "--lint-path", - rebase_path(_lint_path, root_build_dir), + "--lint-binary-path", + rebase_path(_lint_binary_path, root_build_dir), "--cache-dir", rebase_path(_cache_dir, root_build_dir), - "--platform-xml-path", - rebase_path(_platform_xml_path, root_build_dir), - "--android-sdk-version=${lint_android_sdk_version}", "--config-path", rebase_path(_suppressions_file, root_build_dir), - "--product-dir=.", - "--result-path", - rebase_path(_result_path, root_build_dir), - "--stamp", - rebase_path(_stamp_path, root_build_dir), - "--include-unexpected-failures", + "--lint-gen-dir", + rebase_path(_lint_gen_dir, root_build_dir), + "--android-sdk-version=${lint_android_sdk_version}", "--min-sdk-version=$_min_sdk_version", "--android-sdk-root", rebase_path(lint_android_sdk_root, root_build_dir), @@ -933,11 +993,27 @@ if (enable_java_templates) { args += [ "--testonly" ] } + if (defined(invoker.manifest_package)) { + args += [ "--manifest-package=${invoker.manifest_package}" ] + } + + if (java_warnings_as_errors) { + args += [ "--can-fail-build" ] + } + + _stamp_path = "$target_out_dir/$target_name/build.lint.stamp" if (defined(invoker.create_cache) && invoker.create_cache) { args += [ "--silent" ] + + # Putting the stamp file in the cache dir allows us to depend on ninja + # to create the cache dir for us. + _stamp_path = "$_cache_dir/build.lint.stamp" } else { + deps += [ + "//build/android:prepare_android_lint_cache", + invoker.build_config_dep, + ] inputs += [ invoker.build_config ] - deps += [ "//build/android:prepare_android_lint_cache" ] _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) args += [ @@ -947,14 +1023,13 @@ if (enable_java_templates) { "--resource-sources=@FileArg($_rebased_build_config:deps_info:lint_resource_sources)", "--resource-zips=@FileArg($_rebased_build_config:deps_info:lint_resource_zips)", ] - if (java_warnings_as_errors) { - args += [ "--can-fail-build" ] - } } - if (defined(invoker.manifest_package)) { - args += [ "--manifest-package=${invoker.manifest_package}" ] - } + outputs = [ _stamp_path ] + args += [ + "--stamp", + rebase_path(_stamp_path, root_build_dir), + ] } } @@ -973,7 +1048,10 @@ if (enable_java_templates) { # http://crbug.com/725224. Fix for bots running out of memory. _pool = "//build/toolchain:link_pool($default_toolchain)" - _inputs = [ invoker.build_config ] + _inputs = [ + invoker.build_config, + _r8_path, + ] if (defined(invoker.inputs)) { _inputs += invoker.inputs } @@ -986,6 +1064,11 @@ if (enable_java_templates) { _mapping_path = invoker.proguard_mapping_path } + _enable_jdk_library_desugaring = enable_jdk_library_desugaring + if (defined(invoker.supports_jdk_library_desugaring) && + !invoker.supports_jdk_library_desugaring) { + _enable_jdk_library_desugaring = false + } _proguard_sourcefile_suffix = "" if (defined(invoker.proguard_sourcefile_suffix)) { _proguard_sourcefile_suffix = "-${invoker.proguard_sourcefile_suffix}" @@ -999,6 +1082,8 @@ if (enable_java_templates) { "@FileArg($_rebased_build_config:deps_info:proguard_classpath_jars)", "--classpath", "@FileArg($_rebased_build_config:android:sdk_jars)", + "--r8-path", + rebase_path(_r8_path, root_build_dir), ] if (enable_proguard_obfuscation) { @@ -1020,7 +1105,7 @@ if (enable_java_templates) { _args += [ "--feature-name=${_feature_module.name}", "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)", - "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:java_runtime_classpath)", + "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)", ] _deps += [ _feature_module.build_config_target ] } @@ -1066,26 +1151,25 @@ if (enable_java_templates) { ] } - if (!defined(invoker.proguard_jar_path)) { + if (_enable_jdk_library_desugaring) { _args += [ - "--r8-path", - rebase_path(_r8_path, root_build_dir), + "--desugar-jdk-libs-json", + rebase_path(_desugar_jdk_libs_json, root_build_dir), ] - _inputs += [ _r8_path ] - if (enable_jdk_library_desugaring) { - _args += [ - "--desugar-jdk-libs-json", - rebase_path(_desugar_jdk_libs_json, root_build_dir), - ] - _inputs += [ _desugar_jdk_libs_json ] - } - } else { - _proguard_jar_path = invoker.proguard_jar_path + _inputs += [ _desugar_jdk_libs_json ] + + _args += [ + "--desugar-jdk-libs-jar", + rebase_path(_desugar_jdk_libs_jar, root_build_dir), + ] + _inputs += [ _desugar_jdk_libs_jar ] + + _desugared_library_keep_rule_output_path = + "$target_gen_dir/$target_name.desugared_library_keep_rules.flags" _args += [ - "--proguard-path", - rebase_path(_proguard_jar_path, root_build_dir), + "--desugared-library-keep-rule-output", + rebase_path(_desugared_library_keep_rule_output_path, root_build_dir), ] - _inputs += [ _proguard_jar_path ] } _enable_assert = is_java_debug || dcheck_always_on @@ -1099,45 +1183,39 @@ if (enable_java_templates) { _args += invoker.args } - if (defined(invoker.verify_proguard_flags_target_name)) { - _expectations_target = "${invoker.verify_proguard_flags_target_name}" + - "_proguard_expectations" + if (defined(invoker.expected_proguard_config)) { + _expectations_target = + "${invoker.top_target_name}_validate_proguard_config" action_with_pydeps(_expectations_target) { - _expected_configs_file = - get_label_info(":${invoker.verify_proguard_flags_target_name}", - "dir") + - "/java/${invoker.verify_proguard_flags_target_name}" + - ".proguard_flags.expected" - _failed_proguard_expectation_file = - "$android_configuration_failure_dir/" + - string_replace(_expected_configs_file, "/", "_") - _expectation_stamp = "${target_gen_dir}/${_expectations_target}.stamp" - if (fail_on_android_expectations) { - args += [ "--fail-on-expectations" ] - } script = _script # Need to depend on all deps so that proguard.txt within .aar files get # extracted. deps = _deps - pool = _pool depfile = "${target_gen_dir}/${target_name}.d" inputs = [ invoker.build_config, - _expected_configs_file, + invoker.expected_proguard_config, ] + _failure_file = + "$android_configuration_failure_dir/" + + string_replace(invoker.expected_proguard_config, "/", "_") + _expectation_stamp = "$target_gen_dir/$target_name.stamp" outputs = [ _expectation_stamp ] args = _args + [ "--depfile", rebase_path(depfile, root_build_dir), "--proguard-expectations-failure-file", - rebase_path(_failed_proguard_expectation_file, root_build_dir), + rebase_path(_failure_file, root_build_dir), "--expected-configs-file", - rebase_path(_expected_configs_file, root_build_dir), + rebase_path(invoker.expected_proguard_config, root_build_dir), "--stamp", rebase_path(_expectation_stamp, root_build_dir), "--only-verify-expectations", ] + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } } _deps += [ ":$_expectations_target" ] } @@ -1186,7 +1264,9 @@ if (enable_java_templates) { "--main-class", _main_class, ] - args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ] + args += [ + "--classpath=@FileArg($_rebased_build_config:deps_info:host_classpath)", + ] if (use_jacoco_coverage) { args += [ @@ -1214,8 +1294,6 @@ if (enable_java_templates) { _proguard_enabled = defined(invoker.proguard_enabled) && invoker.proguard_enabled _is_dex_merging = defined(invoker.input_dex_filearg) - _proguarding_with_r8 = - _proguard_enabled && !defined(invoker.proguard_jar_path) _enable_multidex = !defined(invoker.enable_multidex) || invoker.enable_multidex _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 @@ -1228,6 +1306,9 @@ if (enable_java_templates) { } } + # It's not safe to dex merge with libraries dex'ed at higher api versions. + assert(!_is_dex_merging || _min_sdk_version >= default_min_sdk_version) + # For D8's backported method desugaring to work properly, the dex merge step # must not be set to a higher minSdkVersion than it was for the libraries. if (_enable_desugar && _is_dex_merging) { @@ -1235,7 +1316,7 @@ if (enable_java_templates) { } assert(defined(invoker.output) || - (_proguarding_with_r8 && defined(invoker.modules))) + (_proguard_enabled && defined(invoker.modules))) assert(!_proguard_enabled || !(defined(invoker.input_dex_filearg) || defined(invoker.input_classes_filearg) || defined(invoker.input_class_jars)), @@ -1257,30 +1338,29 @@ if (enable_java_templates) { } if (_proguard_enabled) { - if (_proguarding_with_r8) { - if (defined(invoker.output)) { - _proguard_output_path = invoker.output - } - _proguard_target_name = target_name - } else { - _proguard_output_path = invoker.output + ".proguard.jar" - _proguard_target_name = "${target_name}__proguard" + if (defined(invoker.output)) { + _proguard_output_path = invoker.output } + _proguard_target_name = target_name proguard(_proguard_target_name) { forward_variables_from(invoker, [ "build_config", + "data", + "data_deps", + "deps", "disable_checkdiscard", "disable_r8_outlining", - "deps", + "expected_proguard_config", + "is_static_library", "modules", - "verify_proguard_flags_target_name", - "proguard_jar_path", "proguard_mapping_path", "proguard_sourcefile_suffix", - "is_static_library", + "supports_jdk_library_desugaring", "testonly", + "top_target_name", + "visibiity", ]) inputs = [] if (defined(invoker.inputs)) { @@ -1292,13 +1372,20 @@ if (enable_java_templates) { args = [ "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)", - "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)", "--min-api=$_min_sdk_version", ] + if (defined(invoker.has_apk_under_test) && invoker.has_apk_under_test) { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath_extended)" ] + } else { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] + } if (enable_bazel_desugar) { deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ] - inputs += [ "$root_build_dir/lib.java/third_party/bazel/desugar/Desugar_runtime.jar" ] - args += [ "--input-paths=lib.java/third_party/bazel/desugar/Desugar_runtime.jar" ] + inputs += [ _desugar_runtime_jar ] + args += [ + "--input-paths", + rebase_path(_desugar_runtime_jar, root_build_dir), + ] } if (defined(invoker.proguard_args)) { args += invoker.proguard_args @@ -1317,7 +1404,7 @@ if (enable_java_templates) { args += [ "--apply-mapping=$_rebased_apply_mapping_path" ] } - if (_proguarding_with_r8 && _enable_main_dex_list) { + if (_enable_main_dex_list) { if (defined(invoker.extra_main_dex_proguard_config)) { args += [ "--main-dex-rules-path", @@ -1335,36 +1422,25 @@ if (enable_java_templates) { if (defined(_proguard_output_path)) { output_path = _proguard_output_path - if (_proguarding_with_r8) { - config_output_path = "$_proguard_output_path.proguard_flags" - } + config_output_path = "$_proguard_output_path.proguard_flags" } else { mapping_path = "$target_out_dir/$target_name.mapping" - if (_proguarding_with_r8) { - config_output_path = "$target_out_dir/$target_name.proguard_flags" - } + config_output_path = "$target_out_dir/$target_name.proguard_flags" } } - } - - if (!_proguarding_with_r8) { + } else { # !_proguard_enabled + _is_library = defined(invoker.is_library) && invoker.is_library _input_class_jars = [] if (defined(invoker.input_class_jars)) { _input_class_jars = invoker.input_class_jars } - if (_proguard_enabled) { - _input_class_jars += [ _proguard_output_path ] - } _deps = invoker.deps - if (_proguard_enabled) { - _deps += [ ":${_proguard_target_name}" ] - } - if (_is_dex_merging && enable_bazel_desugar) { + if (!_is_library && enable_bazel_desugar) { # It would be more efficient to use the pre-dex'ed copy of the runtime, # but it's easier to add it in this way. _deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ] - _input_class_jars += [ "$root_build_dir/lib.java/third_party/bazel/desugar/Desugar_runtime.jar" ] + _input_class_jars += [ _desugar_runtime_jar ] } if (_input_class_jars != []) { _rebased_input_class_jars = @@ -1442,7 +1518,13 @@ if (enable_java_templates) { } action_with_pydeps(target_name) { - forward_variables_from(invoker, [ "testonly" ]) + forward_variables_from(invoker, + [ + "data", + "data_deps", + "testonly", + "visibility", + ]) script = "//build/android/gyp/dex.py" deps = _deps depfile = "$target_gen_dir/$target_name.d" @@ -1459,9 +1541,8 @@ if (enable_java_templates) { rebase_path(_r8_path, root_build_dir), ] - if (!_proguard_enabled && enable_incremental_d8 && - !(defined(invoker.disable_incremental) && - invoker.disable_incremental)) { + if (enable_incremental_d8 && !(defined(invoker.disable_incremental) && + invoker.disable_incremental)) { # Don't use incremental dexing for ProGuarded inputs as a precaution. args += [ "--incremental-dir", @@ -1479,11 +1560,10 @@ if (enable_java_templates) { deps += [ ":${_main_dex_list_target_name}" ] inputs += [ _main_dex_list_path ] } - } else if (defined(invoker.enable_library_multidex) && - invoker.enable_library_multidex) { - args += [ "--multi-dex" ] } - + if (_is_library) { + args += [ "--library" ] + } if (defined(invoker.input_dex_filearg)) { inputs += [ invoker.build_config ] args += [ "--dex-inputs-filearg=${invoker.input_dex_filearg}" ] @@ -1515,20 +1595,13 @@ if (enable_java_templates) { invoker.dexlayout_profile, ] inputs += _default_art_libs - if (_proguard_enabled) { - args += [ - "--proguard-mapping-path", - rebase_path(invoker.proguard_mapping_path, root_build_dir), - ] - inputs += [ invoker.proguard_mapping_path ] - } } # Never compile intemediates with --release in order to: # 1) not require recompiles when toggling is_java_debug, # 2) allow incremental_install=1 to still have local variable # information even when is_java_debug=false. - if (!is_java_debug && _is_dex_merging) { + if (!is_java_debug && !_is_library) { args += [ "--release" ] } @@ -1605,7 +1678,9 @@ if (enable_java_templates) { "testonly", ]) - _sources_json_file = "$target_out_dir/${target_name}_sources.json" + # The name needs to match the SOURCES_JSON_FILES_SUFFIX in + # generate_coverage_metadata_for_java.py. + _sources_json_file = "$target_out_dir/${target_name}__jacoco_sources.json" _jacococli_jar = "//third_party/jacoco/lib/jacococli.jar" script = "//build/android/gyp/jacoco_instr.py" @@ -1638,115 +1713,78 @@ if (enable_java_templates) { } } - # TODO(digit): Document this! - # - # Variables: - # testonly: - # build_config: - # input_jar_path: - # output_jar_path: - # supports_android: - # jacoco_instrument: Use Jacoco-instrumented classes to generate Java - # coverage data. - # jar_excluded_patterns: Optional list of .class file patterns to exclude - # from the final .jar file. - # jar_included_patterns: Optional list of .class file patterns to include - # in the final .jar file. jar_excluded_patterns take precedence over this. - # skip_jetify: A boolean on whether to skip jetifying or not. - # strip_resource_classes: - # deps: - # java_files: - # java_sources_file: - # inputs: - # data_deps: - # visibility: - # - template("process_java_prebuilt") { - set_sources_assignment_filter([]) - forward_variables_from(invoker, [ "testonly" ]) - - assert(invoker.build_config != "") - _build_config = invoker.build_config - _rebased_build_config = rebase_path(_build_config, root_build_dir) - assert(_rebased_build_config != "" || true) # Mark used. - - _input_jar_path = invoker.input_jar_path - _output_jar_path = invoker.output_jar_path - - # Turned off because of existing code which fails the assertion - _enable_thread_annotations = false - - _jacoco_instrument = invoker.jacoco_instrument - _skip_jetify = defined(invoker.skip_jetify) && invoker.skip_jetify + template("filter_jar") { + action_with_pydeps(target_name) { + script = "//build/android/gyp/filter_zip.py" + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + inputs = [ invoker.input_jar ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + outputs = [ invoker.output_jar ] - _enable_bytecode_rewriter = _enable_thread_annotations - _is_prebuilt = defined(invoker.is_prebuilt) && invoker.is_prebuilt - _enable_bytecode_checks = !defined(invoker.enable_bytecode_checks) || - invoker.enable_bytecode_checks - _missing_classes_allowlist = [] - if (defined(invoker.missing_classes_allowlist)) { - _missing_classes_allowlist = invoker.missing_classes_allowlist + _jar_excluded_patterns = [] + if (defined(invoker.jar_excluded_patterns)) { + _jar_excluded_patterns = invoker.jar_excluded_patterns + } + _jar_included_patterns = [] + if (defined(invoker.jar_included_patterns)) { + _jar_included_patterns = invoker.jar_included_patterns + } + _strip_resource_classes = defined(invoker.strip_resource_classes) && + invoker.strip_resource_classes + args = [ + "--input", + rebase_path(invoker.input_jar, root_build_dir), + "--output", + rebase_path(invoker.output_jar, root_build_dir), + "--exclude-globs=${_jar_excluded_patterns}", + "--include-globs=${_jar_included_patterns}", + ] + if (_strip_resource_classes) { + inputs += [ invoker.build_config ] + _rebased_build_config = + rebase_path(invoker.build_config, root_build_dir) + args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] + } } + } - # Release builds don't have asserts enabled, so they often will not run the - # bytecode rewriter. We are okay with having release builds not run the - # bytecode checks at all, since the dependency errors can be caught in debug - # mode. - not_needed([ - "_is_prebuilt", - "_enable_bytecode_checks", - "_missing_classes_allowlist", - ]) - if (defined(invoker.enable_bytecode_rewriter)) { - not_needed([ - "_enable_custom_resources", - "_enable_thread_annotations", - ]) - _enable_bytecode_rewriter = invoker.enable_bytecode_rewriter - } + template("process_java_prebuilt") { + forward_variables_from(invoker, [ "testonly" ]) - _jar_excluded_patterns = [] - if (defined(invoker.jar_excluded_patterns)) { - _jar_excluded_patterns = invoker.jar_excluded_patterns - } - _jar_included_patterns = [] - if (defined(invoker.jar_included_patterns)) { - _jar_included_patterns = invoker.jar_included_patterns - } - _strip_resource_classes = defined(invoker.strip_resource_classes) && - invoker.strip_resource_classes - _filter_jar = _jar_excluded_patterns != [] || - _jar_included_patterns != [] || _strip_resource_classes + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + not_needed([ "_rebased_build_config" ]) + not_needed(invoker, [ "build_config_dep" ]) - _deps = [] - _previous_output_jar = _input_jar_path + _deps = invoker.jar_deps + _previous_output_jar = invoker.input_jar_path - if (!_skip_jetify) { + if (defined(invoker.jetified_jar_path)) { _jetify_target = "${target_name}__jetify" - _jetify_input_jar = _previous_output_jar - _jetify_output_jar = "$target_out_dir/$target_name.jetified.jar" action_with_pydeps(_jetify_target) { script = "//build/android/gyp/jetify_jar.py" deps = _deps - if (defined(invoker.deps)) { - deps += invoker.deps - } _jetify_jar_path = "//third_party/jetifier_standalone/bin/jetifier-standalone" _jetify_config_path = "//third_party/jetifier_standalone/config/ignore_R.config" inputs = [ - _jetify_input_jar, + _previous_output_jar, _jetify_jar_path, ] - outputs = [ _jetify_output_jar ] + outputs = [ invoker.jetified_jar_path ] args = [ "--input-path", rebase_path(invoker.input_jar_path, root_build_dir), "--output-path", - rebase_path(_jetify_output_jar, root_build_dir), + rebase_path(invoker.jetified_jar_path, root_build_dir), "--jetify-path", rebase_path(_jetify_jar_path, root_build_dir), "--jetify-config-path", @@ -1756,192 +1794,159 @@ if (enable_java_templates) { _deps = [] _deps = [ ":$_jetify_target" ] - _previous_output_jar = _jetify_output_jar - } - - if (_enable_bytecode_rewriter) { - _java_bytecode_rewriter_target = "${target_name}__bytecode_rewrite" - _java_bytecode_rewriter_input_jar = _previous_output_jar - _java_bytecode_rewriter_output_jar = - "$target_out_dir/$target_name.bytecode-rewritten.jar" - - action_with_pydeps(_java_bytecode_rewriter_target) { - script = "//build/android/gyp/bytecode_processor.py" - _bytecode_rewriter_script = - "$root_build_dir/bin/helper/java_bytecode_rewriter" - deps = _deps + [ "//build/android/bytecode:java_bytecode_rewriter($default_toolchain)" ] - if (defined(invoker.deps)) { - deps += invoker.deps - } - inputs = [ - _bytecode_rewriter_script, - _java_bytecode_rewriter_input_jar, - _build_config, - ] - outputs = [ _java_bytecode_rewriter_output_jar ] - args = [ - "--script", - rebase_path(_bytecode_rewriter_script, root_build_dir), - "--input-jar", - rebase_path(_java_bytecode_rewriter_input_jar, root_build_dir), - "--output-jar", - rebase_path(_java_bytecode_rewriter_output_jar, root_build_dir), - ] - if (_is_prebuilt) { - args += [ "--is-prebuilt" ] - } - if (defined(_enable_custom_resources) && _enable_custom_resources) { - args += [ "--enable-custom-resources" ] - } - if (_enable_thread_annotations) { - args += [ "--enable-thread-annotations" ] + _previous_output_jar = invoker.jetified_jar_path + } + + # Create the .jar in lib.java for use by java_binary. + if (defined(invoker.host_jar_path)) { + filter_jar("${target_name}_host") { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + "strip_resource_classes", + ]) + deps = _deps + input_jar = _previous_output_jar + output_jar = invoker.host_jar_path + inputs = [] + if (defined(strip_resource_classes) && strip_resource_classes) { + inputs += [ invoker.build_config ] + deps += [ invoker.build_config_dep ] + args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] } - if (_enable_bytecode_checks) { - args += [ "--enable-check-class-path" ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + deps += invoker.input_deps } - args += [ - "--missing-classes-allowlist", - "${_missing_classes_allowlist}", - "--direct-classpath-jars", - "@FileArg($_rebased_build_config:javac:classpath)", - "--sdk-classpath-jars", - "@FileArg($_rebased_build_config:android:sdk_jars)", - "--extra-classpath-jars", - "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", - ] } - - _deps = [] - _deps = [ ":$_java_bytecode_rewriter_target" ] - _previous_output_jar = _java_bytecode_rewriter_output_jar } - if (invoker.enable_desugar) { - _desugar_target = "${target_name}__desugar" - _desugar_input_jar = _previous_output_jar - _desugar_output_jar = "$target_out_dir/$target_name.desugar.jar" + if (defined(invoker.device_jar_path)) { + if (invoker.enable_desugar) { + _desugar_target = "${target_name}_device__desugar" + _desugar_output_jar = "$target_out_dir/$target_name.desugar.jar" - action_with_pydeps(_desugar_target) { - script = "//build/android/gyp/desugar.py" - deps = _deps - depfile = "$target_gen_dir/$target_name.d" - if (defined(invoker.deps)) { - deps += invoker.deps + action_with_pydeps(_desugar_target) { + script = "//build/android/gyp/desugar.py" + deps = _deps + invoker.classpath_deps + depfile = "$target_gen_dir/$target_name.d" + _desugar_jar = "//third_party/bazel/desugar/Desugar.jar" + + inputs = [ + invoker.build_config, + _previous_output_jar, + _desugar_jar, + ] + outputs = [ _desugar_output_jar ] + args = [ + "--desugar-jar", + rebase_path(_desugar_jar, root_build_dir), + "--input-jar", + rebase_path(_previous_output_jar, root_build_dir), + "--output-jar", + rebase_path(_desugar_output_jar, root_build_dir), + + # Temporarily using java_full_interface_classpath until classpath validation of targets + # is implemented, see http://crbug.com/885273 + "--classpath=@FileArg($_rebased_build_config:deps_info:jetified_full_jar_classpath)", + "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)", + "--depfile", + rebase_path(depfile, root_build_dir), + ] } - _desugar_jar = "//third_party/bazel/desugar/Desugar.jar" - inputs = [ - _build_config, - _desugar_input_jar, - _desugar_jar, - ] - outputs = [ _desugar_output_jar ] - args = [ - "--desugar-jar", - rebase_path(_desugar_jar, root_build_dir), - "--input-jar", - rebase_path(_desugar_input_jar, root_build_dir), - "--output-jar", - rebase_path(_desugar_output_jar, root_build_dir), - - # Temporarily using java_full_interface_classpath until classpath validation of targets - # is implemented, see http://crbug.com/885273 - "--classpath=@FileArg($_rebased_build_config:deps_info:jetified_full_jar_classpath)", - "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)", - "--depfile", - rebase_path(depfile, root_build_dir), - ] + _deps = [] + _deps = [ ":$_desugar_target" ] + _previous_output_jar = _desugar_output_jar } - _deps = [] - _deps = [ ":$_desugar_target" ] - _previous_output_jar = _desugar_output_jar - } - - if (_filter_jar) { - _filter_target = "${target_name}__filter" - _filter_input_jar = _previous_output_jar - _filter_output_jar = "$target_out_dir/$target_name.filtered.jar" - - action_with_pydeps(_filter_target) { - script = "//build/android/gyp/filter_zip.py" + if (invoker.jacoco_instrument) { + _filter_jar_target_name = "${target_name}_device__filter_jar" + _filter_jar_output_jar = "$target_out_dir/$target_name.filter.jar" + } else { + _filter_jar_target_name = "${target_name}_device" + _filter_jar_output_jar = invoker.device_jar_path + } + filter_jar(_filter_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + "strip_resource_classes", + ]) deps = _deps - if (defined(invoker.deps)) { - deps += invoker.deps - } - inputs = [ - _build_config, - _filter_input_jar, - ] - outputs = [ _filter_output_jar ] - args = [ - "--input", - rebase_path(_filter_input_jar, root_build_dir), - "--output", - rebase_path(_filter_output_jar, root_build_dir), - "--exclude-globs=$_jar_excluded_patterns", - "--include-globs=$_jar_included_patterns", - ] - if (_strip_resource_classes) { + input_jar = _previous_output_jar + output_jar = _filter_jar_output_jar + inputs = [] + if (defined(strip_resource_classes) && strip_resource_classes) { + inputs += [ invoker.build_config ] + deps += [ invoker.build_config_dep ] args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] } + if (!defined(invoker.host_jar_path) && defined(invoker.inputs)) { + inputs += invoker.inputs + deps += invoker.input_deps + } } - _deps = [] - _deps = [ ":$_filter_target" ] - _previous_output_jar = _filter_output_jar - } - - if (_jacoco_instrument) { - # Jacoco must run after desugar (or else desugar sometimes fails). - _jacoco_target = "${target_name}__jacoco" - _jacoco_input_jar = _previous_output_jar - _jacoco_output_jar = "$target_out_dir/$target_name.instrumented.jar" + if (invoker.jacoco_instrument) { + # Jacoco must run after desugar (or else desugar sometimes fails). + # It must run after filtering to avoid the same (filtered) class mapping + # to multiple .jar files. + jacoco_instr("${target_name}_device") { + deps = [ ":$_filter_jar_target_name" ] + invoker.jar_deps + forward_variables_from(invoker, + [ + "java_files", + "java_sources_file", + ]) - jacoco_instr(_jacoco_target) { - deps = _deps - if (defined(invoker.deps)) { - deps += invoker.deps + input_jar_path = _filter_jar_output_jar + output_jar_path = invoker.device_jar_path } - - forward_variables_from(invoker, - [ - "java_files", - "java_sources_file", - ]) - - input_jar_path = _jacoco_input_jar - output_jar_path = _jacoco_output_jar } - - _deps = [] - _deps = [ ":$_jacoco_target" ] - _previous_output_jar = _jacoco_output_jar } + } - _output_jar_target = "${target_name}__copy" - - # This is copy_ex rather than copy to ensure that JARs (rather than - # possibly broken symlinks to them) get copied into the output - # directory. - copy_ex(_output_jar_target) { - forward_variables_from(invoker, [ "inputs" ]) - deps = _deps - if (defined(invoker.deps)) { - deps += invoker.deps + template("bytecode_processor") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, [ "testonly" ]) + _bytecode_checker_script = "$root_build_dir/bin/helper/bytecode_processor" + script = "//build/android/gyp/bytecode_processor.py" + inputs = [ + invoker.build_config, + invoker.input_jar, + _bytecode_checker_script, + ] + outputs = [ "$target_out_dir/$target_name.bytecode.stamp" ] + deps = + invoker.deps + + [ "//build/android/bytecode:bytecode_processor($default_toolchain)" ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + args = [ + "--script", + rebase_path(_bytecode_checker_script, root_build_dir), + "--gn-target=${invoker.target_label}", + "--input-jar", + rebase_path(invoker.input_jar, root_build_dir), + "--stamp", + rebase_path(outputs[0], root_build_dir), + "--direct-classpath-jars=@FileArg($_rebased_build_config:javac:classpath)", + "--full-classpath-jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--full-classpath-gn-targets=@FileArg($_rebased_build_config:deps_info:javac_full_classpath_targets)", + ] + if (invoker.is_prebuilt) { + args += [ "--is-prebuilt" ] + } + if (invoker.requires_android) { + args += [ "--sdk-classpath-jars=@FileArg($_rebased_build_config:android:sdk_jars)" ] + } + if (defined(invoker.missing_classes_allowlist)) { + args += [ + "--missing-classes-allowlist=${invoker.missing_classes_allowlist}", + ] } - dest = _output_jar_path - sources = [ _previous_output_jar ] - outputs = [ _output_jar_path ] - } - - group(target_name) { - forward_variables_from(invoker, - [ - "data_deps", - "visibility", - ]) - public_deps = [ ":$_output_jar_target" ] } } @@ -2125,10 +2130,14 @@ if (enable_java_templates) { # final R.java sources for all resource packages the binary depends on. # # Input variables: + # android_sdk_dep: The sdk dep that these resources should compile against. + # # deps: Specifies the input dependencies for this target. # # build_config: Path to the .build_config file corresponding to the target. # + # build_config_dep: Dep target to generate the .build_config file. + # # android_manifest: Path to root manifest for the binary. # # version_code: (optional) @@ -2238,7 +2247,23 @@ if (enable_java_templates) { "visibility", ]) - _deps = invoker.deps + _deps = [ + invoker.android_sdk_dep, + invoker.build_config_dep, + ] + if (defined(invoker.android_manifest_dep)) { + _deps += [ invoker.android_manifest_dep ] + } + foreach(_dep, invoker.deps) { + _target_label = get_label_info(_dep, "label_no_toolchain") + if (filter_exclude([ _target_label ], _java_library_patterns) == [] && + filter_exclude([ _target_label ], _java_resource_patterns) != []) { + # Depend on the java libraries' transitive __assetres target instead. + _deps += [ "${_target_label}__assetres" ] + } else { + _deps += [ _dep ] + } + } if (defined(invoker.arsc_output)) { _arsc_output = invoker.arsc_output @@ -2506,43 +2531,26 @@ if (enable_java_templates) { _args += [ "--is-bundle-module" ] } - if (defined(invoker.verify_manifest_target_name)) { + if (defined(invoker.expected_android_manifest)) { _expectations_target = - "${invoker.verify_manifest_target_name}_manifest_expectations" + "${invoker.top_target_name}_validate_android_manifest" action_with_pydeps(_expectations_target) { - _target_src_dir = get_label_info(":$target_name", "dir") _normalized_output = "${invoker.android_manifest}.normalized" - _manifest_expectations_failure_filepath = + _failure_file = "$android_configuration_failure_dir/" + - "${invoker.verify_manifest_target_name}.AndroidManifest.failed" + string_replace(invoker.expected_android_manifest, "/", "_") inputs = [ invoker.android_manifest, invoker.build_config, + invoker.expected_android_manifest, ] - if (defined(invoker.expected_manifest_base_expectation)) { + if (defined(invoker.expected_android_manifest_base)) { _args += [ "--expected-manifest-base-expectation", - rebase_path(invoker.expected_manifest_base_expectation, - root_build_dir), + rebase_path(invoker.expected_android_manifest_base, root_build_dir), ] - inputs += [ invoker.expected_manifest_base_expectation ] - - # When invoker.expected_manifest_base_expectation is defined, we use - # a different file extension to indicate that the expected file is a - # diff file. - # In addition, we set expected_manifest_base_expectation only for - # internal targets, therefore, the expected file is in a different - # directory. - _expected_file = - "$_target_src_dir/" + - "${invoker.verify_manifest_target_name}.AndroidManifest" + - ".diff.expected" - } else { - _expected_file = - "$_target_src_dir/java/" + - "${invoker.verify_manifest_target_name}.AndroidManifest.expected" + inputs += [ invoker.expected_android_manifest_base ] } - inputs += [ _expected_file ] if (fail_on_android_expectations) { _args += [ "--fail-on-expectations" ] } @@ -2555,12 +2563,11 @@ if (enable_java_templates) { script = _script args = _args + [ "--expected-file", - rebase_path(_expected_file, root_build_dir), + rebase_path(invoker.expected_android_manifest, root_build_dir), "--android-manifest-normalized", rebase_path(_normalized_output, root_build_dir), "--android-manifest-expectations-failure-file", - rebase_path(_manifest_expectations_failure_filepath, - root_build_dir), + rebase_path(_failure_file, root_build_dir), "--only-verify-expectations", ] } @@ -2636,7 +2643,7 @@ if (enable_java_templates) { _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) args += [ - "--jar-files=@FileArg($_rebased_build_config:deps_info:jar_path)", + "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)", "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", "--in-res-info-path", rebase_path(invoker.res_size_info_path, root_build_dir), @@ -2675,9 +2682,8 @@ if (enable_java_templates) { # keystore_password: Keystore password. # uncompress_shared_libraries: (optional, default false) Whether to store # native libraries inside the APK uncompressed and page-aligned. - # verify_native_libs_and_assets_target_name: (optional): If set, will verify - # the list of included native libraries and assets is consistent with an - # expectation file. + # expected_libs_and_assets: (optional): Verify the list of included native + # libraries and assets is consistent with the given expectation file. template("package_apk") { forward_variables_from(invoker, [ @@ -2803,43 +2809,33 @@ if (enable_java_templates) { _args += [ "--secondary-native-lib-placeholders=$_secondary_native_lib_placeholders" ] } - if (defined(invoker.verify_native_libs_and_assets_target_name)) { + if (defined(invoker.expected_libs_and_assets)) { _expectations_target = - "${invoker.verify_native_libs_and_assets_target_name}" + - "_libs_and_assets_expectations" + "${invoker.top_target_name}_validate_libs_and_assets" action_with_pydeps(_expectations_target) { _stamp = "$target_gen_dir/$target_name.stamp" - _target_src_dir = get_label_info(":$target_name", "dir") - _expected_native_libs_and_assets = - "$_target_src_dir/java/" + - "${invoker.verify_native_libs_and_assets_target_name}." + - "${target_cpu}.native_libs_and_assets.expected" - _native_libs_and_assets_expectation_failure_filepath = + _failure_file = "$android_configuration_failure_dir/" + - "${invoker.verify_native_libs_and_assets_target_name}." + - "${target_cpu}.native_libs_and_assets.failed" - if (fail_on_android_expectations) { - _args += [ "--fail-on-expectations" ] - } - + string_replace(invoker.expected_libs_and_assets, "/", "_") inputs = [ invoker.build_config, - _expected_native_libs_and_assets, + invoker.expected_libs_and_assets, ] deps = [ invoker.build_config_dep ] outputs = [ _stamp ] script = _script - args = - _args + [ - "--expected-native-libs-and-assets", - rebase_path(_expected_native_libs_and_assets, root_build_dir), - "--native-libs-and-assets-expectation-failure-file", - rebase_path(_native_libs_and_assets_expectation_failure_filepath, - root_build_dir), - "--stamp", - rebase_path(_stamp, root_build_dir), - "--only-verify-expectations", - ] + args = _args + [ + "--expected-native-libs-and-assets", + rebase_path(invoker.expected_libs_and_assets, root_build_dir), + "--native-libs-and-assets-expectation-failure-file", + rebase_path(_failure_file, root_build_dir), + "--stamp", + rebase_path(_stamp, root_build_dir), + "--only-verify-expectations", + ] + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } } _deps += [ ":$_expectations_target" ] } @@ -2889,7 +2885,6 @@ if (enable_java_templates) { # javac:processor_classes # javac_jar_path: Path to the final output .jar file. # javac_args: Optional list of extra arguments to pass to javac. - # never_goma: When true, ignore the value of use_java_goma. # chromium_code: Whether this corresponds to Chromium-specific sources. # requires_android: True if these sources can only run on Android. # additional_jar_files: Optional list of files to copy into the resulting @@ -3016,8 +3011,7 @@ if (enable_java_templates) { if (invoker.supports_android || invoker.use_turbine) { args += [ "--java-version=1.8" ] } - if ((!defined(invoker.never_goma) || !invoker.never_goma) && - use_java_goma) { + if (use_java_goma) { args += [ "--gomacc-path=$goma_dir/gomacc" ] # Override the default action_pool when goma is enabled. @@ -3047,16 +3041,14 @@ if (enable_java_templates) { if (invoker.enable_errorprone) { # Our custom plugin pulls in the main errorprone dep transitively. - _errorprone_dep = - "//tools/android/errorprone_plugin:errorprone_plugin_java" + _errorprone_dep = "//tools/android/errorprone_plugin:errorprone_plugin" deps += [ _errorprone_dep ] _dep_gen_dir = get_label_info(_errorprone_dep, "target_gen_dir") _dep_name = get_label_info(_errorprone_dep, "name") _rebased_errorprone_buildconfig = rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir) args += [ - "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:jar_path)", - "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:javac_full_classpath)", + "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:host_classpath)", "--enable-errorprone", ] } @@ -3083,37 +3075,22 @@ if (enable_java_templates) { } } - template("java_header_group") { + template("java_lib_group") { forward_variables_from(invoker, [ "testonly" ]) + _group_name = invoker.group_name + not_needed([ "_group_name" ]) group(target_name) { if (defined(invoker.deps)) { deps = [] foreach(_dep, invoker.deps) { _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_lib_patterns) == [] && - filter_exclude([ _target_label ], _java_lib_exceptions) != []) { - # This is a java dep, so replace it. - deps += [ "${_target_label}__header" ] - } else { - deps += [ _dep ] - } - } - } - } - } - - template("java_impl_group") { - forward_variables_from(invoker, [ "testonly" ]) - group(target_name) { - if (defined(invoker.deps)) { - deps = [] - foreach(_dep, invoker.deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_lib_patterns) == [] && - filter_exclude([ _target_label ], _java_lib_exceptions) != []) { - # This is a java dep, so replace it. - deps += [ "${_target_label}__impl" ] + if (filter_exclude([ _target_label ], _java_library_patterns) == [] && + filter_exclude([ _target_label ], _java_resource_patterns) != + []) { + # This is a java library dep, so replace it. + deps += [ "${_target_label}__${_group_name}" ] } else { + # Transitive java group targets should also include direct deps. deps += [ _dep ] } } @@ -3184,7 +3161,6 @@ if (enable_java_templates) { # requires_android: Optional. True if target can only run on Android. # java_files: Optional list of Java source file paths for this target. # javac_args: Optional list of extra arguments to pass to javac. - # never_goma: When true, ignore the value of use_java_goma. # errorprone_args: Optional list of extra arguments to pass to . # srcjar_deps: Optional list of .srcjar targets (not file paths). The Java # source files they contain will also be compiled for this target. @@ -3194,23 +3170,14 @@ if (enable_java_templates) { # java_files is empty. If not # jar_path: Optional path to a prebuilt .jar file for this target. # Mutually exclusive with java_files and srcjar_deps. - # final_jar_path: Optional path to the final output .jar file (after - # processing). If not provided, the output will go under - # $root_build_dir/lib.java/ - # output_name: Optional output name for the final jar path. Ignored if - # final_jar_path is provided. Otherwise, used to determine the name - # of the final jar. If not provided, the default is to use the same + # output_name: Optional output name for the final jar path. Used to + # determine the name of the final jar. Default is to use the same # name as jar_path, if provided, or main_target_name. - # dex_path: Optional. Path to the output dex.jar file for this target. - # Ignored if !supports_android. # main_class: Main Java class name for 'java_binary', 'junit_binary' and # 'java_annotation_processor' target types. Should not be set for other # ones. # deps: Dependencies for this target. # testonly: True iff target should only be used for tests. - # enable_turbine: If exists then will be used to determine whether to run - # turbine or not. Useful for disabling turbine headers for problematic - # targets. # chromium_code: Optional. Whether this is Chromium-specific code. If not # provided, this is determined automatically, based on the location of # the source files (i.e. anything under third_party/ is not @@ -3244,8 +3211,6 @@ if (enable_java_templates) { # from the final .jar file. # jar_included_patterns: Optional list of .class file patterns to include # in the final .jar file. jar_excluded_patterns take precedence over this. - # min_sdk_version: Optional. The minimum Android SDK version this target - # supports. # # For 'android_apk' and 'android_app_bundle_module' targets only: # @@ -3293,15 +3258,12 @@ if (enable_java_templates) { # TODO(crbug.com/1042017): Remove. not_needed(invoker, [ "no_build_hooks" ]) - # TODO(bjoyce): Remove when able to use. - not_needed(invoker, [ "enable_jetify" ]) set_sources_assignment_filter([]) forward_variables_from(invoker, [ "testonly" ]) _is_prebuilt = defined(invoker.jar_path) _is_annotation_processor = invoker.type == "java_annotation_processor" _is_java_binary = invoker.type == "java_binary" || invoker.type == "junit_binary" - _is_system_library = invoker.type == "system_java_library" _supports_android = defined(invoker.supports_android) && invoker.supports_android _requires_android = @@ -3341,8 +3303,6 @@ if (enable_java_templates) { "main_class cannot be used for target of type ${invoker.type}") } - # Don't enable coverage or lint unless the target has some non-generated - # files. if (defined(invoker.chromium_code)) { _chromium_code = invoker.chromium_code } else { @@ -3357,7 +3317,15 @@ if (enable_java_templates) { } } + # Define build_config_deps which will be a list of targets required to + # build the _build_config. + _build_config = "$target_gen_dir/$_main_target_name.build_config" + _build_config_target_name = + "${_main_target_name}$build_config_target_suffix" + # The only target that might have no prebuilt and no sources is a java_binary. + _build_host_jar = false + _build_device_jar = false if (_is_prebuilt || _has_sources) { if (defined(invoker.output_name)) { _output_name = invoker.output_name @@ -3367,20 +3335,25 @@ if (enable_java_templates) { _output_name = _main_target_name } - # Jar files can be needed at runtime (by Robolectric tests or java binaries), - # so do not put them under gen/. - _target_dir_name = get_label_info(":$_main_target_name", "dir") - _final_jar_path = - "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar" - if (defined(invoker.final_jar_path)) { - _final_jar_path = invoker.final_jar_path + _build_host_jar = _is_java_binary || _is_annotation_processor || + invoker.type == "java_library" + _build_device_jar = + invoker.type != "system_java_library" && _supports_android + if (_build_host_jar) { + # Jar files can be needed at runtime (by Robolectric tests or java binaries), + # so do not put them under obj/. + # TODO(agrieve): I suspect it would be better to use dist_jar for java_binary + # rather than archiving unnecessary .jar files within lib.java. + _target_dir_name = get_label_info(":$_main_target_name", "dir") + _host_processed_jar_path = + "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar" } - - # TODO(wnwen): Enable turbine for non-chromium code when r8 optimizes out - # bridge methods. - _enable_turbine = _has_sources && _chromium_code - if (defined(invoker.enable_turbine)) { - _enable_turbine = invoker.enable_turbine + if (_build_device_jar) { + _device_processed_jar_path = + "$target_out_dir/$_output_name.processed.jar" + _dex_path = "$target_out_dir/$_main_target_name.dex.jar" + _enable_desugar = + !defined(invoker.enable_desugar) || !invoker.enable_desugar } # For static libraries, the javac jar output is created at the intermediate @@ -3388,13 +3361,10 @@ if (enable_java_templates) { # spot that the .build_config knows about. Technically this should be done # for the ijar as well, but this is only used for APK targets where # the ijar path isn't actually used. - _build_config_jar_path = _final_jar_path - _final_ijar_path = get_path_info(_final_jar_path, "dir") + "/" + - get_path_info(_final_jar_path, "name") - if (_enable_turbine) { - _final_ijar_path += ".turbine.jar" + if (_has_sources) { + _final_ijar_path = "$target_out_dir/$_output_name.turbine.jar" } else { - _final_ijar_path += ".ijar.jar" + _final_ijar_path = "$target_out_dir/$_output_name.ijar.jar" } if (_has_sources) { @@ -3408,58 +3378,77 @@ if (enable_java_templates) { } else { _unprocessed_jar_path = _javac_jar_path } - - if (_supports_android) { - _dex_path = "$target_out_dir/$_main_target_name.dex.jar" - if (defined(invoker.dex_path)) { - _dex_path = invoker.dex_path - } + } + if (_build_host_jar || _build_device_jar) { + if (defined(invoker.enable_jetify) && invoker.enable_jetify) { + _jetified_jar_path = "$target_out_dir/${_main_target_name}.jetified.jar" } } - _public_deps = [] - _accumulated_deps = [] - _java_header_deps = [] - _java_impl_deps = [] - _java_full_deps = [] - if (defined(invoker.deps)) { - foreach(_dep, invoker.deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_lib_patterns) == [] && - filter_exclude([ _target_label ], _java_lib_exceptions) != []) { - # This is a java dep, so replace it with its header. - _java_header_deps += [ "${_target_label}__header" ] - _java_impl_deps += [ "${_target_label}__impl" ] - _java_full_deps += [ _dep ] - } else { - # Not a java header dep, so no need to replace it with its header. - _accumulated_deps += [ _dep ] + if (_is_prebuilt || _has_sources) { + _java_header_deps = [] + _java_impl_deps = [] + _non_java_deps = [] + if (defined(invoker.deps)) { + foreach(_dep, invoker.deps) { + _target_label = get_label_info(_dep, "label_no_toolchain") + if (filter_exclude([ _target_label ], _java_library_patterns) == [] && + filter_exclude([ _target_label ], _java_resource_patterns) != + []) { + # This is a java dep, so replace it with its header. + _java_header_deps += [ "${_target_label}__header" ] + _java_impl_deps += [ "${_target_label}__impl" ] + } else { + _non_java_deps += [ _dep ] + } } } - } - # TODO(crbug.com/1078484): Don't use desugared .jar files for java binaries. - if (_is_java_binary && enable_bazel_desugar) { - _accumulated_deps += - [ "//third_party/bazel/desugar:desugar_runtime_java" ] - } + # Don't need to depend on the apk-under-test to be packaged. + if (defined(invoker.apk_under_test)) { + _java_header_deps += [ "${invoker.apk_under_test}__java__header" ] + _java_impl_deps += [ "${invoker.apk_under_test}__java__impl" ] + } - if (defined(_final_jar_path)) { + _extra_java_deps = [] _jacoco_instrument = use_jacoco_coverage && _chromium_code && _java_files != [] && - !_is_java_binary && !_is_annotation_processor && - (!defined(invoker.testonly) || !invoker.testonly) + _build_device_jar && (!defined(invoker.testonly) || !invoker.testonly) if (defined(invoker.jacoco_never_instrument)) { _jacoco_instrument = !invoker.jacoco_never_instrument && _jacoco_instrument } if (_jacoco_instrument) { - _java_full_deps += [ "//third_party/jacoco:jacocoagent_java" ] - _java_header_deps += [ "//third_party/jacoco:jacocoagent_java__header" ] - _java_impl_deps += [ "//third_party/jacoco:jacocoagent_java__impl" ] + _extra_java_deps += [ "//third_party/jacoco:jacocoagent_java" ] + } + + _include_android_sdk = _build_device_jar + if (defined(invoker.include_android_sdk)) { + _include_android_sdk = invoker.include_android_sdk + } + if (_include_android_sdk) { + _sdk_java_dep = "//third_party/android_sdk:android_sdk_java" + if (defined(invoker.alternative_android_sdk_dep)) { + _sdk_java_dep = invoker.alternative_android_sdk_dep + } + + # This is an android_system_java_prebuilt target, so no headers. + _extra_java_deps += [ _sdk_java_dep ] } + + _classpath_deps = _java_header_deps + _non_java_deps + _extra_java_deps + + [ ":$_build_config_target_name" ] + _full_classpath_deps = + _java_impl_deps + _non_java_deps + _extra_java_deps + + [ ":$_build_config_target_name" ] } + # Often needed, but too hard to figure out when ahead of time. + not_needed([ + "_classpath_deps", + "_full_classpath_deps", + ]) + if (_java_files != []) { _java_sources_file = "$target_gen_dir/$_main_target_name.sources" if (defined(invoker.java_sources_file)) { @@ -3468,28 +3457,6 @@ if (enable_java_templates) { write_file(_java_sources_file, rebase_path(_java_files, root_build_dir)) } - _include_android_sdk = !_is_system_library && _supports_android - if (defined(invoker.include_android_sdk)) { - _include_android_sdk = invoker.include_android_sdk - } - if (_include_android_sdk) { - _sdk_java_dep = "//third_party/android_sdk:android_sdk_java" - if (defined(invoker.alternative_android_sdk_dep)) { - _sdk_java_dep = invoker.alternative_android_sdk_dep - } - - # This is an android_system_java_prebuilt target, so no headers. - _accumulated_deps += [ _sdk_java_dep ] - } - _jetified_jar_path = - "$target_out_dir/${target_name}__process_prebuilt.jetified.jar" - - # Define build_config_deps which will be a list of targets required to - # build the _build_config. - _build_config = "$target_gen_dir/$_main_target_name.build_config" - _build_config_target_name = - "${_main_target_name}$build_config_target_suffix" - write_build_config(_build_config_target_name) { forward_variables_from(invoker, [ @@ -3498,6 +3465,7 @@ if (enable_java_templates) { "gradle_treat_as_prebuilt", "input_jars_paths", "main_class", + "public_target_label", "proguard_configs", "proguard_enabled", "proguard_mapping_path", @@ -3545,9 +3513,17 @@ if (enable_java_templates) { chromium_code = _chromium_code build_config = _build_config is_prebuilt = _is_prebuilt - jetified_jar_path = _jetified_jar_path - possible_config_deps = _java_full_deps + _accumulated_deps - skip_jetify = defined(invoker.skip_jetify) && invoker.skip_jetify + if (defined(invoker.enable_jetify) && invoker.enable_jetify) { + jetified_jar_path = _jetified_jar_path + } + + possible_config_deps = [] + if (defined(_extra_java_deps)) { + possible_config_deps = _extra_java_deps + } + if (defined(invoker.deps)) { + possible_config_deps += invoker.deps + } if (defined(apk_under_test)) { possible_config_deps += [ apk_under_test ] } @@ -3556,12 +3532,15 @@ if (enable_java_templates) { bypass_platform_checks = defined(invoker.bypass_platform_checks) && invoker.bypass_platform_checks - if (defined(_final_jar_path)) { - jar_path = _build_config_jar_path + if (_is_prebuilt || _has_sources) { ijar_path = _final_ijar_path unprocessed_jar_path = _unprocessed_jar_path } - if (defined(_dex_path)) { + if (_build_host_jar) { + host_jar_path = _host_processed_jar_path + } + if (_build_device_jar) { + device_jar_path = _device_processed_jar_path dex_path = _dex_path } if (_java_files != []) { @@ -3576,35 +3555,22 @@ if (enable_java_templates) { } if (defined(invoker.include_java_resources) && invoker.include_java_resources) { + java_resources_jar = _unprocessed_jar_path if (defined(invoker.jar_path)) { # Use original jar_path because _jar_path points to a library without # resources. - java_resources_jar = invoker.jar_path } else { - java_resources_jar = _final_jar_path + java_resources_jar = _device_processed_jar_path } } } - # Don't need to depend on the apk-under-test to be packaged. - if (defined(invoker.apk_under_test)) { - # No need to add to _java_full_deps since that is only used for - # write_build_config. - _java_header_deps += [ "${invoker.apk_under_test}__java__header" ] - _java_impl_deps += [ "${invoker.apk_under_test}__java__impl" ] - } - if (defined(invoker.android_manifest_dep)) { - _accumulated_deps += [ invoker.android_manifest_dep ] - } - if (defined(invoker.annotation_processor_deps)) { - # We need the full annotation processors rather than just the headers. - _accumulated_deps += invoker.annotation_processor_deps - } - - if (_has_sources || _is_prebuilt) { + if (_is_prebuilt || _has_sources) { _header_target_name = "${target_name}__header" } + _public_deps = [] + _analysis_public_deps = [] if (_has_sources) { if (defined(invoker.enable_errorprone)) { _enable_errorprone = invoker.enable_errorprone @@ -3635,8 +3601,7 @@ if (enable_java_templates) { if (!defined(deps)) { deps = [] } - deps += _java_header_deps + _accumulated_deps + - [ ":$_build_config_target_name" ] + deps += _classpath_deps # android_apk and junit_binary pass R.java srcjars via srcjar_deps. if (_type == "java_library" && _requires_android) { @@ -3652,27 +3617,27 @@ if (enable_java_templates) { "provider_configurations", "javac_args", "jar_excluded_patterns", - "never_goma", ] + _annotation_processor_deps = [] + if (defined(invoker.annotation_processor_deps)) { + _annotation_processor_deps = invoker.annotation_processor_deps + } - if (_enable_turbine) { - compile_java_helper(_header_target_name) { - forward_variables_from(invoker, _compile_java_forward_variables) - use_turbine = true - output_jar_path = _final_ijar_path - generated_jar_path = _generated_jar_path - } + compile_java_helper(_header_target_name) { + forward_variables_from(invoker, _compile_java_forward_variables) + use_turbine = true + output_jar_path = _final_ijar_path + generated_jar_path = _generated_jar_path + deps = _annotation_processor_deps } + _public_deps += [ ":$_header_target_name" ] - _analysis_public_deps = [] _compile_java_target = "${_main_target_name}__compile_java" compile_java_helper(_compile_java_target) { forward_variables_from(invoker, _compile_java_forward_variables) output_jar_path = _javac_jar_path - if (_enable_turbine) { - deps = [ ":$_header_target_name" ] - generated_jar_path = _generated_jar_path - } + deps = [ ":$_header_target_name" ] + generated_jar_path = _generated_jar_path } if (_enable_errorprone) { _compile_java_errorprone_target = "${_main_target_name}__errorprone" @@ -3685,121 +3650,129 @@ if (enable_java_templates) { } javac_args += invoker.errorprone_args } - if (_enable_turbine) { - deps = [ ":$_header_target_name" ] - generated_jar_path = _generated_jar_path - } + deps = [ ":$_header_target_name" ] + generated_jar_path = _generated_jar_path output_jar_path = "$target_out_dir/$target_name.errorprone.stamp" } _analysis_public_deps += [ ":$_compile_java_errorprone_target" ] } } # _has_sources - if (_has_sources || _is_prebuilt) { - if (!_enable_turbine) { - generate_interface_jar(_header_target_name) { - # Always used the unfiltered .jar to create the interface jar so that - # other targets will resolve filtered classes when depending on - # BuildConfig, NativeLibraries, etc. - input_jar = _unprocessed_jar_path - output_jar = _final_ijar_path - - # Some prebuilts have java deps (e.g. //third_party/proguard:retrace_java). - deps = _java_header_deps + _accumulated_deps - if (_has_sources) { - deps += [ ":$_compile_java_target" ] - } + if (_is_prebuilt) { + generate_interface_jar(_header_target_name) { + # Always used the unfiltered .jar to create the interface jar so that + # other targets will resolve filtered classes when depending on + # BuildConfig, NativeLibraries, etc. + input_jar = _unprocessed_jar_path + output_jar = _final_ijar_path + + # ijar does not require classpath, but must declare these as deps so + # that they are transitive deps for targets that depend on this + # target. If we can change compile & desugar steps to use direct + # interface classpath rather than full interface classpath, then this + # could just be _non_java_deps. + deps = _classpath_deps + if (_has_sources) { + deps += [ ":$_compile_java_target" ] } } _public_deps += [ ":$_header_target_name" ] } - if (defined(_final_jar_path)) { - if (_is_system_library) { - # These deps are only needed for the process_java_prebuilt template. - not_needed([ "_java_impl_deps" ]) - - _copy_system_library_target_name = "${target_name}__copy_system_library" - - # Use copy_ex rather than copy to ensure that we copy symlink targets - # rather than the symlink itself. - copy_ex(_copy_system_library_target_name) { - sources = [ _unprocessed_jar_path ] - dest = _final_jar_path - outputs = [ _final_jar_path ] + if (_build_host_jar || _build_device_jar) { + _process_prebuilt_target_name = "${target_name}_process" + process_java_prebuilt(_process_prebuilt_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + ]) + build_config = _build_config + build_config_dep = ":$_build_config_target_name" + input_jar_path = _unprocessed_jar_path + jar_deps = _non_java_deps + if (_has_sources) { + jar_deps += [ ":$_compile_java_target" ] } - _public_deps += [ ":$_copy_system_library_target_name" ] - } else { - _enable_desugar = (!defined(invoker.enable_desugar) || - !invoker.enable_desugar) && _supports_android - _process_prebuilt_target_name = "${target_name}__process_prebuilt" - process_java_prebuilt(_process_prebuilt_target_name) { - forward_variables_from(invoker, - [ - "enable_bytecode_checks", - "enable_bytecode_rewriter", - "missing_classes_allowlist", - "jar_excluded_patterns", - "jar_included_patterns", - "skip_jetify", - ]) - enable_desugar = _enable_desugar && enable_bazel_desugar - is_prebuilt = _is_prebuilt - build_config = _build_config - input_jar_path = _unprocessed_jar_path + if (_build_host_jar) { + host_jar_path = _host_processed_jar_path + } + if (_build_device_jar) { + device_jar_path = _device_processed_jar_path jacoco_instrument = _jacoco_instrument if (_jacoco_instrument) { java_files = _java_files java_sources_file = _java_sources_file } - output_jar_path = _final_jar_path - deps = _java_impl_deps + _accumulated_deps + - [ ":$_build_config_target_name" ] - if (_has_sources) { - deps += [ ":$_compile_java_target" ] - } - - # proguard_configs listed on java_library targets need to be marked - # as inputs to at least one action so that "gn analyze" will know - # about them. Although ijar doesn't use them, it's a convenient spot - # to list them. - # https://crbug.com/827197 - if (defined(invoker.proguard_configs)) { - inputs = invoker.proguard_configs - if (!defined(deps)) { - deps = [] - } - deps += _srcjar_deps # For the aapt-generated proguard rules. + enable_desugar = _enable_desugar && enable_bazel_desugar + if (enable_desugar) { + classpath_deps = _full_classpath_deps } } - _public_deps += [ ":$_process_prebuilt_target_name" ] + if (defined(invoker.enable_jetify) && invoker.enable_jetify) { + jetified_jar_path = _jetified_jar_path + } - if (defined(_dex_path)) { - dex("${target_name}__dex") { - input_class_jars = [ _final_jar_path ] + # proguard_configs listed on java_library targets need to be marked + # as inputs to at least one action so that "gn analyze" will know + # about them. Although ijar doesn't use them, it's a convenient spot + # to list them. + # https://crbug.com/827197 + if (defined(invoker.proguard_configs)) { + inputs = invoker.proguard_configs + input_deps = _non_java_deps + _srcjar_deps # For the aapt-generated + # proguard rules. + } + } + if (_build_host_jar) { + _public_deps += [ ":${_process_prebuilt_target_name}_host" ] + } + if (_build_device_jar) { + _public_deps += [ ":${_process_prebuilt_target_name}_device" ] + } - enable_desugar = _enable_desugar + if (!defined(invoker.enable_bytecode_checks) || + invoker.enable_bytecode_checks) { + _bytecode_checks_target = "${target_name}__validate_classpath" + bytecode_processor(_bytecode_checks_target) { + forward_variables_from(invoker, [ "missing_classes_allowlist" ]) + deps = _full_classpath_deps + if (_has_sources) { + deps += [ ":$_compile_java_target" ] + } + requires_android = _requires_android + target_label = + get_label_info(":${invoker.target_name}", "label_no_toolchain") + input_jar = _unprocessed_jar_path + build_config = _build_config + is_prebuilt = _is_prebuilt + } + _analysis_public_deps += [ ":$_bytecode_checks_target" ] + } + } - # There's no value in per-class dexing prebuilts since they never - # change just one class at a time. - disable_incremental = _is_prebuilt - output = _dex_path - deps = [ ":$_process_prebuilt_target_name" ] + if (_build_device_jar) { + dex("${target_name}__dex") { + input_class_jars = [ _device_processed_jar_path ] + enable_desugar = _enable_desugar - if (enable_desugar && !enable_bazel_desugar) { - build_config = _build_config - final_ijar_path = _final_ijar_path - deps += _java_header_deps + [ ":$_header_target_name" ] - } + # There's no value in per-class dexing prebuilts since they never + # change just one class at a time. + disable_incremental = _is_prebuilt + output = _dex_path + deps = [ ":${_process_prebuilt_target_name}_device" ] - # For library targets, we do not need a proper main dex list, but do - # need to allow multiple dex files. - enable_multidex = false - enable_library_multidex = true - } - _public_deps += [ ":${target_name}__dex" ] + if (enable_desugar && !enable_bazel_desugar) { + # Desugaring with D8 requires full classpath. + build_config = _build_config + final_ijar_path = _final_ijar_path + deps += _classpath_deps + [ ":$_header_target_name" ] } + + enable_multidex = false + is_library = true } + _public_deps += [ ":${target_name}__dex" ] } if (_is_java_binary) { @@ -3829,6 +3802,11 @@ if (enable_java_templates) { public_deps = _public_deps } + java_lib_group("${target_name}__assetres") { + forward_variables_from(invoker, [ "deps" ]) + group_name = "assetres" + } + group(target_name) { forward_variables_from(invoker, [ @@ -3873,13 +3851,17 @@ if (enable_java_templates) { # packaged into each module. # is_multi_abi: If true will add a library placeholder for the missing ABI if # either the primary or the secondary ABI has no native libraries set. -# verify_native_libs_and_assets_target_name: (optional): If set, will verify -# the list of included native libraries and assets is consistent with an -# expectation file. +# expected_libs_and_assets: (optional): Verify the list of included native +# libraries and assets is consistent with the given expectation file. +# proguard_enabled: Optional. True if proguarding is enabled for this +# bundle. Default is to enable this only for release builds. Note that +# this will always perform synchronized proguarding. template("create_android_app_bundle_module") { _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) _rebased_native_libraries_config = rebase_path(invoker.native_libraries_config, root_build_dir) + _proguard_enabled = + defined(invoker.proguard_enabled) && invoker.proguard_enabled forward_variables_from(invoker, [ @@ -3950,8 +3932,9 @@ template("create_android_app_bundle_module") { _args += [ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)" ] } - # TODO(https://crbug.com/1056751): Add support for proguarding jdk libs. - if (enable_jdk_library_desugaring && invoker.module_name == "base") { + # The library is imported via proguard when proguard is enabled. + if (!_proguard_enabled && enable_jdk_library_desugaring && + invoker.module_name == "base") { _all_jdk_libs = "//build/android:all_jdk_libs" _deps += [ _all_jdk_libs ] _jdk_libs_dex = @@ -3963,26 +3946,14 @@ template("create_android_app_bundle_module") { ] } - if (defined(invoker.verify_native_libs_and_assets_target_name)) { - _expectations_target = - "${invoker.verify_native_libs_and_assets_target_name}" + - "_libs_and_assets_expectations" + if (defined(invoker.expected_libs_and_assets)) { + _expectations_target = "${invoker.top_target_name}_validate_libs_and_assets" action_with_pydeps(_expectations_target) { _stamp = "$target_gen_dir/$target_name.stamp" - _target_src_dir = get_label_info(":$target_name", "dir") - _expected_native_libs_and_assets = - "$_target_src_dir/java/" + - "${invoker.verify_native_libs_and_assets_target_name}." + - "${target_cpu}.native_libs_and_assets.expected" - _native_libs_and_assets_expectation_failure_filepath = - "$android_configuration_failure_dir/" + - "${invoker.verify_native_libs_and_assets_target_name}." + - "${target_cpu}.native_libs_and_assets.failed" - if (fail_on_android_expectations) { - _args += [ "--fail-on-expectations" ] - } + _failure_file = "$android_configuration_failure_dir/" + + string_replace(invoker.expected_libs_and_assets, "/", "_") inputs = [ - _expected_native_libs_and_assets, + invoker.expected_libs_and_assets, invoker.build_config, invoker.native_libraries_config, ] @@ -3998,14 +3969,16 @@ template("create_android_app_bundle_module") { script = _script args = _args + [ "--expected-native-libs-and-assets", - rebase_path(_expected_native_libs_and_assets, root_build_dir), + rebase_path(invoker.expected_libs_and_assets, root_build_dir), "--native-libs-and-assets-expectation-failure-file", - rebase_path(_native_libs_and_assets_expectation_failure_filepath, - root_build_dir), + rebase_path(_failure_file, root_build_dir), "--stamp", rebase_path(_stamp, root_build_dir), "--only-verify-expectations", ] + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } } _deps += [ ":$_expectations_target" ] } @@ -4066,7 +4039,7 @@ template("dexsplitter") { "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)", ] if (!defined(invoker.feature_jars_args)) { - args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:java_runtime_classpath)" ] + args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)" ] } deps += [ _feature_module.build_config_target ] } diff --git a/chromium/build/config/android/rules.gni b/chromium/build/config/android/rules.gni index cd0258af22c..412041fe56e 100644 --- a/chromium/build/config/android/rules.gni +++ b/chromium/build/config/android/rules.gni @@ -524,17 +524,12 @@ if (enable_java_templates) { # The sources aren't compiled so don't check their dependencies. check_includes = false script = "//build/android/gyp/java_cpp_enum.py" - depfile = "$target_gen_dir/$target_name.d" _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir) _rebased_sources = rebase_path(invoker.sources, root_build_dir) - args = [ - "--depfile", - rebase_path(depfile, root_build_dir), - "--srcjar=$_rebased_srcjar_path", - ] + _rebased_sources + args = [ "--srcjar=$_rebased_srcjar_path" ] + _rebased_sources outputs = [ _srcjar_path ] } } @@ -708,12 +703,16 @@ if (enable_java_templates) { _cpu_family, ] if (invoker.use_final_fields) { + # Write native_libraries_list_file via depfile rather than specifyin it + # as a dep in order allow R8 to run in parallel with native compilation. + depfile = "$target_gen_dir/$target_name.d" args += [ "--final", + "--depfile", + rebase_path(depfile, root_build_dir), "--native-libraries-list", rebase_path(invoker.native_libraries_list_file, root_build_dir), ] - inputs = [ invoker.native_libraries_list_file ] if (defined(invoker.version_number)) { args += [ "--version-number", @@ -1123,12 +1122,9 @@ if (enable_java_templates) { # } # } template("java_group") { - forward_variables_from(invoker, - [ - "testonly", - "input_jars_paths", - ]) + forward_variables_from(invoker, [ "testonly" ]) write_build_config("$target_name$build_config_target_suffix") { + forward_variables_from(invoker, [ "input_jars_paths" ]) type = "group" build_config = "$target_gen_dir/${invoker.target_name}.build_config" supports_android = true @@ -1136,11 +1132,16 @@ if (enable_java_templates) { possible_config_deps = invoker.deps } } - java_header_group("${target_name}__header") { - forward_variables_from(invoker, [ "deps" ]) - } - java_impl_group("${target_name}__impl") { - forward_variables_from(invoker, [ "deps" ]) + foreach(_group_name, + [ + "header", + "impl", + "assetres", + ]) { + java_lib_group("${target_name}__${_group_name}") { + forward_variables_from(invoker, [ "deps" ]) + group_name = _group_name + } } group(target_name) { forward_variables_from(invoker, "*") @@ -1335,13 +1336,18 @@ if (enable_java_templates) { _build_config_target_name = "$target_name$build_config_target_suffix" _deps = [ "//testing/android/junit:junit_test_support", + "//third_party/android_deps:robolectric_all_java", "//third_party/junit", "//third_party/mockito:mockito_java", - "//third_party/robolectric:robolectric_all_java", ] if (defined(invoker.deps)) { _deps += invoker.deps } + if (defined(invoker.alternative_android_sdk_dep)) { + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = "//third_party/android_sdk:android_sdk_java" + } # a package name or a manifest is required to have resources. This is # added so that junit tests that do not care about the package name can @@ -1356,7 +1362,9 @@ if (enable_java_templates) { _compile_resources_target = "${target_name}__compile_resources" compile_resources(_compile_resources_target) { forward_variables_from(invoker, [ "android_manifest" ]) - deps = _deps + [ ":$_build_config_target_name" ] + deps = _deps + android_sdk_dep = _android_sdk_dep + build_config_dep = ":$_build_config_target_name" build_config = _build_config if (defined(_package_name)) { rename_manifest_package = _package_name @@ -1469,7 +1477,6 @@ if (enable_java_templates) { # [ [ path_to_file, path_to_put_in_jar ] ] # # javac_args: Additional arguments to pass to javac. - # never_goma: When true, ignore the value of use_java_goma. # errorprone_args: Additional arguments to pass to errorprone. # # data_deps, testonly @@ -1535,11 +1542,6 @@ if (enable_java_templates) { # deps = [ ":my_java_lib" ] # output = "$root_build_dir/MyLibrary.jar" # } - # dist_jar("sideloaded_dex") { - # deps = [ ":my_java_lib" ] - # output = "$root_build_dir/MyLibrary.jar" - # dex_path = "$root_build_dir/MyLibrary.dex" - # } template("dist_jar") { # TODO(crbug.com/1042017): Remove. not_needed(invoker, [ "no_build_hooks" ]) @@ -1620,12 +1622,20 @@ if (enable_java_templates) { } else if (_use_unprocessed_jars) { args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ] } else { - args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ] + args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] } } + _excludes = [] if (defined(invoker.jar_excluded_patterns)) { - args += - [ "--input-zips-excluded-globs=${invoker.jar_excluded_patterns}" ] + _excludes += invoker.jar_excluded_patterns + } + if (_use_interface_jars) { + # Turbine adds files like: META-INF/TRANSITIVE/.../Foo.class + # These confuse proguard: https://crbug.com/1081443 + _excludes += [ "META-INF/*" ] + } + if (_excludes != []) { + args += [ "--input-zips-excluded-globs=$_excludes" ] } } } @@ -1634,21 +1644,15 @@ if (enable_java_templates) { # # Variables: # output: Path to the output dex. + # proguard_enabled: Whether to enable R8. # proguard_configs: List of proguard configs. - # proguard_jar_path: The path to proguard.jar you wish to use. If undefined, - # the proguard used will be the checked in one in //third_party/proguard. # # Example # dist_dex("lib_fatjar") { # deps = [ ":my_java_lib" ] # output = "$root_build_dir/MyLibrary.jar" # } - # dist_jar("sideloaded_dex") { - # deps = [ ":my_java_lib" ] - # output = "$root_build_dir/MyLibrary.jar" - # dex_path = "$root_build_dir/MyLibrary.dex" - # } - template("proguarded_dist_dex") { + template("dist_dex") { _deps = [ "//third_party/android_sdk:android_sdk_java" ] if (defined(invoker.deps)) { _deps += invoker.deps @@ -1659,10 +1663,13 @@ if (enable_java_templates) { write_build_config(_build_config_target_name) { type = "dist_jar" - forward_variables_from(invoker, [ "proguard_configs" ]) + forward_variables_from(invoker, + [ + "proguard_configs", + "proguard_enabled", + ]) supports_android = true requires_android = true - proguard_enabled = true possible_config_deps = _deps build_config = _build_config } @@ -1670,15 +1677,28 @@ if (enable_java_templates) { _deps += [ ":$_build_config_target_name" ] dex(target_name) { - deps = _deps - build_config = _build_config - proguard_enabled = true forward_variables_from(invoker, [ + "data", + "data_deps", + "visibility", + "testonly", "proguard_configs", + "proguard_enabled", "min_sdk_version", ]) + deps = _deps + build_config = _build_config + enable_multidex = false output = invoker.output + if (defined(proguard_enabled) && proguard_enabled) { + # When trying to build a stand-alone .dex, don't add in jdk_libs_dex. + supports_jdk_library_desugaring = false + } else { + _rebased_build_config = rebase_path(_build_config, root_build_dir) + input_dex_filearg = + "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)" + } } } @@ -1807,8 +1827,6 @@ if (enable_java_templates) { # Supports all variables of java_library(), plus: # deps: In addition to defining java deps, this can also include # android_assets() and android_resources() targets. - # dex_path: If set, the resulting .dex.jar file will be placed under this - # path. # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file # replaces the default android_sdk_ijar. # alternative_android_sdk_ijar_dep: the target that generates @@ -2073,8 +2091,8 @@ if (enable_java_templates) { # apk or module. # resources_config_path: Path to the aapt2 optimize config file that tags # resources with acceptable/non-acceptable optimizations. - # verify_manifest: Enables verification of expected merged manifest based - # on a golden file. + # expected_android_manifest: Enables verification of expected merged + # manifest based on a golden file. # resource_ids_provider_dep: If passed, this target will use the resource # IDs generated by {resource_ids_provider_dep}__compile_res during # resource compilation. @@ -2113,14 +2131,13 @@ if (enable_java_templates) { # main_component_library: Specifies the name of the base component's library # in a component build. If given, the system will find dependent native # libraries at runtime by inspecting this library (optional). - # verify_native_libs_and_assets: (optional): If true, will verify the list - # of included native libraries and assets is consistent with an + # expected_libs_and_assets: (optional): Verify the list + # of included native libraries and assets is consistent with the given # expectation file. template("android_apk_or_module") { forward_variables_from(invoker, [ "testonly" ]) assert(defined(invoker.android_manifest)) - _out_dir = "$target_out_dir/$target_name" - _base_path = "$_out_dir/$target_name" + _base_path = "$target_out_dir/$target_name/$target_name" _build_config = "$target_gen_dir/$target_name.build_config" _build_config_target = "$target_name$build_config_target_suffix" @@ -2133,9 +2150,6 @@ if (enable_java_templates) { _target_sdk_version = invoker.target_sdk_version } - # JUnit tests use resource zip files. These must not be put in gen/ - # directory or they will not be available to tester bots. - _jar_path = "$_base_path.jar" _template_name = target_name _is_bundle_module = @@ -2154,9 +2168,11 @@ if (enable_java_templates) { } _short_resource_paths = - defined(invoker.short_resource_paths) && invoker.short_resource_paths + defined(invoker.short_resource_paths) && invoker.short_resource_paths && + enable_arsc_obfuscation _strip_resource_names = - defined(invoker.strip_resource_names) && invoker.strip_resource_names + defined(invoker.strip_resource_names) && invoker.strip_resource_names && + enable_arsc_obfuscation _optimize_resources = _strip_resource_names || _short_resource_paths if (!_is_bundle_module && _short_resource_paths) { @@ -2330,8 +2346,7 @@ if (enable_java_templates) { _is_static_library_provider = defined(invoker.static_library_dependent_targets) && _proguard_enabled if (_is_static_library_provider) { - _static_library_sync_dex_path = - "$_out_dir/static_library_synchronized_proguard.r8dex.jar" + _static_library_sync_dex_path = "$_base_path.synchronized.r8dex.jar" _resource_ids_provider_deps = [] foreach(_target, invoker.static_library_dependent_targets) { if (_target.is_resource_ids_provider) { @@ -2405,9 +2420,7 @@ if (enable_java_templates) { _final_deps = [] - _enable_main_dex_list = - _enable_multidex && - (!defined(invoker.min_sdk_version) || invoker.min_sdk_version < 21) + _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 if (_enable_main_dex_list) { _generated_proguard_main_dex_config = "$_base_path.resources.main-dex-proguard.txt" @@ -2453,6 +2466,8 @@ if (enable_java_templates) { [ "aapt_locale_allowlist", "app_as_shared_lib", + "expected_android_manifest", + "expected_android_manifest_base", "manifest_package", "max_sdk_version", "no_xml_namespaces", @@ -2466,22 +2481,19 @@ if (enable_java_templates) { "resources_config_path", "shared_resources", "shared_resources_allowlist_locales", - "short_resource_paths", - "strip_resource_names", "support_zh_hk", ]) + short_resource_paths = _short_resource_paths + strip_resource_names = _strip_resource_names android_manifest = _android_manifest + android_manifest_dep = ":$_merge_manifest_target" version_code = _version_code version_name = _version_name min_sdk_version = _min_sdk_version target_sdk_version = _target_sdk_version - if (defined(invoker.verify_manifest) && invoker.verify_manifest && - !is_java_debug) { - verify_manifest_target_name = _template_name - build_config_dep = ":$_build_config_target" - android_manifest_dep = ":$_merge_manifest_target" - forward_variables_from(invoker, - [ "expected_manifest_base_expectation" ]) + + if (defined(expected_android_manifest)) { + top_target_name = _template_name } if (defined(_resource_ids_provider_dep)) { @@ -2503,11 +2515,9 @@ if (enable_java_templates) { } build_config = _build_config - deps = _deps + [ - ":$_merge_manifest_target", - ":$_build_config_target", - _android_sdk_dep, - ] + build_config_dep = ":$_build_config_target" + android_sdk_dep = _android_sdk_dep + deps = _deps # The static library uses the R.txt files generated by the # static_library_dependent_targets when generating the final R.java file. @@ -2622,16 +2632,17 @@ if (enable_java_templates) { if (_generate_native_libraries_java) { write_native_libraries_java("${_template_name}__native_libraries") { forward_variables_from(invoker, [ "main_component_library" ]) - deps = [ - ":${_template_name}__secondary_abi_shared_library_list", - ":${_template_name}__shared_library_list", - ] + deps = [] if (defined(invoker.native_lib_version_rule)) { deps += [ invoker.native_lib_version_rule ] } if (defined(invoker.native_lib_version_arg)) { version_number = invoker.native_lib_version_arg } + + # Do not add a dep on the generated_file target in order to avoid having + # to build the native libraries before this target. The dependency is + # instead captured via a depfile. if (_native_libs_deps != []) { native_libraries_list_file = _shared_library_list_file } else { @@ -2750,7 +2761,6 @@ if (enable_java_templates) { "javac_args", "native_lib_placeholders", "processor_args_javac", - "skip_jetify", "secondary_abi_loadable_modules", "secondary_native_lib_placeholders", "sources", @@ -2784,7 +2794,6 @@ if (enable_java_templates) { supports_android = true requires_android = true srcjar_deps = _srcjar_deps - final_jar_path = _jar_path if (defined(_final_dex_path)) { final_dex_path = _final_dex_path } @@ -2824,10 +2833,9 @@ if (enable_java_templates) { } } - # Don't depend on the runtime_deps target in order to avoid having to - # build the native libraries just to create the .build_config file. - # The dep is unnecessary since the runtime_deps file is created by gn gen - # and the runtime_deps file is added to write_build_config.py's depfile. + # Do not add a dep on the generated_file target in order to avoid having + # to build the native libraries before this target. The dependency is + # instead captured via a depfile. if (_native_libs_deps != []) { shared_libraries_runtime_deps_file = _shared_library_list_file } @@ -2866,13 +2874,6 @@ if (enable_java_templates) { if (_uses_static_library_synchronized_proguard) { _final_dex_target_dep = "${invoker.static_library_provider}__dexsplitter" } else if (_is_bundle_module && _proguard_enabled) { - # A small sanity check to help developers with a subtle point! - assert( - !defined(invoker.proguard_jar_path), - "proguard_jar_path should not be used for app bundle modules " + - "when proguard is enabled. Pass it to the android_app_bundle() " + - "target instead!") - _final_deps += [ ":$_java_target" ] } else if (_incremental_apk) { if (defined(invoker.negative_main_dex_globs)) { @@ -2910,16 +2911,21 @@ if (enable_java_templates) { ":$_java_target", ] if (_proguard_enabled) { - forward_variables_from(invoker, [ "proguard_jar_path" ]) deps += _deps + [ ":$_compile_resources_target" ] proguard_mapping_path = _proguard_mapping_path proguard_sourcefile_suffix = "$android_channel-$_version_code" - } else { + has_apk_under_test = defined(invoker.apk_under_test) + } else if (_min_sdk_version >= default_min_sdk_version) { + # Enable dex merging only when min_sdk_version is >= what the library + # .dex files were created with. input_dex_filearg = "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)" if (_enable_main_dex_list) { - main_dex_list_input_classes_filearg = "@FileArg(${_rebased_build_config}:deps_info:java_runtime_classpath)" + main_dex_list_input_classes_filearg = "@FileArg(${_rebased_build_config}:deps_info:java_runtime_classpath_extended)" } + } else { + input_classes_filearg = + "@FileArg($_rebased_build_config:deps_info:device_classpath)" } if (_is_static_library_provider) { @@ -3137,6 +3143,7 @@ if (enable_java_templates) { package_apk("$_create_apk_target") { forward_variables_from(invoker, [ + "expected_libs_and_assets", "native_lib_placeholders", "secondary_abi_loadable_modules", "secondary_native_lib_placeholders", @@ -3146,10 +3153,9 @@ if (enable_java_templates) { "library_renames", ]) - if (defined(invoker.verify_native_libs_and_assets) && - invoker.verify_native_libs_and_assets) { - verify_native_libs_and_assets_target_name = _template_name + if (defined(expected_libs_and_assets)) { build_config_dep = ":$_build_config_target" + top_target_name = _template_name } build_config = _build_config @@ -3161,8 +3167,7 @@ if (enable_java_templates) { deps = _deps + [ ":$_build_config_target" ] - # TODO(https://crbug.com/1056751): Add support for proguarding jdk libs. - if (enable_jdk_library_desugaring) { + if (!_proguard_enabled && enable_jdk_library_desugaring) { _all_jdk_libs = "//build/android:all_jdk_libs" deps += [ _all_jdk_libs ] jdk_libs_dex = get_label_info(_all_jdk_libs, "target_out_dir") + @@ -3170,16 +3175,14 @@ if (enable_java_templates) { } if (_incremental_apk) { - _dex_target = - "//build/android/incremental_install:bootstrap_java__dex" + _dex_target = "//build/android/incremental_install:apk_dex" deps += [ ":${_incremental_compile_resources_target_name}", _dex_target, ] - dex_path = - get_label_info(_dex_target, "target_out_dir") + "/bootstrap.dex" + dex_path = get_label_info(_dex_target, "target_out_dir") + "/apk.dex" # All native libraries are side-loaded, so use a placeholder to force # the proper bitness for the app. @@ -3353,17 +3356,22 @@ if (enable_java_templates) { android_lint(_android_lint_target) { forward_variables_from(invoker, [ + "lint_suppressions_dep", + "lint_suppressions_file", "manifest_package", "min_sdk_version", ]) build_config = _build_config - deps = [ - ":$_build_config_target", - ":$_java_target", - ] + build_config_dep = ":$_build_config_target" + deps = [ ":$_java_target" ] } } else { - not_needed(invoker, [ "manifest_package" ]) + not_needed(invoker, + [ + "manifest_package", + "lint_suppressions_dep", + "lint_suppressions_file", + ]) } group(target_name) { @@ -3382,14 +3390,16 @@ if (enable_java_templates) { data_deps = [] } + # Include unstripped native libraries so tests can symbolize stacks. + data_deps += _all_native_libs_deps + if (_enable_lint) { data_deps += [ ":$_android_lint_target" ] } if (_incremental_apk) { # device/commands is used by the installer script to push files via .zip. - data_deps += - [ "//build/android/pylib/device/commands" ] + _native_libs_deps + data_deps += [ "//build/android/pylib/device/commands" ] } if (_uses_static_library) { data_deps += [ invoker.static_library_provider ] @@ -3465,6 +3475,8 @@ if (enable_java_templates) { "keystore_name", "keystore_password", "keystore_path", + "lint_suppressions_dep", + "lint_suppressions_file", "load_library_from_apk", "loadable_modules", "manifest_package", @@ -3484,7 +3496,6 @@ if (enable_java_templates) { "product_version_resources_dep", "proguard_configs", "proguard_enabled", - "proguard_jar_path", "r_java_root_package_name", "resource_exclusion_exceptions", "resource_exclusion_regex", @@ -3495,7 +3506,6 @@ if (enable_java_templates) { "secondary_abi_loadable_modules", "secondary_abi_shared_libraries", "secondary_native_lib_placeholders", - "skip_jetify", "shared_libraries", "shared_resources", "shared_resources_allowlist_locales", @@ -3515,9 +3525,9 @@ if (enable_java_templates) { "library_always_compress", "library_renames", "use_chromium_linker", - "verify_manifest", - "expected_manifest_base_expectation", - "verify_native_libs_and_assets", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_libs_and_assets", "version_code", "version_name", ]) @@ -3615,7 +3625,6 @@ if (enable_java_templates) { "product_version_resources_dep", "proguard_configs", "proguard_enabled", - "proguard_jar_path", "resource_exclusion_exceptions", "resource_exclusion_regex", "resource_ids_provider_dep", @@ -3641,8 +3650,8 @@ if (enable_java_templates) { "library_renames", "use_chromium_linker", "use_modern_linker", - "verify_manifest", - "expected_manifest_base_expectation", + "expected_android_manifest", + "expected_android_manifest_base", "version_code", "version_name", ]) @@ -4045,12 +4054,13 @@ if (enable_java_templates) { # Use the regular proto library to generate lite protos. _protoc_bin = "//third_party/android_protoc/protoc" _proto_runtime = "//third_party/android_deps:com_google_protobuf_protobuf_javalite_java" - _protoc_javalite_plugin_dir = "//third_party/protoc_javalite/" } _proto_path = invoker.proto_path _template_name = target_name action_with_pydeps("${_template_name}__protoc_java") { + # The suffix "__protoc_java.srcjar" is used by SuperSize to identify + # protobuf symbols. _srcjar_path = "$target_gen_dir/$target_name.srcjar" script = "//build/protoc_java.py" @@ -4087,11 +4097,6 @@ if (enable_java_templates) { if (_generate_nano) { args += [ "--nano" ] - } else { - args += [ - "--protoc-javalite-plugin-dir", - rebase_path(_protoc_javalite_plugin_dir, root_build_dir), - ] } } @@ -4148,7 +4153,13 @@ if (enable_java_templates) { _info_path = invoker.info_path } _output_path = "${target_out_dir}/${target_name}" - _unpack_target_name = "${target_name}__unpack_aar" + _target_name_without_java_or_junit = + string_replace(string_replace(target_name, "_java", ""), "_junit", "") + + # This unpack target is a python action, not a valid java target. Since the + # java targets below depend on it, its name must not match the java patterns + # in internal_rules.gni. + _unpack_target_name = "${_target_name_without_java_or_junit}__unpack_aar" _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets _ignore_manifest = @@ -4237,8 +4248,14 @@ if (enable_java_templates) { outputs += get_path_info(rebase_path(_scanned_files.subjars, "", _output_path), "abspath") - if (!_ignore_proguard_configs && _scanned_files.has_proguard_flags) { - outputs += [ "${_output_path}/proguard.txt" ] + if (!_ignore_proguard_configs) { + if (_scanned_files.has_proguard_flags) { + outputs += [ "${_output_path}/proguard.txt" ] + } + if (defined(_scanned_files.has_proguard_check_flags) && + _scanned_files.has_proguard_check_flags) { + outputs += [ "${_output_path}/proguard-checks.txt" ] + } } if (_extract_native_libraries && _scanned_files.has_native_libraries) { @@ -4255,7 +4272,7 @@ if (enable_java_templates) { # Create the android_resources target for resources. if (_has_unignored_resources || (!_scanned_files.is_manifest_empty && !_ignore_manifest)) { - _res_target_name = "${target_name}__res" + _res_target_name = "${target_name}__resources" android_resources(_res_target_name) { forward_variables_from(invoker, [ @@ -4285,38 +4302,37 @@ if (enable_java_templates) { not_needed(invoker, [ "strip_drawables" ]) } - _java_library_vars = [ - "enable_bytecode_checks", - "enable_bytecode_rewriter", - "missing_classes_allowlist", - "enable_jetify", - "jar_excluded_patterns", - "jar_included_patterns", - "requires_android", - "skip_jetify", - "testonly", - ] + # Create android_java_prebuilt target for classes.jar. + if (_scanned_files.has_classes_jar) { + _java_library_vars = [ + "enable_bytecode_checks", + "enable_jetify", + "jar_excluded_patterns", + "jar_included_patterns", + "missing_classes_allowlist", + "requires_android", + "testonly", + ] - # Create android_java_prebuilt target for extra jars within jars/. - _subjar_targets = [] - foreach(_tuple, _scanned_files.subjar_tuples) { - _current_target = "${target_name}__subjar_${_tuple[0]}" - _subjar_targets += [ ":$_current_target" ] - java_prebuilt(_current_target) { - forward_variables_from(invoker, _java_library_vars) - deps = [ ":$_unpack_target_name" ] - if (!defined(requires_android)) { - requires_android = true + # Create android_java_prebuilt target for extra jars within jars/. + _subjar_targets = [] + foreach(_tuple, _scanned_files.subjar_tuples) { + _current_target = "${target_name}__subjar_${_tuple[0]}" + _subjar_targets += [ ":$_current_target" ] + java_prebuilt(_current_target) { + forward_variables_from(invoker, _java_library_vars) + deps = [ ":$_unpack_target_name" ] + if (!defined(requires_android)) { + requires_android = true + } + supports_android = true + jar_path = "$_output_path/${_tuple[1]}" + _base_output_name = get_path_info(jar_path, "name") + output_name = "${invoker.target_name}-$_base_output_name" + public_target_label = invoker.target_name } - supports_android = true - jar_path = "$_output_path/${_tuple[1]}" - _base_output_name = get_path_info(jar_path, "name") - output_name = "${invoker.target_name}-$_base_output_name" } - } - # Create android_java_prebuilt target for classes.jar. - if (_scanned_files.has_classes_jar) { _jar_target_name = "${target_name}__classes" java_prebuilt(_jar_target_name) { forward_variables_from(invoker, _java_library_vars) @@ -4340,12 +4356,19 @@ if (enable_java_templates) { jar_path = "$_output_path/classes.jar" output_name = invoker.target_name - if (!_ignore_proguard_configs && _scanned_files.has_proguard_flags) { + if (!_ignore_proguard_configs) { if (!defined(proguard_configs)) { proguard_configs = [] } - proguard_configs += [ "$_output_path/proguard.txt" ] + if (_scanned_files.has_proguard_flags) { + proguard_configs += [ "$_output_path/proguard.txt" ] + } + if (defined(_scanned_files.has_proguard_check_flags) && + _scanned_files.has_proguard_check_flags) { + proguard_configs += [ "$_output_path/proguard-checks.txt" ] + } } + public_target_label = invoker.target_name } } @@ -4416,9 +4439,6 @@ if (enable_java_templates) { # bundle. Default is to enable this only for release builds. Note that # this will always perform synchronized proguarding. # - # proguard_jar_path: Optional. Path to custom proguard jar used for - # proguarding. - # # enable_multidex: Optional. Enable multidexing of optimized modules jars # when using synchronized proguarding. Only applies to base module. # @@ -4439,16 +4459,16 @@ if (enable_java_templates) { # avoid library duplication. Effectively, the static library will be # treated as the parent of the base module. # - # verify_proguard_flags: Enables verification of expected merged proguard - # flags based on a golden file. + # expected_proguard_config: Checks that the merged set of proguard flags + # matches the given config. # # version_code: Optional. Version code of the target. # # is_multi_abi: If true will add a library placeholder for the missing ABI # if either the primary or the secondary ABI has no native libraries set. # - # verify_native_libs_and_assets: (optional): If true, will verify the list - # of included native libraries and assets is consistent with an + # expected_libs_and_assets: (optional): Verify the list + # of included native libraries and assets is consistent with the given # expectation file. # # default_modules_for_testing: (optional): A list of DFM that the wrapper @@ -4669,28 +4689,22 @@ if (enable_java_templates) { if (defined(invoker.min_sdk_version)) { not_needed(invoker, [ "min_sdk_version" ]) } - if (defined(proguard_jar_path)) { - not_needed(invoker, [ "proguard_jar_path" ]) - } } else { - _verify_proguard_flags = defined(invoker.verify_proguard_flags) && - invoker.verify_proguard_flags dex(_dex_target) { forward_variables_from(invoker, [ - "proguard_jar_path", + "expected_proguard_config", "min_sdk_version", ]) + if (defined(expected_proguard_config)) { + top_target_name = _target_name + } enable_multidex = _enable_multidex proguard_enabled = true proguard_mapping_path = _proguard_mapping_path proguard_sourcefile_suffix = "$android_channel-$_version_code" build_config = _build_config - if (_verify_proguard_flags) { - verify_proguard_flags_target_name = _target_name - } - deps = _module_java_targets + [ ":$_build_config_target" ] modules = _modules } @@ -4725,6 +4739,7 @@ if (enable_java_templates) { "is_multi_abi", "min_sdk_version", "uncompress_dex", + "proguard_enabled", ]) module_name = _module.name build_config = _module_build_config @@ -4732,9 +4747,9 @@ if (enable_java_templates) { native_libraries_config = _native_libraries_config if (module_name == "base" && - defined(invoker.verify_native_libs_and_assets) && - invoker.verify_native_libs_and_assets) { - verify_native_libs_and_assets_target_name = "${_target_name}__base" + defined(invoker.expected_libs_and_assets)) { + top_target_name = _target_name + expected_libs_and_assets = invoker.expected_libs_and_assets build_config_target = _module_build_config_target native_libraries_config_target = ":$_native_libraries_config_target" if (defined(android_app_secondary_abi)) { @@ -4968,15 +4983,22 @@ if (enable_java_templates) { android_lint(_android_lint_target) { forward_variables_from(invoker, [ + "lint_suppressions_dep", + "lint_suppressions_file", "manifest_package", "min_sdk_version", - "lint_suppressions_file", ]) build_config = _build_config - deps = _module_java_targets + [ ":$_build_config_target" ] + build_config_dep = ":$_build_config_target" + deps = _module_java_targets } } else { - not_needed(invoker, [ "manifest_package" ]) + not_needed(invoker, + [ + "manifest_package", + "lint_suppressions_dep", + "lint_suppressions_file", + ]) } group(_target_name) { @@ -5085,17 +5107,11 @@ template("generate_ui_locale_resources") { _output_zip = "${root_out_dir}/resource_zips/${_rebased_output_zip_path}/" + "${target_name}.zip" - _locales = invoker.ui_locales - _depfile = "$target_gen_dir/$target_name.d" - - action(_generating_target_name) { + action_with_pydeps(_generating_target_name) { script = "//build/android/gyp/create_ui_locale_resources.py" - depfile = _depfile outputs = [ _output_zip ] args = [ - "--locale-list=$_locales", - "--depfile", - rebase_path(_depfile, root_build_dir), + "--locale-list=${invoker.ui_locales}", "--output-zip", rebase_path(_output_zip, root_build_dir), ] diff --git a/chromium/build/config/c++/BUILD.gn b/chromium/build/config/c++/BUILD.gn index 9787a0b982d..9cd944dce32 100644 --- a/chromium/build/config/c++/BUILD.gn +++ b/chromium/build/config/c++/BUILD.gn @@ -39,11 +39,12 @@ config("runtime_library") { defines += [ "_LIBCPP_ABI_UNSTABLE" ] } - if (is_component_build) { - # In component builds, symbols from libc++.so are exported for all DSOs to - # use. If the system libc++ gets loaded (indirectly through a system - # library), then it will conflict with our libc++.so. Add a custom ABI - # version if we're building with _LIBCPP_ABI_UNSTABLE to avoid conflicts. + if (libcxx_is_shared) { + # When libcxx_is_shared is true, symbols from libc++.so are exported for + # all DSOs to use. If the system libc++ gets loaded (indirectly through + # a system library), then it will conflict with our libc++.so. Add a + # custom ABI version if we're building with _LIBCPP_ABI_UNSTABLE to avoid + # conflicts. # # Windows doesn't need to set _LIBCPP_ABI_VERSION since there's no system # C++ library we could conflict with. diff --git a/chromium/build/config/c++/c++.gni b/chromium/build/config/c++/c++.gni index 5ced4596fac..f4abd1d8ae2 100644 --- a/chromium/build/config/c++/c++.gni +++ b/chromium/build/config/c++/c++.gni @@ -58,7 +58,7 @@ declare_args() { # libc++abi needs to be exported from executables to be picked up by shared # libraries on certain instrumented builds. export_libcxxabi_from_executables = - use_custom_libcxx && !is_win && !is_component_build && + use_custom_libcxx && !is_ios && !is_win && !is_component_build && (is_asan || is_ubsan_vptr) # On Android, many shared libraries get loaded from the context of a JRE. In diff --git a/chromium/build/config/chromecast_build.gni b/chromium/build/config/chromecast_build.gni index 97304c3a4d2..a7552b939e9 100644 --- a/chromium/build/config/chromecast_build.gni +++ b/chromium/build/config/chromecast_build.gni @@ -25,6 +25,9 @@ declare_args() { # If non empty, rpath of executables is set to this. # If empty, default value is used. target_rpath = "" + + # Set true to enable modular_updater. + enable_modular_updater = false } # Note(slan): This arg depends on the value of is_chromecast, and thus must be diff --git a/chromium/build/config/chromeos/rules.gni b/chromium/build/config/chromeos/rules.gni index d0a2f19d47c..3b029a5d32d 100644 --- a/chromium/build/config/chromeos/rules.gni +++ b/chromium/build/config/chromeos/rules.gni @@ -7,11 +7,59 @@ import("//build/config/chromeos/args.gni") assert(is_chromeos) -declare_args() { - # Determines if we're running tests on VMs or on devices. - # TODO(crbug.com/866062): Is there a better way to do this? - cros_is_vm = cros_board == "amd64-generic" || cros_board == "betty" || - cros_board == "betty-pi-arc" +# Determine the real paths for various items in the SDK, which may be used +# in the 'generate_runner_script' template below. We do so outside the template +# to confine exec_script to a single invocation. +cros_is_vm = false +if (is_chromeos_device && cros_sdk_version != "") { + _cache_path_prefix = + "//build/cros_cache/chrome-sdk/symlinks/${cros_board}+${cros_sdk_version}" + + # It's difficult to determine if any given board supports QEMU images. So + # check if there's one present in the expected location. + _qemu_image_path = "${_cache_path_prefix}+chromiumos_qemu_image.tar.xz" + + # TODO(crbug.com/937821): Check for the board in a gclient list var to + # determine if we expect a VM image. + cros_is_vm = exec_script("//build/dir_exists.py", + [ rebase_path(_qemu_image_path) ], + "string") == "True" + + _symlinks = [ + # Tast harness & test data. + rebase_path("${_cache_path_prefix}+chromeos-base/tast-cmd"), + rebase_path("${_cache_path_prefix}+chromeos-base/tast-remote-tests-cros"), + + # Binutils (and other toolchain tools) used to deploy Chrome to the device. + rebase_path( + "${_cache_path_prefix}+environment_chromeos-base_chromeos-chrome.tar.xz"), + rebase_path("${_cache_path_prefix}+target_toolchain"), + ] + if (cros_is_vm) { + # VM-related tools. + _symlinks += [ + rebase_path("${_cache_path_prefix}+sys-firmware/seabios"), + rebase_path(_qemu_image_path), + rebase_path("${_cache_path_prefix}+app-emulation/qemu"), + ] + } + _symlink_targets = + exec_script("//build/get_symlink_targets.py", _symlinks, "list lines") + tast_sdk_items = [ + _symlink_targets[0], + _symlink_targets[1], + ] + toolchain_sdk_items = [ + _symlink_targets[2], + _symlink_targets[3], + ] + if (cros_is_vm) { + vm_sdk_items = [ + _symlink_targets[4], + _symlink_targets[5], + _symlink_targets[6], + ] + } } # Creates a script at $generated_script that can be used to launch a cros VM @@ -44,12 +92,6 @@ template("generate_runner_script") { deploy_chrome = false } - # These are the only 2 conditions when ${_cache_path_prefix} is used, so - # initialize it in a conditional. - if (deploy_chrome || cros_is_vm) { - _cache_path_prefix = "//build/cros_cache/chrome-sdk/tarballs/" - } - assert(defined(generated_script), "Must specify where to place generated test launcher script via " + "'generated_script'") @@ -90,24 +132,6 @@ template("generate_runner_script") { "//third_party/chromite/", ] - # Add the VM/QEMU-launching bits if needed. - if (cros_is_vm) { - assert(defined(_cache_path_prefix)) - _vm_image_path = "${_cache_path_prefix}/chromiumos_qemu_image.tar.xz/" - _qemu_dir = "${_cache_path_prefix}/app-emulation/" - _firmware_dir = "${_cache_path_prefix}/sys-firmware/" - data += [ - _firmware_dir, - _vm_image_path, - _qemu_dir, - ] - } - if (is_tast) { - data += [ - "${_cache_path_prefix}/chromeos-base/tast-cmd/", - "${_cache_path_prefix}/chromeos-base/tast-remote-tests-cros/", - ] - } if (defined(invoker.data)) { deps += invoker.data } @@ -134,15 +158,24 @@ template("generate_runner_script") { if (deploy_chrome) { args += [ "--deploy-chrome" ] + } - # To deploy chrome to the VM, it needs to be stripped down to fit into - # the VM. This is done by using binutils in the toolchain. So add the - # toolchain to the data. - assert(defined(_cache_path_prefix)) - data += [ - "${_cache_path_prefix}/environment_chromeos-base_chromeos-chrome.tar.xz", - "${_cache_path_prefix}/target_toolchain/", - ] + # If we're in the cros chrome-sdk (and not the raw ebuild), the test will + # need some additional runtime data located in the SDK cache. + if (cros_sdk_version != "") { + # Add the VM/QEMU-launching bits if needed. + if (cros_is_vm) { + data += vm_sdk_items + } + if (is_tast) { + data += tast_sdk_items + } + if (deploy_chrome) { + # To deploy chrome to the VM, it needs to be stripped down to fit into + # the VM. This is done by using binutils in the toolchain. So add the + # toolchain to the data. + data += toolchain_sdk_items + } } # When --test-exe is specified, test_runner.py will push the exe to the VM @@ -191,8 +224,7 @@ template("tast_test") { # The following expression filters out all non-critical tests. See the link # below for more details: # https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/test_attributes.md - tast_attr_expr = - "!disabled && (\"group:mainline\" || !\"group:*\") && \"dep:chrome\"" + tast_attr_expr = "\"group:mainline\" && \"dep:chrome\"" if (defined(enable_tast_informational_tests) && enable_tast_informational_tests) { diff --git a/chromium/build/config/compiler/BUILD.gn b/chromium/build/config/compiler/BUILD.gn index 01d97a3605f..6e219b3994e 100644 --- a/chromium/build/config/compiler/BUILD.gn +++ b/chromium/build/config/compiler/BUILD.gn @@ -121,9 +121,6 @@ declare_args() { thin_lto_enable_optimizations = (is_chromeos || is_android || is_win) && is_official_build - # By default only the binaries in official builds get build IDs. - force_local_build_id = false - # Initialize all local variables with a pattern. This flag will fill # uninitialized floating-point types (and 32-bit pointers) with 0xFF and the # rest with 0xAA. This makes behavior of uninitialized memory bugs consistent, @@ -143,6 +140,15 @@ declare_args() { # reduce TLB misses which gives performance improvement on cpu usage. # The gold linker by default has text section splitting enabled. use_text_section_splitting = false + + # Token limits may not be accurate for build configs not covered by the CQ, + # so only enable them by default for mainstream build configs. + enable_wmax_tokens = + !is_official_build && + (is_mac || (is_linux && !is_chromeos && target_cpu == "x64") || + (is_win && target_cpu == "x86") || (is_win && target_cpu == "x64") || + (is_android && target_cpu == "arm") || + (is_android && target_cpu == "arm64")) } declare_args() { @@ -323,14 +329,16 @@ config("compiler") { } } - if (is_official_build || force_local_build_id) { - # Explicitly pass --build-id to ld. Compilers used to always pass this - # implicitly but don't any more (in particular clang when built without - # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build - # id, so explicitly enable it in official builds. It's not needed in - # unofficial builds and computing it does slow down the link, so go with - # faster links in unofficial builds. + # Explicitly pass --build-id to ld. Compilers used to always pass this + # implicitly but don't any more (in particular clang when built without + # ENABLE_LINKER_BUILD_ID=ON). + if (is_official_build) { + # The sha1 build id has lower risk of collision but is more expensive to + # compute, so only use it in the official build to avoid slowing down + # links. ldflags += [ "-Wl,--build-id=sha1" ] + } else if (current_os != "aix") { + ldflags += [ "-Wl,--build-id" ] } if (!is_android) { @@ -543,6 +551,23 @@ config("compiler") { } } + # C++17 removes trigraph support, so preemptively disable trigraphs. This is + # especially useful given the collision with ecmascript's logical assignment + # operators: https://github.com/tc39/proposal-logical-assignment + if (is_clang) { + # clang-cl disables trigraphs by default + if (!is_win) { + # The gnu variants of C++11 and C++14 already disable trigraph support, + # but when building with clang, we use -std=c++11 / -std=c++14, which + # enables trigraph support: override that here. + cflags_cc += [ "-fno-trigraphs" ] + } + + # Don't warn that trigraphs are ignored, since trigraphs are disabled + # anyway. + cflags_cc += [ "-Wno-trigraphs" ] + } + if (is_mac) { # The system libc++ on Mac doesn't have aligned allocation in C++17. defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ] @@ -1468,14 +1493,6 @@ config("default_warnings") { cflags += [ "-Wno-nonportable-include-path" ] } - if (target_os == "chromeos") { - # Disable clang warnings of "-Wmax-tokens" because CQ doesn't cover all CrOS use - # cases, so it's too late to fix when the error goes to CrOS side. - # Also nacl toolchain doesn't recognize the flag, so avoid passing to nacl clang - # See crbug.com/1079053 for more details. - cflags += [ "-Wno-max-tokens" ] - } - if (current_toolchain == host_toolchain || !use_xcode_clang) { # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not # recognize. @@ -1504,6 +1521,13 @@ config("default_warnings") { # TODO(https://crbug.com/995993): Clean up and enable. "-Wno-implicit-fallthrough", ] + + if (enable_wmax_tokens) { + cflags += [ "-Wmax-tokens" ] + } else { + # TODO(https://crbug.com/1049569): Remove after Clang 87b235db. + cflags += [ "-Wno-max-tokens" ] + } } } } @@ -1970,6 +1994,14 @@ config("optimize") { } else { cflags = [ "-Os" ] + common_optimize_on_cflags } + } else if (is_chromeos) { + # TODO(gbiv): This is partially favoring size over speed. CrOS exclusively + # uses clang, and -Os in clang is more of a size-conscious -O2 than "size at + # any cost" (AKA -Oz). It'd be nice to: + # - Make `optimize_for_size` apply to all platforms where we're optimizing + # for size by default (so, also Windows) + # - Investigate -Oz here, maybe just for ARM? + cflags = [ "-Os" ] + common_optimize_on_cflags } else { cflags = [ "-O2" ] + common_optimize_on_cflags } @@ -1998,6 +2030,12 @@ config("no_optimize") { } else { cflags = [ "-Os" ] + common_optimize_on_cflags } + + if (!is_component_build) { + # Required for library partitions. Without this all symbols just end up + # in the base partition. + ldflags = [ "-Wl,--gc-sections" ] + } } else if (is_fuchsia) { # On Fuchsia, we optimize for size here to reduce the size of debug build # packages so they can be run in a KVM. See crbug.com/910243 for details. @@ -2249,7 +2287,8 @@ config("symbols") { cflags += [ "-g2" ] } - if (is_clang && !is_nacl && !use_xcode_clang) { + # TODO(https://crbug.com/1050118): Investigate missing debug info on mac. + if (is_clang && !is_nacl && !use_xcode_clang && !is_mac && !is_ios) { cflags += [ "-Xclang", "-debug-info-kind=constructor", diff --git a/chromium/build/config/compiler/compiler.gni b/chromium/build/config/compiler/compiler.gni index bd21ffe5dc3..ef8cc5fdfd5 100644 --- a/chromium/build/config/compiler/compiler.gni +++ b/chromium/build/config/compiler/compiler.gni @@ -110,7 +110,13 @@ declare_args() { # Determine whether to enable or disable frame pointers, based on the platform # and build arguments. -if (is_mac || is_ios || is_linux) { +if (is_chromeos) { + # ChromeOS generally prefers frame pointers, to support CWP. + # However, Clang does not currently generate usable frame pointers in ARM + # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them + # there to avoid the unnecessary overhead. + enable_frame_pointers = current_cpu != "arm" +} else if (is_mac || is_ios || is_linux) { enable_frame_pointers = true } else if (is_win) { # 64-bit Windows ABI doesn't support frame pointers. @@ -119,12 +125,6 @@ if (is_mac || is_ios || is_linux) { } else { enable_frame_pointers = true } -} else if (is_chromeos) { - # ChromeOS generally prefers frame pointers, to support CWP. - # However, Clang does not currently generate usable frame pointers in ARM - # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them - # there to avoid the unnecessary overhead. - enable_frame_pointers = current_cpu != "arm" } else if (is_android) { enable_frame_pointers = enable_profiling || diff --git a/chromium/build/config/compiler/pgo/BUILD.gn b/chromium/build/config/compiler/pgo/BUILD.gn index 84c941faaf6..20f01f3cafa 100644 --- a/chromium/build/config/compiler/pgo/BUILD.gn +++ b/chromium/build/config/compiler/pgo/BUILD.gn @@ -14,16 +14,12 @@ config("pgo_instrumentation_flags") { # Only add flags when chrome_pgo_phase == 1, so that variables we would use # are not required to be defined when we're not actually using PGO. if (chrome_pgo_phase == 1 && is_clang && !is_nacl && is_a_target_toolchain) { - # TODO(sebmarchand): Add a GN flag that allows setting the PGO profile - # name or find a way to remove use {target_name} in the filename? - # This uses the "%m" specifier to allow concurrent runs of the - # instrumented image. - cflags = [ "-fprofile-instr-generate=%m.profraw" ] + cflags = [ "-fprofile-generate" ] if (!is_win) { # Windows directly calls link.exe instead of the compiler driver when # linking, and embeds the path to the profile runtime library as # dependent library into each object file. - ldflags = [ "-fprofile-instr-generate" ] + ldflags = [ "-fprofile-generate" ] } } } @@ -37,14 +33,22 @@ config("pgo_optimization_flags") { if (chrome_pgo_phase == 2 && is_clang && !is_nacl && is_a_target_toolchain) { _pgo_target = "" + # There are txt files used by //tools/update_pgo_profiles.py to decide which + # profiles to use, adding them as inputs so that analyzer recognizes the + # dependencies. + inputs = [] + if (is_win) { if (target_cpu == "x64") { _pgo_target = "win64" + inputs = [ "//chrome/build/win64.pgo.txt" ] } else { _pgo_target = "win32" + inputs = [ "//chrome/build/win32.pgo.txt" ] } } else if (is_mac) { _pgo_target = "mac" + inputs = [ "//chrome/build/mac.pgo.txt" ] } if (pgo_data_path == "" && _pgo_target != "") { @@ -66,7 +70,14 @@ config("pgo_optimization_flags") { # and at least some profile data always ends up being considered # out of date, so make sure we don't error for those cases. "-Wno-profile-instr-unprofiled", - "-Wno-error=profile-instr-out-of-date", + "-Wno-profile-instr-out-of-date", + + # Some hashing conflict results in a lot of warning like this when doing + # a PGO build: + # warning: foo.cc: Function control flow change detected (hash mismatch) + # [-Wbackend-plugin] + # See https://crbug.com/978401 + "-Wno-backend-plugin", ] } } diff --git a/chromium/build/config/compiler/pgo/pgo.gni b/chromium/build/config/compiler/pgo/pgo.gni index a0cbb6acc05..f616b88a052 100644 --- a/chromium/build/config/compiler/pgo/pgo.gni +++ b/chromium/build/config/compiler/pgo/pgo.gni @@ -9,6 +9,9 @@ declare_args() { # 1 : Used during the PGI (instrumentation) phase. # 2 : Used during the PGO (optimization) phase. chrome_pgo_phase = 0 + if (is_official_build && (is_win || is_mac)) { + chrome_pgo_phase = 2 + } # When using chrome_pgo_phase = 2, read profile data from this path. pgo_data_path = "" diff --git a/chromium/build/config/fuchsia/BUILD.gn b/chromium/build/config/fuchsia/BUILD.gn index 7ee1923d946..427f364a6b4 100644 --- a/chromium/build/config/fuchsia/BUILD.gn +++ b/chromium/build/config/fuchsia/BUILD.gn @@ -13,6 +13,13 @@ config("compiler") { # but on other platforms it's much higher, so a variety of code assumes more # will be available. Raise to 8M which matches e.g. macOS. ldflags = [ "-Wl,-z,stack-size=0x800000" ] + + if (target_cpu != "arm64") { + # TODO(https://crbug.com/1086134): RELR packing of dynamic relocations is + # enabled by default in the Fuchsia toolchain, but is not fully supported + # in Chromium builds except on ARM64. + ldflags += [ "-Wl,--pack-dyn-relocs=none" ] + } } # Settings for executables. diff --git a/chromium/build/config/fuchsia/generate_runner_scripts.gni b/chromium/build/config/fuchsia/generate_runner_scripts.gni index 2f57fc0c607..a0bae5eee6b 100644 --- a/chromium/build/config/fuchsia/generate_runner_scripts.gni +++ b/chromium/build/config/fuchsia/generate_runner_scripts.gni @@ -26,12 +26,9 @@ declare_args() { # package_name_override: Specifies the name of the generated package, if its # name is different than the |package| target name. This value must match # package_name_override in the |package| target. -# archive_name_override: Specifies the filename of the archive file (FAR) -# which contains the package's data. -# If left unset, defaults to |package_name_override|. -# Defaults to the package's name. -# package_deps: An array of [package, archive_basename] pairs that specify -# additional dependency packages to be installed prior to execution. +# package_deps: An array of [package, package_name_override] array pairs +# which specify additional dependency packages to be installed +# prior to execution. # runner_script: The runner script implementation to use, relative to # "build/fuchsia". Defaults to "test_runner.py". # install_only: If true, executing the script will only install the package @@ -39,29 +36,19 @@ declare_args() { template("fuchsia_package_runner") { forward_variables_from(invoker, [ "runner_script" ]) - # Compute the package name. if (defined(invoker.package_name_override)) { - _package_name = invoker.package_name_override + _pkg_shortname = invoker.package_name_override } else { - _package_name = get_label_info(invoker.package, "name") - } - - # Compute the archive (FAR) filename. - if (defined(invoker.archive_name_override)) { - _archive_shortname = invoker.archive_name_override - } else { - _archive_shortname = _package_name + _pkg_shortname = get_label_info(invoker.package, "name") } _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") + - "/" + _archive_shortname - _manifest_path = "$_pkg_dir/${_archive_shortname}.archive_manifest" - _package_path = "$_pkg_dir/${_archive_shortname}.far" + "/" + _pkg_shortname + _package_path = "$_pkg_dir/${_pkg_shortname}.far" - generated_run_pkg_script_path = - "$root_build_dir/bin/run_${_archive_shortname}" + generated_run_pkg_script_path = "$root_build_dir/bin/run_${_pkg_shortname}" generated_install_pkg_script_path = - "$root_build_dir/bin/install_${_archive_shortname}" + "$root_build_dir/bin/install_$_pkg_shortname" _generate_runner_target = "${target_name}__generate_runner" _generate_installer_target = "${target_name}__generate_installer" @@ -100,7 +87,6 @@ template("fuchsia_package_runner") { # Declares the files that are needed for test execution on the # swarming test client. data = [ - _manifest_path, "//build/fuchsia/", "//build/util/lib/", "//third_party/fuchsia-sdk/sdk/.build-id/", @@ -167,7 +153,7 @@ template("fuchsia_package_runner") { "--target-cpu", target_cpu, "--package-name", - _package_name, + _pkg_shortname, ] if (defined(invoker.use_test_server) && invoker.use_test_server) { diff --git a/chromium/build/config/fuchsia/gfx_tests.cmx b/chromium/build/config/fuchsia/gfx_tests.cmx index 5e51f421120..081fc0f5482 100644 --- a/chromium/build/config/fuchsia/gfx_tests.cmx +++ b/chromium/build/config/fuchsia/gfx_tests.cmx @@ -11,15 +11,20 @@ "zero" ], "services": [ + "fuchsia.accessibility.semantics.SemanticsManager", + "fuchsia.device.NameProvider", "fuchsia.fonts.Provider", "fuchsia.intl.PropertyProvider", "fuchsia.logger.Log", "fuchsia.logger.LogSink", + "fuchsia.memorypressure.Provider", "fuchsia.process.Launcher", "fuchsia.sys.Environment", "fuchsia.sys.Loader", "fuchsia.sysmem.Allocator", "fuchsia.tracing.provider.Registry", + "fuchsia.ui.input.ImeService", + "fuchsia.ui.input.ImeVisibilityService", "fuchsia.ui.policy.Presenter", "fuchsia.ui.scenic.Scenic", "fuchsia.vulkan.loader.Loader" diff --git a/chromium/build/config/fuchsia/package.gni b/chromium/build/config/fuchsia/package.gni index e7bca251c5f..51ebad637b1 100644 --- a/chromium/build/config/fuchsia/package.gni +++ b/chromium/build/config/fuchsia/package.gni @@ -13,9 +13,6 @@ import("//third_party/fuchsia-sdk/sdk/build/package.gni") # Parameters are: # package_name_override: Specifies the name of the package to generate, # if different than |target_name|. -# archive_filename_override: Specifies the filename of the generated FAR. -# If left unset, defaults to |package_name_override|. -# Defaults to the target name. # binary: The executable target which should be launched. # manifest: A path to the manifest that will be used. # "testonly" targets default to using @@ -107,27 +104,7 @@ template("cr_fuchsia_package") { } } - # Generate a Fuchsia ARchive (FAR) of the requested name. - if (defined(invoker.archive_name_override)) { - _archive_name = invoker.archive_name_override - } else { - _archive_name = _package_name - } - - if (_archive_name != _package_name) { - _archive_target = target_name + "__cr-archive" - - copy(target_name) { - deps = [ ":${_archive_target}" ] - _pkg_out_dir = "${target_gen_dir}/${_package_name}" - sources = [ "${_pkg_out_dir}/${_package_name}.far" ] - outputs = [ "${_pkg_out_dir}/${_archive_name}.far" ] - } - } else { - _archive_target = target_name - } - - fuchsia_package(_archive_target) { + fuchsia_package(target_name) { forward_variables_from(invoker, [ "testonly" ]) package_name = _package_name if (defined(invoker.excluded_files)) { diff --git a/chromium/build/config/ios/ios_sdk.gni b/chromium/build/config/ios/ios_sdk.gni index f3aaf810644..917a6902ae1 100644 --- a/chromium/build/config/ios/ios_sdk.gni +++ b/chromium/build/config/ios/ios_sdk.gni @@ -27,12 +27,18 @@ declare_args() { # not work (see build/BUILDCONFIG.gn for pattern that would cause issue). ios_sdk_developer_dir = "" - # The iOS Code signing identity to use - # TODO(GYP), TODO(sdfresne): Consider having a separate - # ios_enable_code_signing_flag=<bool> flag to make the invocation clearer. + # Control whether codesiging is enabled (ignored for simulator builds). ios_enable_code_signing = true + + # Explicitly select the identity to use for codesigning. If defined, must + # be set to a non-empty string that will be passed to codesigning. Can be + # left unspecified if ios_code_signing_identity_description is used instead. ios_code_signing_identity = "" - ios_code_signing_identity_description = "iPhone Developer" + + # Pattern used to select the identity to use for codesigning. If defined, + # must be a substring of the description of exactly one of the identities by + # `security find-identity -v -p codesigning`. + ios_code_signing_identity_description = "Apple Development" # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the # "Organization Identifier" in Xcode). Code signing will fail if no mobile @@ -68,6 +74,18 @@ assert(custom_toolchain == "" || additional_target_cpus == [], use_ios_simulator = current_cpu == "x86" || current_cpu == "x64" +# If codesigning is enabled, use must configure either a codesigning identity +# or a filter to automatically select the codesigning identity. +if (!use_ios_simulator && ios_enable_code_signing) { + assert(ios_code_signing_identity == "" || + ios_code_signing_identity_description == "", + "You should either specify the precise identity to use with " + + "ios_code_signing_identity or let the code select an identity " + + "automatically (via find_signing_identity.py which use the " + + "variable ios_code_signing_identity_description to set the " + + "pattern to match the identity to use).") +} + # Initialize additional_toolchains from additional_target_cpus. Assert here # that the list does not contains $target_cpu nor duplicates as this would # cause weird errors during the build. @@ -135,12 +153,15 @@ if (!use_ios_simulator && ios_enable_code_signing) { # Automatically select a codesigning identity if no identity is configured. # This only applies to device build as simulator builds are not signed. if (ios_code_signing_identity == "") { - ios_code_signing_identity = - exec_script("find_signing_identity.py", - [ - "--matching-pattern", - ios_code_signing_identity_description, - ], - "string") + find_signing_identity_args = [] + if (ios_code_signing_identity_description != "") { + find_signing_identity_args = [ + "--matching-pattern", + ios_code_signing_identity_description, + ] + } + ios_code_signing_identity = exec_script("find_signing_identity.py", + find_signing_identity_args, + "trim string") } } diff --git a/chromium/build/config/ios/rules.gni b/chromium/build/config/ios/rules.gni index 6c9733346e7..0e882464c74 100644 --- a/chromium/build/config/ios/rules.gni +++ b/chromium/build/config/ios/rules.gni @@ -514,13 +514,10 @@ template("ios_info_plist") { # entitlements (must generate a single file as output); cannot be # defined if entitlements_path is set. # -# bundle_extension: -# (optional) bundle extension including the dot, default to ".app". -# # product_type # (optional) string, product type for the generated Xcode project, -# default to "com.apple.product-type.application". Should generally -# not be overridden. +# default to "com.apple.product-type.application". Should only be +# overriden when building application extension. # # enable_code_signing # (optional) boolean, control whether code signing is enabled or not, @@ -546,6 +543,10 @@ template("ios_app_bundle") { _output_name = invoker.output_name } + assert( + !defined(invoker.bundle_extension), + "bundle_extension must not be set for ios_app_bundle template for $target_name") + _xcode_product_bundle_id = "$ios_app_bundle_id_prefix.$_output_name" if (defined(invoker.xcode_product_bundle_id)) { _xcode_product_bundle_id = invoker.xcode_product_bundle_id @@ -599,10 +600,6 @@ template("ios_app_bundle") { _default_variant = _variants[0] - if (current_toolchain != default_toolchain) { - not_needed("*") - } - source_set(_arch_executable_source) { forward_variables_from(invoker, "*", @@ -632,6 +629,21 @@ template("ios_app_bundle") { "target_out_dir") + "/$_output_name.xcent" } + _product_type = _ios_xcode_app_bundle_id + if (defined(invoker.product_type)) { + _product_type = invoker.product_type + } + + if (_product_type == _ios_xcode_app_bundle_id) { + _bundle_extension = ".app" + } else if (_product_type == _ios_xcode_appex_bundle_id) { + _bundle_extension = ".appex" + } else { + assert(false, "unknown product_type \"$product_type\" for $_target_name") + } + + _is_app_bundle = _product_type == _ios_xcode_app_bundle_id + executable(_arch_executable_target) { forward_variables_from(invoker, "*", @@ -681,6 +693,18 @@ template("ios_app_bundle") { "2", ] + # Application extension may share private frameworks with the application + # itself, so they need to look in both their own bundle and the surrounding + # app bundle for the frameworks. + if (!_is_app_bundle) { + ldflags += [ + "-Xlinker", + "-rpath", + "-Xlinker", + "@executable_path/../../Frameworks", + ] + } + if (use_ios_simulator) { deps += [ ":$_generate_entitlements_target($default_toolchain)" ] @@ -791,23 +815,8 @@ template("ios_app_bundle") { } } - _product_type = _ios_xcode_app_bundle_id - if (defined(invoker.product_type)) { - _product_type = invoker.product_type - } - - _app_bundle_extension = ".app" - _bundle_extension = _app_bundle_extension - if (defined(invoker.bundle_extension)) { - _bundle_extension = invoker.bundle_extension - } - - # Only write PkgInfo for real application, not application extension (they - # have the same product type but a different extension). - _write_pkg_info = _product_type == _ios_xcode_app_bundle_id && - _bundle_extension == _app_bundle_extension - - if (_write_pkg_info) { + # Only write PkgInfo for real application, not application extension. + if (_is_app_bundle) { _create_pkg_info = target_name + "_pkg_info" action(_create_pkg_info) { forward_variables_from(invoker, [ "testonly" ]) @@ -873,7 +882,7 @@ template("ios_app_bundle") { if (!defined(bundle_deps)) { bundle_deps = [] } - if (_write_pkg_info) { + if (_is_app_bundle) { bundle_deps += [ ":$_bundle_data_pkg_info" ] } bundle_deps += _variant.bundle_deps @@ -906,6 +915,10 @@ template("ios_app_bundle") { } } } + + if (current_toolchain != default_toolchain) { + not_needed("*") + } } set_defaults("ios_app_bundle") { @@ -948,7 +961,6 @@ template("ios_appex_bundle") { "bundle_extension", "product_type", ]) - bundle_extension = ".appex" product_type = _ios_xcode_appex_bundle_id # Add linker flags required for an application extension (determined by diff --git a/chromium/build/config/linux/BUILD.gn b/chromium/build/config/linux/BUILD.gn index 6b8ef4fd075..fda14196f3e 100644 --- a/chromium/build/config/linux/BUILD.gn +++ b/chromium/build/config/linux/BUILD.gn @@ -60,14 +60,6 @@ config("xext") { libs = [ "Xext" ] } -config("xrandr") { - libs = [ "Xrandr" ] -} - -config("xscrnsaver") { - libs = [ "Xss" ] -} - config("xfixes") { libs = [ "Xfixes" ] } diff --git a/chromium/build/config/mac/BUILD.gn b/chromium/build/config/mac/BUILD.gn index de8233bba4a..5369f767267 100644 --- a/chromium/build/config/mac/BUILD.gn +++ b/chromium/build/config/mac/BUILD.gn @@ -24,6 +24,13 @@ config("compiler") { "-arch", "i386", ] + } else if (current_cpu == "arm64") { + common_mac_flags += [ + "-arch", + current_cpu, + ] + } else { + assert(false, "unknown current_cpu $current_cpu") } # This is here so that all files get recompiled after an Xcode update. diff --git a/chromium/build/config/mac/OWNERS b/chromium/build/config/mac/OWNERS index 14747a00255..0ed2e154d83 100644 --- a/chromium/build/config/mac/OWNERS +++ b/chromium/build/config/mac/OWNERS @@ -1,4 +1,2 @@ rsesek@chromium.org sdefresne@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/config/mac/mac_sdk.gni b/chromium/build/config/mac/mac_sdk.gni index c43c00ddaf0..eda9dfd77d8 100644 --- a/chromium/build/config/mac/mac_sdk.gni +++ b/chromium/build/config/mac/mac_sdk.gni @@ -3,6 +3,7 @@ # found in the LICENSE file. import("//build/config/chrome_build.gni") +import("//build/config/gclient_args.gni") import("//build/config/mac/mac_sdk_overrides.gni") import("//build/toolchain/goma.gni") import("//build/toolchain/toolchain.gni") @@ -112,7 +113,13 @@ if (use_system_xcode) { mac_bin_path = find_sdk_lines[1] } } else { - mac_sdk_version = mac_sdk_official_version + if (mac_xcode_version == "default") { + mac_sdk_version = mac_sdk_official_version + } else if (mac_xcode_version == "xcode_12_beta") { + mac_sdk_version = "11.0" + } else { + assert(false, "unknown mac_xcode_version $mac_xcode_version") + } _dev = _hermetic_xcode_path + "/Contents/Developer" _sdk = "MacOSX${mac_sdk_version}.sdk" mac_sdk_path = _dev + "/Platforms/MacOSX.platform/Developer/SDKs/$_sdk" diff --git a/chromium/build/config/mac/prepare_framework_version.py b/chromium/build/config/mac/prepare_framework_version.py index 5e8a53f20ab..db921506987 100644 --- a/chromium/build/config/mac/prepare_framework_version.py +++ b/chromium/build/config/mac/prepare_framework_version.py @@ -31,7 +31,7 @@ def PrepareFrameworkVersion(version_file, framework_root_dir, version): # directory exists. dirname = os.path.dirname(version_file) if not os.path.isdir(dirname): - os.makedirs(dirname, 0700) + os.makedirs(dirname, 0o700) with open(version_file, 'w+') as f: f.write(version) diff --git a/chromium/build/config/mac/sdk_info.py b/chromium/build/config/mac/sdk_info.py index 805097a2339..1ad11d3b195 100644 --- a/chromium/build/config/mac/sdk_info.py +++ b/chromium/build/config/mac/sdk_info.py @@ -69,8 +69,8 @@ def FillXcodeVersion(settings, developer_dir): def FillMachineOSBuild(settings): """Fills OS build number into |settings|.""" - machine_os_build = subprocess.check_output(['sw_vers', - '-buildVersion']).strip() + machine_os_build = subprocess.check_output(['sw_vers', '-buildVersion'], + universal_newlines=True).strip() settings['machine_os_build'] = machine_os_build # The reported build number is made up from the kernel major version number, diff --git a/chromium/build/config/win/BUILD.gn b/chromium/build/config/win/BUILD.gn index 7b44f0e4372..c0758b654af 100644 --- a/chromium/build/config/win/BUILD.gn +++ b/chromium/build/config/win/BUILD.gn @@ -82,6 +82,10 @@ config("compiler") { # Don't look for includes in %INCLUDE%. cflags += [ "/X" ] + # Required to make the 19041 SDK compatible with clang-cl. + # See https://crbug.com/1089996 issue #2 for details. + cflags += [ "/D__WRL_ENABLE_FUNCTION_STATICS__" ] + # Tell clang which version of MSVC to emulate. cflags += [ "-fmsc-version=1916" ] @@ -121,9 +125,6 @@ config("compiler") { if (use_clang_diagnostics_format) { cflags += [ "/clang:-fdiagnostics-format=clang" ] } - - # Clang runtime libraries, such as the sanitizer runtimes, live here. - lib_dirs = [ "$clang_base_path/lib/clang/$clang_version/lib/windows" ] } if (use_lld && !use_thin_lto && (is_clang || !use_goma)) { diff --git a/chromium/build/config/win/manifest.gni b/chromium/build/config/win/manifest.gni index b18a4a14129..e2115083fe5 100644 --- a/chromium/build/config/win/manifest.gni +++ b/chromium/build/config/win/manifest.gni @@ -41,6 +41,12 @@ as_invoker_manifest = "//build/win/as_invoker.manifest" # elevate. require_administrator_manifest = "//build/win/require_administrator.manifest" +# Request the segment heap. See https://crbug.com/1014701 for details. +declare_args() { + enable_segment_heap = false +} +segment_heap_manifest = "//build/win/segment_heap.manifest" + # Construct a target to combine the given manifest files into a .rc file. # # Variables for the windows_manifest template: diff --git a/chromium/build/docs/mac_hermetic_toolchain.md b/chromium/build/docs/mac_hermetic_toolchain.md index b1ee13ef670..d5c88deefb3 100644 --- a/chromium/build/docs/mac_hermetic_toolchain.md +++ b/chromium/build/docs/mac_hermetic_toolchain.md @@ -1,44 +1,34 @@ # Mac and iOS hermetic toolchain instructions The following is a short explanation of why we use a the hermetic toolchain -and instructions on how to roll a new toolchain. +and instructions on how to roll a new toolchain. This toolchain is only +available to Googlers and infra bots. ## How to roll a new hermetic toolchain. 1. Download a new version of Xcode, and confirm either mac or ios builds properly with this new version. -2. Run the following command: +2. Create a new CIPD package by moving Xcode.app to the `build/` directory, then + follow the instructions in + [build/xcode_binaries.yaml](../xcode_binaries.yaml). - ``` - src/build/package_mac_toolchain.py /path/to/Xcode.app/ [ios|mac] - ``` + The CIPD package creates a subset of the toolchain necessary for a build. - The script will create a subset of the toolchain necessary for a build, and - upload them to be used by hermetic builds. - - If for some reason this toolchain version has already been uploaded, the - script will ask if we should create sub revision. This can be necessary when - the package script has been updated to compress additional files. - -2. Create a CL with updated [MAC|IOS]_TOOLCHAIN_VERSION and _SUB_REVISION in - src/build/mac_toolchain.py with the version created by the previous command. +2. Create a CL with the updated `MAC_BINARIES_TAG` in + [mac_toolchain.py](../mac_toolchain.py) with the version created by the + previous command. 3. Run the CL through the trybots to confirm the roll works. ## Why we use a hermetic toolchain. Building Chrome Mac currently requires many binaries that come bundled with -Xcode, as well the macOS and iphoneOS SDK [also bundled with Xcode]. Note that -Chrome ships its own version of clang [compiler], but is dependent on Xcode -for these other binaries. - -Chrome should be built against the latest SDK available, but historically, -updating the SDK has been nontrivially difficult. Additionally, bot system -installs can range from Xcode 5 on some bots, to the latest and -greatest. Using a hermetic toolchain has two main benefits: +Xcode, as well the macOS and iphoneOS SDK (also bundled with Xcode). Note that +Chrome ships its own version of clang (compiler), but is dependent on Xcode +for these other binaries. Using a hermetic toolchain has two main benefits: -1. Build Chrome with a well-defined toolchain [rather than whatever happens to -be installed on the machine]. +1. Build Chrome with a well-defined toolchain (rather than whatever happens to + be installed on the machine). 2. Easily roll/update the toolchain. diff --git a/chromium/build/dotfile_settings.gni b/chromium/build/dotfile_settings.gni index 74f76771839..de5479fceba 100644 --- a/chromium/build/dotfile_settings.gni +++ b/chromium/build/dotfile_settings.gni @@ -10,6 +10,7 @@ build_dotfile_settings = { "//build/config/android/config.gni", "//build/config/android/internal_rules.gni", "//build/config/android/rules.gni", + "//build/config/chromeos/rules.gni", "//build/config/compiler/BUILD.gn", "//build/config/compiler/pgo/BUILD.gn", "//build/config/gcc/gcc_version.gni", diff --git a/chromium/build/fuchsia/aemu_target.py b/chromium/build/fuchsia/aemu_target.py index 0e3bc8f4ead..e997541ab8c 100644 --- a/chromium/build/fuchsia/aemu_target.py +++ b/chromium/build/fuchsia/aemu_target.py @@ -11,7 +11,9 @@ import logging from common import GetEmuRootForPlatform + class AemuTarget(qemu_target.QemuTarget): + def __init__(self, output_dir, target_cpu, system_log_file, emu_type, cpu_cores, require_kvm, ram_size_mb, enable_graphics, hardware_gpu): @@ -60,7 +62,7 @@ class AemuTarget(qemu_target.QemuTarget): # All args after -fuchsia flag gets passed to QEMU aemu_command.extend([ '-feature', aemu_features, '-window-size', '1024x600', '-gpu', - gpu_target, '-fuchsia' + gpu_target, '-verbose', '-fuchsia' ]) aemu_command.extend(self._BuildQemuConfig()) @@ -73,3 +75,19 @@ class AemuTarget(qemu_target.QemuTarget): '-device', 'ich9-ahci,id=ahci']) logging.info(' '.join(aemu_command)) return aemu_command + + def _GetVulkanIcdFile(self): + return os.path.join(GetEmuRootForPlatform(self._emu_type), 'lib64', + 'vulkan', 'vk_swiftshader_icd.json') + + def _SetEnv(self): + env = os.environ.copy() + aemu_logging_env = { + "ANDROID_EMU_VK_NO_CLEANUP": "1", + "ANDROID_EMUGL_LOG_PRINT": "1", + "ANDROID_EMUGL_VERBOSE": "1", + "VK_ICD_FILENAMES": self._GetVulkanIcdFile(), + "VK_LOADER_DEBUG": "info,error", + } + env.update(aemu_logging_env) + return env diff --git a/chromium/build/fuchsia/common_args.py b/chromium/build/fuchsia/common_args.py index 24544f2c305..855bc1149c3 100644 --- a/chromium/build/fuchsia/common_args.py +++ b/chromium/build/fuchsia/common_args.py @@ -66,6 +66,8 @@ def AddCommonArgs(arg_parser): help='Path to a Fuchsia build output directory. ' 'Equivalent to setting --ssh_config and ' '---os-check=ignore') + common_args.add_argument('--runner-logs-dir', + help='Directory to write test runner logs to.') common_args.add_argument('--system-log-file', help='File to write system logs to. Specify - to ' 'log to stdout.') diff --git a/chromium/build/fuchsia/device_target.py b/chromium/build/fuchsia/device_target.py index 8a5f7d7c5b9..9ba173ba95a 100644 --- a/chromium/build/fuchsia/device_target.py +++ b/chromium/build/fuchsia/device_target.py @@ -141,7 +141,7 @@ class DeviceTarget(target.Target): or waits up to |timeout| seconds and returns False if the device couldn't be found.""" - dev_finder_path = GetHostToolPathFromPlatform('dev_finder') + dev_finder_path = GetHostToolPathFromPlatform('device-finder') if self._node_name: command = [dev_finder_path, 'resolve', @@ -163,13 +163,13 @@ class DeviceTarget(target.Target): return False if self._node_name: - # Handle the result of "dev_finder resolve". + # Handle the result of "device-finder resolve". self._host = output.pop().strip() else: name_host_pairs = [x.strip().split(' ') for x in output] - # Handle the output of "dev_finder list". + # Handle the output of "device-finder list". if len(name_host_pairs) > 1: print('More than one device was discovered on the network.') print('Use --node-name <name> to specify the device to use.') diff --git a/chromium/build/fuchsia/emu_target.py b/chromium/build/fuchsia/emu_target.py index 83e10167914..6ccdd832fbd 100644 --- a/chromium/build/fuchsia/emu_target.py +++ b/chromium/build/fuchsia/emu_target.py @@ -8,6 +8,7 @@ import amber_repo import boot_data import logging import os +import runner_logs import subprocess import sys import target @@ -34,6 +35,9 @@ class EmuTarget(target.Target): """Build the command that will be run to start Fuchsia in the emulator.""" pass + def _SetEnv(self): + return os.environ.copy() + # Used by the context manager to ensure that the emulator is killed when # the Python process exits. def __exit__(self, exc_type, exc_val, exc_tb): @@ -52,27 +56,28 @@ class EmuTarget(target.Target): # Zircon sends debug logs to serial port (see kernel.serial=legacy flag # above). Serial port is redirected to a file through emulator stdout. - # Unless a |_system_log_file| is explicitly set, we output the kernel serial - # log to a temporary file, and print that out if we are unable to connect to + # Unless runner_pogs are enabled, we output the kernel serial log + # to a temporary file, and print that out if we are unable to connect to # the emulator guest, to make it easier to diagnose connectivity issues. - temporary_system_log_file = None - if self._system_log_file: - stdout = self._system_log_file - stderr = subprocess.STDOUT + temporary_log_file = None + if runner_logs.IsEnabled(): + stdout = runner_logs.FileStreamFor('serial_log') else: - temporary_system_log_file = tempfile.NamedTemporaryFile('w') - stdout = temporary_system_log_file - stderr = sys.stderr + temporary_log_file = tempfile.NamedTemporaryFile('w') + stdout = temporary_log_file - self._emu_process = subprocess.Popen(emu_command, stdin=open(os.devnull), - stdout=stdout, stderr=stderr) + self._emu_process = subprocess.Popen(emu_command, + stdin=open(os.devnull), + stdout=stdout, + stderr=subprocess.STDOUT, + env=self._SetEnv()) try: - self._WaitUntilReady(); + self._WaitUntilReady() except target.FuchsiaTargetException: - if temporary_system_log_file: + if temporary_log_file: logging.info('Kernel logs:\n' + - open(temporary_system_log_file.name, 'r').read()) + open(temporary_log_file.name, 'r').read()) raise def GetAmberRepo(self): @@ -86,15 +91,18 @@ class EmuTarget(target.Target): logging.error('%s did not start' % (self._GetEmulatorName())) return returncode = self._emu_process.poll() - if returncode: - logging.error('%s quit unexpectedly with exit code %d' % - (self._GetEmulatorName(), returncode)) + if returncode == None: + logging.info('Shutting down %s' % (self._GetEmulatorName())) + self._emu_process.kill() elif returncode == 0: logging.info('%s quit unexpectedly without errors' % self._GetEmulatorName()) + elif returncode < 0: + logging.error('%s was terminated by signal %d' % + (self._GetEmulatorName(), -returncode)) else: - logging.info('Shutting down %s' % (self._GetEmulatorName())) - self._emu_process.kill() + logging.error('%s quit unexpectedly with exit code %d' % + (self._GetEmulatorName(), returncode)) def _IsEmuStillRunning(self): if not self._emu_process: diff --git a/chromium/build/fuchsia/linux.sdk.sha1 b/chromium/build/fuchsia/linux.sdk.sha1 index 7552ab1c396..a7cac626a07 100644 --- a/chromium/build/fuchsia/linux.sdk.sha1 +++ b/chromium/build/fuchsia/linux.sdk.sha1 @@ -1 +1 @@ -0.20200514.2.1 +0.20200625.3.1 diff --git a/chromium/build/fuchsia/mac.sdk.sha1 b/chromium/build/fuchsia/mac.sdk.sha1 index da07900e95d..a7cac626a07 100644 --- a/chromium/build/fuchsia/mac.sdk.sha1 +++ b/chromium/build/fuchsia/mac.sdk.sha1 @@ -1 +1 @@ -0.20200514.1.1 +0.20200625.3.1 diff --git a/chromium/build/fuchsia/net_test_server.py b/chromium/build/fuchsia/net_test_server.py index 94f71635433..60ad78f60fa 100644 --- a/chromium/build/fuchsia/net_test_server.py +++ b/chromium/build/fuchsia/net_test_server.py @@ -43,6 +43,7 @@ class SSHPortForwarder(chrome_test_server_spawner.PortForwarder): '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port] task = self._target.RunCommandPiped([], ssh_args=forwarding_args, + stdout=open(os.devnull, 'w'), stderr=subprocess.PIPE) task.wait() if task.returncode != 0: diff --git a/chromium/build/fuchsia/qemu_image.py b/chromium/build/fuchsia/qemu_image.py index 5126074fe02..ab5e040acbd 100644 --- a/chromium/build/fuchsia/qemu_image.py +++ b/chromium/build/fuchsia/qemu_image.py @@ -18,6 +18,7 @@ TODO(crbug.com/1046861): Remove this workaround when the bug is fixed. import logging import subprocess +import tempfile import time @@ -33,7 +34,9 @@ def _ExecQemuImgWithTimeout(command): """ logging.info('qemu-img starting') - p = subprocess.Popen(command) + command_output_file = tempfile.NamedTemporaryFile('w') + p = subprocess.Popen(command, stdout=command_output_file, + stderr=subprocess.STDOUT) start_sec = time.time() while p.poll() is None and time.time() - start_sec < QEMU_IMG_TIMEOUT_SEC: time.sleep(1) @@ -41,10 +44,17 @@ def _ExecQemuImgWithTimeout(command): logging.info('qemu-img duration: %f' % float(stop_sec - start_sec)) if p.poll() is None: + returncode = None p.kill() - return None + p.wait() + else: + returncode = p.returncode - return p.returncode + log_level = logging.WARN if returncode else logging.DEBUG + for line in open(command_output_file.name, 'r'): + logging.log(log_level, 'qemu-img stdout: ' + line.strip()) + + return returncode def ExecQemuImgWithRetry(command): diff --git a/chromium/build/fuchsia/qemu_target.py b/chromium/build/fuchsia/qemu_target.py index bdc397e797b..96d7465bf9e 100644 --- a/chromium/build/fuchsia/qemu_target.py +++ b/chromium/build/fuchsia/qemu_target.py @@ -55,7 +55,11 @@ class QemuTarget(emu_target.EmuTarget): if kvm_supported and same_arch: return True elif self._require_kvm: - raise FuchsiaTargetException('KVM required but unavailable.') + if same_arch: + raise FuchsiaTargetException('KVM required but unavailable.') + else: + raise FuchsiaTargetException('KVM unavailable when CPU architecture of'\ + ' host is different from that of target.') else: return False @@ -119,7 +123,7 @@ class QemuTarget(emu_target.EmuTarget): if self._target_cpu == 'arm64': kvm_command.append('host') else: - kvm_command.append('host,migratable=no') + kvm_command.append('host,migratable=no,+invtsc') else: logging.warning('Unable to launch %s with KVM acceleration.' % (self._emu_type) + diff --git a/chromium/build/fuchsia/remote_cmd.py b/chromium/build/fuchsia/remote_cmd.py index 93ce32ce287..019c2dc9abd 100644 --- a/chromium/build/fuchsia/remote_cmd.py +++ b/chromium/build/fuchsia/remote_cmd.py @@ -5,7 +5,6 @@ import logging import os import subprocess -import sys import threading _SSH = ['ssh'] @@ -57,31 +56,36 @@ class CommandRunner(object): _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command)) if silent: devnull = open(os.devnull, 'w') - process = subprocess.Popen(ssh_command, stderr=devnull, stdout=devnull) + process = subprocess.Popen(ssh_command, stdout=devnull, stderr=devnull) else: - process = subprocess.Popen(ssh_command) + process = subprocess.Popen(ssh_command, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) timeout_timer = None if timeout_secs: timeout_timer = threading.Timer(timeout_secs, process.kill) timeout_timer.start() - process.wait() + if not silent: + for line in process.stdout: + print(line) + process.wait() if timeout_timer: timeout_timer.cancel() - if process.returncode == -9: raise Exception('Timeout when executing \"%s\".' % ' '.join(command)) return process.returncode - def RunCommandPiped(self, command = None, ssh_args = None, **kwargs): + def RunCommandPiped(self, command, stdout, stderr, ssh_args = None, **kwargs): """Executes an SSH command on the remote host and returns a process object with access to the command's stdio streams. Does not block. command: A list of strings containing the command and its arguments. + stdout: subprocess stdout. Must not be None. + stderr: subprocess stderr. Must not be None. ssh_args: Arguments that will be passed to SSH. kwargs: A dictionary of parameters to be passed to subprocess.Popen(). The parameters can be used to override stdin and stdout, for @@ -89,14 +93,15 @@ class CommandRunner(object): Returns a Popen object for the command.""" - if not command: - command = [] + if not stdout or not stderr: + raise Exception('Stdout/stderr must be specified explicitly') + if not ssh_args: ssh_args = [] ssh_command = self._GetSshCommandLinePrefix() + ssh_args + ['--'] + command _SSH_LOGGER.debug(' '.join(ssh_command)) - return subprocess.Popen(ssh_command, **kwargs) + return subprocess.Popen(ssh_command, stdout=stdout, stderr=stderr, **kwargs) def RunScp(self, sources, dest, direction, recursive=False): diff --git a/chromium/build/fuchsia/run_package.py b/chromium/build/fuchsia/run_package.py index 62bbd3b8fd8..ed2cca3bf12 100644 --- a/chromium/build/fuchsia/run_package.py +++ b/chromium/build/fuchsia/run_package.py @@ -16,12 +16,10 @@ import re import select import subprocess import sys -import time import threading import uuid -from symbolizer import RunSymbolizer -from symbolizer import SymbolizerFilter +from symbolizer import BuildIdsPaths, RunSymbolizer, SymbolizerFilter FAR = common.GetHostToolPathFromPlatform('far') @@ -34,16 +32,8 @@ def _AttachKernelLogReader(target): logging.info('Attaching kernel logger.') return target.RunCommandPiped(['dlog', '-f'], stdin=open(os.devnull, 'r'), - stdout=subprocess.PIPE) - - -def _BuildIdsPaths(package_paths): - """Generate build ids paths for symbolizer processes.""" - build_ids_paths = map( - lambda package_path: os.path.join( - os.path.dirname(package_path), 'ids.txt'), - package_paths) - return build_ids_paths + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) class SystemLogReader(object): @@ -77,7 +67,7 @@ class SystemLogReader(object): self._system_log = open(system_log_file,'w', buffering=1) self._symbolizer_proc = RunSymbolizer(self._listener_proc.stdout, self._system_log, - _BuildIdsPaths(package_paths)) + BuildIdsPaths(package_paths)) class MergedInputStream(object): @@ -239,7 +229,7 @@ def RunPackage(output_dir, target, package_paths, package_name, # Run the log data through the symbolizer process. output_stream = SymbolizerFilter(output_stream, - _BuildIdsPaths(package_paths)) + BuildIdsPaths(package_paths)) for next_line in output_stream: print(next_line.rstrip()) diff --git a/chromium/build/fuchsia/runner_exceptions.py b/chromium/build/fuchsia/runner_exceptions.py index cedf99bbd7a..03f872e453f 100644 --- a/chromium/build/fuchsia/runner_exceptions.py +++ b/chromium/build/fuchsia/runner_exceptions.py @@ -7,6 +7,7 @@ This makes it easier to query build tables for particular error types as exit codes are visible to queries while exception stack traces are not.""" +import errno import fcntl import logging import os @@ -23,17 +24,15 @@ def _PrintException(value, trace): print(str(value)) -# TODO(crbug.com/1080858): Delete function when the stdout print bug is fixed. -def _LogStdoutBlockingStatus(): - """Log whether sys.stdout is blocking or non-blocking. +def IsStdoutBlocking(): + """Returns True if sys.stdout is blocking or False if non-blocking. - It should be blocking, but there are intermittent IO errors that suggest - that it is set to non-blocking at times during test runs.""" + sys.stdout should always be blocking. Non-blocking is associated with + intermittent IOErrors (crbug.com/1080858). + """ - if fcntl.fcntl(sys.stdout, fcntl.F_GETFD) & os.O_NONBLOCK: - logging.error('sys.stdout is non-blocking') - else: - logging.info('sys.stdout is blocking') + nonblocking = fcntl.fcntl(sys.stdout, fcntl.F_GETFL) & os.O_NONBLOCK + return not nonblocking def HandleExceptionAndReturnExitCode(): @@ -57,19 +56,23 @@ def HandleExceptionAndReturnExitCode(): if type is FuchsiaTargetException: if 'ssh' in str(value).lower(): - print('Error: FuchsiaTargetException: SSH to Fuchsia target failed.') - return 65 + print('Error: FuchsiaTargetException: SSH to Fuchsia target failed.') + return 65 return 64 elif type is IOError: - if value.errno == 11: - print('Info: Python print to sys.stdout probably failed') - _LogStdoutBlockingStatus() - return 73 + if value.errno == errno.EAGAIN: + logging.info('Python print to sys.stdout probably failed') + if not IsStdoutBlocking(): + logging.warn('sys.stdout is non-blocking') + return 73 return 72 elif type is subprocess.CalledProcessError: - if value.cmd[0] == 'scp': + if os.path.basename(value.cmd[0]) == 'scp': print('Error: scp operation failed - %s' % str(value)) return 81 + if os.path.basename(value.cmd[0]) == 'qemu-img': + print('Error: qemu-img fuchsia image generation failed.') + return 82 return 80 else: return 1 diff --git a/chromium/build/fuchsia/runner_logs.py b/chromium/build/fuchsia/runner_logs.py new file mode 100644 index 00000000000..19da4f5a045 --- /dev/null +++ b/chromium/build/fuchsia/runner_logs.py @@ -0,0 +1,96 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates and manages test runner log file objects. + +Provides a context manager object for use in a with statement +and a module level FileStreamFor function for use by clients. +""" + +import collections +import multiprocessing +import os + +from symbolizer import RunSymbolizer + +SYMBOLIZED_SUFFIX = '.symbolized' + +_RunnerLogEntry = collections.namedtuple( + '_RunnerLogEntry', ['name', 'log_file', 'path', 'symbolize']) + +# Module singleton variable. +_instance = None + + +class RunnerLogManager(object): + """ Runner logs object for use in a with statement.""" + + def __init__(self, log_dir, build_ids_files): + global _instance + if _instance: + raise Exception('Only one RunnerLogManager can be instantiated') + + self._log_dir = log_dir + self._build_ids_files = build_ids_files + self._runner_logs = [] + + if self._log_dir: + os.makedirs(self._log_dir) + + _instance = self + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pool = multiprocessing.Pool(4) + for log_entry in self._runner_logs: + pool.apply_async(_FinalizeLog, (log_entry, self._build_ids_files)) + pool.close() + pool.join() + _instance = None + + + def _FileStreamFor(self, name, symbolize): + if any(elem.name == name for elem in self._runner_logs): + raise Exception('RunnerLogManager can only open "%" once' % name) + + path = os.path.join(self._log_dir, name) if self._log_dir else os.devnull + log_file = open(path, 'w') + + self._runner_logs.append(_RunnerLogEntry(name, log_file, path, symbolize)) + + return log_file + + +def _FinalizeLog(log_entry, build_ids_files): + log_entry.log_file.close() + + if log_entry.symbolize: + input_file = open(log_entry.path, 'r') + output_file = open(log_entry.path + SYMBOLIZED_SUFFIX, 'w') + proc = RunSymbolizer(input_file, output_file, build_ids_files) + proc.wait() + output_file.close() + input_file.close() + + +def IsEnabled(): + """Returns True if the RunnerLogManager has been created, or False if not.""" + + return _instance is not None and _instance._log_dir is not None + + +def FileStreamFor(name, symbolize=False): + """Opens a test runner file stream in the test runner log directory. + + If no test runner log directory is specified, output is discarded. + + name: log file name + symbolize: if True, make a symbolized copy of the log after closing it. + + Returns an opened log file object.""" + + return _instance._FileStreamFor(name, symbolize) if IsEnabled() else open( + os.devnull, 'w') diff --git a/chromium/build/fuchsia/sdk-bucket.txt b/chromium/build/fuchsia/sdk-bucket.txt new file mode 100644 index 00000000000..9f860bce1d7 --- /dev/null +++ b/chromium/build/fuchsia/sdk-bucket.txt @@ -0,0 +1 @@ +fuchsia diff --git a/chromium/build/fuchsia/sdk-hash-files.list b/chromium/build/fuchsia/sdk-hash-files.list new file mode 100644 index 00000000000..6f37bcd9f77 --- /dev/null +++ b/chromium/build/fuchsia/sdk-hash-files.list @@ -0,0 +1 @@ +{platform}.sdk.sha1 diff --git a/chromium/build/fuchsia/symbolizer.py b/chromium/build/fuchsia/symbolizer.py index 1432feea3f0..325cc23fc61 100644 --- a/chromium/build/fuchsia/symbolizer.py +++ b/chromium/build/fuchsia/symbolizer.py @@ -14,6 +14,15 @@ from common import GetHostToolPathFromPlatform ARM64_DOCKER_LLVM_SYMBOLIZER_PATH = os.path.join('/', 'usr', 'lib', 'llvm-3.8', 'bin', 'llvm-symbolizer') +def BuildIdsPaths(package_paths): + """Generate build ids paths for symbolizer processes.""" + build_ids_paths = map( + lambda package_path: os.path.join( + os.path.dirname(package_path), 'ids.txt'), + package_paths) + return build_ids_paths + + def RunSymbolizer(input_file, output_file, build_ids_files): """Starts a symbolizer process. diff --git a/chromium/build/fuchsia/target.py b/chromium/build/fuchsia/target.py index d8c1741f7d8..254c5fd5789 100644 --- a/chromium/build/fuchsia/target.py +++ b/chromium/build/fuchsia/target.py @@ -8,6 +8,7 @@ import json import logging import os import remote_cmd +import runner_logs import shutil import subprocess import sys @@ -21,7 +22,7 @@ _ATTACH_RETRY_SECONDS = 120 # Amount of time to wait for Amber to complete package installation, as a # mitigation against hangs due to amber/network-related failures. -_INSTALL_TIMEOUT_SECS = 5 * 60 +_INSTALL_TIMEOUT_SECS = 10 * 60 def _GetPackageUri(package_name): @@ -216,9 +217,14 @@ class Target(object): host, port = self._GetEndpoint() end_time = time.time() + _ATTACH_RETRY_SECONDS + ssh_diagnostic_log = runner_logs.FileStreamFor('ssh_diagnostic_log') while time.time() < end_time: runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port) - if runner.RunCommand(['true'], True) == 0: + ssh_proc = runner.RunCommandPiped(['true'], + ssh_args=['-v'], + stdout=ssh_diagnostic_log, + stderr=subprocess.STDOUT) + if ssh_proc.wait() == 0: logging.info('Connected!') self._started = True return True diff --git a/chromium/build/fuchsia/test_runner.py b/chromium/build/fuchsia/test_runner.py index 37992fd2fe1..5033a245c49 100755 --- a/chromium/build/fuchsia/test_runner.py +++ b/chromium/build/fuchsia/test_runner.py @@ -10,6 +10,7 @@ import argparse import json import logging import os +import runner_logs import socket import subprocess import sys @@ -20,6 +21,8 @@ from common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForA from net_test_server import SetupTestServer from run_package import RunPackage, RunPackageArgs, SystemLogReader from runner_exceptions import HandleExceptionAndReturnExitCode +from runner_logs import RunnerLogManager +from symbolizer import BuildIdsPaths DEFAULT_TEST_SERVER_CONCURRENCY = 4 @@ -140,37 +143,38 @@ def main(): child_args.extend(args.child_args) try: - with GetDeploymentTargetForArgs(args) as target: - with SystemLogReader() as system_logger: - target.Start() - - if args.system_log_file and args.system_log_file != '-': - system_logger.Start(target, args.package, args.system_log_file) - - if args.test_launcher_filter_file: - target.PutFile(args.test_launcher_filter_file, TEST_FILTER_PATH, - for_package=args.package_name) - child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH) - - test_server = None - if args.enable_test_server: - assert test_concurrency - test_server = SetupTestServer(target, test_concurrency, - args.package_name) - - run_package_args = RunPackageArgs.FromCommonArgs(args) - returncode = RunPackage( - args.output_directory, target, args.package, args.package_name, - child_args, run_package_args) - - if test_server: - test_server.Stop() - - if args.test_launcher_summary_output: - target.GetFile(TEST_RESULT_PATH, args.test_launcher_summary_output, - for_package=args.package_name) - - return returncode + with GetDeploymentTargetForArgs(args) as target, \ + SystemLogReader() as system_logger, \ + RunnerLogManager(args.runner_logs_dir, BuildIdsPaths(args.package)): + target.Start() + + if args.system_log_file and args.system_log_file != '-': + system_logger.Start(target, args.package, args.system_log_file) + + if args.test_launcher_filter_file: + target.PutFile(args.test_launcher_filter_file, TEST_FILTER_PATH, + for_package=args.package_name) + child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH) + + test_server = None + if args.enable_test_server: + assert test_concurrency + test_server = SetupTestServer(target, test_concurrency, + args.package_name) + + run_package_args = RunPackageArgs.FromCommonArgs(args) + returncode = RunPackage( + args.output_directory, target, args.package, args.package_name, + child_args, run_package_args) + + if test_server: + test_server.Stop() + + if args.test_launcher_summary_output: + target.GetFile(TEST_RESULT_PATH, args.test_launcher_summary_output, + for_package=args.package_name) + + return returncode except: return HandleExceptionAndReturnExitCode() diff --git a/chromium/build/fuchsia/update_sdk.py b/chromium/build/fuchsia/update_sdk.py index 0a7752e4146..6e36f8dcdd7 100755 --- a/chromium/build/fuchsia/update_sdk.py +++ b/chromium/build/fuchsia/update_sdk.py @@ -26,60 +26,56 @@ SDK_SIGNATURE_FILE = '.hash' EXTRA_SDK_HASH_PREFIX = '' SDK_TARBALL_PATH_TEMPLATE = ( - 'gs://fuchsia/development/{sdk_hash}/sdk/{platform}-amd64/gn.tar.gz') + 'gs://{bucket}/development/{sdk_hash}/sdk/{platform}-amd64/gn.tar.gz') -def GetSdkGeneration(hash): +def ReadFile(filename): + with open(os.path.join(os.path.dirname(__file__), filename), 'r') as f: + return f.read() + + +def GetCloudStorageBucket(): + return ReadFile('sdk-bucket.txt').strip() + + +def GetSdkHash(bucket): + hashes = GetSdkHashList() + return max(hashes, key=lambda sdk:GetSdkGeneration(bucket, sdk)) if hashes else None + + +def GetSdkHashList(): + """Read filename entries from sdk-hash-files.list (one per line), substitute + {platform} in each entry if present, and read from each filename.""" + platform = GetHostOsFromPlatform() + filenames = [ + line.strip() for line in ReadFile('sdk-hash-files.list').replace( + '{platform}', platform).splitlines() + ] + sdk_hashes = [ReadFile(filename).strip() for filename in filenames] + return sdk_hashes + + +def GetSdkGeneration(bucket, hash): if not hash: return None + sdk_path = GetSdkTarballPath(bucket, hash) cmd = [ os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'ls', '-L', - GetSdkTarballForPlatformAndHash(hash) + sdk_path ] logging.debug("Running '%s'", " ".join(cmd)) sdk_details = subprocess.check_output(cmd) m = re.search('Generation:\s*(\d*)', sdk_details) if not m: - return None + raise RuntimeError('Could not find SDK generation for {sdk_path}'.format( + sdk_path=sdk_path)) return int(m.group(1)) -def GetSdkHashForPlatform(): - filename = '{platform}.sdk.sha1'.format(platform = GetHostOsFromPlatform()) - - # Get the hash of the SDK in chromium. - sdk_hash = None - hash_file = os.path.join(os.path.dirname(__file__), filename) - with open(hash_file, 'r') as f: - sdk_hash = f.read().strip() - - # Get the hash of the SDK with the extra prefix. - extra_sdk_hash = None - if EXTRA_SDK_HASH_PREFIX: - extra_hash_file = os.path.join(os.path.dirname(__file__), - EXTRA_SDK_HASH_PREFIX + filename) - with open(extra_hash_file, 'r') as f: - extra_sdk_hash = f.read().strip() - - # If both files are empty, return an error. - if not sdk_hash and not extra_sdk_hash: - logging.error( - 'No SHA1 found in {} or {}'.format(hash_file, extra_hash_file), - file=sys.stderr) - return 1 - - # Return the newer SDK based on the generation number. - sdk_generation = GetSdkGeneration(sdk_hash) - extra_sdk_generation = GetSdkGeneration(extra_sdk_hash) - if extra_sdk_generation > sdk_generation: - return extra_sdk_hash - return sdk_hash - - -def GetSdkTarballForPlatformAndHash(sdk_hash): +def GetSdkTarballPath(bucket, sdk_hash): return SDK_TARBALL_PATH_TEMPLATE.format( - sdk_hash=sdk_hash, platform=GetHostOsFromPlatform()) + bucket=bucket, sdk_hash=sdk_hash, platform=GetHostOsFromPlatform()) def GetSdkSignature(sdk_hash, boot_images): @@ -124,7 +120,7 @@ def DownloadAndUnpackFromCloudStorage(url, output_dir): task.stderr.read()) -def DownloadSdkBootImages(sdk_hash, boot_image_names): +def DownloadSdkBootImages(bucket, sdk_hash, boot_image_names): if not boot_image_names: return @@ -151,10 +147,9 @@ def DownloadSdkBootImages(sdk_hash, boot_image_names): logging.info( 'Downloading Fuchsia boot images for %s.%s...' % (device_type, arch)) - images_tarball_url = \ - 'gs://fuchsia/development/{sdk_hash}/images/'\ + images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\ '{device_type}-{arch}.tgz'.format( - sdk_hash=sdk_hash, device_type=device_type, arch=arch) + bucket=bucket, sdk_hash=sdk_hash, device_type=device_type, arch=arch) DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir) @@ -178,7 +173,8 @@ def main(): except: return 0 - sdk_hash = GetSdkHashForPlatform() + bucket = GetCloudStorageBucket() + sdk_hash = GetSdkHash(bucket) if not sdk_hash: return 1 @@ -193,7 +189,7 @@ def main(): EnsureDirExists(SDK_ROOT) DownloadAndUnpackFromCloudStorage( - GetSdkTarballForPlatformAndHash(sdk_hash), SDK_ROOT) + GetSdkTarballPath(bucket, sdk_hash), SDK_ROOT) # Clean out the boot images directory. if (os.path.exists(IMAGES_ROOT)): @@ -204,7 +200,7 @@ def main(): # Ensure that the boot images are downloaded for this SDK. # If the developer opted into downloading hardware boot images in their # .gclient file, then only the hardware boot images will be downloaded. - DownloadSdkBootImages(sdk_hash, args.boot_images) + DownloadSdkBootImages(bucket, sdk_hash, args.boot_images) except subprocess.CalledProcessError as e: logging.error(("command '%s' failed with status %d.%s"), " ".join(e.cmd), e.returncode, " Details: " + e.output if e.output else "") diff --git a/chromium/build/get_symlink_targets.py b/chromium/build/get_symlink_targets.py new file mode 100755 index 00000000000..3285ff1d930 --- /dev/null +++ b/chromium/build/get_symlink_targets.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# Copyright (c) 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Prints the target paths of the given symlinks. + +Prints out each target in the order that the links were passed in. +""" + +import os +import sys + + +def main(): + for link_name in sys.argv[1:]: + if not os.path.islink(link_name): + sys.stderr.write("%s is not a link" % link_name) + return 1 + target = os.readlink(link_name) + if not os.path.isabs(target): + target = os.path.join(os.path.dirname(link_name), target) + print(os.path.realpath(target)) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/chromium/build/gn_helpers.py b/chromium/build/gn_helpers.py index b90c2fbbd71..a240d807a0d 100644 --- a/chromium/build/gn_helpers.py +++ b/chromium/build/gn_helpers.py @@ -4,71 +4,151 @@ """Helper functions useful when writing scripts that integrate with GN. -The main functions are ToGNString and FromGNString which convert between +The main functions are ToGNString() and FromGNString(), to convert between serialized GN veriables and Python variables. -To use in a random python file in the build: +To use in an arbitrary Python file in the build: import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), - os.pardir, os.pardir, "build")) + os.pardir, os.pardir, 'build')) import gn_helpers Where the sequence of parameters to join is the relative path from your source -file to the build directory.""" +file to the build directory. +""" import os import re import sys +_CHROMIUM_ROOT = os.path.join(os.path.dirname(__file__), os.pardir) + IMPORT_RE = re.compile(r'^import\("//(\S+)"\)') -class GNException(Exception): +class GNError(Exception): pass -def ToGNString(value, allow_dicts = True): - """Returns a stringified GN equivalent of the Python value. - - allow_dicts indicates if this function will allow converting dictionaries - to GN scopes. This is only possible at the top level, you can't nest a - GN scope in a list, so this should be set to False for recursive calls.""" - if isinstance(value, str): - if value.find('\n') >= 0: - raise GNException("Trying to print a string with a newline in it.") - return '"' + \ - value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \ - '"' - - if sys.version_info.major < 3 and isinstance(value, unicode): - return ToGNString(value.encode('utf-8')) - - if isinstance(value, bool): - if value: - return "true" - return "false" - - if isinstance(value, list): - return '[ %s ]' % ', '.join(ToGNString(v) for v in value) - - if isinstance(value, dict): - if not allow_dicts: - raise GNException("Attempting to recursively print a dictionary.") - result = "" - for key in sorted(value): - if not isinstance(key, str) and not isinstance(key, unicode): - raise GNException("Dictionary key is not a string.") - result += "%s = %s\n" % (key, ToGNString(value[key], False)) - return result +# Computes ASCII code of an element of encoded Python 2 str / Python 3 bytes. +_Ord = ord if sys.version_info.major < 3 else lambda c: c + + +def _TranslateToGnChars(s): + for decoded_ch in s.encode('utf-8'): # str in Python 2, bytes in Python 3. + code = _Ord(decoded_ch) # int + if code in (34, 36, 92): # For '"', '$', or '\\'. + yield '\\' + chr(code) + elif 32 <= code < 127: + yield chr(code) + else: + yield '$0x%02X' % code - if isinstance(value, int): - return str(value) - raise GNException("Unsupported type when printing to GN.") +def ToGNString(value, pretty=False): + """Returns a stringified GN equivalent of a Python value. + + Args: + value: The Python value to convert. + pretty: Whether to pretty print. If true, then non-empty lists are rendered + recursively with one item per line, with indents. Otherwise lists are + rendered without new line. + Returns: + The stringified GN equivalent to |value|. + + Raises: + GNError: |value| cannot be printed to GN. + """ + + if sys.version_info.major < 3: + basestring_compat = basestring + else: + basestring_compat = str + + # Emits all output tokens without intervening whitespaces. + def GenerateTokens(v, level): + if isinstance(v, basestring_compat): + yield '"' + ''.join(_TranslateToGnChars(v)) + '"' + + elif isinstance(v, bool): + yield 'true' if v else 'false' + + elif isinstance(v, int): + yield str(v) + + elif isinstance(v, list): + yield '[' + for i, item in enumerate(v): + if i > 0: + yield ',' + for tok in GenerateTokens(item, level + 1): + yield tok + yield ']' + + elif isinstance(v, dict): + if level > 0: + raise GNError('Attempting to recursively print a dictionary.') + for key in sorted(v): + if not isinstance(key, basestring_compat): + raise GNError('Dictionary key is not a string.') + if not key or key[0].isdigit() or not key.replace('_', '').isalnum(): + raise GNError('Dictionary key is not a valid GN identifier.') + yield key # No quotations. + yield '=' + for tok in GenerateTokens(value[key], level + 1): + yield tok + + else: # Not supporting float: Add only when needed. + raise GNError('Unsupported type when printing to GN.') + + can_start = lambda tok: tok and tok not in ',]=' + can_end = lambda tok: tok and tok not in ',[=' + + # Adds whitespaces, trying to keep everything (except dicts) in 1 line. + def PlainGlue(gen): + prev_tok = None + for i, tok in enumerate(gen): + if i > 0: + if can_end(prev_tok) and can_start(tok): + yield '\n' # New dict item. + elif prev_tok == '[' and tok == ']': + yield ' ' # Special case for []. + elif tok != ',': + yield ' ' + yield tok + prev_tok = tok + + # Adds whitespaces so non-empty lists can span multiple lines, with indent. + def PrettyGlue(gen): + prev_tok = None + level = 0 + for i, tok in enumerate(gen): + if i > 0: + if can_end(prev_tok) and can_start(tok): + yield '\n' + ' ' * level # New dict item. + elif tok == '=' or prev_tok in '=': + yield ' ' # Separator before and after '=', on same line. + if tok == ']': + level -= 1 + if int(prev_tok == '[') + int(tok == ']') == 1: # Exclude '[]' case. + yield '\n' + ' ' * level + yield tok + if tok == '[': + level += 1 + if tok == ',': + yield '\n' + ' ' * level + prev_tok = tok + + token_gen = GenerateTokens(value, 0) + ret = ''.join((PrettyGlue if pretty else PlainGlue)(token_gen)) + # Add terminating '\n' for dict |value| or multi-line output. + if isinstance(value, dict) or '\n' in ret: + return ret + '\n' + return ret def FromGNString(input_string): @@ -106,7 +186,8 @@ def FromGNString(input_string): The main use cases for this is for other types, in particular lists. When using string interpolation on a list (as in the top example) the embedded strings will be quoted and escaped according to GN rules so the list can be - re-parsed to get the same result.""" + re-parsed to get the same result. + """ parser = GNValueParser(input_string) return parser.Parse() @@ -120,7 +201,7 @@ def FromGNArgs(input_string): gn assignments, this returns a Python dict, i.e.: - FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }. + FromGNArgs('foo=true\nbar=1\n') -> { 'foo': True, 'bar': 1 }. Only simple types and lists supported; variables, structs, calls and other, more complicated things are not. @@ -137,7 +218,11 @@ def UnescapeGNString(value): Be careful not to feed with input from a Python parsing function like 'ast' because it will do Python unescaping, which will be incorrect when - fed into the GN unescaper.""" + fed into the GN unescaper. + + Args: + value: Input string to unescape. + """ result = '' i = 0 while i < len(value): @@ -158,7 +243,7 @@ def UnescapeGNString(value): def _IsDigitOrMinus(char): - return char in "-0123456789" + return char in '-0123456789' class GNValueParser(object): @@ -167,10 +252,13 @@ class GNValueParser(object): Normally you would use the wrapper function FromGNValue() below. If you expect input as a specific type, you can also call one of the Parse* - functions directly. All functions throw GNException on invalid input. """ - def __init__(self, string): + functions directly. All functions throw GNError on invalid input. + """ + + def __init__(self, string, checkout_root=_CHROMIUM_ROOT): self.input = string self.cur = 0 + self.checkout_root = checkout_root def IsDone(self): return self.cur == len(self.input) @@ -189,9 +277,8 @@ class GNValueParser(object): continue regex_match = IMPORT_RE.match(line) if not regex_match: - raise GNException('Not a valid import string: %s' % line) - import_path = os.path.join( - os.path.dirname(__file__), os.pardir, regex_match.group(1)) + raise GNError('Not a valid import string: %s' % line) + import_path = os.path.join(self.checkout_root, regex_match.group(1)) with open(import_path) as f: imported_args = f.read() self.input = self.input.replace(line, imported_args) @@ -220,31 +307,37 @@ class GNValueParser(object): def Parse(self): """Converts a string representing a printed GN value to the Python type. - See additional usage notes on FromGNString above. + See additional usage notes on FromGNString() above. - - GN booleans ('true', 'false') will be converted to Python booleans. + * GN booleans ('true', 'false') will be converted to Python booleans. - - GN numbers ('123') will be converted to Python numbers. + * GN numbers ('123') will be converted to Python numbers. - - GN strings (double-quoted as in '"asdf"') will be converted to Python + * GN strings (double-quoted as in '"asdf"') will be converted to Python strings with GN escaping rules. GN string interpolation (embedded variables preceded by $) are not supported and will be returned as literals. - - GN lists ('[1, "asdf", 3]') will be converted to Python lists. + * GN lists ('[1, "asdf", 3]') will be converted to Python lists. - - GN scopes ('{ ... }') are not supported.""" + * GN scopes ('{ ... }') are not supported. + + Raises: + GNError: Parse fails. + """ result = self._ParseAllowTrailing() self.ConsumeWhitespace() if not self.IsDone(): - raise GNException("Trailing input after parsing:\n " + - self.input[self.cur:]) + raise GNError("Trailing input after parsing:\n " + self.input[self.cur:]) return result def ParseArgs(self): """Converts a whitespace-separated list of ident=literals to a dict. - See additional usage notes on FromGNArgs, above. + See additional usage notes on FromGNArgs(), above. + + Raises: + GNError: Parse fails. """ d = {} @@ -255,21 +348,22 @@ class GNValueParser(object): ident = self._ParseIdent() self.ConsumeWhitespace() if self.input[self.cur] != '=': - raise GNException("Unexpected token: " + self.input[self.cur:]) + raise GNError("Unexpected token: " + self.input[self.cur:]) self.cur += 1 self.ConsumeWhitespace() val = self._ParseAllowTrailing() self.ConsumeWhitespace() self.ConsumeComment() + self.ConsumeWhitespace() d[ident] = val return d def _ParseAllowTrailing(self): - """Internal version of Parse that doesn't check for trailing stuff.""" + """Internal version of Parse() that doesn't check for trailing stuff.""" self.ConsumeWhitespace() if self.IsDone(): - raise GNException("Expected input to parse.") + raise GNError("Expected input to parse.") next_char = self.input[self.cur] if next_char == '[': @@ -283,14 +377,14 @@ class GNValueParser(object): elif self._ConstantFollows('false'): return False else: - raise GNException("Unexpected token: " + self.input[self.cur:]) + raise GNError("Unexpected token: " + self.input[self.cur:]) def _ParseIdent(self): ident = '' next_char = self.input[self.cur] if not next_char.isalpha() and not next_char=='_': - raise GNException("Expected an identifier: " + self.input[self.cur:]) + raise GNError("Expected an identifier: " + self.input[self.cur:]) ident += next_char self.cur += 1 @@ -306,7 +400,7 @@ class GNValueParser(object): def ParseNumber(self): self.ConsumeWhitespace() if self.IsDone(): - raise GNException('Expected number but got nothing.') + raise GNError('Expected number but got nothing.') begin = self.cur @@ -318,17 +412,17 @@ class GNValueParser(object): number_string = self.input[begin:self.cur] if not len(number_string) or number_string == '-': - raise GNException("Not a valid number.") + raise GNError('Not a valid number.') return int(number_string) def ParseString(self): self.ConsumeWhitespace() if self.IsDone(): - raise GNException('Expected string but got nothing.') + raise GNError('Expected string but got nothing.') if self.input[self.cur] != '"': - raise GNException('Expected string beginning in a " but got:\n ' + - self.input[self.cur:]) + raise GNError('Expected string beginning in a " but got:\n ' + + self.input[self.cur:]) self.cur += 1 # Skip over quote. begin = self.cur @@ -336,12 +430,11 @@ class GNValueParser(object): if self.input[self.cur] == '\\': self.cur += 1 # Skip over the backslash. if self.IsDone(): - raise GNException("String ends in a backslash in:\n " + - self.input) + raise GNError('String ends in a backslash in:\n ' + self.input) self.cur += 1 if self.IsDone(): - raise GNException('Unterminated string:\n ' + self.input[begin:]) + raise GNError('Unterminated string:\n ' + self.input[begin:]) end = self.cur self.cur += 1 # Consume trailing ". @@ -351,16 +444,15 @@ class GNValueParser(object): def ParseList(self): self.ConsumeWhitespace() if self.IsDone(): - raise GNException('Expected list but got nothing.') + raise GNError('Expected list but got nothing.') # Skip over opening '['. if self.input[self.cur] != '[': - raise GNException("Expected [ for list but got:\n " + - self.input[self.cur:]) + raise GNError('Expected [ for list but got:\n ' + self.input[self.cur:]) self.cur += 1 self.ConsumeWhitespace() if self.IsDone(): - raise GNException("Unterminated list:\n " + self.input) + raise GNError('Unterminated list:\n ' + self.input) list_result = [] previous_had_trailing_comma = True @@ -370,7 +462,7 @@ class GNValueParser(object): return list_result if not previous_had_trailing_comma: - raise GNException("List items not separated by comma.") + raise GNError('List items not separated by comma.') list_result += [ self._ParseAllowTrailing() ] self.ConsumeWhitespace() @@ -384,13 +476,19 @@ class GNValueParser(object): self.cur += 1 self.ConsumeWhitespace() - raise GNException("Unterminated list:\n " + self.input) + raise GNError('Unterminated list:\n ' + self.input) def _ConstantFollows(self, constant): - """Returns true if the given constant follows immediately at the current - location in the input. If it does, the text is consumed and the function - returns true. Otherwise, returns false and the current position is - unchanged.""" + """Checks and maybe consumes a string constant at current input location. + + Param: + constant: The string constant to check. + + Returns: + True if |constant| follows immediately at the current location in the + input. In this case, the string is consumed as a side effect. Otherwise, + returns False and the current position is unchanged. + """ end = self.cur + len(constant) if end > len(self.input): return False # Not enough room. diff --git a/chromium/build/gn_helpers_unittest.py b/chromium/build/gn_helpers_unittest.py index 99d720b907b..5886577ea2b 100644 --- a/chromium/build/gn_helpers_unittest.py +++ b/chromium/build/gn_helpers_unittest.py @@ -3,6 +3,7 @@ # found in the LICENSE file. import mock +import sys import textwrap import unittest @@ -11,9 +12,56 @@ import gn_helpers class UnitTest(unittest.TestCase): def test_ToGNString(self): - self.assertEqual( - gn_helpers.ToGNString([1, 'two', [ '"thr$\\', True, False, [] ]]), - '[ 1, "two", [ "\\"thr\\$\\\\", true, false, [ ] ] ]') + test_cases = [ + (42, '42', '42'), ('foo', '"foo"', '"foo"'), (True, 'true', 'true'), + (False, 'false', 'false'), ('', '""', '""'), + ('\\$"$\\', '"\\\\\\$\\"\\$\\\\"', '"\\\\\\$\\"\\$\\\\"'), + (' \t\r\n', '" $0x09$0x0D$0x0A"', '" $0x09$0x0D$0x0A"'), + (u'\u2713', '"$0xE2$0x9C$0x93"', '"$0xE2$0x9C$0x93"'), + ([], '[ ]', '[]'), ([1], '[ 1 ]', '[\n 1\n]\n'), + ([3, 1, 4, 1], '[ 3, 1, 4, 1 ]', '[\n 3,\n 1,\n 4,\n 1\n]\n'), + (['a', True, 2], '[ "a", true, 2 ]', '[\n "a",\n true,\n 2\n]\n'), + ({ + 'single': 'item' + }, 'single = "item"\n', 'single = "item"\n'), + ({ + 'kEy': 137, + '_42A_Zaz_': [False, True] + }, '_42A_Zaz_ = [ false, true ]\nkEy = 137\n', + '_42A_Zaz_ = [\n false,\n true\n]\nkEy = 137\n'), + ([1, 'two', + ['"thr,.$\\', True, False, [], + u'(\u2713)']], '[ 1, "two", [ "\\"thr,.\\$\\\\", true, false, ' + + '[ ], "($0xE2$0x9C$0x93)" ] ]', '''[ + 1, + "two", + [ + "\\"thr,.\\$\\\\", + true, + false, + [], + "($0xE2$0x9C$0x93)" + ] +] +'''), + ({ + 's': 'foo', + 'n': 42, + 'b': True, + 'a': [3, 'x'] + }, 'a = [ 3, "x" ]\nb = true\nn = 42\ns = "foo"\n', + 'a = [\n 3,\n "x"\n]\nb = true\nn = 42\ns = "foo"\n'), + ( + [[[], [[]]], []], + '[ [ [ ], [ [ ] ] ], [ ] ]', + '[\n [\n [],\n [\n []\n ]\n ],\n []\n]\n', + ) + ] + for obj, exp_ugly, exp_pretty in test_cases: + out_ugly = gn_helpers.ToGNString(obj) + self.assertEqual(exp_ugly, out_ugly) + out_pretty = gn_helpers.ToGNString(obj, pretty=True) + self.assertEqual(exp_pretty, out_pretty) def test_UnescapeGNString(self): # Backslash followed by a \, $, or " means the folling character without @@ -27,7 +75,7 @@ class UnitTest(unittest.TestCase): gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'), [ 1, -20, True, False, [ 'as"', [] ] ]) - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('123 456') parser.Parse() @@ -42,10 +90,10 @@ class UnitTest(unittest.TestCase): parser = gn_helpers.GNValueParser('123') self.assertEqual(parser.ParseNumber(), 123) - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('') parser.ParseNumber() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('a123') parser.ParseNumber() @@ -53,13 +101,13 @@ class UnitTest(unittest.TestCase): parser = gn_helpers.GNValueParser('"asdf"') self.assertEqual(parser.ParseString(), 'asdf') - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('') # Empty. parser.ParseString() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('asdf') # Unquoted. parser.ParseString() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('"trailing') # Unterminated. parser.ParseString() @@ -67,16 +115,16 @@ class UnitTest(unittest.TestCase): parser = gn_helpers.GNValueParser('[1,]') # Optional end comma OK. self.assertEqual(parser.ParseList(), [ 1 ]) - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('') # Empty. parser.ParseList() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('asdf') # No []. parser.ParseList() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('[1, 2') # Unterminated parser.ParseList() - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser('[1 2]') # No separating comma. parser.ParseList() @@ -93,10 +141,15 @@ class UnitTest(unittest.TestCase): gn_args_lines = [ '# Top-level comment.', 'foo = true', - 'bar = 1 # In-line comment.', + 'bar = 1 # In-line comment followed by whitespace.', + ' ', + 'baz = false', ] - self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), - {'foo': True, 'bar': 1}) + self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), { + 'foo': True, + 'bar': 1, + 'baz': False + }) # Lists should work. self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'), @@ -107,15 +160,15 @@ class UnitTest(unittest.TestCase): self.assertEqual(gn_helpers.FromGNArgs(' \n '), {}) # Non-identifiers should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): gn_helpers.FromGNArgs('123 = true') # References to other variables should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): gn_helpers.FromGNArgs('foo = bar') # References to functions should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")') # Underscores in identifiers should work. @@ -134,7 +187,7 @@ class UnitTest(unittest.TestCase): some_arg2 = "val2" """)) parser.ReplaceImports() - self.assertEquals( + self.assertEqual( parser.input, textwrap.dedent(""" some_arg1 = "val1" @@ -150,9 +203,11 @@ class UnitTest(unittest.TestCase): some_arg2 = "val2" """)) fake_import = 'some_imported_arg = "imported_val"' - with mock.patch('__builtin__.open', mock.mock_open(read_data=fake_import)): + builtin_var = '__builtin__' if sys.version_info.major < 3 else 'builtins' + open_fun = '{}.open'.format(builtin_var) + with mock.patch(open_fun, mock.mock_open(read_data=fake_import)): parser.ReplaceImports() - self.assertEquals( + self.assertEqual( parser.input, textwrap.dedent(""" some_arg1 = "val1" @@ -161,19 +216,19 @@ class UnitTest(unittest.TestCase): """)) # No trailing parenthesis should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser( textwrap.dedent('import("//some/args/file.gni"')) parser.ReplaceImports() # No double quotes should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser( textwrap.dedent('import(//some/args/file.gni)')) parser.ReplaceImports() # A path that's not source absolute should raise an exception. - with self.assertRaises(gn_helpers.GNException): + with self.assertRaises(gn_helpers.GNError): parser = gn_helpers.GNValueParser( textwrap.dedent('import("some/relative/args/file.gni")')) parser.ReplaceImports() diff --git a/chromium/build/install-build-deps.sh b/chromium/build/install-build-deps.sh index b10cbb4ac2d..cfebaa6941f 100755 --- a/chromium/build/install-build-deps.sh +++ b/chromium/build/install-build-deps.sh @@ -97,6 +97,7 @@ fi # Check for lsb_release command in $PATH if ! which lsb_release > /dev/null; then echo "ERROR: lsb_release not found in \$PATH" >&2 + echo "try: sudo apt-get install lsb-release" >&2 exit 1; fi diff --git a/chromium/build/linux/libbrlapi/BUILD.gn b/chromium/build/linux/libbrlapi/BUILD.gn deleted file mode 100644 index 4ee39504552..00000000000 --- a/chromium/build/linux/libbrlapi/BUILD.gn +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//tools/generate_library_loader/generate_library_loader.gni") - -generate_library_loader("libbrlapi") { - name = "LibBrlapiLoader" - output_h = "libbrlapi.h" - output_cc = "libbrlapi_loader.cc" - header = "<brlapi.h>" - - functions = [ - "brlapi_getHandleSize", - "brlapi_error_location", - "brlapi_strerror", - "brlapi__acceptKeys", - "brlapi__openConnection", - "brlapi__closeConnection", - "brlapi__getDisplaySize", - "brlapi__enterTtyModeWithPath", - "brlapi__leaveTtyMode", - "brlapi__writeDots", - "brlapi__readKey", - ] -} diff --git a/chromium/build/linux/libncursesw/OWNERS b/chromium/build/linux/libncursesw/OWNERS new file mode 100644 index 00000000000..697dba28c57 --- /dev/null +++ b/chromium/build/linux/libncursesw/OWNERS @@ -0,0 +1,4 @@ +file://ui/accessibility/OWNERS + +# TEAM: chromium-accessibility@chromium.org +# COMPONENT: Internals>Accessibility diff --git a/chromium/build/linux/libudev/BUILD.gn b/chromium/build/linux/libudev/BUILD.gn index 9486a03292d..dcd9f234dbe 100644 --- a/chromium/build/linux/libudev/BUILD.gn +++ b/chromium/build/linux/libudev/BUILD.gn @@ -7,6 +7,7 @@ import("//tools/generate_library_loader/generate_library_loader.gni") libudev_functions = [ "udev_device_get_action", "udev_device_get_devnode", + "udev_device_get_devtype", "udev_device_get_parent", "udev_device_get_parent_with_subsystem_devtype", "udev_device_get_property_value", diff --git a/chromium/build/linux/pkg-config-wrapper b/chromium/build/linux/pkg-config-wrapper deleted file mode 100755 index c4935d7b597..00000000000 --- a/chromium/build/linux/pkg-config-wrapper +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This program wraps around pkg-config to generate the correct include and -# library paths when cross-compiling using a sysroot. -# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig -# and usr/share/pkgconfig (relative to the sysroot) and that they output paths -# relative to some parent path of the sysroot. -# This assumption is valid for a range of sysroots, in particular: a -# LSB-compliant root filesystem mounted at the sysroot, and a board build -# directory of a Chromium OS chroot. - -set -o nounset -set -o errexit - -root="$1" -shift -target_arch="$1" -shift -libpath="$1" -shift - -if [ -z "$root" -o -z "$target_arch" ] -then - echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2 - exit 1 -fi - -rewrite=`dirname $0`/rewrite_dirs.py -package=${!#} - -libdir=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig - -set -e -# Some sysroots, like the Chromium OS ones, may generate paths that are not -# relative to the sysroot. For example, -# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths -# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of -# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). -# To support this correctly, it's necessary to extract the prefix to strip from -# pkg-config's |prefix| variable. -prefix=`PKG_CONFIG_LIBDIR=$libdir pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'` -result=`PKG_CONFIG_LIBDIR=$libdir pkg-config "$@"` -echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix" diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 index 6e45afd689a..12ae2454059 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_amd64.deb @@ -261,6 +259,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_amd64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_amd64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_amd64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_amd64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_amd64.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm index 0ed5f778cb1..5cb6c68bb42 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_armhf.deb @@ -260,6 +258,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_armhf.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_armhf.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_armhf.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_armhf.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_armhf.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 index 6d2a0af3ed3..248bbff003f 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_arm64.deb @@ -262,6 +260,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_arm64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_arm64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_arm64.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_arm64.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_arm64.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.armel b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.armel index 4adb37878a1..edcd7236dd8 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.armel +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.armel @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_armel.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_armel.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_armel.deb @@ -259,6 +257,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_armel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_armel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_armel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_armel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_armel.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386 b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386 index cdb037820a0..79c537dc58d 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386 +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.i386 @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_i386.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_i386.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_i386.deb @@ -259,6 +257,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_i386.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_i386.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_i386.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_i386.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_i386.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el index 49022c434f2..fbefbf2361a 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_mips64el.deb @@ -252,6 +250,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_mips64el.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_mips64el.deb diff --git a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel index bf3495f8885..3fedde4decb 100644 --- a/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel +++ b/chromium/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel @@ -12,8 +12,6 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/li https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/a/avahi/libavahi-common3_0.7-4+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth3_5.50-1+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/bluez/libbluetooth-dev_5.50-1+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi0.7_6.0+dfsg-4+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brltty/libbrlapi-dev_6.0+dfsg-4+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/b/brotli/libbrotli1_1.0.7-5+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2_1.16.0-4_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/c/cairo/libcairo2-dev_1.16.0-4_mipsel.deb @@ -252,6 +250,10 @@ https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mesa/mes https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/m/mtdev/libmtdev1_1.1.5-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses6_6.1+20191019-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncurses-dev_6.1+20191019-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libncursesw6_6.1+20191019-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/ncurses/libtinfo6_6.1+20191019-1_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2_mipsel.deb https://snapshot.debian.org/archive/debian/20191212T145612Z/pool/main/n/nspr/libnspr4_4.23-1_mipsel.deb diff --git a/chromium/build/linux/sysroot_scripts/install-sysroot.py b/chromium/build/linux/sysroot_scripts/install-sysroot.py index 165551a2948..f8b7906cc55 100755 --- a/chromium/build/linux/sysroot_scripts/install-sysroot.py +++ b/chromium/build/linux/sysroot_scripts/install-sysroot.py @@ -78,8 +78,6 @@ def main(args): parser.add_option('--print-hash', help='Print the hash of the sysroot for the given arch.') options, _ = parser.parse_args(args) - if not sys.platform.startswith('linux'): - return 0 if options.print_hash: arch = options.print_hash diff --git a/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh index ebe40f20c6b..7c50db3e347 100755 --- a/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh +++ b/chromium/build/linux/sysroot_scripts/sysroot-creator-sid.sh @@ -63,8 +63,6 @@ DEBIAN_PACKAGES="\ libblkid1 libbluetooth-dev libbluetooth3 - libbrlapi-dev - libbrlapi0.7 libbrotli1 libbsd0 libc6 @@ -206,6 +204,9 @@ DEBIAN_PACKAGES="\ libnss-db libnss3 libnss3-dev + libncurses-dev + libncurses6 + libncursesw6 libogg-dev libogg0 libopengl0 @@ -270,6 +271,7 @@ DEBIAN_PACKAGES="\ libtasn1-6 libthai0 libtiff5 + libtinfo6 libudev-dev libudev1 libunbound8 diff --git a/chromium/build/linux/sysroot_scripts/sysroot-creator.sh b/chromium/build/linux/sysroot_scripts/sysroot-creator.sh index f03b46042a3..8626cded112 100644 --- a/chromium/build/linux/sysroot_scripts/sysroot-creator.sh +++ b/chromium/build/linux/sysroot_scripts/sysroot-creator.sh @@ -336,6 +336,23 @@ HacksAndPatchesCommon() { cp "${SCRIPT_DIR}/libdbus-1-3-symbols" \ "${INSTALL_ROOT}/debian/libdbus-1-3/DEBIAN/symbols" + # Shared objects depending on libdbus-1.so.3 have unsatisfied undefined + # versioned symbols. To avoid LLD --no-allow-shlib-undefined errors, rewrite + # DT_NEEDED entries from libdbus-1.so.3 to a different string. LLD will + # suppress --no-allow-shlib-undefined diagnostics for such shared objects. + set +e + for f in "${INSTALL_ROOT}/lib/${arch}-${os}"/*.so \ + "${INSTALL_ROOT}/usr/lib/${arch}-${os}"/*.so; do + echo "$f" | grep -q 'libdbus-1.so$' && continue + # In a dependent shared object, the only occurrence of "libdbus-1.so.3" is + # the string referenced by the DT_NEEDED entry. + offset=$(LANG=C grep -abo libdbus-1.so.3 "$f") + [ -n "$offset" ] || continue + echo -n 'libdbus-1.so.0' | dd of="$f" conv=notrunc bs=1 \ + seek="$(echo -n "$offset" | cut -d : -f 1)" status=none + done + set -e + # Glibc 2.27 introduced some new optimizations to several math functions, but # it will be a while before it makes it into all supported distros. Luckily, # glibc maintains ABI compatibility with previous versions, so the old symbols @@ -498,6 +515,8 @@ VerifyLibraryDepsCommon() { grep ': ELF' | sed 's/^\(.*\): .*$/\1/' | xargs readelf -d | \ grep NEEDED | sort | uniq | sed 's/^.*Shared library: \[\(.*\)\]$/\1/g')" local all_libs="$(find ${find_dirs[*]} -printf '%f\n')" + # Ignore missing libdbus-1.so.0 + all_libs+="$(echo -e '\nlibdbus-1.so.0')" local missing_libs="$(grep -vFxf <(echo "${all_libs}") \ <(echo "${needed_libs}"))" if [ ! -z "${missing_libs}" ]; then diff --git a/chromium/build/linux/sysroot_scripts/sysroots.json b/chromium/build/linux/sysroot_scripts/sysroots.json index 366ce23e675..60ae1cf90ed 100644 --- a/chromium/build/linux/sysroot_scripts/sysroots.json +++ b/chromium/build/linux/sysroot_scripts/sysroots.json @@ -1,36 +1,36 @@ { "sid_amd64": { - "Sha1Sum": "52cf2961a3cddc0d46e1a2f7d9bf376fc16a61de", + "Sha1Sum": "d6879d611f3dcf3da8dd92e080029394aa30bc42", "SysrootDir": "debian_sid_amd64-sysroot", "Tarball": "debian_sid_amd64_sysroot.tar.xz" }, "sid_arm": { - "Sha1Sum": "0a1444c9e8e5a9a461a4c2e168cc040e443b201d", + "Sha1Sum": "b7688b64ce3a5b93db21678d1c88cf35da340a39", "SysrootDir": "debian_sid_arm-sysroot", "Tarball": "debian_sid_arm_sysroot.tar.xz" }, "sid_arm64": { - "Sha1Sum": "39c8af09836079013052d813eb3faa84eacc1023", + "Sha1Sum": "96072272c5c3dc906fdce0517368b8a298af7abd", "SysrootDir": "debian_sid_arm64-sysroot", "Tarball": "debian_sid_arm64_sysroot.tar.xz" }, "sid_armel": { - "Sha1Sum": "d737c7ea6d35a1d059dbb4572367717b6aa01ea4", + "Sha1Sum": "220a382df2a9af90328e70cdef598b298781ad27", "SysrootDir": "debian_sid_armel-sysroot", "Tarball": "debian_sid_armel_sysroot.tar.xz" }, "sid_i386": { - "Sha1Sum": "36ed2ee83109acdd44af2e522a5e9e0082b074a6", + "Sha1Sum": "9c31dbb40e1af6ce03336a095ba6ad6e0c961e77", "SysrootDir": "debian_sid_i386-sysroot", "Tarball": "debian_sid_i386_sysroot.tar.xz" }, "sid_mips": { - "Sha1Sum": "5c1c223a5bfc9bfe66b95025abe17153149e5ffb", + "Sha1Sum": "728755d2fb5455d1d3293b6b10704fca859d97dd", "SysrootDir": "debian_sid_mips-sysroot", "Tarball": "debian_sid_mips_sysroot.tar.xz" }, "sid_mips64el": { - "Sha1Sum": "2351c79f5af6eab1f68141049a3929d8f7319e09", + "Sha1Sum": "e9e309d7887cb9baacb3aac603fd50f476891ead", "SysrootDir": "debian_sid_mips64el-sysroot", "Tarball": "debian_sid_mips64el_sysroot.tar.xz" } diff --git a/chromium/build/mac/OWNERS b/chromium/build/mac/OWNERS index a2d7cc837d7..163563f967d 100644 --- a/chromium/build/mac/OWNERS +++ b/chromium/build/mac/OWNERS @@ -1,4 +1,2 @@ mark@chromium.org rsesek@chromium.org - -# COMPONENT: Build diff --git a/chromium/build/mac/find_sdk.py b/chromium/build/mac/find_sdk.py index 78314e04dfe..58362bfa244 100755 --- a/chromium/build/mac/find_sdk.py +++ b/chromium/build/mac/find_sdk.py @@ -2,8 +2,7 @@ # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - -"""Prints the lowest locally available SDK version greater than or equal to a +r"""Prints the lowest locally available SDK version greater than or equal to a given minimum sdk version to standard output. If --print_sdk_path is passed, then the script will also print the SDK path. @@ -11,8 +10,10 @@ If --print_bin_path is passed, then the script will also print the path to the toolchain bin dir. Usage: - python find_sdk.py [--print_sdk_path] \ - [--print_bin_path] 10.6 # Ignores SDKs < 10.6 + python find_sdk.py \ + [--print_sdk_path] \ + [--print_bin_path] \ + 10.6 # Ignores SDKs < 10.6 Sample Output: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.14.sdk @@ -39,7 +40,7 @@ class SdkError(Exception): def parse_version(version_str): """'10.6' => [10, 6]""" - return map(int, re.findall(r'(\d+)', version_str)) + return [int(s) for s in re.findall(r'(\d+)', version_str)] def main(): @@ -72,7 +73,7 @@ def main(): raise SdkError('Install Xcode, launch it, accept the license ' + 'agreement, and run `sudo xcode-select -s /path/to/Xcode.app` ' + 'to continue.') - sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)] + sdks = [re.findall('^MacOSX(\d+\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)] sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6'] sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6'] if parse_version(s) >= parse_version(min_sdk_version)] diff --git a/chromium/build/mac/should_use_hermetic_xcode.py b/chromium/build/mac/should_use_hermetic_xcode.py index 450c40383a1..a366eb8fabd 100755 --- a/chromium/build/mac/should_use_hermetic_xcode.py +++ b/chromium/build/mac/should_use_hermetic_xcode.py @@ -16,6 +16,7 @@ Usage: from __future__ import print_function +import argparse import os import sys @@ -31,12 +32,20 @@ def _IsCorpMachine(): def main(): + parser = argparse.ArgumentParser(description='Download hermetic Xcode.') + parser.add_argument('platform') + parser.add_argument('--xcode-version', + choices=('default', 'xcode_12_beta'), + default='default') + args = parser.parse_args() + force_toolchain = os.environ.get('FORCE_MAC_TOOLCHAIN') - if force_toolchain and sys.argv[1] == 'ios': + if force_toolchain and args.platform == 'ios': return "3" - allow_corp = sys.argv[1] == 'mac' and _IsCorpMachine() + allow_corp = args.platform == 'mac' and _IsCorpMachine() if force_toolchain or allow_corp: - if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements(): + if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements( + args.xcode_version): return "2" return "1" else: diff --git a/chromium/build/mac/tweak_info_plist.gni b/chromium/build/mac/tweak_info_plist.gni index 2a79b0d5e31..f1164c8746c 100644 --- a/chromium/build/mac/tweak_info_plist.gni +++ b/chromium/build/mac/tweak_info_plist.gni @@ -31,7 +31,11 @@ template("tweak_info_plist") { _deps = [ ":" + target_name + "_merge_plist" ] action(target_name + "_merge_plist") { - forward_variables_from(invoker, [ "testonly" ]) + forward_variables_from(invoker, + [ + "testonly", + "deps", + ]) script = "//build/config/mac/plist_util.py" sources = invoker.info_plists outputs = [ _source_name ] @@ -47,6 +51,9 @@ template("tweak_info_plist") { _source_name = invoker.info_plist _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } } action(target_name) { diff --git a/chromium/build/mac_toolchain.py b/chromium/build/mac_toolchain.py index 56f640cc712..ff788309051 100755 --- a/chromium/build/mac_toolchain.py +++ b/chromium/build/mac_toolchain.py @@ -19,6 +19,7 @@ the full revision, e.g. 9A235. from __future__ import print_function +import argparse import os import pkg_resources import platform @@ -27,16 +28,24 @@ import shutil import subprocess import sys - -# This contains binaries from Xcode 11.2.1, along with the 10.15 SDKs (aka -# 11B53). To build this package, see comments in build/xcode_binaries.yaml +# To build these packages, see comments in build/xcode_binaries.yaml MAC_BINARIES_LABEL = 'infra_internal/ios/xcode/xcode_binaries/mac-amd64' -MAC_BINARIES_TAG = 'X5ZbqG_UKa-N64_XSBkAwShWPtzskeXhQRfpzc_1KUYC' +MAC_BINARIES_TAG = { + # This contains binaries from Xcode 11.2.1, along with the 10.15 SDKs (aka + # 11B53). + 'default': 'X5ZbqG_UKa-N64_XSBkAwShWPtzskeXhQRfpzc_1KUYC', + # This contains binaries from Xcode (Universal) 12 beta, along with the + # 11 SDK (aka 12A8158a). + 'xcode_12_beta': '_tvvMQXaruqACKkcaZmqHR_7S-S2pHrXgcjTWfbI1qoC', +} # The toolchain will not be downloaded if the minimum OS version is not met. # 17 is the major version number for macOS 10.13. # 9E145 (Xcode 9.3) only runs on 10.13.2 and newer. -MAC_MINIMUM_OS_VERSION = 17 +MAC_MINIMUM_OS_VERSION = { + 'default': [17], # macOS 10.13+ + 'xcode_12_beta': [19, 4], # macOS 10.15.4+ +} BASE_DIR = os.path.abspath(os.path.dirname(__file__)) TOOLCHAIN_ROOT = os.path.join(BASE_DIR, 'mac_files') @@ -49,9 +58,11 @@ TOOLCHAIN_BUILD_DIR = os.path.join(TOOLCHAIN_ROOT, 'Xcode.app') # Note the trailing \n! PARANOID_MODE = '$ParanoidMode CheckIntegrity\n' -def PlatformMeetsHermeticXcodeRequirements(): - major_version = int(platform.release().split('.')[0]) - return major_version >= MAC_MINIMUM_OS_VERSION + +def PlatformMeetsHermeticXcodeRequirements(version): + needed = MAC_MINIMUM_OS_VERSION[version] + major_version = map(int, platform.release().split('.')[:len(needed)]) + return major_version >= needed def _UseHermeticToolchain(): @@ -90,7 +101,7 @@ def PrintError(message): sys.stderr.flush() -def InstallXcodeBinaries(): +def InstallXcodeBinaries(version): """Installs the Xcode binaries needed to build Chrome and accepts the license. This is the replacement for InstallXcode that installs a trimmed down version @@ -107,18 +118,11 @@ def InstallXcodeBinaries(): 'cipd', 'ensure', '-root', binaries_root, '-ensure-file', '-' ] - # Buildbot slaves need to use explicit credentials. LUCI bots should NOT set - # this variable. This is temporary code used to make official Xcode bots - # happy. https://crbug.com/986488 - creds = os.environ.get('MAC_TOOLCHAIN_CREDS') - if creds: - args.extend(['--service-account-json', creds]) - p = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = p.communicate( - input=PARANOID_MODE + MAC_BINARIES_LABEL + ' ' + MAC_BINARIES_TAG) + stdout, stderr = p.communicate(input=PARANOID_MODE + MAC_BINARIES_LABEL + + ' ' + MAC_BINARIES_TAG[version]) if p.returncode != 0: print(stdout) print(stderr) @@ -180,20 +184,17 @@ def main(): print('Skipping Mac toolchain installation for mac') return 0 - if not PlatformMeetsHermeticXcodeRequirements(): + parser = argparse.ArgumentParser(description='Download hermetic Xcode.') + parser.add_argument('--xcode-version', + choices=('default', 'xcode_12_beta'), + default='default') + args = parser.parse_args() + + if not PlatformMeetsHermeticXcodeRequirements(args.xcode_version): print('OS version does not support toolchain.') return 0 - # Delete obsolete hermetic full Xcode folder, the build now uses - # build/mac_files/xcode_binaries instead. - if os.path.exists(TOOLCHAIN_BUILD_DIR): - # TODO(thakis): Remove this after it's been here for a few months. - print('Deleting obsolete build/mac_files/Xcode.app...', end='') - sys.stdout.flush() - shutil.rmtree(TOOLCHAIN_BUILD_DIR) - print('done') - - return InstallXcodeBinaries() + return InstallXcodeBinaries(args.xcode_version) if __name__ == '__main__': diff --git a/chromium/build/package_mac_toolchain.py b/chromium/build/package_mac_toolchain.py deleted file mode 100755 index 2c03ef37e3c..00000000000 --- a/chromium/build/package_mac_toolchain.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Compress and upload Mac toolchain files. - -Stored in in https://pantheon.corp.google.com/storage/browser/chrome-mac-sdk/. -""" - -from __future__ import print_function - -import argparse -import glob -import os -import plistlib -import re -import subprocess -import sys -import tarfile -import tempfile - - -TOOLCHAIN_URL = "gs://chrome-mac-sdk" - -# It's important to at least remove unused Platform folders to cut down on the -# size of the toolchain folder. There are other various unused folders that -# have been removed through trial and error. If future versions of Xcode become -# problematic it's possible this list is incorrect, and can be reduced to just -# the unused platforms. On the flip side, it's likely more directories can be -# excluded. -DEFAULT_EXCLUDE_FOLDERS = [ -'Contents/Applications', -'Contents/Developer/Documentation', -'Contents/Developer/Library/Xcode/Templates', -'Contents/Developer/Platforms/AppleTVOS.platform', -'Contents/Developer/Platforms/AppleTVSimulator.platform', -'Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/' - 'usr/share/man/', -'Contents/Developer/Platforms/WatchOS.platform', -'Contents/Developer/Platforms/WatchSimulator.platform', -'Contents/Developer/Toolchains/Swift*', -'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift', -'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator', -'Contents/Resources/Packages/MobileDevice.pkg', -] - -MAC_EXCLUDE_FOLDERS = [ -# The only thing we need in iPhoneOS.platform on mac is: -# \Developer\Library\Xcode\PrivatePlugins -# \Info.Plist. -# This is the cleanest way to get these. -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/Frameworks', -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/GPUTools', -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/' - 'GPUToolsPlatform', -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/' - 'PrivateFrameworks', -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr', -'Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs', -'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport', -'Contents/Developer/Platforms/iPhoneOS.platform/Library', -'Contents/Developer/Platforms/iPhoneOS.platform/usr', - -# iPhoneSimulator has a similar requirement, but the bulk of the binary size is -# in \Developer\SDKs, so only excluding that here. -'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs', -] - -IOS_EXCLUDE_FOLDERS = [ -'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/' -'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/' - 'iPhoneSimulator.sdk/Applications/', -'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/' - 'iPhoneSimulator.sdk/System/Library/AccessibilityBundles/', -'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/' - 'iPhoneSimulator.sdk/System/Library/CoreServices/', -'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/' - 'iPhoneSimulator.sdk/System/Library/LinguisticData/', -] - -def main(): - """Compress |target_dir| and upload to |TOOLCHAIN_URL|""" - parser = argparse.ArgumentParser() - parser.add_argument('target_dir', - help="Xcode installation directory.") - parser.add_argument('platform', choices=['ios', 'mac'], - help="Target platform for bundle.") - parser_args = parser.parse_args() - - # Verify this looks like an Xcode directory. - contents_dir = os.path.join(parser_args.target_dir, 'Contents') - plist_file = os.path.join(contents_dir, 'version.plist') - try: - info = plistlib.readPlist(plist_file) - except: - print("Invalid Xcode dir.") - return 0 - build_version = info['ProductBuildVersion'] - - # Look for previous toolchain tgz files with the same |build_version|. - fname = 'toolchain' - if parser_args.platform == 'ios': - fname = 'ios-' + fname - wildcard_filename = '%s/%s-%s-*.tgz' % (TOOLCHAIN_URL, fname, build_version) - p = subprocess.Popen(['gsutil.py', 'ls', wildcard_filename], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - output = p.communicate()[0] - next_count = 1 - if p.returncode == 0: - next_count = len(output.split('\n')) - sys.stdout.write("%s already exists (%s). " - "Do you want to create another? [y/n] " - % (build_version, next_count - 1)) - - if raw_input().lower() not in set(['yes','y', 'ye']): - print("Skipping duplicate upload.") - return 0 - - os.chdir(parser_args.target_dir) - toolchain_file_name = "%s-%s-%s" % (fname, build_version, next_count) - toolchain_name = tempfile.mktemp(suffix='toolchain.tgz') - - print("Creating %s (%s)." % (toolchain_file_name, toolchain_name)) - os.environ["COPYFILE_DISABLE"] = "1" - os.environ["GZ_OPT"] = "-8" - args = ['tar', '-cvzf', toolchain_name] - exclude_folders = DEFAULT_EXCLUDE_FOLDERS - if parser_args.platform == 'mac': - exclude_folders += MAC_EXCLUDE_FOLDERS - else: - exclude_folders += IOS_EXCLUDE_FOLDERS - args.extend(map('--exclude={0}'.format, exclude_folders)) - args.extend(['.']) - subprocess.check_call(args) - - print("Uploading %s toolchain." % toolchain_file_name) - destination_path = '%s/%s.tgz' % (TOOLCHAIN_URL, toolchain_file_name) - subprocess.check_call(['gsutil.py', 'cp', '-n', toolchain_name, - destination_path]) - - print("Done with %s upload." % toolchain_file_name) - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/chromium/build/print_python_deps.py b/chromium/build/print_python_deps.py index ca32a5bc206..fd29c0972c9 100755 --- a/chromium/build/print_python_deps.py +++ b/chromium/build/print_python_deps.py @@ -22,7 +22,7 @@ import sys _SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) -def _ComputePythonDependencies(): +def ComputePythonDependencies(): """Gets the paths of imported non-system python modules. A path is assumed to be a "system" import if it is outside of chromium's @@ -179,7 +179,7 @@ def main(): sys.stderr.write('python={}\n'.format(sys.executable)) raise - paths_set = _ComputePythonDependencies() + paths_set = ComputePythonDependencies() for path in options.whitelists: paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path)) diff --git a/chromium/build/protoc_java.py b/chromium/build/protoc_java.py index 62b012e2861..e6147d8b845 100755 --- a/chromium/build/protoc_java.py +++ b/chromium/build/protoc_java.py @@ -37,8 +37,6 @@ def main(argv): parser.add_option("--stamp", help="File to touch on success.") parser.add_option("--nano", help="Use to generate nano protos.", action='store_true') - parser.add_option("--protoc-javalite-plugin-dir", - help="Path to protoc java lite plugin directory.") parser.add_option("--import-dir", action="append", default=[], help="Extra import directory for protos, can be repeated.") options, args = parser.parse_args(argv) @@ -59,21 +57,11 @@ def main(argv): 'store_unknown_fields=true'] out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir else: - out_arg = '--javalite_out=' + temp_dir - - custom_env = os.environ.copy() - if options.protoc_javalite_plugin_dir: - # If we are generating lite protos, then the lite plugin needs to be in - # the path when protoc is called. See - # https://github.com/protocolbuffers/protobuf/blob/master/java/lite.md - custom_env['PATH'] = '{}:{}'.format( - os.path.abspath(options.protoc_javalite_plugin_dir), - custom_env['PATH']) + out_arg = '--java_out=lite:' + temp_dir # Generate Java files using protoc. build_utils.CheckOutput( [options.protoc] + proto_path_args + [out_arg] + args, - env=custom_env, # protoc generates superfluous warnings about LITE_RUNTIME deprecation # even though we are using the new non-deprecated method. stderr_filter=lambda output: build_utils.FilterLines( @@ -89,8 +77,7 @@ def main(argv): if options.depfile: assert options.srcjar deps = args + [options.protoc] - build_utils.WriteDepfile(options.depfile, options.srcjar, deps, - add_pydeps=False) + build_utils.WriteDepfile(options.depfile, options.srcjar, deps) if options.stamp: build_utils.Touch(options.stamp) diff --git a/chromium/build/sanitizers/lsan_suppressions.cc b/chromium/build/sanitizers/lsan_suppressions.cc index fbd9b40494e..381f4b7a0d1 100644 --- a/chromium/build/sanitizers/lsan_suppressions.cc +++ b/chromium/build/sanitizers/lsan_suppressions.cc @@ -30,6 +30,8 @@ char kLSanDefaultSuppressions[] = // Leaks in Nvidia's libGL. "leak:libGL.so\n" + "leak:libGLX_nvidia.so\n" + "leak:libnvidia-glcore.so\n" // XRandR has several one time leaks. "leak:libxrandr\n" diff --git a/chromium/build/skia_gold_common/.style.yapf b/chromium/build/skia_gold_common/.style.yapf new file mode 100644 index 00000000000..239e0a247f3 --- /dev/null +++ b/chromium/build/skia_gold_common/.style.yapf @@ -0,0 +1,6 @@ +[style] +based_on_style = pep8 + +column_limit = 80 +indent_width = 2 + diff --git a/chromium/build/skia_gold_common/OWNERS b/chromium/build/skia_gold_common/OWNERS new file mode 100644 index 00000000000..71de6595104 --- /dev/null +++ b/chromium/build/skia_gold_common/OWNERS @@ -0,0 +1 @@ +bsheedy@google.com diff --git a/chromium/build/skia_gold_common/PRESUBMIT.py b/chromium/build/skia_gold_common/PRESUBMIT.py new file mode 100644 index 00000000000..41e1bb2f7de --- /dev/null +++ b/chromium/build/skia_gold_common/PRESUBMIT.py @@ -0,0 +1,34 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for //build/skia_gold_common/. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details on the presubmit API built into depot_tools. +""" + + +def CommonChecks(input_api, output_api): + output = [] + build_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..') + skia_gold_env = dict(input_api.environ) + skia_gold_env.update({ + 'PYTHONPATH': build_path, + 'PYTHONDONTWRITEBYTECODE': '1', + }) + output.extend( + input_api.canned_checks.RunUnitTestsInDirectory( + input_api, + output_api, + input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'], + env=skia_gold_env)) + output.extend(input_api.canned_checks.RunPylint(input_api, output_api)) + return output + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/chromium/build/skia_gold_common/README.md b/chromium/build/skia_gold_common/README.md new file mode 100644 index 00000000000..ec721117480 --- /dev/null +++ b/chromium/build/skia_gold_common/README.md @@ -0,0 +1,6 @@ +This directory contains Python code used for interacting with the Skia Gold +image diff service. It is used by multiple test harnesses, e.g. +`//build/android/test_runner.py` and +`//content/test/gpu/run_gpu_integration_test.py`. A place such as +`//testing/` would likely be a better location, but causes issues with +V8 since it imports `//build/` but not all of Chromium src. diff --git a/chromium/build/skia_gold_common/__init__.py b/chromium/build/skia_gold_common/__init__.py new file mode 100644 index 00000000000..ae1922e1cc4 --- /dev/null +++ b/chromium/build/skia_gold_common/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/chromium/build/skia_gold_common/skia_gold_properties.py b/chromium/build/skia_gold_common/skia_gold_properties.py new file mode 100644 index 00000000000..6ee38eedb55 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_properties.py @@ -0,0 +1,139 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for storing Skia Gold comparison properties. + +Examples: +* git revision being tested +* Whether the test is being run locally or on a bot +* What the continuous integration system is +""" + +import logging +import os +import subprocess +import sys + + +class SkiaGoldProperties(object): + def __init__(self, args): + """Abstract class to validate and store properties related to Skia Gold. + + Args: + args: The parsed arguments from an argparse.ArgumentParser. + """ + self._git_revision = None + self._issue = None + self._patchset = None + self._job_id = None + self._local_pixel_tests = None + self._no_luci_auth = None + self._bypass_skia_gold_functionality = None + + # Could in theory be configurable, but hard-coded for now since there's + # no plan to support anything else. + self._code_review_system = 'gerrit' + self._continuous_integration_system = 'buildbucket' + + self._InitializeProperties(args) + + def IsTryjobRun(self): + return self.issue is not None + + @property + def continuous_integration_system(self): + return self._continuous_integration_system + + @property + def code_review_system(self): + return self._code_review_system + + @property + def git_revision(self): + return self._GetGitRevision() + + @property + def issue(self): + return self._issue + + @property + def job_id(self): + return self._job_id + + @property + def local_pixel_tests(self): + return self._IsLocalRun() + + @property + def no_luci_auth(self): + return self._no_luci_auth + + @property + def patchset(self): + return self._patchset + + @property + def bypass_skia_gold_functionality(self): + return self._bypass_skia_gold_functionality + + @staticmethod + def _GetGitOriginMasterHeadSha1(): + raise NotImplementedError() + + def _GetGitRevision(self): + if not self._git_revision: + # Automated tests should always pass the revision, so assume we're on + # a workstation and try to get the local origin/master HEAD. + if not self._IsLocalRun(): + raise RuntimeError( + '--git-revision was not passed when running on a bot') + revision = self._GetGitOriginMasterHeadSha1() + if not revision or len(revision) != 40: + raise RuntimeError( + '--git-revision not passed and unable to determine from git') + self._git_revision = revision + return self._git_revision + + def _IsLocalRun(self): + if self._local_pixel_tests is None: + # Look for the presence of the SWARMING_SERVER environment variable as a + # heuristic to determine whether we're running on a workstation or a bot. + # This should always be set on swarming, but would be strange to be set on + # a workstation. + self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ + if self._local_pixel_tests: + logging.warning( + 'Automatically determined that test is running on a workstation') + else: + logging.warning( + 'Automatically determined that test is running on a bot') + return self._local_pixel_tests + + def _InitializeProperties(self, args): + if hasattr(args, 'local_pixel_tests'): + # If not set, will be automatically determined later if needed. + self._local_pixel_tests = args.local_pixel_tests + + if hasattr(args, 'no_luci_auth'): + self._no_luci_auth = args.no_luci_auth + + if hasattr(args, 'bypass_skia_gold_functionality'): + self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality + + # Will be automatically determined later if needed. + if not hasattr(args, 'git_revision') or not args.git_revision: + return + self._git_revision = args.git_revision + + # Only expected on tryjob runs. + if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue: + return + self._issue = args.gerrit_issue + if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset: + raise RuntimeError( + '--gerrit-issue passed, but --gerrit-patchset not passed.') + self._patchset = args.gerrit_patchset + if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id: + raise RuntimeError( + '--gerrit-issue passed, but --buildbucket-id not passed.') + self._job_id = args.buildbucket_id diff --git a/chromium/build/skia_gold_common/skia_gold_properties_unittest.py b/chromium/build/skia_gold_common/skia_gold_properties_unittest.py new file mode 100755 index 00000000000..348512c7f91 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_properties_unittest.py @@ -0,0 +1,169 @@ +#!/usr/bin/env vpython +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import os +import unittest + +import mock + +from skia_gold_common import skia_gold_properties +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +class SkiaGoldPropertiesInitializationTest(unittest.TestCase): + """Tests that SkiaGoldProperties initializes (or doesn't) when expected.""" + + def verifySkiaGoldProperties(self, instance, expected): + self.assertEqual(instance._local_pixel_tests, + expected.get('local_pixel_tests')) + self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth')) + self.assertEqual(instance._git_revision, expected.get('git_revision')) + self.assertEqual(instance._issue, expected.get('gerrit_issue')) + self.assertEqual(instance._patchset, expected.get('gerrit_patchset')) + self.assertEqual(instance._job_id, expected.get('buildbucket_id')) + self.assertEqual(instance._bypass_skia_gold_functionality, + expected.get('bypass_skia_gold_functionality')) + + def test_initializeSkiaGoldAttributes_unsetLocal(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {}) + + def test_initializeSkiaGoldAttributes_explicitLocal(self): + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True}) + + def test_initializeSkiaGoldAttributes_explicitNonLocal(self): + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False}) + + def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self): + args = createSkiaGoldArgs(no_luci_auth=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True}) + + def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self): + args = createSkiaGoldArgs(bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True}) + + def test_initializeSkiaGoldAttributes_explicitGitRevision(self): + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'}) + + def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision(self): + args = createSkiaGoldArgs(gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {}) + + def test_initializeSkiaGoldAttributes_tryjobArgs(self): + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties( + sgp, { + 'git_revision': 'a', + 'gerrit_issue': 1, + 'gerrit_patchset': 2, + 'buildbucket_id': 3 + }) + + def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self): + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + buildbucket_id=3) + with self.assertRaises(RuntimeError): + skia_gold_properties.SkiaGoldProperties(args) + + def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self): + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2) + with self.assertRaises(RuntimeError): + skia_gold_properties.SkiaGoldProperties(args) + + +class SkiaGoldPropertiesCalculationTest(unittest.TestCase): + """Tests that SkiaGoldProperties properly calculates certain properties.""" + + def testLocalPixelTests_determineTrue(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.dict(os.environ, {}, clear=True): + self.assertTrue(sgp.local_pixel_tests) + + def testLocalPixelTests_determineFalse(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True): + self.assertFalse(sgp.local_pixel_tests) + + def testIsTryjobRun_noIssue(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertFalse(sgp.IsTryjobRun()) + + def testIsTryjobRun_issue(self): + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertTrue(sgp.IsTryjobRun()) + + def testGetGitRevision_revisionSet(self): + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertEqual(sgp.git_revision, 'a') + + def testGetGitRevision_findValidRevision(self): + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMasterHeadSha1') as patched_head: + expected = 'a' * 40 + patched_head.return_value = expected + self.assertEqual(sgp.git_revision, expected) + # Should be cached. + self.assertEqual(sgp._git_revision, expected) + + def testGetGitRevision_noExplicitOnBot(self): + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + def testGetGitRevision_findEmptyRevision(self): + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMasterHeadSha1') as patched_head: + patched_head.return_value = '' + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + def testGetGitRevision_findMalformedRevision(self): + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMasterHeadSha1') as patched_head: + patched_head.return_value = 'a' * 39 + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/chromium/build/skia_gold_common/skia_gold_session.py b/chromium/build/skia_gold_common/skia_gold_session.py new file mode 100644 index 00000000000..fcdc06fbe58 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_session.py @@ -0,0 +1,432 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for interacting with the Skia Gold image diffing service.""" + +import logging +import os +import subprocess +import sys +import tempfile + +CHROMIUM_SRC = os.path.realpath( + os.path.join(os.path.dirname(__file__), '..', '..')) + +GOLDCTL_BINARY = os.path.join(CHROMIUM_SRC, 'tools', 'skia_goldctl') +if sys.platform == 'win32': + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'win', 'goldctl') + '.exe' +elif sys.platform == 'darwin': + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac', 'goldctl') +else: + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'linux', 'goldctl') + + +class SkiaGoldSession(object): + class StatusCodes(object): + """Status codes for RunComparison.""" + SUCCESS = 0 + AUTH_FAILURE = 1 + INIT_FAILURE = 2 + COMPARISON_FAILURE_REMOTE = 3 + COMPARISON_FAILURE_LOCAL = 4 + LOCAL_DIFF_FAILURE = 5 + NO_OUTPUT_MANAGER = 6 + + class ComparisonResults(object): + """Struct-like object for storing results of an image comparison.""" + + def __init__(self): + self.triage_link = None + self.triage_link_omission_reason = None + self.local_diff_given_image = None + self.local_diff_closest_image = None + self.local_diff_diff_image = None + + def __init__(self, working_dir, gold_properties, keys_file, corpus, instance): + """Abstract class to handle all aspects of image comparison via Skia Gold. + + A single SkiaGoldSession is valid for a single instance/corpus/keys_file + combination. + + Args: + working_dir: The directory to store config files, etc. + gold_properties: A skia_gold_properties.SkiaGoldProperties instance for + the current test run. + keys_file: A path to a JSON file containing various comparison config data + such as corpus and debug information like the hardware/software + configuration the images will be produced on. + corpus: The corpus that images that will be compared belong to. + instance: The name of the Skia Gold instance to interact with. + """ + self._working_dir = working_dir + self._gold_properties = gold_properties + self._keys_file = keys_file + self._corpus = corpus + self._instance = instance + self._triage_link_file = tempfile.NamedTemporaryFile(suffix='.txt', + dir=working_dir, + delete=False).name + # A map of image name (string) to ComparisonResults for that image. + self._comparison_results = {} + self._authenticated = False + self._initialized = False + + def RunComparison(self, name, png_file, output_manager, use_luci=True): + """Helper method to run all steps to compare a produced image. + + Handles authentication, itnitialization, comparison, and, if necessary, + local diffing. + + Args: + name: The name of the image being compared. + png_file: A path to a PNG file containing the image to be compared. + output_manager: An output manager to use to store diff links. The + argument's type depends on what type a subclasses' _StoreDiffLinks + implementation expects. Can be None even if _StoreDiffLinks expects + a valid input, but will fail if it ever actually needs to be used. + use_luci: If true, authentication will use the service account provided by + the LUCI context. If false, will attempt to use whatever is set up in + gsutil, which is only supported for local runs. + + Returns: + A tuple (status, error). |status| is a value from + SkiaGoldSession.StatusCodes signifying the result of the comparison. + |error| is an error message describing the status if not successful. + """ + auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci) + if auth_rc: + return self.StatusCodes.AUTH_FAILURE, auth_stdout + + init_rc, init_stdout = self.Initialize() + if init_rc: + return self.StatusCodes.INIT_FAILURE, init_stdout + + compare_rc, compare_stdout = self.Compare(name=name, png_file=png_file) + if not compare_rc: + return self.StatusCodes.SUCCESS, None + + logging.error('Gold comparison failed: %s', compare_stdout) + if not self._gold_properties.local_pixel_tests: + return self.StatusCodes.COMPARISON_FAILURE_REMOTE, compare_stdout + + if not output_manager: + return (self.StatusCodes.NO_OUTPUT_MANAGER, + 'No output manager for local diff images') + + diff_rc, diff_stdout = self.Diff(name=name, + png_file=png_file, + output_manager=output_manager) + if diff_rc: + return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout + return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout + + def Authenticate(self, use_luci=True): + """Authenticates with Skia Gold for this session. + + Args: + use_luci: If true, authentication will use the service account provided + by the LUCI context. If false, will attempt to use whatever is set up + in gsutil, which is only supported for local runs. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + authentication process. |output| is the stdout + stderr of the + authentication process. + """ + if self._authenticated: + return 0, None + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually authenticating with Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + + auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir] + if use_luci: + auth_cmd.append('--luci') + elif not self._gold_properties.local_pixel_tests: + raise RuntimeError( + 'Cannot authenticate to Skia Gold with use_luci=False unless running ' + 'local pixel tests') + + rc, stdout = self._RunCmdForRcAndOutput(auth_cmd) + if rc == 0: + self._authenticated = True + return rc, stdout + + def Initialize(self): + """Initializes the working directory if necessary. + + This can technically be skipped if the same information is passed to the + command used for image comparison, but that is less efficient under the + hood. Doing it that way effectively requires an initialization for every + comparison (~250 ms) instead of once at the beginning. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + initialization process. |output| is the stdout + stderr of the + initialization process. + """ + if self._initialized: + return 0, None + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually initializing Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + + init_cmd = [ + GOLDCTL_BINARY, + 'imgtest', + 'init', + '--passfail', + '--instance', + self._instance, + '--corpus', + self._corpus, + '--keys-file', + self._keys_file, + '--work-dir', + self._working_dir, + '--failure-file', + self._triage_link_file, + '--commit', + self._gold_properties.git_revision, + ] + if self._gold_properties.IsTryjobRun(): + init_cmd.extend([ + '--issue', + str(self._gold_properties.issue), + '--patchset', + str(self._gold_properties.patchset), + '--jobid', + str(self._gold_properties.job_id), + '--crs', + str(self._gold_properties.code_review_system), + '--cis', + str(self._gold_properties.continuous_integration_system), + ]) + + rc, stdout = self._RunCmdForRcAndOutput(init_cmd) + if rc == 0: + self._initialized = True + return rc, stdout + + def Compare(self, name, png_file): + """Compares the given image to images known to Gold. + + Triage links can later be retrieved using GetTriageLink(). + + Args: + name: The name of the image being compared. + png_file: A path to a PNG file containing the image to be compared. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + comparison process. |output| is the stdout + stderr of the comparison + process. + """ + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually comparing with Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + + compare_cmd = [ + GOLDCTL_BINARY, + 'imgtest', + 'add', + '--test-name', + name, + '--png-file', + png_file, + '--work-dir', + self._working_dir, + ] + if self._gold_properties.local_pixel_tests: + compare_cmd.append('--dryrun') + + self._ClearTriageLinkFile() + rc, stdout = self._RunCmdForRcAndOutput(compare_cmd) + + self._comparison_results[name] = self.ComparisonResults() + if rc == 0: + self._comparison_results[name].triage_link_omission_reason = ( + 'Comparison succeeded, no triage link') + elif self._gold_properties.IsTryjobRun(): + cl_triage_link = ('https://{instance}-gold.skia.org/cl/{crs}/{issue}') + cl_triage_link = cl_triage_link.format( + instance=self._instance, + crs=self._gold_properties.code_review_system, + issue=self._gold_properties.issue) + self._comparison_results[name].triage_link = cl_triage_link + else: + try: + with open(self._triage_link_file) as tlf: + triage_link = tlf.read().strip() + self._comparison_results[name].triage_link = triage_link + except IOError: + self._comparison_results[name].triage_link_omission_reason = ( + 'Failed to read triage link from file') + return rc, stdout + + def Diff(self, name, png_file, output_manager): + """Performs a local image diff against the closest known positive in Gold. + + This is used for running tests on a workstation, where uploading data to + Gold for ingestion is not allowed, and thus the web UI is not available. + + Image links can later be retrieved using Get*ImageLink(). + + Args: + name: The name of the image being compared. + png_file: The path to a PNG file containing the image to be diffed. + output_manager: An output manager to use to store diff links. The + argument's type depends on what type a subclasses' _StoreDiffLinks + implementation expects. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + diff process. |output| is the stdout + stderr of the diff process. + """ + # Instead of returning that everything is okay and putting in dummy links, + # just fail since this should only be called when running locally and + # --bypass-skia-gold-functionality is only meant for use on the bots. + if self._gold_properties.bypass_skia_gold_functionality: + raise RuntimeError( + '--bypass-skia-gold-functionality is not supported when running ' + 'tests locally.') + + output_dir = self._CreateDiffOutputDir() + diff_cmd = [ + GOLDCTL_BINARY, + 'diff', + '--corpus', + self._corpus, + '--instance', + self._instance, + '--input', + png_file, + '--test', + name, + '--work-dir', + self._working_dir, + '--out-dir', + output_dir, + ] + rc, stdout = self._RunCmdForRcAndOutput(diff_cmd) + self._StoreDiffLinks(name, output_manager, output_dir) + return rc, stdout + + def GetTriageLink(self, name): + """Gets the triage link for the given image. + + Args: + name: The name of the image to retrieve the triage link for. + + Returns: + A string containing the triage link if it is available, or None if it is + not available for some reason. The reason can be retrieved using + GetTriageLinkOmissionReason. + """ + return self._comparison_results.get(name, + self.ComparisonResults()).triage_link + + def GetTriageLinkOmissionReason(self, name): + """Gets the reason why a triage link is not available for an image. + + Args: + name: The name of the image whose triage link does not exist. + + Returns: + A string containing the reason why a triage link is not available. + """ + if name not in self._comparison_results: + return 'No image comparison performed for %s' % name + results = self._comparison_results[name] + # This method should not be called if there is a valid triage link. + assert results.triage_link is None + if results.triage_link_omission_reason: + return results.triage_link_omission_reason + if results.local_diff_given_image: + return 'Gold only used to do a local image diff' + raise RuntimeError( + 'Somehow have a ComparisonResults instance for %s that should not ' + 'exist' % name) + + def GetGivenImageLink(self, name): + """Gets the link to the given image used for local diffing. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_given_image + + def GetClosestImageLink(self, name): + """Gets the link to the closest known image used for local diffing. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_closest_image + + def GetDiffImageLink(self, name): + """Gets the link to the diff between the given and closest images. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_diff_image + + def _ClearTriageLinkFile(self): + """Clears the contents of the triage link file. + + This should be done before every comparison since goldctl appends to the + file instead of overwriting its contents, which results in multiple triage + links getting concatenated together if there are multiple failures. + """ + open(self._triage_link_file, 'w').close() + + def _CreateDiffOutputDir(self): + return tempfile.mkdtemp(dir=self._working_dir) + + def _StoreDiffLinks(self, image_name, output_manager, output_dir): + """Stores the local diff files as links. + + The ComparisonResults entry for |image_name| should have its *_image fields + filled after this unless corresponding images were not found on disk. + + Args: + image_name: A string containing the name of the image that was diffed. + output_manager: An output manager used used to surface links to users, + if necessary. The expected argument type depends on each subclasses' + implementation of this method. + output_dir: A string containing the path to the directory where diff + output image files where saved. + """ + raise NotImplementedError() + + @staticmethod + def _RunCmdForRcAndOutput(cmd): + """Runs |cmd| and returns its returncode and output. + + Args: + cmd: A list containing the command line to run. + + Returns: + A tuple (rc, output), where, |rc| is the returncode of the command and + |output| is the stdout + stderr of the command. + """ + raise NotImplementedError() diff --git a/chromium/build/skia_gold_common/skia_gold_session_manager.py b/chromium/build/skia_gold_common/skia_gold_session_manager.py new file mode 100644 index 00000000000..f4f5f178817 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_session_manager.py @@ -0,0 +1,115 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for managing multiple SkiaGoldSessions.""" + +import json +import tempfile + + +class SkiaGoldSessionManager(object): + def __init__(self, working_dir, gold_properties): + """Abstract class to manage one or more skia_gold_session.SkiaGoldSessions. + + A separate session is required for each instance/corpus/keys_file + combination, so this class will lazily create them as necessary. + + Args: + working_dir: The working directory under which each individual + SkiaGoldSessions' working directory will be created. + gold_properties: A SkiaGoldProperties instance that will be used to create + any SkiaGoldSessions. + """ + self._working_dir = working_dir + self._gold_properties = gold_properties + self._sessions = {} + + def GetSkiaGoldSession(self, keys_input, corpus=None, instance=None): + """Gets a SkiaGoldSession for the given arguments. + + Lazily creates one if necessary. + + Args: + keys_input: A way of retrieving various comparison config data such as + corpus and debug information like the hardware/software configuration + the image was produced on. Can be either a dict or a filepath to a + file containing JSON to read. + corpus: A string containing the corpus the session is for. If None, the + corpus will be determined using available information. + instance: The name of the Skia Gold instance to interact with. It None, + will use whatever default the subclass sets. + """ + instance = instance or self._GetDefaultInstance() + keys_dict = _GetKeysAsDict(keys_input) + keys_string = json.dumps(keys_dict, sort_keys=True) + if corpus is None: + corpus = keys_dict.get('source_type', instance) + # Use the string representation of the keys JSON as a proxy for a hash since + # dicts themselves are not hashable. + session = self._sessions.setdefault(instance, + {}).setdefault(corpus, {}).setdefault( + keys_string, None) + if not session: + working_dir = tempfile.mkdtemp(dir=self._working_dir) + keys_file = _GetKeysAsJson(keys_input, working_dir) + session = self._GetSessionClass()(working_dir, self._gold_properties, + keys_file, corpus, instance) + self._sessions[instance][corpus][keys_string] = session + return session + + @staticmethod + def _GetDefaultInstance(): + """Gets the default Skia Gold instance. + + Returns: + A string containing the default instance. + """ + raise NotImplementedError + + @staticmethod + def _GetSessionClass(): + """Gets the SkiaGoldSession class to use for session creation. + + Returns: + A reference to a SkiaGoldSession class. + """ + raise NotImplementedError + + +def _GetKeysAsDict(keys_input): + """Converts |keys_input| into a dictionary. + + Args: + keys_input: A dictionary or a string pointing to a JSON file. The contents + of either should be Skia Gold config data. + + Returns: + A dictionary containing the Skia Gold config data. + """ + if isinstance(keys_input, dict): + return keys_input + assert isinstance(keys_input, str) + with open(keys_input) as f: + return json.load(f) + + +def _GetKeysAsJson(keys_input, session_work_dir): + """Converts |keys_input| into a JSON file on disk. + + Args: + keys_input: A dictionary or a string pointing to a JSON file. The contents + of either should be Skia Gold config data. + + Returns: + A string containing a filepath to a JSON file with containing |keys_input|'s + data. + """ + if isinstance(keys_input, str): + return keys_input + assert isinstance(keys_input, dict) + keys_file = tempfile.NamedTemporaryFile(suffix='.json', + dir=session_work_dir, + delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_input, f) + return keys_file diff --git a/chromium/build/skia_gold_common/skia_gold_session_manager_unittest.py b/chromium/build/skia_gold_common/skia_gold_session_manager_unittest.py new file mode 100755 index 00000000000..453ec9a9ae2 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_session_manager_unittest.py @@ -0,0 +1,176 @@ +#!/usr/bin/env vpython +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import json +import os +import tempfile +import unittest + +import mock + +from pyfakefs import fake_filesystem_unittest + +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session +from skia_gold_common import skia_gold_session_manager +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +class SkiaGoldSessionManagerGetSessionTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._patcher = mock.patch.object( + skia_gold_session_manager.SkiaGoldSessionManager, '_GetSessionClass') + self._session_class_mock = self._patcher.start() + self._session_class_mock.return_value = skia_gold_session.SkiaGoldSession + self.addCleanup(self._patcher.stop) + + def test_ArgsForwardedToSession(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'corpus') + self.assertEqual(session._instance, 'instance') + # Make sure the session's working directory is a subdirectory of the + # manager's working directory. + self.assertEqual(os.path.dirname(session._working_dir), self._working_dir) + + def test_corpusFromJson(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({'source_type': 'foobar'}, None, + 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'foobar') + self.assertEqual(session._instance, 'instance') + + def test_corpusDefaultsToInstance(self): + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, None, 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'instance') + self.assertEqual(session._instance, 'instance') + + @mock.patch.object(skia_gold_session_manager.SkiaGoldSessionManager, + '_GetDefaultInstance') + def test_getDefaultInstance(self, default_instance_mock): + default_instance_mock.return_value = 'default' + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, None, None) + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'default') + self.assertEqual(session._instance, 'default') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_matchingSessionReused(self, session_mock): + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + self.assertEqual(session1, session2) + # For some reason, session_mock.assert_called_once() always passes, + # so check the call count directly. + self.assertEqual(session_mock.call_count, 1) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromKeys(self, session_mock): + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + session2 = sgsm.GetSkiaGoldSession({'something_different': 1}, 'corpus', + 'instance') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromCorpus(self, session_mock): + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus1', 'instance') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus2', 'instance') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromInstance(self, session_mock): + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self._working_dir = tempfile.mkdtemp() + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance1') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance2') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + +class SkiaGoldSessionManagerKeyConversionTest(fake_filesystem_unittest.TestCase + ): + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + def test_getKeysAsDict(self): + keys_dict = {'foo': 'bar'} + keys_file_contents = {'bar': 'baz'} + keys_file = tempfile.NamedTemporaryFile(delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_file_contents, f) + + self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_dict), + keys_dict) + self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_file), + keys_file_contents) + with self.assertRaises(AssertionError): + skia_gold_session_manager._GetKeysAsDict(1) + + def test_getKeysAsJson(self): + keys_dict = {'foo': 'bar'} + keys_file_contents = {'bar': 'baz'} + keys_file = tempfile.NamedTemporaryFile(delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_file_contents, f) + + self.assertEqual(skia_gold_session_manager._GetKeysAsJson(keys_file, None), + keys_file) + keys_dict_as_json = skia_gold_session_manager._GetKeysAsJson( + keys_dict, self._working_dir) + self.assertTrue(keys_dict_as_json.startswith(self._working_dir)) + with open(keys_dict_as_json) as f: + self.assertEqual(json.load(f), keys_dict) + with self.assertRaises(AssertionError): + skia_gold_session_manager._GetKeysAsJson(1, None) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/chromium/build/skia_gold_common/skia_gold_session_unittest.py b/chromium/build/skia_gold_common/skia_gold_session_unittest.py new file mode 100755 index 00000000000..65e353b72f1 --- /dev/null +++ b/chromium/build/skia_gold_common/skia_gold_session_unittest.py @@ -0,0 +1,649 @@ +#!/usr/bin/env vpython +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import json +import os +import tempfile +import unittest + +import mock + +from pyfakefs import fake_filesystem_unittest + +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +def assertArgWith(test, arg_list, arg, value): + i = arg_list.index(arg) + test.assertEqual(arg_list[i + 1], value) + + +class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.RunComparison.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_comparisonSuccess(self, auth_mock, init_mock, compare_mock, + diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (0, None) + compare_mock.return_value = (0, None) + keys_file = os.path.join(self._working_dir, 'keys.json') + with open(os.path.join(self._working_dir, 'keys.json'), 'w') as f: + json.dump({}, f) + session = skia_gold_session.SkiaGoldSession(self._working_dir, None, + keys_file, None, None) + status, _ = session.RunComparison(None, None, None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 1) + self.assertEqual(diff_mock.call_count, 0) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock): + auth_mock.return_value = (1, 'Auth failed') + session = skia_gold_session.SkiaGoldSession(self._working_dir, None, None, + None, None) + status, error = session.RunComparison(None, None, None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.AUTH_FAILURE) + self.assertEqual(error, 'Auth failed') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 0) + self.assertEqual(compare_mock.call_count, 0) + self.assertEqual(diff_mock.call_count, 0) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (1, 'Init failed') + session = skia_gold_session.SkiaGoldSession(self._working_dir, None, None, + None, None) + status, error = session.RunComparison(None, None, None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.INIT_FAILURE) + self.assertEqual(error, 'Init failed') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 0) + self.assertEqual(diff_mock.call_count, 0) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_compareFailureRemote(self, auth_mock, init_mock, compare_mock, + diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (0, None) + compare_mock.return_value = (1, 'Compare failed') + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + keys_file = os.path.join(self._working_dir, 'keys.json') + with open(os.path.join(self._working_dir, 'keys.json'), 'w') as f: + json.dump({}, f) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + keys_file, None, None) + status, error = session.RunComparison(None, None, None) + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE) + self.assertEqual(error, 'Compare failed') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 1) + self.assertEqual(diff_mock.call_count, 0) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_compareFailureLocal(self, auth_mock, init_mock, compare_mock, + diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (0, None) + compare_mock.return_value = (1, 'Compare failed') + diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + keys_file = os.path.join(self._working_dir, 'keys.json') + with open(os.path.join(self._working_dir, 'keys.json'), 'w') as f: + json.dump({}, f) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + keys_file, None, None) + status, error = session.RunComparison(None, None, + 'Definitely an output manager') + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL) + self.assertEqual(error, 'Compare failed') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 1) + self.assertEqual(diff_mock.call_count, 1) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_diffFailure(self, auth_mock, init_mock, compare_mock, diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (0, None) + compare_mock.return_value = (1, 'Compare failed') + diff_mock.return_value = (1, 'Diff failed') + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + keys_file = os.path.join(self._working_dir, 'keys.json') + with open(os.path.join(self._working_dir, 'keys.json'), 'w') as f: + json.dump({}, f) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + keys_file, None, None) + status, error = session.RunComparison(None, None, + 'Definitely an output manager') + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE) + self.assertEqual(error, 'Diff failed') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(init_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 1) + self.assertEqual(diff_mock.call_count, 1) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') + @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') + def test_noOutputManagerLocal(self, auth_mock, init_mock, compare_mock, + diff_mock): + auth_mock.return_value = (0, None) + init_mock.return_value = (0, None) + compare_mock.return_value = (1, 'Compare failed') + diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + keys_file = os.path.join(self._working_dir, 'keys.json') + with open(os.path.join(self._working_dir, 'keys.json'), 'w') as f: + json.dump({}, f) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + keys_file, None, None) + status, error = session.RunComparison(None, None, None) + self.assertEqual( + status, skia_gold_session.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER) + self.assertEqual(error, 'No output manager for local diff images') + self.assertEqual(auth_mock.call_count, 1) + self.assertEqual(compare_mock.call_count, 1) + self.assertEqual(diff_mock.call_count, 0) + + +class SkiaGoldSessionAuthenticateTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Authenticate.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandOutputReturned(self, cmd_mock): + cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, stdout = session.Authenticate() + self.assertEqual(cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_bypassSkiaGoldFunctionality(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + cmd_mock.assert_not_called() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_shortCircuitAlreadyAuthenticated(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session._authenticated = True + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + cmd_mock.assert_not_called() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_successSetsShortCircuit(self, cmd_mock): + cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + self.assertFalse(session._authenticated) + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + self.assertTrue(session._authenticated) + cmd_mock.assert_called_once() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_failureDoesNotSetShortCircuit(self, cmd_mock): + cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + self.assertFalse(session._authenticated) + rc, _ = session.Authenticate() + self.assertEqual(rc, 1) + self.assertFalse(session._authenticated) + cmd_mock.assert_called_once() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandWithUseLuciTrue(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Authenticate(use_luci=True) + self.assertIn('--luci', cmd_mock.call_args[0][0]) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandWithUseLuciFalse(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Authenticate(use_luci=False) + self.assertNotIn('--luci', cmd_mock.call_args[0][0]) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandWithUseLuciFalseNotLocal(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + with self.assertRaises(RuntimeError): + session.Authenticate(use_luci=False) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandCommonArgs(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Authenticate() + call_args = cmd_mock.call_args[0][0] + self.assertIn('auth', call_args) + assertArgWith(self, call_args, '--work-dir', self._working_dir) + + +class SkiaGoldSessionInitializeTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Initialize.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_bypassSkiaGoldFunctionality(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + cmd_mock.assert_not_called() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_shortCircuitAlreadyInitialized(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session._initialized = True + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + cmd_mock.assert_not_called() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_successSetsShortCircuit(self, cmd_mock): + cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + self.assertFalse(session._initialized) + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + self.assertTrue(session._initialized) + cmd_mock.assert_called_once() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_failureDoesNotSetShortCircuit(self, cmd_mock): + cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + self.assertFalse(session._initialized) + rc, _ = session.Initialize() + self.assertEqual(rc, 1) + self.assertFalse(session._initialized) + cmd_mock.assert_called_once() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandCommonArgs(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + 'keys_file', + 'corpus', + instance='instance') + session.Initialize() + call_args = cmd_mock.call_args[0][0] + self.assertIn('imgtest', call_args) + self.assertIn('init', call_args) + self.assertIn('--passfail', call_args) + assertArgWith(self, call_args, '--instance', 'instance') + assertArgWith(self, call_args, '--corpus', 'corpus') + assertArgWith(self, call_args, '--keys-file', 'keys_file') + assertArgWith(self, call_args, '--work-dir', self._working_dir) + assertArgWith(self, call_args, '--failure-file', session._triage_link_file) + assertArgWith(self, call_args, '--commit', 'a') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandTryjobArgs(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Initialize() + call_args = cmd_mock.call_args[0][0] + assertArgWith(self, call_args, '--issue', '1') + assertArgWith(self, call_args, '--patchset', '2') + assertArgWith(self, call_args, '--jobid', '3') + assertArgWith(self, call_args, '--crs', 'gerrit') + assertArgWith(self, call_args, '--cis', 'buildbucket') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandTryjobArgsMissing(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Initialize() + call_args = cmd_mock.call_args[0][0] + self.assertNotIn('--issue', call_args) + self.assertNotIn('--patchset', call_args) + self.assertNotIn('--jobid', call_args) + self.assertNotIn('--crs', call_args) + self.assertNotIn('--cis', call_args) + + +class SkiaGoldSessionCompareTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Compare.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandOutputReturned(self, cmd_mock): + cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, stdout = session.Compare(None, None) + self.assertEqual(cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_bypassSkiaGoldFunctionality(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, _ = session.Compare(None, None) + self.assertEqual(rc, 0) + cmd_mock.assert_not_called() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandWithLocalPixelTestsTrue(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Compare(None, None) + self.assertIn('--dryrun', cmd_mock.call_args[0][0]) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandWithLocalPixelTestsFalse(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + session.Compare(None, None) + self.assertNotIn('--dryrun', cmd_mock.call_args[0][0]) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandCommonArgs(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + 'keys_file', + 'corpus', + instance='instance') + session.Compare('name', 'png_file') + call_args = cmd_mock.call_args[0][0] + self.assertIn('imgtest', call_args) + self.assertIn('add', call_args) + assertArgWith(self, call_args, '--test-name', 'name') + assertArgWith(self, call_args, '--png-file', 'png_file') + assertArgWith(self, call_args, '--work-dir', self._working_dir) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_noLinkOnSuccess(self, cmd_mock): + cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + 'keys_file', None, None) + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 0) + self.assertEqual(session._comparison_results['name'].triage_link, None) + self.assertNotEqual( + session._comparison_results['name'].triage_link_omission_reason, None) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_clLinkOnTrybot(self, cmd_mock): + cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + 'keys_file', + None, + instance='instance') + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + self.assertNotEqual(session._comparison_results['name'].triage_link, None) + self.assertEqual(session._comparison_results['name'].triage_link, + 'https://instance-gold.skia.org/cl/gerrit/1') + self.assertEqual( + session._comparison_results['name'].triage_link_omission_reason, None) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_individualLinkOnCi(self, cmd_mock): + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + 'keys_file', None, None) + + def WriteTriageLinkFile(_): + with open(session._triage_link_file, 'w') as f: + f.write('foobar') + return (1, None) + + cmd_mock.side_effect = WriteTriageLinkFile + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + self.assertNotEqual(session._comparison_results['name'].triage_link, None) + self.assertEqual(session._comparison_results['name'].triage_link, 'foobar') + self.assertEqual( + session._comparison_results['name'].triage_link_omission_reason, None) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_validOmissionOnIoError(self, cmd_mock): + cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + 'keys_file', None, None) + + def DeleteTriageLinkFile(_): + os.remove(session._triage_link_file) + return (1, None) + + cmd_mock.side_effect = DeleteTriageLinkFile + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + self.assertEqual(session._comparison_results['name'].triage_link, None) + self.assertNotEqual( + session._comparison_results['name'].triage_link_omission_reason, None) + self.assertIn( + 'Failed to read', + session._comparison_results['name'].triage_link_omission_reason) + + +class SkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Diff.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_StoreDiffLinks') + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_commandOutputReturned(self, cmd_mock, _): + cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + rc, stdout = session.Diff(None, None, None) + self.assertEqual(cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') + def test_bypassSkiaGoldFunctionality(self, cmd_mock): + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, None, + None, None) + with self.assertRaises(RuntimeError): + session.Diff(None, None, None) + + +class SkiaGoldSessionTriageLinkOmissionTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason.""" + + def setUp(self): + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + def _CreateSession(self): + session = skia_gold_session.SkiaGoldSession(self._working_dir, None, None, + None, None) + session._comparison_results = { + 'foo': skia_gold_session.SkiaGoldSession.ComparisonResults(), + } + return session + + def test_noComparison(self): + session = self._CreateSession() + session._comparison_results = {} + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'No image comparison performed for foo') + + def test_validReason(self): + session = self._CreateSession() + session._comparison_results['foo'].triage_link_omission_reason = 'bar' + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'bar') + + def test_onlyLocal(self): + session = self._CreateSession() + session._comparison_results['foo'].local_diff_given_image = 'bar' + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'Gold only used to do a local image diff') + + def test_onlyWithoutTriageLink(self): + session = self._CreateSession() + session._comparison_results['foo'].triage_link = 'bar' + with self.assertRaises(AssertionError): + session.GetTriageLinkOmissionReason('foo') + + def test_resultsShouldNotExist(self): + session = self._CreateSession() + with self.assertRaises(RuntimeError): + session.GetTriageLinkOmissionReason('foo') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/chromium/build/skia_gold_common/unittest_utils.py b/chromium/build/skia_gold_common/unittest_utils.py new file mode 100644 index 00000000000..e57498442e5 --- /dev/null +++ b/chromium/build/skia_gold_common/unittest_utils.py @@ -0,0 +1,28 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Utility methods for Skia Gold functionality unittests.""" + +import collections + +_SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [ + 'local_pixel_tests', + 'no_luci_auth', + 'git_revision', + 'gerrit_issue', + 'gerrit_patchset', + 'buildbucket_id', + 'bypass_skia_gold_functionality', +]) + + +def createSkiaGoldArgs(local_pixel_tests=None, + no_luci_auth=None, + git_revision=None, + gerrit_issue=None, + gerrit_patchset=None, + buildbucket_id=None, + bypass_skia_gold_functionality=None): + return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, git_revision, + gerrit_issue, gerrit_patchset, buildbucket_id, + bypass_skia_gold_functionality) diff --git a/chromium/build/toolchain/OWNERS b/chromium/build/toolchain/OWNERS index 0a8dcda56c5..37c7730cbcf 100644 --- a/chromium/build/toolchain/OWNERS +++ b/chromium/build/toolchain/OWNERS @@ -1,4 +1,5 @@ dpranke@chromium.org +dpranke@google.com scottmg@chromium.org # Clang Static Analyzer. diff --git a/chromium/build/toolchain/gcc_solink_wrapper.py b/chromium/build/toolchain/gcc_solink_wrapper.py index 5bb7b9513f0..66b7f0cad6c 100755 --- a/chromium/build/toolchain/gcc_solink_wrapper.py +++ b/chromium/build/toolchain/gcc_solink_wrapper.py @@ -12,6 +12,7 @@ does not have a POSIX-like shell (e.g. Windows). import argparse import os +import shlex import subprocess import sys @@ -22,7 +23,10 @@ def CollectSONAME(args): """Replaces: readelf -d $sofile | grep SONAME""" toc = '' readelf = subprocess.Popen(wrapper_utils.CommandToRun( - [args.readelf, '-d', args.sofile]), stdout=subprocess.PIPE, bufsize=-1) + [args.readelf, '-d', args.sofile]), + stdout=subprocess.PIPE, + bufsize=-1, + universal_newlines=True) for line in readelf.stdout: if 'SONAME' in line: toc += line @@ -32,11 +36,11 @@ def CollectSONAME(args): def CollectDynSym(args): """Replaces: nm --format=posix -g -D -p $sofile | cut -f1-2 -d' '""" toc = '' - nm = subprocess.Popen( - wrapper_utils.CommandToRun( - [args.nm, '--format=posix', '-g', '-D', '-p', args.sofile]), - stdout=subprocess.PIPE, - bufsize=-1) + nm = subprocess.Popen(wrapper_utils.CommandToRun( + [args.nm, '--format=posix', '-g', '-D', '-p', args.sofile]), + stdout=subprocess.PIPE, + bufsize=-1, + universal_newlines=True) for line in nm.stdout: toc += ' '.join(line.split(' ', 2)[:2]) + '\n' return nm.wait(), toc @@ -59,6 +63,23 @@ def UpdateTOC(tocfile, toc): open(tocfile, 'w').write(toc) +def CollectInputs(out, args): + for x in args: + if x.startswith('@'): + with open(x[1:]) as rsp: + CollectInputs(out, shlex.split(rsp.read())) + elif not x.startswith('-') and (x.endswith('.o') or x.endswith('.a')): + out.write(x) + out.write('\n') + + +def InterceptFlag(flag, command): + ret = flag in command + if ret: + command.remove(flag) + return ret + + def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--readelf', @@ -96,21 +117,10 @@ def main(): fast_env = dict(os.environ) fast_env['LC_ALL'] = 'C' - # Extract the --link-only argument, which goes for a ride through ldflags into - # the command, but is meant to be intercepted by this wrapper script (not - # passed to the linker). https://crbug.com/954311 tracks finding a better way - # to plumb this argument. - link_only = '--link-only' in args.command - if link_only: - args.command.remove('--link-only') - - # First, run the actual link. - command = wrapper_utils.CommandToRun(args.command) - result = wrapper_utils.RunLinkWithOptionalMapFile(command, env=fast_env, - map_file=args.map_file) - - if result != 0: - return result + # Extract flags passed through ldflags but meant for this script. + # https://crbug.com/954311 tracks finding a better way to plumb these. + link_only = InterceptFlag('--link-only', args.command) + collect_inputs_only = InterceptFlag('--collect-inputs-only', args.command) # If only linking, we are likely generating a partitioned .so that will be # split apart later. In that case: @@ -125,13 +135,29 @@ def main(): # tools would need to be updated to handle and/or not complain about # partitioned libraries. Instead, to keep Ninja happy, simply create dummy # files for the TOC and stripped lib. - if link_only: - with open(args.output, 'w'): - pass - with open(args.tocfile, 'w'): - pass + if link_only or collect_inputs_only: + open(args.output, 'w').close() + open(args.tocfile, 'w').close() + + # Instead of linking, records all inputs to a file. This is used by + # enable_resource_whitelist_generation in order to avoid needing to + # link (which is slow) to build the resources whitelist. + if collect_inputs_only: + with open(args.sofile, 'w') as f: + CollectInputs(f, args.command) + if args.map_file: + open(args.map_file, 'w').close() return 0 + # First, run the actual link. + command = wrapper_utils.CommandToRun(args.command) + result = wrapper_utils.RunLinkWithOptionalMapFile(command, + env=fast_env, + map_file=args.map_file) + + if result != 0 or link_only: + return result + # Next, generate the contents of the TOC file. result, toc = CollectTOC(args) if result != 0: diff --git a/chromium/build/toolchain/gcc_toolchain.gni b/chromium/build/toolchain/gcc_toolchain.gni index 775c35c8780..1c7bf7e7ab8 100644 --- a/chromium/build/toolchain/gcc_toolchain.gni +++ b/chromium/build/toolchain/gcc_toolchain.gni @@ -171,7 +171,7 @@ template("gcc_toolchain") { # Chrome OS builders. So we pass in an explicit value. goma_ld = rebase_path("//tools/clang/scripts/goma_ld.py", root_build_dir) + - " --gomacc ${goma_path} --jobs 100 -- " + " --gomacc ${goma_path} --jobs 200 -- " } else { goma_ld = "" not_needed([ "goma_path" ]) diff --git a/chromium/build/toolchain/mac/BUILD.gn b/chromium/build/toolchain/mac/BUILD.gn index add57c71bbe..8fdada0f022 100644 --- a/chromium/build/toolchain/mac/BUILD.gn +++ b/chromium/build/toolchain/mac/BUILD.gn @@ -477,6 +477,13 @@ mac_toolchain("clang_arm") { } } +mac_toolchain("clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + current_os = "mac" + } +} + mac_toolchain("clang_x64") { toolchain_args = { current_cpu = "x64" diff --git a/chromium/build/toolchain/mac/linker_driver.py b/chromium/build/toolchain/mac/linker_driver.py index e5170505853..453259a40d5 100755 --- a/chromium/build/toolchain/mac/linker_driver.py +++ b/chromium/build/toolchain/mac/linker_driver.py @@ -278,8 +278,8 @@ _LINKER_DRIVER_ACTIONS = [ ('dsymutilpath,', SetDsymutilPath), ('dsym,', RunDsymUtil), ('unstripped,', RunSaveUnstripped), - ('strip,', RunStrip), ('strippath,', SetStripPath), + ('strip,', RunStrip), ] diff --git a/chromium/build/toolchain/toolchain.gni b/chromium/build/toolchain/toolchain.gni index 552ceb67e65..80c2e7b5e4a 100644 --- a/chromium/build/toolchain/toolchain.gni +++ b/chromium/build/toolchain/toolchain.gni @@ -31,15 +31,15 @@ if (generate_linker_map) { is_official_build, "Linker map files should only be generated when is_official_build = true") assert(current_os == "android" || current_os == "linux" || - target_os == "android" || target_os == "linux", - "Linker map files should only be generated for Android and Linux") + target_os == "android" || target_os == "linux" || + target_os == "chromeos", + "Linker map files should only be generated for Android, Linux, " + + "or ChromeOS.") } declare_args() { - if (is_clang) { - # Clang compiler version. Clang files are placed at version-dependent paths. - clang_version = "11.0.0" - } + # Clang compiler version. Clang files are placed at version-dependent paths. + clang_version = "11.0.0" } # Check target_os here instead of is_ios as this file is loaded for secondary diff --git a/chromium/build/toolchain/win/BUILD.gn b/chromium/build/toolchain/win/BUILD.gn index 0c356e16296..be0893059aa 100644 --- a/chromium/build/toolchain/win/BUILD.gn +++ b/chromium/build/toolchain/win/BUILD.gn @@ -446,12 +446,20 @@ template("win_toolchains") { environment = "environment." + toolchain_arch prefix = rebase_path("$clang_base_path/bin", root_build_dir) cl = "${clang_prefix}$prefix/${clang_cl}" + _clang_lib_dir = + rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows", + root_build_dir) if (host_os == "win") { # Flip the slashes so that copy/paste of the command works. cl = string_replace(cl, "/", "\\") + + # And to match the other -libpath flags. + _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\") } + sys_include_flags = "${win_toolchain_data.include_flags_imsvc}" - sys_lib_flags = "${win_toolchain_data.libpath_flags}" + sys_lib_flags = + "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_flags}" toolchain_args = { if (defined(invoker.toolchain_args)) { diff --git a/chromium/build/toolchain/win/midl.gni b/chromium/build/toolchain/win/midl.gni index ee7dc479e88..72e79ebaa35 100644 --- a/chromium/build/toolchain/win/midl.gni +++ b/chromium/build/toolchain/win/midl.gni @@ -123,6 +123,8 @@ template("midl") { dlldata_file, interface_identifier_file, proxy_file, + rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe", + root_build_dir), "{{source}}", "/char", "signed", diff --git a/chromium/build/toolchain/win/midl.py b/chromium/build/toolchain/win/midl.py index 6d6fab05a50..5483173e58c 100644 --- a/chromium/build/toolchain/win/midl.py +++ b/chromium/build/toolchain/win/midl.py @@ -173,8 +173,8 @@ def overwrite_cls_guid(h_file, iid_file, tlb_file, dynamic_guid): overwrite_cls_guid_tlb(tlb_file, dynamic_guid) -def main(arch, gendir, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl, - *flags): +def main(arch, gendir, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, clang, + idl, *flags): # Copy checked-in outputs to final location. source = gendir if os.path.isdir(os.path.join(source, os.path.basename(idl))): @@ -204,6 +204,10 @@ def main(arch, gendir, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl, env_pairs = open(arch).read()[:-2].split('\0') env_dict = dict([item.split('=', 1) for item in env_pairs]) + # Extract the /D options and send them to the preprocessor. + preprocessor_options = '-E -nologo -Wno-nonportable-include-path' + preprocessor_options += ''.join( + [' ' + flag for flag in flags if flag.startswith('/D')]) args = ['midl', '/nologo'] + list(flags) + [ '/out', tmp_dir, '/tlb', tlb, @@ -211,6 +215,8 @@ def main(arch, gendir, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl, '/dlldata', dlldata, '/iid', iid, '/proxy', proxy, + '/cpp_cmd', clang, + '/cpp_opt', preprocessor_options, idl] try: popen = subprocess.Popen(args, shell=True, env=env_dict, diff --git a/chromium/build/toolchain/win/setup_toolchain.py b/chromium/build/toolchain/win/setup_toolchain.py index 9c936c69d68..1a7c3d74b5c 100644 --- a/chromium/build/toolchain/win/setup_toolchain.py +++ b/chromium/build/toolchain/win/setup_toolchain.py @@ -153,10 +153,14 @@ def _LoadToolchainEnv(cpu, sdk_dir, target_store): if (cpu != 'x64'): # x64 is default target CPU thus any other CPU requires a target set cpu_arg += '_' + cpu - args = [script_path, cpu_arg] + args = [script_path, cpu_arg, ] # Store target must come before any SDK version declaration if (target_store): - args.append(['store']) + args.append('store') + # Explicitly specifying the SDK version to build with to avoid accidentally + # building with a new and untested SDK. This should stay in sync with the + # packaged toolchain in build/vs_toolchain.py. + args.append('10.0.19041.0') variables = _LoadEnvFromBat(args) return _ExtractImportantEnvironment(variables) diff --git a/chromium/build/util/LASTCHANGE b/chromium/build/util/LASTCHANGE index 0cd32ed4c93..c6189029d20 100644 --- a/chromium/build/util/LASTCHANGE +++ b/chromium/build/util/LASTCHANGE @@ -1 +1 @@ -LASTCHANGE=80c974bf7990b9735a8e885046fc5c9b1da4796c-refs/branch-heads/4147@{#1132} +LASTCHANGE=59840fa678c084e98201a428c7db996326e0c749-refs/branch-heads/4183@{#1935} diff --git a/chromium/build/util/LASTCHANGE.committime b/chromium/build/util/LASTCHANGE.committime index 4537d7c2887..5bb262b8388 100644 --- a/chromium/build/util/LASTCHANGE.committime +++ b/chromium/build/util/LASTCHANGE.committime @@ -1 +1 @@ -1600724114
\ No newline at end of file +1601597474
\ No newline at end of file diff --git a/chromium/build/vs_toolchain.py b/chromium/build/vs_toolchain.py index f3557ad32ee..e60770e9b4e 100755 --- a/chromium/build/vs_toolchain.py +++ b/chromium/build/vs_toolchain.py @@ -164,7 +164,8 @@ def GetVisualStudioVersion(): '/Microsoft Visual Studio/%s' % version) if path and any( os.path.exists(os.path.join(path, edition)) - for edition in ('Enterprise', 'Professional', 'Community', 'Preview')): + for edition in ('Enterprise', 'Professional', 'Community', 'Preview', + 'BuildTools')): available_versions.append(version) break @@ -199,6 +200,9 @@ def DetectVisualStudioPath(): version_as_year), os.path.expandvars('%ProgramFiles(x86)%' + '/Microsoft Visual Studio/%s/Preview' % + version_as_year), + os.path.expandvars('%ProgramFiles(x86)%' + + '/Microsoft Visual Studio/%s/BuildTools' % version_as_year)): if path and os.path.exists(path): return path @@ -245,7 +249,8 @@ def _SortByHighestVersionNumberFirst(list_of_str_versions): list_of_str_versions.sort(key=to_number_sequence, reverse=True) -def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix): + +def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix): """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't exist, but the target directory does exist.""" if target_cpu == 'arm64': @@ -263,8 +268,11 @@ def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix): .format(MSVC_TOOLSET_VERSION[GetVisualStudioVersion()]) source_dir = os.path.join(vc_redist_root, 'debug_nonredist', 'arm64', vc_toolset_dir) - for file_part in ('msvcp', 'vccorlib', 'vcruntime'): - dll = dll_pattern % file_part + file_parts = ('msvcp140', 'vccorlib140', 'vcruntime140') + if target_cpu == 'x64' and GetVisualStudioVersion() != '2017': + file_parts = file_parts + ('vcruntime140_1', ) + for file_part in file_parts: + dll = file_part + suffix target = os.path.join(target_dir, dll) source = os.path.join(source_dir, dll) _CopyRuntimeImpl(target, source) @@ -350,8 +358,7 @@ def _CopyRuntime(target_dir, source_dir, target_cpu, debug): directory does exist. Handles VS 2015, 2017 and 2019.""" suffix = 'd.dll' if debug else '.dll' # VS 2015, 2017 and 2019 use the same CRT DLLs. - _CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix, - suffix) + _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix) def CopyDlls(target_dir, configuration, target_cpu): @@ -402,16 +409,19 @@ def _CopyDebugger(target_dir, target_cpu): # List of debug files that should be copied, the first element of the tuple is # the name of the file and the second indicates if it's optional. debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)] + # The UCRT is not a redistributable component on arm64. + if target_cpu != 'arm64': + debug_files.extend([('api-ms-win-downlevel-kernel32-l2-1-0.dll', False), + ('api-ms-win-eventing-provider-l1-1-0.dll', False)]) for debug_file, is_optional in debug_files: full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file) if not os.path.exists(full_path): if is_optional: continue else: - # TODO(crbug.com/773476): remove version requirement. raise Exception('%s not found in "%s"\r\nYou must install the ' '"Debugging Tools for Windows" feature from the Windows' - ' 10 SDK.' + ' 10 SDK, the 10.0.19041.0 version.' % (debug_file, full_path)) target_path = os.path.join(target_dir, debug_file) _CopyRuntimeImpl(target_path, full_path) @@ -431,12 +441,11 @@ def _GetDesiredVsToolchainHashes(): * //docs/windows_build_instructions.md mentions of VS or Windows SDK. Keeps the document consistent with the toolchain version. """ - # VS 2019 Update 9 (16.3.29324.140) with 10.0.18362 SDK, 10.0.17763 version of - # Debuggers, and 10.0.17134 version of d3dcompiler_47.dll, with ARM64 - # libraries and UWP support. + # VS 2019 16.61 with 10.0.19041 SDK, and 10.0.17134 version of + # d3dcompiler_47.dll, with ARM64 libraries and UWP support. # See go/chromium-msvc-toolchain for instructions about how to update the # toolchain. - toolchain_hash = '9ff60e43ba91947baca460d0ca3b1b980c3a2c23' + toolchain_hash = 'a687d8e2e4114d9015eb550e1b156af21381faac' # Third parties that do not have access to the canonical toolchain can map # canonical toolchain version to their own toolchain versions. toolchain_hash_mapping_key = 'GYP_MSVS_HASH_%s' % toolchain_hash diff --git a/chromium/build/win/BUILD.gn b/chromium/build/win/BUILD.gn index 9be769fd075..d449f592519 100644 --- a/chromium/build/win/BUILD.gn +++ b/chromium/build/win/BUILD.gn @@ -79,6 +79,7 @@ if (is_win) { "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll", "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll", "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll", + "$root_out_dir/cdb/api-ms-win-downlevel-kernel32-l2-1-0.dll", "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll", "$root_out_dir/cdb/ucrtbase.dll", ] @@ -108,6 +109,9 @@ if (is_win) { "$root_out_dir/vccorlib140${vcrt_suffix}.dll", "$root_out_dir/vcruntime140${vcrt_suffix}.dll", ] + if (current_cpu == "x64") { + data += [ "$root_out_dir/vcruntime140_1${vcrt_suffix}.dll" ] + } if (is_debug) { data += [ "$root_out_dir/ucrtbased.dll" ] } @@ -159,6 +163,8 @@ if (is_win) { "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll", "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll", "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll", + "$root_out_dir/api-ms-win-downlevel-kernel32-l2-1-0.dll", + "$root_out_dir/api-ms-win-eventing-provider-l1-1-0.dll", ] if (!is_debug) { data += [ "$root_out_dir/ucrtbase.dll" ] diff --git a/chromium/build/win/segment_heap.manifest b/chromium/build/win/segment_heap.manifest new file mode 100644 index 00000000000..fc930a4d563 --- /dev/null +++ b/chromium/build/win/segment_heap.manifest @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> + <application xmlns="urn:schemas-microsoft-com:asm.v3"> + <windowsSettings> + <heapType xmlns="http://schemas.microsoft.com/SMI/2020/WindowsSettings">SegmentHeap</heapType> + </windowsSettings> + </application> +</assembly> |