summaryrefslogtreecommitdiff
path: root/chromium/build/android/gyp/compile_resources.py
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/build/android/gyp/compile_resources.py')
-rwxr-xr-xchromium/build/android/gyp/compile_resources.py157
1 files changed, 76 insertions, 81 deletions
diff --git a/chromium/build/android/gyp/compile_resources.py b/chromium/build/android/gyp/compile_resources.py
index 2ca4ec781c8..eece2eb3fea 100755
--- a/chromium/build/android/gyp/compile_resources.py
+++ b/chromium/build/android/gyp/compile_resources.py
@@ -18,7 +18,6 @@ import contextlib
import filecmp
import hashlib
import logging
-import multiprocessing.dummy
import os
import re
import shutil
@@ -26,7 +25,6 @@ import subprocess
import sys
import tempfile
import textwrap
-import time
import zipfile
from xml.etree import ElementTree
@@ -34,9 +32,11 @@ from util import build_utils
from util import diff_utils
from util import manifest_utils
from util import md5_check
+from util import parallel
from util import protoresources
from util import resource_utils
+
# Pngs that we shouldn't convert to webp. Please add rationale when updating.
_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
# Crashes on Galaxy S5 running L (https://crbug.com/807059).
@@ -546,68 +546,64 @@ def _CreateKeepPredicate(resource_exclusion_regex,
build_utils.MatchesGlob(path, resource_exclusion_exceptions))
-def _ConvertToWebP(webp_binary, png_paths, path_info, webp_cache_dir):
- pool = multiprocessing.dummy.Pool(10)
+def _ComputeSha1(path):
+ with open(path, 'rb') as f:
+ data = f.read()
+ return hashlib.sha1(data).hexdigest()
- build_utils.MakeDirectory(webp_cache_dir)
- cwebp_version = subprocess.check_output([webp_binary, '-version']).rstrip()
- cwebp_arguments = ['-mt', '-quiet', '-m', '6', '-q', '100', '-lossless']
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+ sha1_hash = _ComputeSha1(png_path)
- sha1_time = [0]
- cwebp_time = [0]
- cache_hits = [0]
+ # The set of arguments that will appear in the cache key.
+ quality_args = ['-m', '6', '-q', '100', '-lossless']
- def cal_sha1(png_path):
- start = time.time()
- with open(png_path, 'rb') as f:
- png_content = f.read()
+ webp_cache_path = os.path.join(
+ webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+ ''.join(quality_args)))
+ # No need to add .webp. Android can load images fine without them.
+ webp_path = os.path.splitext(png_path)[0]
- sha1_hex = hashlib.sha1(png_content).hexdigest()
- sha1_time[0] += time.time() - start
- return sha1_hex
+ cache_hit = os.path.exists(webp_cache_path)
+ if cache_hit:
+ os.link(webp_cache_path, webp_path)
+ else:
+ # We place the generated webp image to webp_path, instead of in the
+ # webp_cache_dir to avoid concurrency issues.
+ args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+ subprocess.check_call(args)
- def get_converted_image(png_path):
- sha1_hash = cal_sha1(png_path)
+ try:
+ os.link(webp_path, webp_cache_path)
+ except OSError:
+ # Because of concurrent run, a webp image may already exists in
+ # webp_cache_path.
+ pass
- webp_cache_path = os.path.join(
- webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
- ''.join(cwebp_arguments)))
- # No need to add an extension, android can load images fine without them.
- webp_path = os.path.splitext(png_path)[0]
+ os.remove(png_path)
+ original_dir = os.path.dirname(os.path.dirname(png_path))
+ rename_tuple = (os.path.relpath(png_path, original_dir),
+ os.path.relpath(webp_path, original_dir))
+ return rename_tuple, cache_hit
- if os.path.exists(webp_cache_path):
- cache_hits[0] += 1
- os.link(webp_cache_path, webp_path)
- else:
- # We place the generated webp image to webp_path, instead of in the
- # webp_cache_dir to avoid concurrency issues.
- start = time.time()
- args = [webp_binary, png_path] + cwebp_arguments + ['-o', webp_path]
- subprocess.check_call(args)
- cwebp_time[0] += time.time() - start
-
- try:
- os.link(webp_path, webp_cache_path)
- except OSError:
- # Because of concurrent run, a webp image may already exists in
- # webp_cache_path.
- pass
-
- os.remove(png_path)
- original_dir = os.path.dirname(os.path.dirname(png_path))
- path_info.RegisterRename(
- os.path.relpath(png_path, original_dir),
- os.path.relpath(webp_path, original_dir))
-
- png_paths = [f for f in png_paths if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
- try:
- pool.map(get_converted_image, png_paths)
- finally:
- pool.close()
- pool.join()
- logging.debug('png->webp: cache: %d/%d sha1 time: %.1fms cwebp time: %.1fms',
- cache_hits[0], len(png_paths), sha1_time[0], cwebp_time[0])
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+ cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+ shard_args = [(f, ) for f in png_paths
+ if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+ build_utils.MakeDirectory(webp_cache_dir)
+ results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+ shard_args,
+ cwebp_binary=cwebp_binary,
+ cwebp_version=cwebp_version,
+ webp_cache_dir=webp_cache_dir)
+ total_cache_hits = 0
+ for rename_tuple, cache_hit in results:
+ path_info.RegisterRename(*rename_tuple)
+ total_cache_hits += int(cache_hit)
+
+ logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
def _RemoveImageExtensions(directory, path_info):
@@ -627,10 +623,9 @@ def _RemoveImageExtensions(directory, path_info):
os.path.relpath(path_no_extension, directory))
-def _CompileSingleDep(args):
- index, dep_path, aapt2_path, partials_dir, exclusion_rules = args
- basename = os.path.basename(dep_path)
- unique_name = '{}_{}'.format(index, basename)
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+ partials_dir):
+ unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
compile_command = [
@@ -639,7 +634,7 @@ def _CompileSingleDep(args):
# TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
# '--no-crunch',
'--dir',
- dep_path,
+ dep_subdir,
'-o',
partial_path
]
@@ -654,33 +649,16 @@ def _CompileSingleDep(args):
# Filtering these files is expensive, so only apply filters to the partials
# that have been explicitly targeted.
- keep_predicate = _CreateValuesKeepPredicate(exclusion_rules, dep_path)
if keep_predicate:
- logging.debug('Applying .arsc filtering to %s', dep_path)
+ logging.debug('Applying .arsc filtering to %s', dep_subdir)
protoresources.StripUnwantedResources(partial_path, keep_predicate)
return partial_path
-def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
- partials_dir = os.path.join(temp_dir, 'partials')
- build_utils.MakeDirectory(partials_dir)
-
- def iter_params():
- for i, dep_path in enumerate(dep_subdirs):
- yield i, dep_path, aapt2_path, partials_dir, exclusion_rules
-
- pool = multiprocessing.dummy.Pool(10)
- try:
- return pool.map(_CompileSingleDep, iter_params())
- finally:
- pool.close()
- pool.join()
-
-
-def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
patterns = [
x[1] for x in exclusion_rules
- if build_utils.MatchesGlob(dep_path, [x[0]])
+ if build_utils.MatchesGlob(dep_subdir, [x[0]])
]
if not patterns:
return None
@@ -689,6 +667,23 @@ def _CreateValuesKeepPredicate(exclusion_rules, dep_path):
return lambda x: not any(r.search(x) for r in regexes)
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir, exclusion_rules):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+
+ job_params = [(i, dep_subdir,
+ _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+ for i, dep_subdir in enumerate(dep_subdirs)]
+
+ # Filtering is slow, so ensure jobs with keep_predicate are started first.
+ job_params.sort(key=lambda x: not x[2])
+ return list(
+ parallel.BulkForkAndCall(_CompileSingleDep,
+ job_params,
+ aapt2_path=aapt2_path,
+ partials_dir=partials_dir))
+
+
def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
for zip_file in dependencies_res_zips:
zip_info_file_path = zip_file + '.info'