summaryrefslogtreecommitdiff
path: root/chromium/tools
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2017-04-05 17:15:33 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2017-04-11 07:47:18 +0000
commit7324afb043a0b1e623d8e8eb906cdc53bdeb4685 (patch)
treea3fe2d74ea9c9e142c390dac4ca0e219382ace46 /chromium/tools
parent6a4cabb866f66d4128a97cdc6d9d08ce074f1247 (diff)
downloadqtwebengine-chromium-7324afb043a0b1e623d8e8eb906cdc53bdeb4685.tar.gz
BASELINE: Update Chromium to 58.0.3029.54
Change-Id: I67f57065a7afdc8e4614adb5c0230281428df4d1 Reviewed-by: Peter Varga <pvarga@inf.u-szeged.hu>
Diffstat (limited to 'chromium/tools')
-rw-r--r--chromium/tools/accessibility/OWNERS3
-rw-r--r--chromium/tools/accessibility/nvda/OWNERS3
-rw-r--r--chromium/tools/android/memconsumer/BUILD.gn2
-rw-r--r--chromium/tools/bash-completion22
-rw-r--r--chromium/tools/binary_size/OWNERS2
-rwxr-xr-xchromium/tools/checklicenses/checklicenses.py5
-rwxr-xr-xchromium/tools/checkperms/checkperms.py1
-rwxr-xr-xchromium/tools/checkteamtags/checkteamtags.py101
-rw-r--r--chromium/tools/checkteamtags/checkteamtags_test.py97
-rwxr-xr-xchromium/tools/checkteamtags/extract_components.py91
-rw-r--r--chromium/tools/checkteamtags/extract_components_test.py38
-rw-r--r--chromium/tools/checkteamtags/owners_file_tags.py36
-rw-r--r--chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py11
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py14
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py28
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py2
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py4
-rw-r--r--chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py6
-rw-r--r--chromium/tools/chrome_proxy/webdriver/bypass.py107
-rw-r--r--chromium/tools/chrome_proxy/webdriver/common.py174
-rw-r--r--chromium/tools/chrome_proxy/webdriver/compression_regression.py219
-rw-r--r--chromium/tools/chrome_proxy/webdriver/decorator_smoke.py24
-rw-r--r--chromium/tools/chrome_proxy/webdriver/examples.py2
-rw-r--r--chromium/tools/chrome_proxy/webdriver/fallback.py40
-rw-r--r--chromium/tools/chrome_proxy/webdriver/html5.py30
-rw-r--r--chromium/tools/chrome_proxy/webdriver/lite_page.py115
-rw-r--r--chromium/tools/chrome_proxy/webdriver/lofi.py112
-rw-r--r--chromium/tools/chrome_proxy/webdriver/safebrowsing.py33
-rw-r--r--chromium/tools/chrome_proxy/webdriver/smoke.py15
-rw-r--r--chromium/tools/chrome_proxy/webdriver/video.py134
-rw-r--r--chromium/tools/clang/OWNERS2
-rw-r--r--chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp4
-rw-r--r--chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp9
-rw-r--r--chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h5
-rw-r--r--chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.cpp25
-rw-r--r--chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.h3
-rw-r--r--chromium/tools/clang/blink_gc_plugin/Config.cpp6
-rw-r--r--chromium/tools/clang/blink_gc_plugin/Config.h20
-rw-r--r--chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp18
-rw-r--r--chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h3
-rw-r--r--chromium/tools/clang/blink_gc_plugin/Edge.h92
-rw-r--r--chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp8
-rw-r--r--chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp12
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/EditTracker.cpp25
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/EditTracker.h14
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/OWNERS2
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp201
-rwxr-xr-xchromium/tools/clang/scripts/blink_gc_plugin_flags.py38
-rwxr-xr-xchromium/tools/clang/scripts/generate_win_compdb.py9
-rwxr-xr-xchromium/tools/clang/scripts/package.py9
-rwxr-xr-xchromium/tools/clang/scripts/update.py28
-rw-r--r--chromium/tools/clang/value_cleanup/ListValueRewriter.cpp32
-rw-r--r--chromium/tools/clang_format_merge_driver/OWNERS2
-rw-r--r--chromium/tools/cr/OWNERS2
-rwxr-xr-xchromium/tools/cygprofile/profile_android_startup.py3
-rw-r--r--chromium/tools/determinism/deterministic_build_whitelist.pyl6
-rw-r--r--chromium/tools/gn/analyzer.cc12
-rw-r--r--chromium/tools/gn/args.cc42
-rw-r--r--chromium/tools/gn/args.h25
-rwxr-xr-xchromium/tools/gn/bootstrap/bootstrap.py19
-rw-r--r--chromium/tools/gn/command_args.cc113
-rw-r--r--chromium/tools/gn/docs/cross_compiles.md2
-rw-r--r--chromium/tools/gn/docs/reference.md54
-rw-r--r--chromium/tools/gn/functions.cc2
-rw-r--r--chromium/tools/gn/import_manager.cc2
-rw-r--r--chromium/tools/gn/misc/vim/autoload/gn.vim26
-rw-r--r--chromium/tools/gn/misc/vim/ftplugin/gn.vim12
-rw-r--r--chromium/tools/gn/misc/vim/syntax/gn.vim4
-rw-r--r--chromium/tools/gn/parser.cc2
-rw-r--r--chromium/tools/gn/setup.cc36
-rw-r--r--chromium/tools/gn/setup.h4
-rw-r--r--chromium/tools/gn/visual_studio_writer.cc2
-rwxr-xr-xchromium/tools/grit/grit/format/gen_predetermined_ids.py157
-rwxr-xr-xchromium/tools/grit/grit/format/gen_predetermined_ids_unittest.py43
-rwxr-xr-xchromium/tools/grit/grit/format/html_inline.py168
-rwxr-xr-xchromium/tools/grit/grit/format/html_inline_unittest.py54
-rwxr-xr-xchromium/tools/grit/grit/format/rc_header.py39
-rwxr-xr-xchromium/tools/grit/grit/format/rc_header_unittest.py56
-rwxr-xr-xchromium/tools/grit/grit/format/resource_map.py2
-rwxr-xr-xchromium/tools/grit/grit/format/resource_map_unittest.py8
-rwxr-xr-xchromium/tools/grit/grit/grd_reader.py8
-rwxr-xr-xchromium/tools/grit/grit/tool/build.py17
-rwxr-xr-xchromium/tools/grit/grit/tool/xmb.py12
-rwxr-xr-xchromium/tools/grit/grit/tool/xmb_unittest.py20
-rw-r--r--chromium/tools/grit/grit_rule.gni17
-rw-r--r--chromium/tools/gritsettings/README.md36
-rw-r--r--chromium/tools/gritsettings/resource_ids44
-rw-r--r--chromium/tools/gritsettings/startup_resources_mac.txt266
-rw-r--r--chromium/tools/gritsettings/startup_resources_win.txt223
-rw-r--r--chromium/tools/idl_parser/OWNERS2
-rwxr-xr-xchromium/tools/idl_parser/idl_lexer.py22
-rwxr-xr-xchromium/tools/idl_parser/idl_parser.py48
-rwxr-xr-xchromium/tools/idl_parser/idl_ppapi_lexer.py4
-rwxr-xr-xchromium/tools/idl_parser/idl_ppapi_parser.py12
-rw-r--r--chromium/tools/idl_parser/test_lexer/keywords.in2
-rw-r--r--chromium/tools/idl_parser/test_parser/dictionary_web.idl6
-rw-r--r--chromium/tools/idl_parser/test_parser/exception_web.idl4
-rw-r--r--chromium/tools/idl_parser/test_parser/interface_web.idl79
-rw-r--r--chromium/tools/idl_parser/test_parser/typedef_web.idl18
-rw-r--r--chromium/tools/ipc_fuzzer/OWNERS2
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc29
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/BUILD.gn2
-rw-r--r--chromium/tools/ipc_fuzzer/message_replay/replay_process.cc2
-rw-r--r--chromium/tools/json_schema_compiler/cc_generator.py2
-rw-r--r--chromium/tools/json_schema_compiler/cpp_bundle_generator.py21
-rw-r--r--chromium/tools/json_schema_compiler/feature_compiler.py4
-rwxr-xr-xchromium/tools/json_schema_compiler/idl_schema.py15
-rwxr-xr-xchromium/tools/json_schema_compiler/idl_schema_test.py7
-rw-r--r--chromium/tools/json_schema_compiler/js_externs_generator.py49
-rwxr-xr-xchromium/tools/json_schema_compiler/js_externs_generator_test.py49
-rw-r--r--chromium/tools/json_schema_compiler/js_util.py12
-rw-r--r--chromium/tools/json_schema_compiler/model.py6
-rwxr-xr-xchromium/tools/licenses.py78
-rw-r--r--chromium/tools/luci-go/linux64/isolate.sha12
-rw-r--r--chromium/tools/luci-go/mac64/isolate.sha12
-rw-r--r--chromium/tools/luci-go/win64/isolate.exe.sha12
-rw-r--r--chromium/tools/mb/docs/design_spec.md4
-rwxr-xr-xchromium/tools/mb/mb.py15
-rw-r--r--chromium/tools/mb/mb_config.pyl105
-rwxr-xr-xchromium/tools/nocompile_driver.py67
-rwxr-xr-xchromium/tools/origin_trials/generate_token.py2
-rw-r--r--chromium/tools/origin_trials/third_party/ed25519/OWNERS3
-rwxr-xr-xchromium/tools/protoc_wrapper/protoc_wrapper.py8
-rwxr-xr-xchromium/tools/resource_prefetch_predictor/generate_test_data.py7
-rwxr-xr-xchromium/tools/resource_prefetch_predictor/prefetch_benchmark.py16
-rwxr-xr-xchromium/tools/roll_webgl_conformance.py27
-rw-r--r--chromium/tools/security/OWNERS2
-rw-r--r--chromium/tools/traffic_annotation/DEPS4
-rw-r--r--chromium/tools/traffic_annotation/OWNERS2
-rw-r--r--chromium/tools/traffic_annotation/sample_traffic_annotation.cc72
-rw-r--r--chromium/tools/traffic_annotation/traffic_annotation.proto185
-rw-r--r--chromium/tools/valgrind/memcheck/suppressions.txt8
-rw-r--r--chromium/tools/vim/OWNERS2
-rw-r--r--chromium/tools/vim/ninja_output.py19
-rw-r--r--chromium/tools/win/DebugVisualizers/chrome.natvis2
-rw-r--r--chromium/tools/win/ShowThreadNames/ReadMe.txt61
-rw-r--r--chromium/tools/win/ShowThreadNames/ShowThreadNames.cc142
-rw-r--r--chromium/tools/win/ShowThreadNames/ShowThreadNames.sln28
-rw-r--r--chromium/tools/win/ShowThreadNames/ShowThreadNames.vcxproj118
-rw-r--r--chromium/tools/win/chromeexts/BUILD.gn8
-rw-r--r--chromium/tools/win/chromeexts/chrome_exts_command.cc56
-rw-r--r--chromium/tools/win/chromeexts/chrome_exts_command.h76
-rw-r--r--chromium/tools/win/chromeexts/chromeexts.cc60
-rw-r--r--chromium/tools/win/chromeexts/chromeexts.def3
-rw-r--r--chromium/tools/win/chromeexts/commands/hwnd_command.cc68
-rw-r--r--chromium/tools/win/chromeexts/commands/hwnd_command.h30
-rwxr-xr-xchromium/tools/win/static_initializers/build.bat3
-rw-r--r--chromium/tools/win/static_initializers/static_initializers.cc3
148 files changed, 4543 insertions, 720 deletions
diff --git a/chromium/tools/accessibility/OWNERS b/chromium/tools/accessibility/OWNERS
index 11e8fd837ee..bdb9178c0a6 100644
--- a/chromium/tools/accessibility/OWNERS
+++ b/chromium/tools/accessibility/OWNERS
@@ -1,2 +1,5 @@
dmazzoni@chromium.org
dtseng@chromium.org
+
+# TEAM: chromium-accessibility@chromium.org
+# COMPONENT: UI>Accessibility
diff --git a/chromium/tools/accessibility/nvda/OWNERS b/chromium/tools/accessibility/nvda/OWNERS
index c50d5b87619..2f05ed92783 100644
--- a/chromium/tools/accessibility/nvda/OWNERS
+++ b/chromium/tools/accessibility/nvda/OWNERS
@@ -1,3 +1,6 @@
aboxhall@chromium.org
dmazzoni@chromium.org
dtseng@chromium.org
+
+# TEAM: chromium-accessibility@chromium.org
+# COMPONENT: UI>Accessibility
diff --git a/chromium/tools/android/memconsumer/BUILD.gn b/chromium/tools/android/memconsumer/BUILD.gn
index 70a4447d217..da69770b03c 100644
--- a/chromium/tools/android/memconsumer/BUILD.gn
+++ b/chromium/tools/android/memconsumer/BUILD.gn
@@ -30,7 +30,7 @@ shared_library("libmemconsumer") {
"memconsumer_hook.cc",
]
libs = [ "log" ]
- configs -= [ "//build/config/android:hide_native_jni_exports" ]
+ configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
diff --git a/chromium/tools/bash-completion b/chromium/tools/bash-completion
index 8cc3c73a8d8..2c5c00f5586 100644
--- a/chromium/tools/bash-completion
+++ b/chromium/tools/bash-completion
@@ -19,8 +19,17 @@
# Layout test switches:
# $ third_party/WebKit/Tools/Scripts/run-webkit-tests --additional-driver-f<tab>
# $ .../run-webkit-tests --additional-driver-flag=--site-per-pro<tab>
+#
+# Blink webkit-patch sub-commands:
+# $ third_party/WebKit/Tools/Scripts/webkit-patch reb<tab>
-chrome_source=$(cd $(dirname $BASH_SOURCE)/.. && pwd)
+if [ -n "$BASH_SOURCE" ]; then
+ # The $BASH_SOURCE variable returns path of current script in bash.
+ chrome_source=$(cd $(dirname $BASH_SOURCE)/.. && pwd)
+else
+ # This is here for other similar shells, e.g. zsh.
+ chrome_source=$(cd $(dirname $0)/.. && pwd)
+fi
_chrome_flag() {
local cur targets
@@ -71,6 +80,16 @@ _layout_test_flag() {
return 0
}
+_webkit_patch_flag() {
+ local cur targets webkit_scripts_dir
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ webkit_scripts_dir=$chrome_source/third_party/WebKit/Tools/Scripts
+ targets=$($webkit_scripts_dir/webkit-patch help | grep '^ [a-z]' | \
+ awk '{ print $1 }')
+ COMPREPLY=($(compgen -W "$targets" -- "$cur"))
+ return 0
+}
+
complete -F _chrome_flag google-chrome
complete -F _chrome_flag chrome
if [ $(uname) = "Darwin" ]
@@ -86,3 +105,4 @@ for gtest_test_executable in $(
done
complete -F _layout_test_flag run-webkit-tests
+complete -F _webkit_patch_flag webkit-patch
diff --git a/chromium/tools/binary_size/OWNERS b/chromium/tools/binary_size/OWNERS
index c598cde507c..d86822896b4 100644
--- a/chromium/tools/binary_size/OWNERS
+++ b/chromium/tools/binary_size/OWNERS
@@ -1,3 +1,5 @@
andrewhayden@chromium.org
bratell@opera.com
primiano@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/checklicenses/checklicenses.py b/chromium/tools/checklicenses/checklicenses.py
index cfb09bc7915..3d27b0f2a06 100755
--- a/chromium/tools/checklicenses/checklicenses.py
+++ b/chromium/tools/checklicenses/checklicenses.py
@@ -231,11 +231,6 @@ PATH_SPECIFIC_WHITELISTED_LICENSES = {
'UNKNOWN',
],
- # https://bugs.chromium.org/p/chromium/issues/detail?id=655755
- 'third_party/dpkg-dev': [
- 'GPL (v2 or later)',
- ],
-
'third_party/devscripts': [
'GPL (v2 or later)',
],
diff --git a/chromium/tools/checkperms/checkperms.py b/chromium/tools/checkperms/checkperms.py
index 8e759744bb6..9960558ecf8 100755
--- a/chromium/tools/checkperms/checkperms.py
+++ b/chromium/tools/checkperms/checkperms.py
@@ -182,7 +182,6 @@ IGNORED_PATHS = (
'__init__.py',
'out/',
# TODO(maruel): Fix these.
- 'third_party/bintrees/',
'third_party/devscripts/licensecheck.pl.vanilla',
'third_party/hyphen/',
'third_party/lcov-1.9/contrib/galaxy/conglomerate_functions.pl',
diff --git a/chromium/tools/checkteamtags/checkteamtags.py b/chromium/tools/checkteamtags/checkteamtags.py
index cfad8d1a934..c06c354b3e9 100755
--- a/chromium/tools/checkteamtags/checkteamtags.py
+++ b/chromium/tools/checkteamtags/checkteamtags.py
@@ -11,17 +11,102 @@ import logging
import optparse
import os
import sys
+import urllib2
+from collections import defaultdict
-def check_owners(root, owners_path):
- """Component and Team check in OWNERS files. crbug.com/667954"""
+from owners_file_tags import parse
+
+
+DEFAULT_MAPPING_URL = \
+ 'https://storage.googleapis.com/chromium-owners/component_map.json'
+
+
+def rel_and_full_paths(root, owners_path):
if root:
full_path = os.path.join(root, owners_path)
rel_path = owners_path
else:
full_path = os.path.abspath(owners_path)
rel_path = os.path.relpath(owners_path)
+ return rel_path, full_path
+
+
+def validate_mappings(options, args):
+ """Ensure team/component mapping remains consistent after patch.
+
+ The main purpose of this check is to prevent new and edited OWNERS files
+ introduce multiple teams for the same component.
+ Args:
+ options: Command line options from optparse
+ args: List of paths to affected OWNERS files
+ """
+ mappings_file = json.load(urllib2.urlopen(options.current_mapping_url))
+
+ # Convert dir -> component, component -> team to dir -> (team, component)
+ current_mappings = {}
+ for dir_name in mappings_file['dir-to-component'].keys():
+ component = mappings_file['dir-to-component'].get(dir_name)
+ if component:
+ team = mappings_file['component-to-team'].get(component)
+ else:
+ team = None
+ current_mappings[dir_name] = (team, component)
+
+ # Extract dir -> (team, component) for affected files
+ affected = {}
+ deleted = []
+ for f in args:
+ rel, full = rel_and_full_paths(options.root, f)
+ if os.path.exists(full):
+ affected[os.path.dirname(rel)] = parse(full)
+ else:
+ deleted.append(os.path.dirname(rel))
+ for d in deleted:
+ current_mappings.pop(d, None)
+ current_mappings.update(affected)
+
+ #Ensure internal consistency of modified mappings.
+ new_dir_to_component = {}
+ new_component_to_team = {}
+ team_to_dir = defaultdict(list)
+ errors = {}
+ for dir_name, tags in current_mappings.iteritems():
+ team, component = tags
+ if component:
+ new_dir_to_component[dir_name] = component
+ if team:
+ team_to_dir[team].append(dir_name)
+ if component and team:
+ if new_component_to_team.setdefault(component, team) != team:
+ if component not in errors:
+ errors[component] = set([new_component_to_team[component], team])
+ else:
+ errors[component].add(team)
+
+ result = []
+ for component, teams in errors.iteritems():
+ error_message = 'The component "%s" has more than one team: ' % component
+ team_details = []
+ for team in teams:
+ team_details.append('%(team)s is used in %(paths)s' % {
+ 'team': team,
+ 'paths': ', '.join(team_to_dir[team]),
+ })
+ error_message += '; '.join(team_details)
+ result.append({
+ 'error': error_message,
+ 'full_path':
+ ' '.join(['%s/OWNERS' % d
+ for d, c in new_dir_to_component.iteritems()
+ if c == component and d in affected.keys()])
+ })
+ return result
+
+
+def check_owners(rel_path, full_path):
+ """Component and Team check in OWNERS files. crbug.com/667954"""
def result_dict(error):
return {
'error': error,
@@ -29,6 +114,9 @@ def check_owners(root, owners_path):
'rel_path': rel_path,
}
+ if not os.path.exists(full_path):
+ return
+
with open(full_path) as f:
owners_file_lines = f.readlines()
@@ -89,13 +177,20 @@ Examples:
action='store_true',
default=False,
help='Prints the bare filename triggering the checks')
+ parser.add_option(
+ '--current_mapping_url', default=DEFAULT_MAPPING_URL,
+ help='URL for existing dir/component and component/team mapping')
parser.add_option('--json', help='Path to JSON output file')
options, args = parser.parse_args()
levels = [logging.ERROR, logging.INFO, logging.DEBUG]
logging.basicConfig(level=levels[min(len(levels) - 1, options.verbose)])
- errors = filter(None, [check_owners(options.root, f) for f in args])
+ errors = filter(None, [check_owners(*rel_and_full_paths(options.root, f))
+ for f in args])
+
+ if not errors:
+ errors += validate_mappings(options, args) or []
if options.json:
with open(options.json, 'w') as f:
diff --git a/chromium/tools/checkteamtags/checkteamtags_test.py b/chromium/tools/checkteamtags/checkteamtags_test.py
index b99f66430ed..5dc0c4a984a 100644
--- a/chromium/tools/checkteamtags/checkteamtags_test.py
+++ b/chromium/tools/checkteamtags/checkteamtags_test.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import json
import os
import sys
import unittest
@@ -16,7 +17,8 @@ import mock
def mock_file(lines):
inner_mock = mock.MagicMock()
- inner_attrs = {'readlines.return_value': lines}
+ inner_attrs = {'readlines.return_value': lines,
+ '__iter__.return_value': lines}
inner_mock.configure_mock(**inner_attrs)
return_val = mock.MagicMock()
@@ -24,6 +26,33 @@ def mock_file(lines):
return_val.configure_mock(**attrs)
return return_val
+
+DEFAULT_MAPPING = {
+ 'dir-to-component': {},
+ 'component-to-team': {},
+}
+
+def mock_url_open(data=None):
+ """Simulate the result of fetching the cloud location of the mapping.
+
+ i.e. https://storage.googleapis.com/chromium-owners/component_map.json
+ """
+ if data is None:
+ data = DEFAULT_MAPPING
+
+ class _MockJsonResponse(object):
+ def __init__(self, data):
+ self.data = data
+
+ def read(self):
+ return json.dumps(self.data)
+
+ def inner(url):
+ if url.endswith('.json'):
+ return _MockJsonResponse(data)
+ return inner
+
+
NO_TAGS = """
mock@chromium.org
""".splitlines()
@@ -75,45 +104,109 @@ mock@chromium.org
open_name = 'checkteamtags.open'
-@mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
@mock.patch('sys.stdout', mock.MagicMock())
+@mock.patch('os.path.exists', mock.MagicMock())
class CheckTeamTagsTest(unittest.TestCase):
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testNoTags(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(NO_TAGS)
self.assertEqual(0, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMultipleComponentTags(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MULTIPLE_COMPONENT_TAGS)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMultipleComponentsInTag(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MULTIPLE_COMPONENTS_IN_TAG)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMissingComponent(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MISSING_COMPONENT)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMultipleTeamTags(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MULTIPLE_TEAM_TAGS)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMultipleTeamsInTag(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MULTIPLE_TEAMS_IN_TAG)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testMissingTeam(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(MISSING_TEAM)
self.assertEqual(1, checkteamtags.main())
+ @mock.patch('urllib2.urlopen', mock_url_open())
+ @mock.patch('sys.argv', ['checkteamtags', '--bare' ,'OWNERS'])
def testBasic(self):
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock_file(BASIC)
self.assertEqual(0, checkteamtags.main())
+
+ @mock.patch('urllib2.urlopen', mock_url_open({
+ 'dir-to-component': {
+ 'some/dir': 'V8>mock_component',
+ },
+ 'component-to-team': {
+ 'V8>mock_component': 'some-other-team@chromium.org',
+ },
+ }))
+ @mock.patch('sys.argv', ['checkteamtags', '--bare', 'fakepath/OWNERS'])
+ def testMappingFail(self):
+ with mock.patch(open_name, create=True) as mock_open:
+ mock_open.return_value = mock_file(BASIC)
+ with mock.patch('owners_file_tags.open', create=True) as mock_open_2:
+ mock_open_2.return_value = mock_file(BASIC)
+ self.assertEqual(1, checkteamtags.main())
+
+ @mock.patch('urllib2.urlopen', mock_url_open({
+ 'dir-to-component': {
+ 'some/dir': 'V8>mock_component',
+ },
+ 'component-to-team': {
+ 'V8>mock_component': 'some-other-team@chromium.org',
+ },
+ }))
+ @mock.patch('sys.argv', ['checkteamtags', '--bare', 'some/dir/OWNERS'])
+ def testMappingPassRename(self):
+ with mock.patch(open_name, create=True) as mock_open:
+ mock_open.return_value = mock_file(BASIC)
+ with mock.patch('owners_file_tags.open', create=True) as mock_open_2:
+ mock_open_2.return_value = mock_file(BASIC)
+ self.assertEqual(0, checkteamtags.main())
+
+ @mock.patch('urllib2.urlopen', mock_url_open({
+ 'dir-to-component': {
+ 'some/dir/': 'V8>mock_component',
+ },
+ 'component-to-team': {
+ 'V8>mock_component': 'some-team@chromium.org',
+ },
+ }))
+ @mock.patch('sys.argv', ['checkteamtags', '--bare', 'other/dir/OWNERS'])
+ def testMappingPassNew(self):
+ with mock.patch(open_name, create=True) as mock_open:
+ mock_open.return_value = mock_file(BASIC)
+ with mock.patch('owners_file_tags.open', create=True) as mock_open_2:
+ mock_open_2.return_value = mock_file(BASIC)
+ self.assertEqual(0, checkteamtags.main())
diff --git a/chromium/tools/checkteamtags/extract_components.py b/chromium/tools/checkteamtags/extract_components.py
index a96fa03928e..e944894eb7d 100755
--- a/chromium/tools/checkteamtags/extract_components.py
+++ b/chromium/tools/checkteamtags/extract_components.py
@@ -44,6 +44,86 @@ def write_results(filename, data):
f.write(data)
+def display_stat(stats, root, options):
+ """"Display coverage statistic.
+
+ The following three values are always displayed:
+ - The total number of OWNERS files under directory root and its sub-
+ directories.
+ - The number of OWNERS files (and its percentage of the total) that have
+ component information but no team information.
+ - The number of OWNERS files (and its percentage of the total) that have
+ both component and team information.
+
+ Optionally, if options.stat_coverage or options.complete_coverage are given,
+ the same information will be shown for each depth level.
+ (up to the level given by options.stat_coverage, if any).
+
+ Args:
+ stats (dict): Tha statistics in dictionary form as produced by the
+ owners_file_tags module.
+ root (str): The root directory from which the depth level is calculated.
+ options (optparse.Values): The command line options as returned by
+ optparse.
+ """
+ file_total = stats['OWNERS-count']
+ print ("%d OWNERS files in total." % file_total)
+ file_with_component = stats['OWNERS-with-component-only-count']
+ file_pct_with_component = "N/A"
+ if file_total > 0:
+ file_pct_with_component = "{0:.2f}".format(
+ 100.0 * file_with_component / file_total)
+ print '%(file_with_component)d (%(file_pct_with_component)s%%) OWNERS '\
+ 'files have COMPONENT' % {
+ 'file_with_component': file_with_component,
+ 'file_pct_with_component': file_pct_with_component}
+ file_with_team_component = stats['OWNERS-with-team-and-component-count']
+ file_pct_with_team_component = "N/A"
+ if file_total > 0:
+ file_pct_with_team_component = "{0:.2f}".format(
+ 100.0 * file_with_team_component / file_total)
+ print '%(file_with_team_component)d (%(file_pct_with_team_component)s%%) '\
+ 'OWNERS files have TEAM and COMPONENT' % {
+ 'file_with_team_component': file_with_team_component,
+ 'file_pct_with_team_component': file_pct_with_team_component}
+
+ print ("\nUnder directory %s " % root)
+ # number of depth to display, default is max depth under root
+ num_output_depth = len(stats['OWNERS-count-by-depth'])
+ if (options.stat_coverage > 0
+ and options.stat_coverage < num_output_depth):
+ num_output_depth = options.stat_coverage
+
+ for depth in range(0, num_output_depth):
+ file_total_by_depth = stats['OWNERS-count-by-depth'][depth]
+ file_with_component_by_depth =\
+ stats['OWNERS-with-component-only-count-by-depth'][depth]
+ file_pct_with_component_by_depth = "N/A"
+ if file_total_by_depth > 0:
+ file_pct_with_component_by_depth = "{0:.2f}".format(
+ 100.0 * file_with_component_by_depth / file_total_by_depth)
+ file_with_team_component_by_depth =\
+ stats['OWNERS-with-team-and-component-count-by-depth'][depth]
+ file_pct_with_team_component_by_depth = "N/A"
+ if file_total_by_depth > 0:
+ file_pct_with_team_component_by_depth = "{0:.2f}".format(
+ 100.0 * file_with_team_component_by_depth / file_total_by_depth)
+ print '%(file_total_by_depth)d OWNERS files at depth %(depth)d'% {
+ 'file_total_by_depth': file_total_by_depth, 'depth': depth}
+ print 'have COMPONENT: %(file_with_component_by_depth)d, '\
+ 'percentage: %(file_pct_with_component_by_depth)s%%' % {
+ 'file_with_component_by_depth':
+ file_with_component_by_depth,
+ 'file_pct_with_component_by_depth':
+ file_pct_with_component_by_depth}
+ print 'have COMPONENT and TEAM: %(file_with_team_component_by_depth)d,'\
+ 'percentage: %(file_pct_with_team_component_by_depth)s%%' % {
+ 'file_with_team_component_by_depth':
+ file_with_team_component_by_depth,
+ 'file_pct_with_team_component_by_depth':
+ file_pct_with_team_component_by_depth}
+
+
def main(argv):
usage = """Usage: python %prog [options] [<root_dir>]
root_dir specifies the topmost directory to traverse looking for OWNERS
@@ -56,6 +136,8 @@ Examples:
python %prog -v /b/build/src
python %prog -w /b/build/src
python %prog -o ~/components.json /b/build/src
+ python %prog -c /b/build/src
+ python %prog -s 3 /b/build/src
"""
parser = optparse.OptionParser(usage=usage)
parser.add_option('-w', '--write', action='store_true',
@@ -67,13 +149,17 @@ Examples:
parser.add_option('-o', '--output_file', help='Specify file to write the '
'mappings to instead of the default: <CWD>/'
'component_map.json (implies -w)')
+ parser.add_option('-c', '--complete_coverage', action='store_true',
+ help='Print complete coverage statistic')
+ parser.add_option('-s', '--stat_coverage', type="int",
+ help='Specify directory depth to display coverage stats')
options, args = parser.parse_args(argv[1:])
if args:
root = args[0]
else:
root = _DEFAULT_SRC_LOCATION
- mappings, warnings, errors = aggregate_components_from_owners(root)
+ mappings, warnings, errors, stats = aggregate_components_from_owners(root)
if options.verbose:
for w in warnings:
print w
@@ -81,6 +167,9 @@ Examples:
for e in errors:
print e
+ if options.stat_coverage or options.complete_coverage:
+ display_stat(stats, root, options)
+
mappings['AAA-README']= _README
mapping_file_contents = json.dumps(mappings, sort_keys=True, indent=2)
if options.write or options.output_file:
diff --git a/chromium/tools/checkteamtags/extract_components_test.py b/chromium/tools/checkteamtags/extract_components_test.py
index b9111659f4f..0f99f4eab14 100644
--- a/chromium/tools/checkteamtags/extract_components_test.py
+++ b/chromium/tools/checkteamtags/extract_components_test.py
@@ -124,3 +124,41 @@ class ExtractComponentsTest(unittest.TestCase):
output = saved_output.getvalue()
self.assertIn('src/OWNERS has no COMPONENT tag', output)
+ @mock_file_tree({
+ 'src': 'boss@chromium.org\n',
+ 'src/dummydir1': 'dummy@chromium.org\n'
+ '# TEAM: dummy-team@chromium.org\n'
+ '# COMPONENT: Dummy>Component',
+ 'src/dummydir2': 'dummy2@chromium.org\n'
+ '# COMPONENT: Dummy>Component',
+ 'src/dummydir1/innerdir1': 'dummy@chromium.org\n'
+ '# TEAM: dummy-specialist-team@chromium.org\n'
+ '# COMPONENT: Dummy>Component>Subcomponent'})
+ def testCoverage(self):
+ saved_output = StringIO()
+ with mock.patch('sys.stdout', saved_output):
+ extract_components.main(['%prog', '-s 2'])
+ output = saved_output.getvalue()
+ self.assertIn('4 OWNERS files in total.', output)
+ self.assertIn('3 (75.00%) OWNERS files have COMPONENT', output)
+ self.assertIn('2 (50.00%) OWNERS files have TEAM and COMPONENT', output)
+
+ @mock_file_tree({
+ 'src': 'boss@chromium.org\n',
+ 'src/dummydir1': 'dummy@chromium.org\n'
+ '# TEAM: dummy-team@chromium.org\n'
+ '# COMPONENT: Dummy>Component',
+ 'src/dummydir2': 'dummy2@chromium.org\n'
+ '# COMPONENT: Dummy>Component',
+ 'src/dummydir1/innerdir1': 'dummy@chromium.org\n'
+ '# TEAM: dummy-specialist-team@chromium.org\n'
+ '# COMPONENT: Dummy>Component>Subcomponent'})
+ def testCompleteCoverage(self):
+ saved_output = StringIO()
+ with mock.patch('sys.stdout', saved_output):
+ extract_components.main(['%prog', '-c'])
+ output = saved_output.getvalue()
+ self.assertIn('4 OWNERS files in total.', output)
+ self.assertIn('3 (75.00%) OWNERS files have COMPONENT', output)
+ self.assertIn('2 (50.00%) OWNERS files have TEAM and COMPONENT', output)
+ self.assertIn('4 OWNERS files at depth 0', output)
diff --git a/chromium/tools/checkteamtags/owners_file_tags.py b/chromium/tools/checkteamtags/owners_file_tags.py
index fc8d1f57ebc..d2e3fe968e1 100644
--- a/chromium/tools/checkteamtags/owners_file_tags.py
+++ b/chromium/tools/checkteamtags/owners_file_tags.py
@@ -39,11 +39,26 @@ def aggregate_components_from_owners(root):
root (str): the path to the src directory.
Returns:
- A pair (data, warnings) where data is a dict of the form
+ A tuple (data, warnings, errors, stats) where data is a dict of the form
{'component-to-team': {'Component1': 'team1@chr...', ...},
'dir-to-component': {'/path/to/1': 'Component1', ...}}
- and warnings is a list of strings.
+ , warnings is a list of strings, stats is a dict of form
+ {'OWNERS-count': total number of OWNERS files,
+ 'OWNERS-with-component-only-count': number of OWNERS have # COMPONENT,
+ 'OWNERS-with-team-and-component-count': number of
+ OWNERS have TEAM and COMPONENT,
+ 'OWNERS-count-by-depth': {directory depth: number of OWNERS},
+ 'OWNERS-with-component-only-count-by-depth': {directory depth: number
+ of OWNERS have COMPONENT at this depth},
+ 'OWNERS-with-team-and-component-count-by-depth':{directory depth: ...}}
"""
+ stats = {}
+ num_total = 0
+ num_with_component = 0
+ num_with_team_component = 0
+ num_total_by_depth = defaultdict(int)
+ num_with_component_by_depth = defaultdict(int)
+ num_with_team_component_by_depth = defaultdict(int)
warnings = []
component_to_team = defaultdict(set)
dir_to_component = {}
@@ -51,19 +66,34 @@ def aggregate_components_from_owners(root):
# Proofing against windows casing oddities.
owners_file_names = [f for f in files if f.upper() == 'OWNERS']
if owners_file_names:
+ file_depth = dirname[len(root) + len(os.path.sep):].count(os.path.sep)
+ num_total += 1
+ num_total_by_depth[file_depth] += 1
owners_full_path = os.path.join(dirname, owners_file_names[0])
owners_rel_path = os.path.relpath(owners_full_path, root)
team, component = parse(owners_full_path)
if component:
+ num_with_component += 1
+ num_with_component_by_depth[file_depth] += 1
dir_to_component[os.path.relpath(dirname, root)] = component
if team:
+ num_with_team_component += 1
+ num_with_team_component_by_depth[file_depth] += 1
component_to_team[component].add(team)
else:
warnings.append('%s has no COMPONENT tag' % owners_rel_path)
mappings = {'component-to-team': component_to_team,
'dir-to-component': dir_to_component}
errors = validate_one_team_per_component(mappings)
- return unwrap(mappings), warnings, errors
+ stats = {'OWNERS-count': num_total,
+ 'OWNERS-with-component-only-count': num_with_component,
+ 'OWNERS-with-team-and-component-count': num_with_team_component,
+ 'OWNERS-count-by-depth': num_total_by_depth,
+ 'OWNERS-with-component-only-count-by-depth':
+ num_with_component_by_depth,
+ 'OWNERS-with-team-and-component-count-by-depth':
+ num_with_team_component_by_depth}
+ return unwrap(mappings), warnings, errors, stats
def validate_one_team_per_component(m):
diff --git a/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py
index 36b06143f14..b62920831eb 100644
--- a/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py
+++ b/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py
@@ -51,14 +51,16 @@ def WaitForViaHeader(tab, url="http://check.googlezip.net/test.html"):
'</body></html>'))
# Ensure the page has finished loading before attempting the DRP check.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventEnd', 60)
+ tab.WaitForJavaScriptCondition('performance.timing.loadEventEnd', timeout=60)
expected_via_header = metrics.CHROME_PROXY_VIA_HEADER
if ChromeProxyValidation.extra_via_header:
expected_via_header = ChromeProxyValidation.extra_via_header
- tab.WaitForJavaScriptExpression(
- 'PollDRPCheck("%s", "%s")' % (url, expected_via_header), 60)
+ tab.WaitForJavaScriptCondition(
+ 'PollDRPCheck({{ url }}, {{ via_header }})',
+ url=url, via_header=expected_via_header,
+ timeout=60)
class ChromeProxyValidation(legacy_page_test.LegacyPageTest):
@@ -100,7 +102,8 @@ class ChromeProxyValidation(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
self._page = page
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
assert self._metrics
self._metrics.Stop(page, tab)
if ChromeProxyValidation.extra_via_header:
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
index 6754a60fae6..7b07d760fe4 100644
--- a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
@@ -47,7 +47,8 @@ class ChromeProxyDataSaving(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
self._metrics.Stop(page, tab)
self._metrics.AddResultsForDataSaving(tab, results)
@@ -123,7 +124,7 @@ class ChromeProxyCorsBypass(ChromeProxyValidation):
def ValidateAndMeasurePage(self, page, tab, results):
# The test page sets window.xhrRequestCompleted to true when the XHR fetch
# finishes.
- tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 300)
+ tab.WaitForJavaScriptCondition('window.xhrRequestCompleted', timeout=300)
super(ChromeProxyCorsBypass,
self).ValidateAndMeasurePage(page, tab, results)
@@ -501,7 +502,8 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
# cause a proxy fallback, and have this test run starting from the HTTP
# fallback proxy.
tab.Navigate(_TEST_SERVER_DEFAULT_URL)
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
def AddResults(self, tab, results):
self._metrics.AddResultsForHTTPToDirectFallback(tab, results, _TEST_SERVER)
@@ -602,7 +604,8 @@ class ChromeProxyQuicSmoke(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
self._metrics.Stop(page, tab)
page_to_metrics = {
'header validation': [self._metrics.AddResultsForHeaderValidation],
@@ -800,6 +803,7 @@ class ChromeProxyQuicTransaction(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
self._metrics.Stop(page, tab)
self._metrics.AddResultsForQuicTransaction(tab, results)
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
index 7651298ead0..004b2285c58 100644
--- a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
@@ -431,13 +431,13 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
def AddResultsForHTML5Test(self, tab, results):
# Wait for the number of "points" of HTML5 compatibility to appear to verify
# the HTML5 elements have loaded successfully.
- tab.WaitForJavaScriptExpression(
- 'document.getElementsByClassName("pointsPanel")', 15)
+ tab.WaitForJavaScriptCondition(
+ 'document.getElementsByClassName("pointsPanel")', timeout=15)
def AddResultsForYouTube(self, tab, results):
# Wait for the video to begin playing.
- tab.WaitForJavaScriptExpression(
- 'window.playerState == YT.PlayerState.PLAYING', 30)
+ tab.WaitForJavaScriptCondition(
+ 'window.playerState == YT.PlayerState.PLAYING', timeout=30)
def AddResultsForBypass(self, tab, results, url_pattern=""):
bypass_count = 0
@@ -671,7 +671,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
before_metrics = ChromeProxyMetric()
before_metrics.Start(results.current_page, tab)
tab.Navigate('http://chromeproxy-test.appspot.com/default')
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=10)
before_metrics.Stop(results.current_page, tab)
for resp in before_metrics.IterResponses(tab):
@@ -696,7 +697,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
after_metrics = ChromeProxyMetric()
after_metrics.Start(results.current_page, tab)
tab.Navigate('http://chromeproxy-test.appspot.com/default')
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=10)
after_metrics.Stop(results.current_page, tab)
for resp in after_metrics.IterResponses(tab):
@@ -746,7 +748,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
before_metrics = ChromeProxyMetric()
before_metrics.Start(results.current_page, tab)
tab.Navigate('http://chromeproxy-test.appspot.com/default')
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=10)
before_metrics.Stop(results.current_page, tab)
for resp in before_metrics.IterResponses(tab):
@@ -773,7 +776,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
after_metrics = ChromeProxyMetric()
after_metrics.Start(results.current_page, tab)
tab.Navigate('http://chromeproxy-test.appspot.com/default')
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=10)
after_metrics.Stop(results.current_page, tab)
for resp in after_metrics.IterResponses(tab):
@@ -984,13 +988,15 @@ class ChromeProxyVideoMetric(network_metrics.NetworkMetric):
super(ChromeProxyVideoMetric, self).Start(page, tab)
def Stop(self, page, tab):
- tab.WaitForJavaScriptExpression('window.__chromeProxyVideoLoaded', 30)
+ tab.WaitForJavaScriptCondition(
+ 'window.__chromeProxyVideoLoaded', timeout=30)
m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
# Now wait for the video to stop playing.
# Give it 2x the total duration to account for buffering.
waitTime = 2 * m['video_duration']
- tab.WaitForJavaScriptExpression('window.__chromeProxyVideoEnded', waitTime)
+ tab.WaitForJavaScriptCondition(
+ 'window.__chromeProxyVideoEnded', timeout=waitTime)
# Load the final metrics.
m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
@@ -1068,7 +1074,7 @@ class ChromeProxyInstrumentedVideoMetric(Metric):
def Stop(self, page, tab):
waitTime = tab.EvaluateJavaScript('test.waitTime')
- tab.WaitForJavaScriptExpression('test.metrics.complete', waitTime)
+ tab.WaitForJavaScriptCondition('test.metrics.complete', timeout=waitTime)
super(ChromeProxyInstrumentedVideoMetric, self).Stop(page, tab)
def AddResults(self, tab, results):
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py
index 30def663457..395ca19dcf2 100644
--- a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py
@@ -36,7 +36,7 @@ class BlockOncePage(page_module.Page):
})();
''')
action_runner.WaitForJavaScriptCondition(
- "window.post_request_completed == true", timeout_in_seconds=30)
+ "window.post_request_completed == true", timeout=30)
class BlockOnceStorySet(story.StorySet):
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
index c19cfc3e73e..cc1d267d32f 100644
--- a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
@@ -20,10 +20,10 @@ class PassThroughPage(page_module.Page):
action_runner.ExecuteJavaScript('''
(function() {
var request = new XMLHttpRequest();
- request.open("GET", "%s");
+ request.open("GET", {{ url }});
request.setRequestHeader("Chrome-Proxy-Accept-Transform", "identity");
request.send(null);
- })();''' % (self.url))
+ })();''', url=self.url)
action_runner.Wait(1)
diff --git a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
index 4b9fe6756ad..126eb89bdb7 100644
--- a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
+++ b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
@@ -22,7 +22,8 @@ class ChromeProxyLatencyBase(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
self._metrics.Stop(page, tab)
self._metrics.AddResultsForLatency(tab, results)
@@ -61,7 +62,8 @@ class ChromeProxyDataSavingBase(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ tab.WaitForJavaScriptCondition(
+ 'performance.timing.loadEventStart', timeout=300)
self._metrics.Stop(page, tab)
self._metrics.AddResultsForDataSaving(tab, results)
diff --git a/chromium/tools/chrome_proxy/webdriver/bypass.py b/chromium/tools/chrome_proxy/webdriver/bypass.py
new file mode 100644
index 00000000000..3b7798ae6c1
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/bypass.py
@@ -0,0 +1,107 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+
+class Bypass(IntegrationTest):
+
+ # Ensure Chrome does not use Data Saver for block-once, but does use Data
+ # Saver for a subsequent request.
+ def testBlockOnce(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://check.googlezip.net/blocksingle/')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(2, len(responses))
+ for response in responses:
+ if response.url == "http://check.googlezip.net/image.png":
+ self.assertHasChromeProxyViaHeader(response)
+ else:
+ self.assertNotHasChromeProxyViaHeader(response)
+
+ # Ensure Chrome does not use Data Saver for block=0, which uses the default
+ # proxy retry delay.
+ def testBypass(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://check.googlezip.net/block/')
+ for response in t.GetHTTPResponses():
+ self.assertNotHasChromeProxyViaHeader(response)
+
+ # Load another page and check that Data Saver is not used.
+ t.LoadURL('http://check.googlezip.net/test.html')
+ for response in t.GetHTTPResponses():
+ self.assertNotHasChromeProxyViaHeader(response)
+
+ # Ensure Chrome does not use Data Saver for HTTPS requests.
+ def testHttpsBypass(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+
+ # Load HTTP page and check that Data Saver is used.
+ t.LoadURL('http://check.googlezip.net/test.html')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(2, len(responses))
+ for response in responses:
+ self.assertHasChromeProxyViaHeader(response)
+
+ # Load HTTPS page and check that Data Saver is not used.
+ t.LoadURL('https://check.googlezip.net/test.html')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(2, len(responses))
+ for response in responses:
+ self.assertNotHasChromeProxyViaHeader(response)
+
+ # Verify that CORS requests receive a block-once from the data reduction
+ # proxy by checking that those requests are retried without data reduction
+ # proxy.
+ def testCorsBypass(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.LoadURL('http://www.gstatic.com/chrome/googlezip/cors/')
+
+ # Navigate to a different page to verify that later requests are not
+ # blocked.
+ test_driver.LoadURL('http://check.googlezip.net/test.html')
+
+ cors_requests = 0
+ same_origin_requests = 0
+ for response in test_driver.GetHTTPResponses():
+ # The origin header implies that |response| is a CORS request.
+ if ('origin' not in response.request_headers):
+ self.assertHasChromeProxyViaHeader(response)
+ same_origin_requests = same_origin_requests + 1
+ else:
+ self.assertNotHasChromeProxyViaHeader(response)
+ cors_requests = cors_requests + 1
+ # Verify that both CORS and same origin requests were seen.
+ self.assertNotEqual(0, same_origin_requests)
+ self.assertNotEqual(0, cors_requests)
+
+ # Verify that when an origin times out using Data Saver, the request is
+ # fetched directly and data saver is bypassed only for one request.
+ def testOriginTimeoutBlockOnce(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+
+ # Load URL that times out when the proxy server tries to access it.
+ test_driver.LoadURL('http://chromeproxy-test.appspot.com/blackhole')
+ responses = test_driver.GetHTTPResponses()
+ self.assertNotEqual(0, len(responses))
+ for response in responses:
+ self.assertNotHasChromeProxyViaHeader(response)
+
+ # Load HTTP page and check that Data Saver is used.
+ test_driver.LoadURL('http://check.googlezip.net/test.html')
+ responses = test_driver.GetHTTPResponses()
+ self.assertNotEqual(0, len(responses))
+ for response in responses:
+ self.assertHasChromeProxyViaHeader(response)
+
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/common.py b/chromium/tools/chrome_proxy/webdriver/common.py
index 6417d8a1ea8..57bab09efb6 100644
--- a/chromium/tools/chrome_proxy/webdriver/common.py
+++ b/chromium/tools/chrome_proxy/webdriver/common.py
@@ -127,6 +127,8 @@ class TestDriver:
_driver: A reference to the driver object from the Chrome Driver library.
_chrome_args: A set of string arguments to start Chrome with.
_url: The string URL that Chrome will navigate to for this test.
+ _has_logs: Boolean flag set when a page is loaded and cleared when logs are
+ fetched.
"""
def __init__(self):
@@ -135,6 +137,7 @@ class TestDriver:
self._chrome_args = set()
self._url = ''
self._logger = GetLogger(name='TestDriver')
+ self._has_logs = False
def __enter__(self):
return self
@@ -283,6 +286,7 @@ class TestDriver:
self._logger.debug('Set page load timeout to %f seconds', timeout)
self._driver.get(self._url)
self._logger.debug('Loaded page %s', url)
+ self._has_logs = True
def ExecuteJavascript(self, script, timeout=30):
"""Executes the given javascript in the browser's current page in an
@@ -323,9 +327,18 @@ class TestDriver:
"""
return self.ExecuteJavascript("return " + script, timeout)
- def GetHistogram(self, histogram):
+ def GetHistogram(self, histogram, timeout=30):
+ """Gets a Chrome histogram as a dictionary object.
+
+ Args:
+ histogram: the name of the histogram to fetch
+ timeout: timeout for the underlying Javascript query.
+
+ Returns:
+ A dictionary object containing information about the histogram.
+ """
js_query = 'statsCollectionController.getBrowserHistogram("%s")' % histogram
- string_response = self.ExecuteJavascriptStatement(js_query)
+ string_response = self.ExecuteJavascriptStatement(js_query, timeout)
self._logger.debug('Got %s histogram=%s', histogram, string_response)
return json.loads(string_response)
@@ -355,7 +368,8 @@ class TestDriver:
return result
def GetPerformanceLogs(self, method_filter=r'Network\.responseReceived'):
- """Returns all logged Performance events from Chrome.
+ """Returns all logged Performance events from Chrome. Raises an Exception if
+ no pages have been loaded since the last time this function was called.
Args:
method_filter: A regex expression to match the method of logged events
@@ -364,6 +378,8 @@ class TestDriver:
Performance logs as a list of dicts, since the last time this function was
called.
"""
+ if not self._has_logs:
+ raise Exception('No pages loaded since last Network log query!')
all_messages = []
for log in self._driver.execute('getLog', {'type': 'performance'})['value']:
message = json.loads(log['message'])['message']
@@ -372,14 +388,37 @@ class TestDriver:
all_messages.append(message)
self._logger.info('Got %d performance logs with filter method=%s',
len(all_messages), method_filter)
+ self._has_logs = False
return all_messages
+ def SleepUntilHistogramHasEntry(self, histogram_name, sleep_intervals=10):
+ """Polls if a histogram exists in 1-6 second intervals for 10 intervals.
+ Allows script to run with a timeout of 5 seconds, so the default behavior
+ allows up to 60 seconds until timeout.
+
+ Args:
+ histogram_name: The name of the histogram to wait for
+ sleep_intervals: The number of polling intervals, each polling cycle takes
+ no more than 6 seconds.
+ Returns:
+ Whether the histogram exists
+ """
+ histogram = {}
+ while(not histogram and sleep_intervals > 0):
+ histogram = self.GetHistogram(histogram_name, 5)
+ if (not histogram):
+ time.sleep(1)
+ sleep_intervals -= 1
+
+ return bool(histogram)
+
def GetHTTPResponses(self, include_favicon=False, skip_domainless_pages=True):
"""Parses the Performance Logs and returns a list of HTTPResponse objects.
Use caution when calling this function multiple times. Only responses
since the last time this function was called are returned (or since Chrome
- started, whichever is later).
+ started, whichever is later). An Exception will be raised if no page was
+ loaded since the last time this function was called.
Args:
include_favicon: A bool that if True will include responses for favicons.
@@ -539,6 +578,65 @@ class IntegrationTest(unittest.TestCase):
self.assertNotIn(expected_via_header,
http_response.response_headers['via'])
+ def checkLoFiResponse(self, http_response, expected_lo_fi):
+ """Asserts that if expected the response headers contain the Lo-Fi directive
+ then the request headers do too. Also checks that the content size is less
+ than 100 if |expected_lo_fi|. Otherwise, checks that the response and
+ request headers don't contain the Lo-Fi directive and the content size is
+ greater than 100.
+
+ Args:
+ http_response: The HTTPResponse object to check.
+ expected_lo_fi: Whether the response should be Lo-Fi.
+
+ Returns:
+ Whether the response was Lo-Fi.
+ """
+
+ if (expected_lo_fi) :
+ self.assertHasChromeProxyViaHeader(http_response)
+ content_length = http_response.response_headers['content-length']
+ cpat_request = http_response.request_headers[
+ 'chrome-proxy-accept-transform']
+ cpct_response = http_response.response_headers[
+ 'chrome-proxy-content-transform']
+ if ('empty-image' in cpct_response):
+ self.assertIn('empty-image', cpat_request)
+ self.assertTrue(int(content_length) < 100)
+ return True;
+ return False;
+ else:
+ self.assertNotIn('chrome-proxy-accept-transform',
+ http_response.request_headers)
+ self.assertNotIn('chrome-proxy-content-transform',
+ http_response.response_headers)
+ content_length = http_response.response_headers['content-length']
+ self.assertTrue(int(content_length) > 100)
+ return False;
+
+ def checkLitePageResponse(self, http_response):
+ """Asserts that if the response headers contain the Lite Page directive then
+ the request headers do too.
+
+ Args:
+ http_response: The HTTPResponse object to check.
+
+ Returns:
+ Whether the response was a Lite Page.
+ """
+
+ self.assertHasChromeProxyViaHeader(http_response)
+ if ('chrome-proxy-content-transform' not in http_response.response_headers):
+ return False;
+ cpct_response = http_response.response_headers[
+ 'chrome-proxy-content-transform']
+ cpat_request = http_response.request_headers[
+ 'chrome-proxy-accept-transform']
+ if ('lite-page' in cpct_response):
+ self.assertIn('lite-page', cpat_request)
+ return True;
+ return False;
+
@staticmethod
def RunAllTests(run_all_tests=False):
"""A simple helper method to run all tests using unittest.main().
@@ -572,3 +670,71 @@ class IntegrationTest(unittest.TestCase):
testRunner = unittest.runner.TextTestRunner(verbosity=2,
failfast=flags.failfast, buffer=(not flags.disable_buffer))
testRunner.run(tests)
+
+# Platform-specific decorators.
+# These decorators can be used to only run a test function for certain platforms
+# by annotating the function with them.
+
+def AndroidOnly(func):
+ def wrapper(*args, **kwargs):
+ if ParseFlags().android:
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test runs on Android only.')
+ return wrapper
+
+def NotAndroid(func):
+ def wrapper(*args, **kwargs):
+ if not ParseFlags().android:
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test does not run on Android.')
+ return wrapper
+
+def WindowsOnly(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform == 'win32':
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test runs on Windows only.')
+ return wrapper
+
+def NotWindows(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform != 'win32':
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test does not run on Windows.')
+ return wrapper
+
+def LinuxOnly(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform.startswith('linux'):
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test runs on Linux only.')
+ return wrapper
+
+def NotLinux(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform.startswith('linux'):
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test does not run on Linux.')
+ return wrapper
+
+def MacOnly(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform == 'darwin':
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test runs on Mac OS only.')
+ return wrapper
+
+def NotMac(func):
+ def wrapper(*args, **kwargs):
+ if sys.platform == 'darwin':
+ func(*args, **kwargs)
+ else:
+ args[0].skipTest('This test does not run on Mac OS.')
+ return wrapper
diff --git a/chromium/tools/chrome_proxy/webdriver/compression_regression.py b/chromium/tools/chrome_proxy/webdriver/compression_regression.py
new file mode 100644
index 00000000000..f6b1ee63efe
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/compression_regression.py
@@ -0,0 +1,219 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import json
+import math
+import subprocess
+import time
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+# The maximum number of data points that will be saved.
+MAX_DATA_POINTS = 365
+
+# The persistant storage for compression data is kept in Google Storage with
+# this bucket name.
+BUCKET = 'chrome_proxy_compression'
+
+# The data file name in the Google Storage bucket, above. The data file is also
+# saved locally under the same name.
+DATA_FILE = 'compression_data.json'
+
+class CompressionRegression(IntegrationTest):
+ """This class is responsible for alerting the Chrome Proxy team to regression
+ in the compression metrics of the proxy. At present, this class will simply
+ gather data and save it to a Google Storage bucket. Once enough data has been
+ gathered to form a reasonable model, alerting will be added to check for
+ regression.
+
+ Before running the test, this class will fetch the JSON data file from Google
+ Storage in a subprocess and store it on the local disk with the same file
+ name. The data is then read from that file. After running the test, if the
+ data has changed the file will be uploaded back to Google Storage.
+
+ The JSON data object and data dict object used widely in this class has the
+ following structure:
+ {
+ "2017-02-29": {
+ "html": 0.314,
+ "jpg": 0.1337,
+ "png": 0.1234,
+ "mp4": 0.9876
+ }
+ }
+ where keys are date stamps in the form "YYYY-MM-DD", and each key in the child
+ object is the resource type with its compression value.
+
+ Also frequently referenced is the compression_average dict object, which
+ contains the compression data just now gathered from Chrome in
+ getCurrentCompressionMetrics(). That object has the following structure:
+ {
+ "test/html": 0.314,
+ "image/jpg": 0.1337,
+ "image/png": 0.1234,
+ "video/mp4": 0.9876
+ }
+ where keys are the content type with its compression value.
+
+ Due to the complexity of several methods in this class, a number of local
+ unit tests can be found at the bottom of this file.
+
+ Please note that while this test uses the IntegrationTest framework, it is
+ classified as a regression test.
+ """
+
+ def testCompression(self):
+ """This function is the main test function for regression compression
+ checking and facilitates the test with all of the helper functions'
+ behavior.
+ """
+ compression_average = self.getCurrentCompressionMetrics()
+ self.fetchFromGoogleStorage()
+ data = {}
+ with open(DATA_FILE, 'r') as data_fp:
+ data = json.load(data_fp)
+ if self.updateDataObject(compression_average, data):
+ with open(DATA_FILE, 'w') as data_fp:
+ json.dump(data, data_fp)
+ self.uploadToGoogleStorage()
+
+ def getCurrentCompressionMetrics(self):
+ """This function uses the ChromeDriver framework to open Chrome and navigate
+ to a number of static resources of different types, like jpg, png, mp4, gif,
+ html. Multiple resources of a single type are supported. This function will
+ check that each resource was fetched via the Chrome Proxy, and then compute
+ the compression as a percentage from the Content-Length and
+ X-Original-Content-Length headers where compression = 1 - (cl / xocl). The
+ function will then return the average compression for each of the resource
+ types.
+
+ Returns:
+ a dict object mapping resource type to compression
+ """
+ def AddToCompression(compression, key, value):
+ if key in compression:
+ compression[key].append(value)
+ else:
+ compression[key] = [value]
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.AddChromeArg('--data-reduction-proxy-server-experiments-disabled')
+ t.LoadURL('http://check.googlezip.net/metrics/local.gif')
+ t.LoadURL('http://check.googlezip.net/metrics/local.png')
+ t.LoadURL('http://check.googlezip.net/metrics/local.jpg')
+ t.LoadURL(
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_tiny.html')
+ compression = {}
+ for response in t.GetHTTPResponses():
+ # Check that the response was proxied.
+ self.assertHasChromeProxyViaHeader(response)
+ # Compute compression metrics.
+ cl = response.response_headers['content-length']
+ ocl = response.response_headers['x-original-content-length']
+ content_type = response.response_headers['content-type']
+ compression_rate = 1.0 - (float(cl) / float(ocl))
+ if 'html' in response.response_headers['content-type']:
+ AddToCompression(compression, 'html', compression_rate)
+ else:
+ resource = response.url[response.url.rfind('/'):]
+ AddToCompression(compression, resource[resource.rfind('.') + 1:],
+ compression_rate)
+ # Compute the average compression for each resource type.
+ compression_average = {}
+ for resource_type in compression:
+ compression_average[resource_type] = (sum(compression[resource_type]) /
+ float(len(compression[resource_type])))
+ return compression_average
+
+ def updateDataObject(self, compression_average, data,
+ today=datetime.date.today()):
+ """This function handles the updating of the data object when new data is
+ available. Given the existing data object, the results of the
+ getCurrentCompressionMetrics() func, and a date object, it will check if
+ data exists for today. If it does, the method will do nothing and return
+ False. Otherwise, it will update the data object with the compression data.
+ If needed, it will also find the least recent entry in the data object and
+ remove it.
+
+ Args:
+ compression_average: the compression data from
+ getCurrentCompressionMetrics()
+ data: the current data object, a dict
+ today: a date object, specifiable here for testing purposes.
+ Returns:
+ True iff the data object was changed
+ """
+ datestamp = today.strftime('%Y-%m-%d')
+ # Check if this data has already been recorded.
+ if datestamp in data:
+ return False
+ # Append new data, removing the least recent if needed.
+ data[datestamp] = compression_average
+ if len(data) > MAX_DATA_POINTS:
+ min_date = None
+ for date_str in data:
+ date = datetime.date(*[int(d) for d in date_str.split('-')])
+ if min_date == None or date < min_date:
+ min_date = date
+ del data[min_date.strftime('%Y-%m-%d')]
+ return True
+
+ def uploadToGoogleStorage(self):
+ """This function uses the gsutil command to upload the local data file to
+ Google Storage.
+ """
+ gs_location = 'gs://%s/%s' % (BUCKET, DATA_FILE)
+ cmd = ['gsutil', 'cp', DATA_FILE, gs_location]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = proc.communicate()
+ if proc.returncode:
+ raise Exception('Uploading to Google Storage failed! output: %s %s' %
+ (stdout, stderr))
+
+ def fetchFromGoogleStorage(self):
+ """This function uses the gsutil command to fetch the local data file from
+ Google Storage.
+ """
+ gs_location = 'gs://%s/%s' % (BUCKET, DATA_FILE)
+ cmd = ['gsutil', 'cp', gs_location, DATA_FILE]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = proc.communicate()
+ if proc.returncode:
+ raise Exception('Fetching to Google Storage failed! output: %s %s' %
+ (stdout, stderr))
+
+ def test0UpdateDataObject_NoUpdate(self):
+ """This unit test asserts that the updateDataObject() function doesn't
+ update the data object when today is already contained in the data object.
+ """
+ data = { '2017-02-06': {'hello': 'world'}}
+ new_data = {'Benoit': 'Mandelbrot'}
+ test_day = datetime.date(2017, 02, 06)
+ changed = self.updateDataObject(new_data, data, today=test_day)
+ self.assertFalse(changed, "No data should have been recorded!")
+
+ def test0UpdateDataObject_Update(self):
+ """This unit test asserts that the updateDataObject() function updates the
+ data object when there is new data available, also removing the least recent
+ data point.
+ """
+ start_date = datetime.date(2017, 2, 6)
+ data = {}
+ for i in range(MAX_DATA_POINTS):
+ date_obj = start_date + datetime.timedelta(days=i)
+ datestamp = date_obj.strftime('%Y-%m-%d')
+ data[datestamp] = {'hello': 'world'}
+ new_data = {'Benoit': 'Mandelbrot'}
+ test_day = datetime.date(2017, 02, 06) + datetime.timedelta(
+ days=(MAX_DATA_POINTS))
+ changed = self.updateDataObject(new_data, data, today=test_day)
+ self.assertTrue(changed, "Data should have been recorded!")
+ self.assertNotIn('2017-02-06', data)
+ self.assertIn(test_day.strftime('%Y-%m-%d'), data)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/decorator_smoke.py b/chromium/tools/chrome_proxy/webdriver/decorator_smoke.py
new file mode 100644
index 00000000000..e9a6c75eef2
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/decorator_smoke.py
@@ -0,0 +1,24 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from common import AndroidOnly
+from common import ParseFlags
+from common import IntegrationTest
+
+
+class DecoratorSmokeTest(IntegrationTest):
+
+ def AndroidOnlyFunction(self):
+ # This function should never be called.
+ self.fail()
+
+ @AndroidOnly
+ def testDecorator(self):
+ # This test should always result as 'skipped' or pass if --android given.
+ if not ParseFlags().android:
+ self.AndroidOnlyFunction()
+
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/examples.py b/chromium/tools/chrome_proxy/webdriver/examples.py
index 31d37bfab06..fcf7ae27a40 100644
--- a/chromium/tools/chrome_proxy/webdriver/examples.py
+++ b/chromium/tools/chrome_proxy/webdriver/examples.py
@@ -10,7 +10,7 @@ from common import TestDriver
from common import IntegrationTest
-class SimpleSmoke(IntegrationTest):
+class Examples(IntegrationTest):
# Simple example integration test.
def testCheckPageWithProxy(self):
diff --git a/chromium/tools/chrome_proxy/webdriver/fallback.py b/chromium/tools/chrome_proxy/webdriver/fallback.py
new file mode 100644
index 00000000000..4de3956fc74
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/fallback.py
@@ -0,0 +1,40 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+class Fallback(IntegrationTest):
+
+ # Ensure that when a carrier blocks using the secure proxy, requests fallback
+ # to the HTTP proxy server.
+ def testSecureProxyProbeFallback(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+
+ # Set the secure proxy check URL to the google.com favicon, which will be
+ # interpreted as a secure proxy check failure since the response body is
+ # not "OK". The google.com favicon is used because it will load reliably
+ # fast, and there have been problems with chromeproxy-test.appspot.com
+ # being slow and causing tests to flake.
+ test_driver.AddChromeArg(
+ '--data-reduction-proxy-secure-proxy-check-url='
+ 'http://www.google.com/favicon.ico')
+
+ # Start chrome to begin the secure proxy check
+ test_driver.LoadURL('http://www.google.com/favicon.ico')
+
+ self.assertTrue(
+ test_driver.SleepUntilHistogramHasEntry("DataReductionProxy.ProbeURL"))
+
+ test_driver.LoadURL('http://check.googlezip.net/test.html')
+ responses = test_driver.GetHTTPResponses()
+ self.assertNotEqual(0, len(responses))
+ for response in responses:
+ self.assertHasChromeProxyViaHeader(response)
+ self.assertEqual(u'http/1.1', response.protocol)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/html5.py b/chromium/tools/chrome_proxy/webdriver/html5.py
new file mode 100644
index 00000000000..036b019e715
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/html5.py
@@ -0,0 +1,30 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+
+class HTML5(IntegrationTest):
+
+ # This test site has a div with id="pointsPanel" that is rendered if the
+ # browser is capable of using HTML5.
+ def testHTML5(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://html5test.com/')
+ t.WaitForJavascriptExpression(
+ 'document.getElementsByClassName("pointsPanel")', 15)
+ checked_main_page = False
+ for response in t.GetHTTPResponses():
+ # Site has a lot on it, just check the main page.
+ if (response.url == 'http://html5test.com/'
+ or response.url == 'http://html5test.com/index.html'):
+ self.assertHasChromeProxyViaHeader(response)
+ checked_main_page = True
+ if not checked_main_page:
+ self.fail("Did not check any page!")
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/lite_page.py b/chromium/tools/chrome_proxy/webdriver/lite_page.py
new file mode 100644
index 00000000000..a833813ab74
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/lite_page.py
@@ -0,0 +1,115 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+
+class LitePage(IntegrationTest):
+
+ # Checks that a Lite Page is served and that the ignore_preview_blacklist
+ # experiment is being used.
+ def testLitePage(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.AddChromeArg('--data-reduction-proxy-lo-fi=always-on')
+ test_driver.AddChromeArg('--enable-data-reduction-proxy-lite-page')
+
+ test_driver.LoadURL('http://check.googlezip.net/test.html')
+
+ lite_page_responses = 0
+ for response in test_driver.GetHTTPResponses():
+ # Skip CSI requests when validating Lite Page headers. CSI requests
+ # aren't expected to have LoFi headers.
+ if '/csi?' in response.url:
+ continue
+ if response.url.startswith('data:'):
+ continue
+ self.assertIn('exp=ignore_preview_blacklist',
+ response.request_headers['chrome-proxy'])
+ if (self.checkLitePageResponse(response)):
+ lite_page_responses = lite_page_responses + 1
+
+ # Verify that a Lite Page response for the main frame was seen.
+ self.assertEqual(1, lite_page_responses)
+
+ # Checks that Lo-Fi images are used when the user is in the
+ # DataCompressionProxyLitePageFallback field trial and a Lite Page is not
+ # served.
+ def testLitePageFallback(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.AddChromeArg('--force-fieldtrials='
+ 'DataCompressionProxyLoFi/Enabled_Preview/'
+ 'DataCompressionProxyLitePageFallback/Enabled')
+ test_driver.AddChromeArg('--force-fieldtrial-params='
+ 'DataCompressionProxyLoFi.Enabled_Preview:'
+ 'effective_connection_type/4G')
+ test_driver.AddChromeArg('--force-net-effective-connection-type=2g')
+
+ test_driver.LoadURL('http://check.googlezip.net/lite-page-fallback')
+
+ lite_page_requests = 0
+ lo_fi_responses = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.request_headers:
+ continue
+
+ cpat_request = response.request_headers['chrome-proxy-accept-transform']
+ if ('lite-page' in cpat_request):
+ lite_page_requests = lite_page_requests + 1
+ self.assertFalse(self.checkLitePageResponse(response))
+
+ if not response.url.endswith('png'):
+ continue
+
+ if (self.checkLoFiResponse(response, True)):
+ lo_fi_responses = lo_fi_responses + 1
+
+ # Verify that a Lite Page was requested and that the page fell back to
+ # Lo-Fi images.
+ self.assertEqual(1, lite_page_requests)
+ self.assertEqual(1, lo_fi_responses)
+
+ # Checks that Lo-Fi images are not used when the user is not in the
+ # DataCompressionProxyLitePageFallback field trial and a Lite Page is not
+ # served.
+ def testLitePageNoFallback(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ # Lite Pages must be enabled via the field trial because the Lite Page
+ # flag always falls back to Lo-Fi.
+ test_driver.AddChromeArg('--force-fieldtrials='
+ 'DataCompressionProxyLoFi/Enabled_Preview')
+ test_driver.AddChromeArg('--force-fieldtrial-params='
+ 'DataCompressionProxyLoFi.Enabled_Preview:'
+ 'effective_connection_type/4G')
+ test_driver.AddChromeArg('--force-net-effective-connection-type=2g')
+
+ test_driver.LoadURL('http://check.googlezip.net/lite-page-fallback')
+
+ lite_page_requests = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.request_headers:
+ continue
+
+ if ('chrome-proxy-accept-transform' in response.request_headers):
+ cpat_request = response.request_headers[
+ 'chrome-proxy-accept-transform']
+ if ('lite-page' in cpat_request):
+ lite_page_requests = lite_page_requests + 1
+ self.assertFalse(self.checkLitePageResponse(response))
+
+ if not response.url.endswith('png'):
+ continue
+
+ self.checkLoFiResponse(response, False)
+
+ # Verify that a Lite Page was requested and that the page fell back to
+ # Lo-Fi images.
+ self.assertEqual(1, lite_page_requests)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/lofi.py b/chromium/tools/chrome_proxy/webdriver/lofi.py
new file mode 100644
index 00000000000..fca1f8566c3
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/lofi.py
@@ -0,0 +1,112 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+
+
+class LoFi(IntegrationTest):
+
+ # Checks that the compressed image is below a certain threshold.
+ # The test page is uncacheable otherwise a cached page may be served that
+ # doesn't have the correct via headers.
+ def testLoFi(self):
+ with TestDriver() as test_driver:
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.AddChromeArg('--data-reduction-proxy-lo-fi=always-on')
+ # Disable server experiments such as tamper detection.
+ test_driver.AddChromeArg('--data-reduction-proxy-server-experiments-'
+ 'disabled')
+
+ test_driver.LoadURL('http://check.googlezip.net/static/index.html')
+
+ lofi_responses = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.url.endswith('png'):
+ continue
+ if not response.request_headers:
+ continue
+ if (self.checkLoFiResponse(response, True)):
+ lofi_responses = lofi_responses + 1
+
+ # Verify that Lo-Fi responses were seen.
+ self.assertNotEqual(0, lofi_responses)
+
+ # Checks that Lo-Fi placeholder images are not loaded from cache on page
+ # reloads when Lo-Fi mode is disabled or data reduction proxy is disabled.
+ # First a test page is opened with Lo-Fi and chrome proxy enabled. This allows
+ # Chrome to cache the Lo-Fi placeholder image. The browser is restarted with
+ # chrome proxy disabled and the same test page is loaded. This second page
+ # load should not pick the Lo-Fi placeholder from cache and original image
+ # should be loaded. Finally, the browser is restarted with chrome proxy
+ # enabled and Lo-Fi disabled and the same test page is loaded. This third page
+ # load should not pick the Lo-Fi placeholder from cache and original image
+ # should be loaded.
+ def testLoFiCacheBypass(self):
+ with TestDriver() as test_driver:
+ # First page load, enable Lo-Fi and chrome proxy. Disable server
+ # experiments such as tamper detection. This test should be run with
+ # --profile-type=default command line for the same user profile and cache
+ # to be used across the two page loads.
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.AddChromeArg('--data-reduction-proxy-lo-fi=always-on')
+ test_driver.AddChromeArg('--profile-type=default')
+ test_driver.AddChromeArg('--data-reduction-proxy-server-experiments-'
+ 'disabled')
+
+ test_driver.LoadURL('http://check.googlezip.net/cacheable/test.html')
+
+ lofi_responses = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.url.endswith('png'):
+ continue
+ if not response.request_headers:
+ continue
+ if (self.checkLoFiResponse(response, True)):
+ lofi_responses = lofi_responses + 1
+
+ # Verify that Lo-Fi responses were seen.
+ self.assertNotEqual(0, lofi_responses)
+
+ # Second page load with the chrome proxy off.
+ test_driver._StopDriver()
+ test_driver.RemoveChromeArg('--enable-spdy-proxy-auth')
+ test_driver.LoadURL('http://check.googlezip.net/cacheable/test.html')
+
+ responses = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.url.endswith('png'):
+ continue
+ if not response.request_headers:
+ continue
+ responses = responses + 1
+ self.assertNotHasChromeProxyViaHeader(response)
+ self.checkLoFiResponse(response, False)
+
+ # Verify that responses were seen.
+ self.assertNotEqual(0, responses)
+
+ # Third page load with the chrome proxy on and Lo-Fi off.
+ test_driver._StopDriver()
+ test_driver.AddChromeArg('--enable-spdy-proxy-auth')
+ test_driver.RemoveChromeArg('--data-reduction-proxy-lo-fi=always-on')
+ test_driver.AddChromeArg('--data-reduction-proxy-lo-fi=disabled')
+ test_driver.LoadURL('http://check.googlezip.net/cacheable/test.html')
+
+ responses = 0
+ for response in test_driver.GetHTTPResponses():
+ if not response.url.endswith('png'):
+ continue
+ if not response.request_headers:
+ continue
+ responses = responses + 1
+ self.assertHasChromeProxyViaHeader(response)
+ self.checkLoFiResponse(response, False)
+
+ # Verify that responses were seen.
+ self.assertNotEqual(0, responses)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/safebrowsing.py b/chromium/tools/chrome_proxy/webdriver/safebrowsing.py
new file mode 100644
index 00000000000..cdfe2ec055b
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/safebrowsing.py
@@ -0,0 +1,33 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+from common import AndroidOnly
+from common import NotAndroid
+
+
+class SafeBrowsing(IntegrationTest):
+
+ @AndroidOnly
+ def testSafeBrowsingOn(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://testsafebrowsing.appspot.com/s/malware.html')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(0, len(responses))
+
+ @NotAndroid
+ def testSafeBrowsingOff(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://testsafebrowsing.appspot.com/s/malware.html')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(1, len(responses))
+ for response in responses:
+ self.assertHasChromeProxyViaHeader(response)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/smoke.py b/chromium/tools/chrome_proxy/webdriver/smoke.py
index 1f36d3a97f9..b54dd7fc224 100644
--- a/chromium/tools/chrome_proxy/webdriver/smoke.py
+++ b/chromium/tools/chrome_proxy/webdriver/smoke.py
@@ -7,7 +7,7 @@ from common import TestDriver
from common import IntegrationTest
-class Incognito(IntegrationTest):
+class Smoke(IntegrationTest):
# Ensure Chrome does not use DataSaver in Incognito mode.
def testCheckPageWithIncognito(self):
@@ -18,5 +18,18 @@ class Incognito(IntegrationTest):
for response in t.GetHTTPResponses():
self.assertNotHasChromeProxyViaHeader(response)
+ # Ensure Chrome uses DataSaver with QUIC enabled.
+ def testCheckPageWithQuicProxy(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.AddChromeArg('--enable-quic')
+ t.AddChromeArg('--data-reduction-proxy-http-proxies=https://proxy.googlezip.net:443')
+ t.AddChromeArg('--force-fieldtrials=DataReductionProxyUseQuic/Enabled')
+ t.LoadURL('http://check.googlezip.net/test.html')
+ responses = t.GetHTTPResponses()
+ self.assertEqual(2, len(responses))
+ for response in responses:
+ self.assertHasChromeProxyViaHeader(response)
+
if __name__ == '__main__':
IntegrationTest.RunAllTests()
diff --git a/chromium/tools/chrome_proxy/webdriver/video.py b/chromium/tools/chrome_proxy/webdriver/video.py
new file mode 100644
index 00000000000..c080b8b0c86
--- /dev/null
+++ b/chromium/tools/chrome_proxy/webdriver/video.py
@@ -0,0 +1,134 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+
+import common
+from common import TestDriver
+from common import IntegrationTest
+from common import NotAndroid
+
+
+class Video(IntegrationTest):
+
+ # Check videos are proxied.
+ def testCheckVideoHasViaHeader(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL(
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_tiny.html')
+ for response in t.GetHTTPResponses():
+ self.assertHasChromeProxyViaHeader(response)
+
+ # Videos fetched via an XHR request should not be proxied.
+ def testNoCompressionOnXHR(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ # The test will actually use Javascript, so use a site that won't have any
+ # resources on it that could interfere.
+ t.LoadURL('http://check.googlezip.net/connect')
+ t.ExecuteJavascript(
+ 'var xhr = new XMLHttpRequest();'
+ 'xhr.open("GET", "/cacheable/video/data/buck_bunny_tiny.mp4", false);'
+ 'xhr.send();'
+ 'return;'
+ )
+ saw_video_response = False
+ for response in t.GetHTTPResponses():
+ if 'video' in response.response_headers['content-type']:
+ self.assertNotHasChromeProxyViaHeader(response)
+ saw_video_response = True
+ else:
+ self.assertHasChromeProxyViaHeader(response)
+ self.assertTrue(saw_video_response, 'No video request seen in test!')
+
+ # Check the compressed video has the same frame count, width, height, and
+ # duration as uncompressed.
+ def testVideoMetrics(self):
+ expected = {
+ 'duration': 3.124,
+ 'webkitDecodedFrameCount': 54.0,
+ 'videoWidth': 1280.0,
+ 'videoHeight': 720.0
+ }
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://check.googlezip.net/cacheable/video/buck_bunny_tiny.html')
+ # Check request was proxied and we got a compressed video back.
+ for response in t.GetHTTPResponses():
+ self.assertHasChromeProxyViaHeader(response)
+ if ('content-type' in response.response_headers
+ and 'video' in response.response_headers['content-type']):
+ self.assertEqual('video/webm',
+ response.response_headers['content-type'])
+ t.ExecuteJavascriptStatement(
+ 'document.querySelectorAll("video")[0].play()')
+ # Wait for the video to finish playing, plus some headroom.
+ time.sleep(5)
+ # Check each metric against its expected value.
+ for metric in expected:
+ actual = float(t.ExecuteJavascriptStatement(
+ 'document.querySelectorAll("video")[0].%s' % metric))
+ self.assertAlmostEqual(expected[metric], actual, msg="Compressed video "
+ "metric doesn't match expected! Metric=%s Expected=%f Actual=%f"
+ % (metric, expected[metric], actual), places=None, delta=0.001)
+
+ # Check the frames of a compressed video.
+ def testVideoFrames(self):
+ self.instrumentedVideoTest('http://check.googlezip.net/cacheable/video/buck_bunny_640x360_24fps_video.html')
+
+ # Check the audio volume of a compressed video.
+ def testVideoAudio(self):
+ self.instrumentedVideoTest('http://check.googlezip.net/cacheable/video/buck_bunny_640x360_24fps_audio.html')
+
+ def instrumentedVideoTest(self, url):
+ """Run an instrumented video test. The given page is reloaded up to some
+ maximum number of times until a compressed video is seen by ChromeDriver by
+ inspecting the network logs. Once that happens, test.ready is set and that
+ will signal the Javascript test on the page to begin. Once it is complete,
+ check the results.
+ """
+ # The maximum number of times to attempt to reload the page for a compressed
+ # video.
+ max_attempts = 10
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ loaded_compressed_video = False
+ attempts = 0
+ while not loaded_compressed_video and attempts < max_attempts:
+ t.LoadURL(url)
+ attempts += 1
+ for resp in t.GetHTTPResponses():
+ if ('content-type' in resp.response_headers
+ and resp.response_headers['content-type'] == 'video/webm'):
+ loaded_compressed_video = True
+ self.assertHasChromeProxyViaHeader(resp)
+ else:
+ # Take a breath before requesting again.
+ time.sleep(1)
+ if attempts >= max_attempts:
+ self.fail('Could not get a compressed video after %d tries' % attempts)
+ t.ExecuteJavascriptStatement('test.ready = true')
+ wait_time = int(t.ExecuteJavascriptStatement('test.waitTime'))
+ t.WaitForJavascriptExpression('test.metrics.complete', wait_time)
+ metrics = t.ExecuteJavascriptStatement('test.metrics')
+ if not metrics['complete']:
+ raise Exception('Test not complete after %d seconds.' % wait_time)
+ if metrics['failed']:
+ raise Exception('Test failed!')
+
+ # Make sure YouTube autoplays.
+ @NotAndroid
+ def testYoutube(self):
+ with TestDriver() as t:
+ t.AddChromeArg('--enable-spdy-proxy-auth')
+ t.LoadURL('http://data-saver-test.appspot.com/youtube')
+ t.WaitForJavascriptExpression(
+ 'window.playerState == YT.PlayerState.PLAYING', 30)
+ for response in t.GetHTTPResponses():
+ if not response.url.startswith('https'):
+ self.assertHasChromeProxyViaHeader(response)
+
+if __name__ == '__main__':
+ IntegrationTest.RunAllTests()
diff --git a/chromium/tools/clang/OWNERS b/chromium/tools/clang/OWNERS
index d0e3cf46145..4c7cbe4ffaa 100644
--- a/chromium/tools/clang/OWNERS
+++ b/chromium/tools/clang/OWNERS
@@ -3,3 +3,5 @@ thakis@chromium.org
# Only for clang tooling.
dcheng@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp b/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp
index d263a8cc5ee..43b064ccdcf 100644
--- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp
@@ -33,6 +33,10 @@ class BlinkGCPluginAction : public PluginASTAction {
for (const auto& arg : args) {
if (arg == "dump-graph") {
options_.dump_graph = true;
+ } else if (arg == "warn-stack-allocated-trace-method") {
+ // TODO(sof): after next roll, remove this option to round out
+ // crbug.com/689874
+ continue;
} else if (arg == "warn-unneeded-finalizer") {
options_.warn_unneeded_finalizer = true;
} else if (arg == "use-chromium-style-naming") {
diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
index c3e277a7ae1..771d3f86acb 100644
--- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
@@ -181,6 +181,8 @@ void BlinkGCPluginConsumer::CheckClass(RecordInfo* info) {
return;
if (CXXMethodDecl* trace = info->GetTraceMethod()) {
+ if (info->IsStackAllocated())
+ reporter_.TraceMethodForStackAllocatedClass(info, trace);
if (trace->isPure())
reporter_.ClassDeclaresPureVirtualTrace(info, trace);
} else if (info->RequiresTraceMethod()) {
@@ -528,7 +530,6 @@ void BlinkGCPluginConsumer::CheckTraceOrDispatchMethod(
CXXMethodDecl* method) {
Config::TraceMethodType trace_type = Config::GetTraceMethodType(method);
if (trace_type == Config::TRACE_AFTER_DISPATCH_METHOD ||
- trace_type == Config::TRACE_AFTER_DISPATCH_IMPL_METHOD ||
!parent->GetTraceDispatchMethod()) {
CheckTraceMethod(parent, method, trace_type);
}
@@ -549,12 +550,6 @@ void BlinkGCPluginConsumer::CheckTraceMethod(
CheckTraceVisitor visitor(trace, parent, &cache_);
visitor.TraverseCXXMethodDecl(trace);
- // Skip reporting if this trace method is a just delegate to
- // traceImpl (or traceAfterDispatchImpl) method. We will report on
- // CheckTraceMethod on traceImpl method.
- if (visitor.delegates_to_traceimpl())
- return;
-
for (auto& base : parent->GetBases())
if (!base.second.IsProperlyTraced())
reporter_.BaseRequiresTracing(parent, trace, base.first);
diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h
index baaa2fff6f1..fee044b7b1a 100644
--- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h
+++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h
@@ -11,9 +11,14 @@
struct BlinkGCPluginOptions {
bool dump_graph = false;
+
+ // If |true|, emit warning for class types which derive from from
+ // GarbageCollectedFinalized<> when just GarbageCollected<> will do.
bool warn_unneeded_finalizer = false;
+
// TODO(https://crbug.com/675879): Clean up after the Blink rename.
bool use_chromium_style_naming = false;
+
std::set<std::string> ignored_classes;
std::set<std::string> checked_namespaces;
std::vector<std::string> ignored_directories;
diff --git a/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.cpp b/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.cpp
index c996ca7dd14..8f6639eae41 100644
--- a/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.cpp
@@ -13,15 +13,7 @@ using namespace clang;
CheckTraceVisitor::CheckTraceVisitor(CXXMethodDecl* trace,
RecordInfo* info,
RecordCache* cache)
- : trace_(trace),
- info_(info),
- cache_(cache),
- delegates_to_traceimpl_(false) {
-}
-
-bool CheckTraceVisitor::delegates_to_traceimpl() const {
- return delegates_to_traceimpl_;
-}
+ : trace_(trace), info_(info), cache_(cache) {}
bool CheckTraceVisitor::VisitMemberExpr(MemberExpr* member) {
// In weak callbacks, consider any occurrence as a correct usage.
@@ -72,8 +64,6 @@ bool CheckTraceVisitor::VisitCallExpr(CallExpr* call) {
CXXRecordDecl* decl = base->getPointeeType()->getAsCXXRecordDecl();
if (decl)
CheckTraceFieldCall(expr->getMemberName().getAsString(), decl, arg);
- if (Config::IsTraceImplName(expr->getMemberName().getAsString()))
- delegates_to_traceimpl_ = true;
return true;
}
@@ -81,10 +71,6 @@ bool CheckTraceVisitor::VisitCallExpr(CallExpr* call) {
if (CheckTraceFieldMemberCall(expr) || CheckRegisterWeakMembers(expr))
return true;
- if (Config::IsTraceImplName(expr->getMethodDecl()->getNameAsString())) {
- delegates_to_traceimpl_ = true;
- return true;
- }
}
CheckTraceBaseCall(call);
@@ -92,10 +78,6 @@ bool CheckTraceVisitor::VisitCallExpr(CallExpr* call) {
}
bool CheckTraceVisitor::IsTraceCallName(const std::string& name) {
- if (trace_->getName() == kTraceImplName)
- return name == kTraceName;
- if (trace_->getName() == kTraceAfterDispatchImplName)
- return name == kTraceAfterDispatchName;
// Currently, a manually dispatched class cannot have mixin bases (having
// one would add a vtable which we explicitly check against). This means
// that we can only make calls to a trace method of the same name. Revisit
@@ -232,8 +214,7 @@ bool CheckTraceVisitor::CheckTraceBaseCall(CallExpr* call) {
dyn_cast<UnresolvedMemberExpr>(call->getCallee())) {
// Callee part may become unresolved if the type of the argument
// ("visitor") is a template parameter and the called function is
- // overloaded (i.e. trace(Visitor*) and
- // trace(InlinedGlobalMarkingVisitor)).
+ // overloaded.
//
// Here, we try to find a function that looks like trace() from the
// candidate overloaded functions, and if we find one, we assume it is
@@ -351,6 +332,8 @@ bool CheckTraceVisitor::CheckRegisterWeakMembers(CXXMemberCallExpr* call) {
nested_visitor.TraverseStmt(callback->getBody());
}
}
+ // TODO: mark all WeakMember<>s as traced even if
+ // the body isn't available?
}
}
return true;
diff --git a/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.h b/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.h
index 580a6fba99b..e1afd9b602e 100644
--- a/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.h
+++ b/chromium/tools/clang/blink_gc_plugin/CheckTraceVisitor.h
@@ -23,8 +23,6 @@ class CheckTraceVisitor : public clang::RecursiveASTVisitor<CheckTraceVisitor> {
RecordInfo* info,
RecordCache* cache);
- bool delegates_to_traceimpl() const;
-
bool VisitMemberExpr(clang::MemberExpr* member);
bool VisitCallExpr(clang::CallExpr* call);
@@ -53,7 +51,6 @@ class CheckTraceVisitor : public clang::RecursiveASTVisitor<CheckTraceVisitor> {
clang::CXXMethodDecl* trace_;
RecordInfo* info_;
RecordCache* cache_;
- bool delegates_to_traceimpl_;
};
#endif // TOOLS_BLINK_GC_PLUGIN_CHECK_TRACE_VISITOR_H_
diff --git a/chromium/tools/clang/blink_gc_plugin/Config.cpp b/chromium/tools/clang/blink_gc_plugin/Config.cpp
index bb32ad449d3..fa00782cc6b 100644
--- a/chromium/tools/clang/blink_gc_plugin/Config.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/Config.cpp
@@ -14,10 +14,8 @@ using namespace clang;
namespace legacy {
const char kCreateName[] = "create";
const char kTraceName[] = "trace";
-const char kTraceImplName[] = "traceImpl";
const char kFinalizeName[] = "finalizeGarbageCollectedObject";
const char kTraceAfterDispatchName[] = "traceAfterDispatch";
-const char kTraceAfterDispatchImplName[] = "traceAfterDispatchImpl";
const char kRegisterWeakMembersName[] = "registerWeakMembers";
const char kAdjustAndMarkName[] = "adjustAndMark";
const char kIsHeapObjectAliveName[] = "isHeapObjectAlive";
@@ -26,10 +24,8 @@ const char kIsHeapObjectAliveName[] = "isHeapObjectAlive";
const char kNewOperatorName[] = "operator new";
const char* kCreateName = "Create";
const char* kTraceName = "Trace";
-const char* kTraceImplName = "TraceImpl";
const char* kFinalizeName = "FinalizeGarbageCollectedObject";
const char* kTraceAfterDispatchName = "TraceAfterDispatch";
-const char* kTraceAfterDispatchImplName = "TraceAfterDispatchImpl";
const char* kRegisterWeakMembersName = "RegisterWeakMembers";
const char kHeapAllocatorName[] = "HeapAllocator";
const char kTraceIfNeededName[] = "TraceIfNeeded";
@@ -46,10 +42,8 @@ const char kReverseIteratorName[] = "reverse_iterator";
void Config::UseLegacyNames() {
kCreateName = legacy::kCreateName;
kTraceName = legacy::kTraceName;
- kTraceImplName = legacy::kTraceImplName;
kFinalizeName = legacy::kFinalizeName;
kTraceAfterDispatchName = legacy::kTraceAfterDispatchName;
- kTraceAfterDispatchImplName = legacy::kTraceAfterDispatchImplName;
kRegisterWeakMembersName = legacy::kRegisterWeakMembersName;
kAdjustAndMarkName = legacy::kAdjustAndMarkName;
kIsHeapObjectAliveName = legacy::kIsHeapObjectAliveName;
diff --git a/chromium/tools/clang/blink_gc_plugin/Config.h b/chromium/tools/clang/blink_gc_plugin/Config.h
index 2ab933f382d..f0c4aeca33b 100644
--- a/chromium/tools/clang/blink_gc_plugin/Config.h
+++ b/chromium/tools/clang/blink_gc_plugin/Config.h
@@ -20,10 +20,8 @@
extern const char kNewOperatorName[];
extern const char* kCreateName;
extern const char* kTraceName;
-extern const char* kTraceImplName;
extern const char* kFinalizeName;
extern const char* kTraceAfterDispatchName;
-extern const char* kTraceAfterDispatchImplName;
extern const char* kRegisterWeakMembersName;
extern const char kHeapAllocatorName[];
extern const char kTraceIfNeededName[];
@@ -219,8 +217,6 @@ class Config {
NOT_TRACE_METHOD,
TRACE_METHOD,
TRACE_AFTER_DISPATCH_METHOD,
- TRACE_IMPL_METHOD,
- TRACE_AFTER_DISPATCH_IMPL_METHOD
};
static TraceMethodType GetTraceMethodType(const clang::FunctionDecl* method) {
@@ -228,15 +224,11 @@ class Config {
return NOT_TRACE_METHOD;
const std::string& name = method->getNameAsString();
- if (name != kTraceName && name != kTraceAfterDispatchName &&
- name != kTraceImplName && name != kTraceAfterDispatchImplName)
+ if (name != kTraceName && name != kTraceAfterDispatchName)
return NOT_TRACE_METHOD;
const clang::QualType& formal_type = method->getParamDecl(0)->getType();
- if (name == kTraceImplName || name == kTraceAfterDispatchImplName) {
- if (!IsVisitorDispatcherType(formal_type))
- return NOT_TRACE_METHOD;
- } else if (!IsVisitorPtrType(formal_type)) {
+ if (!IsVisitorPtrType(formal_type)) {
return NOT_TRACE_METHOD;
}
@@ -244,10 +236,6 @@ class Config {
return TRACE_METHOD;
if (name == kTraceAfterDispatchName)
return TRACE_AFTER_DISPATCH_METHOD;
- if (name == kTraceImplName)
- return TRACE_IMPL_METHOD;
- if (name == kTraceAfterDispatchImplName)
- return TRACE_AFTER_DISPATCH_IMPL_METHOD;
assert(false && "Should not reach here");
return NOT_TRACE_METHOD;
@@ -257,10 +245,6 @@ class Config {
return GetTraceMethodType(method) != NOT_TRACE_METHOD;
}
- static bool IsTraceImplName(const std::string& name) {
- return name == kTraceImplName || name == kTraceAfterDispatchImplName;
- }
-
static bool StartsWith(const std::string& str, const std::string& prefix) {
if (prefix.size() > str.size())
return false;
diff --git a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp
index 1ae842573ff..95d5595beb5 100644
--- a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp
@@ -150,6 +150,10 @@ const char kBaseClassMustDeclareVirtualTrace[] =
const char kIteratorToGCManagedCollectionNote[] =
"[blink-gc] Iterator field %0 to a GC managed collection declared here:";
+const char kTraceMethodOfStackAllocatedParentNote[] =
+ "[blink-gc] The stack allocated class %0 provides an unnecessary "
+ "trace method:";
+
} // namespace
DiagnosticBuilder DiagnosticsReporter::ReportDiagnostic(
@@ -210,6 +214,10 @@ DiagnosticsReporter::DiagnosticsReporter(
getErrorLevel(), kLeftMostBaseMustBePolymorphic);
diag_base_class_must_declare_virtual_trace_ = diagnostic_.getCustomDiagID(
getErrorLevel(), kBaseClassMustDeclareVirtualTrace);
+ diag_iterator_to_gc_managed_collection_note_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kIteratorToGCManagedCollectionNote);
+ diag_trace_method_of_stack_allocated_parent_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kTraceMethodOfStackAllocatedParentNote);
// Register note messages.
diag_base_requires_tracing_note_ = diagnostic_.getCustomDiagID(
@@ -256,8 +264,6 @@ DiagnosticsReporter::DiagnosticsReporter(
DiagnosticsEngine::Note, kOverriddenNonVirtualTraceNote);
diag_manual_dispatch_method_note_ = diagnostic_.getCustomDiagID(
DiagnosticsEngine::Note, kManualDispatchMethodNote);
- diag_iterator_to_gc_managed_collection_note_ = diagnostic_.getCustomDiagID(
- DiagnosticsEngine::Note, kIteratorToGCManagedCollectionNote);
}
bool DiagnosticsReporter::hasErrorOccurred() const
@@ -499,6 +505,14 @@ void DiagnosticsReporter::BaseClassMustDeclareVirtualTrace(
<< base << derived->record();
}
+void DiagnosticsReporter::TraceMethodForStackAllocatedClass(
+ RecordInfo* info,
+ CXXMethodDecl* trace) {
+ ReportDiagnostic(trace->getLocStart(),
+ diag_trace_method_of_stack_allocated_parent_)
+ << info->record();
+}
+
void DiagnosticsReporter::NoteManualDispatchMethod(CXXMethodDecl* dispatch) {
ReportDiagnostic(dispatch->getLocStart(),
diag_manual_dispatch_method_note_)
diff --git a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h
index ddcfbfd7c8d..6a32f70ec6f 100644
--- a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h
+++ b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h
@@ -62,6 +62,8 @@ class DiagnosticsReporter {
clang::CXXRecordDecl* base);
void BaseClassMustDeclareVirtualTrace(RecordInfo* derived,
clang::CXXRecordDecl* base);
+ void TraceMethodForStackAllocatedClass(RecordInfo* parent,
+ clang::CXXMethodDecl* trace);
void NoteManualDispatchMethod(clang::CXXMethodDecl* dispatch);
void NoteBaseRequiresTracing(BasePoint* base);
@@ -136,6 +138,7 @@ class DiagnosticsReporter {
unsigned diag_overridden_non_virtual_trace_note_;
unsigned diag_manual_dispatch_method_note_;
unsigned diag_iterator_to_gc_managed_collection_note_;
+ unsigned diag_trace_method_of_stack_allocated_parent_;
};
#endif // TOOLS_BLINK_GC_PLUGIN_DIAGNOSTICS_REPORTER_H_
diff --git a/chromium/tools/clang/blink_gc_plugin/Edge.h b/chromium/tools/clang/blink_gc_plugin/Edge.h
index d7af335dc63..164112ffdb3 100644
--- a/chromium/tools/clang/blink_gc_plugin/Edge.h
+++ b/chromium/tools/clang/blink_gc_plugin/Edge.h
@@ -143,13 +143,13 @@ class RawPtr : public PtrEdge {
{
}
- bool IsRawPtr() { return true; }
- LivenessKind Kind() { return kWeak; }
- bool NeedsFinalization() { return false; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsRawPtr() override { return true; }
+ LivenessKind Kind() override { return kWeak; }
+ bool NeedsFinalization() override { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Illegal();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitRawPtr(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitRawPtr(this); }
bool HasReferenceType() { return is_ref_type_; }
private:
@@ -159,83 +159,83 @@ class RawPtr : public PtrEdge {
class RefPtr : public PtrEdge {
public:
explicit RefPtr(Edge* ptr) : PtrEdge(ptr) { }
- bool IsRefPtr() { return true; }
- LivenessKind Kind() { return kStrong; }
- bool NeedsFinalization() { return true; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsRefPtr() override { return true; }
+ LivenessKind Kind() override { return kStrong; }
+ bool NeedsFinalization() override { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Illegal();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitRefPtr(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitRefPtr(this); }
};
class OwnPtr : public PtrEdge {
public:
explicit OwnPtr(Edge* ptr) : PtrEdge(ptr) { }
- bool IsOwnPtr() { return true; }
- LivenessKind Kind() { return kStrong; }
- bool NeedsFinalization() { return true; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsOwnPtr() override { return true; }
+ LivenessKind Kind() override { return kStrong; }
+ bool NeedsFinalization() override { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Illegal();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitOwnPtr(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitOwnPtr(this); }
};
class UniquePtr : public PtrEdge {
public:
explicit UniquePtr(Edge* ptr) : PtrEdge(ptr) { }
- bool IsUniquePtr() { return true; }
- LivenessKind Kind() { return kStrong; }
- bool NeedsFinalization() { return true; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsUniquePtr() override { return true; }
+ LivenessKind Kind() override { return kStrong; }
+ bool NeedsFinalization() override { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Illegal();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitUniquePtr(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitUniquePtr(this); }
};
class Member : public PtrEdge {
public:
explicit Member(Edge* ptr) : PtrEdge(ptr) { }
- bool IsMember() { return true; }
- LivenessKind Kind() { return kStrong; }
- bool NeedsFinalization() { return false; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsMember() override { return true; }
+ LivenessKind Kind() override { return kStrong; }
+ bool NeedsFinalization() override { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Needed();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitMember(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitMember(this); }
};
class WeakMember : public PtrEdge {
public:
explicit WeakMember(Edge* ptr) : PtrEdge(ptr) { }
- bool IsWeakMember() { return true; }
- LivenessKind Kind() { return kWeak; }
- bool NeedsFinalization() { return false; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool IsWeakMember() override { return true; }
+ LivenessKind Kind() override { return kWeak; }
+ bool NeedsFinalization() override { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Needed();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitWeakMember(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitWeakMember(this); }
};
class Persistent : public PtrEdge {
public:
explicit Persistent(Edge* ptr) : PtrEdge(ptr) { }
- LivenessKind Kind() { return kRoot; }
- bool NeedsFinalization() { return true; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ LivenessKind Kind() override { return kRoot; }
+ bool NeedsFinalization() override { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Unneeded();
}
- void Accept(EdgeVisitor* visitor) { visitor->VisitPersistent(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitPersistent(this); }
};
class CrossThreadPersistent : public PtrEdge {
public:
explicit CrossThreadPersistent(Edge* ptr) : PtrEdge(ptr) { }
- LivenessKind Kind() { return kRoot; }
- bool NeedsFinalization() { return true; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ LivenessKind Kind() override { return kRoot; }
+ bool NeedsFinalization() override { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
return TracingStatus::Illegal();
}
- void Accept(EdgeVisitor* visitor) {
+ void Accept(EdgeVisitor* visitor) override {
visitor->VisitCrossThreadPersistent(this);
}
};
@@ -253,18 +253,18 @@ class Collection : public Edge {
delete *it;
}
}
- bool IsCollection() { return true; }
- LivenessKind Kind() { return is_root_ ? kRoot : kStrong; }
+ bool IsCollection() override { return true; }
+ LivenessKind Kind() override { return is_root_ ? kRoot : kStrong; }
bool on_heap() { return on_heap_; }
bool is_root() { return is_root_; }
Members& members() { return members_; }
- void Accept(EdgeVisitor* visitor) { visitor->VisitCollection(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitCollection(this); }
void AcceptMembers(EdgeVisitor* visitor) {
for (Members::iterator it = members_.begin(); it != members_.end(); ++it)
(*it)->Accept(visitor);
}
- bool NeedsFinalization();
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool NeedsFinalization() override;
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
if (is_root_)
return TracingStatus::Unneeded();
if (on_heap_)
@@ -292,10 +292,10 @@ class Iterator : public Edge {
: info_(info), on_heap_(on_heap), is_unsafe_(is_unsafe) {}
~Iterator() {}
- void Accept(EdgeVisitor* visitor) { visitor->VisitIterator(this); }
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitIterator(this); }
LivenessKind Kind() override { return kStrong; }
- bool NeedsFinalization() { return false; }
- TracingStatus NeedsTracing(NeedsTracingOption) {
+ bool NeedsFinalization() override { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) override {
if (on_heap_)
return TracingStatus::Needed();
return TracingStatus::Unneeded();
diff --git a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
index fe211ac8b9c..419ed7ae764 100644
--- a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
@@ -438,7 +438,6 @@ void RecordInfo::DetermineTracingMethods() {
if (Config::IsGCBase(name_))
return;
CXXMethodDecl* trace = nullptr;
- CXXMethodDecl* trace_impl = nullptr;
CXXMethodDecl* trace_after_dispatch = nullptr;
bool has_adjust_and_mark = false;
bool has_is_heap_object_alive = false;
@@ -459,11 +458,6 @@ void RecordInfo::DetermineTracingMethods() {
case Config::TRACE_AFTER_DISPATCH_METHOD:
trace_after_dispatch = method;
break;
- case Config::TRACE_IMPL_METHOD:
- trace_impl = method;
- break;
- case Config::TRACE_AFTER_DISPATCH_IMPL_METHOD:
- break;
case Config::NOT_TRACE_METHOD:
if (method->getNameAsString() == kFinalizeName) {
finalize_dispatch_method_ = method;
@@ -481,7 +475,7 @@ void RecordInfo::DetermineTracingMethods() {
has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
if (trace_after_dispatch) {
trace_method_ = trace_after_dispatch;
- trace_dispatch_method_ = trace_impl ? trace_impl : trace;
+ trace_dispatch_method_ = trace;
} else {
// TODO: Can we never have a dispatch method called trace without the same
// class defining a traceAfterDispatch method?
diff --git a/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp b/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
index 636261b3db3..427fa472a9e 100644
--- a/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
+++ b/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
@@ -112,13 +112,14 @@ std::set<FunctionDecl*> GetLateParsedFunctionDecls(TranslationUnitDecl* decl) {
return v.late_parsed_decls;
}
-std::string GetAutoReplacementTypeAsString(QualType type) {
+std::string GetAutoReplacementTypeAsString(QualType type,
+ StorageClass storage_class) {
QualType non_reference_type = type.getNonReferenceType();
if (!non_reference_type->isPointerType())
- return "auto";
+ return storage_class == SC_Static ? "static auto" : "auto";
- std::string result =
- GetAutoReplacementTypeAsString(non_reference_type->getPointeeType());
+ std::string result = GetAutoReplacementTypeAsString(
+ non_reference_type->getPointeeType(), storage_class);
result += "*";
if (non_reference_type.isLocalConstQualified())
result += " const";
@@ -1016,7 +1017,8 @@ void FindBadConstructsConsumer::CheckVarDecl(clang::VarDecl* var_decl) {
diag_auto_deduced_to_a_pointer_type_)
<< FixItHint::CreateReplacement(
range,
- GetAutoReplacementTypeAsString(var_decl->getType()));
+ GetAutoReplacementTypeAsString(
+ var_decl->getType(), var_decl->getStorageClass()));
}
}
}
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.cpp b/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.cpp
index cd8228ebdf5..600fb02470e 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.cpp
+++ b/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.cpp
@@ -9,6 +9,27 @@
#include "llvm/Support/Path.h"
#include "llvm/Support/raw_ostream.h"
+namespace {
+
+const char* GetTag(RenameCategory category) {
+ switch (category) {
+ case RenameCategory::kEnumValue:
+ return "enum";
+ case RenameCategory::kField:
+ return "var";
+ case RenameCategory::kFunction:
+ return "func";
+ case RenameCategory::kUnresolved:
+ return "unresolved";
+ case RenameCategory::kVariable:
+ return "var";
+ }
+}
+
+} // namespace
+
+EditTracker::EditTracker(RenameCategory category) : category_(category) {}
+
void EditTracker::Add(const clang::SourceManager& source_manager,
clang::SourceLocation location,
llvm::StringRef original_text,
@@ -30,8 +51,8 @@ void EditTracker::Add(const clang::SourceManager& source_manager,
result.first->getValue().filenames.try_emplace(filename);
}
-void EditTracker::SerializeTo(llvm::StringRef tag,
- llvm::raw_ostream& output) const {
+void EditTracker::SerializeTo(llvm::raw_ostream& output) const {
+ const char* tag = GetTag(category_);
for (const auto& edit : tracked_edits_) {
for (const auto& filename : edit.getValue().filenames) {
output << filename.getKey() << ":" << tag << ":" << edit.getKey() << ":"
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.h b/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.h
index ef5e30145ad..0d8bf0c115e 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.h
+++ b/chromium/tools/clang/rewrite_to_chrome_style/EditTracker.h
@@ -22,11 +22,19 @@ struct EditInfo {
llvm::StringSet<> filenames;
};
+enum class RenameCategory {
+ kEnumValue,
+ kField,
+ kFunction,
+ kUnresolved,
+ kVariable,
+};
+
// Simple class that tracks the edits made by path. Used to dump the databaes
// used by the Blink rebase helper.
class EditTracker {
public:
- EditTracker() = default;
+ explicit EditTracker(RenameCategory category);
void Add(const clang::SourceManager& source_manager,
clang::SourceLocation location,
@@ -36,7 +44,7 @@ class EditTracker {
// Serializes the tracked edits to |output|. Emits:
// <filename>:<tag>:<original text>:<new text>
// for each distinct filename for each tracked edit.
- void SerializeTo(llvm::StringRef tag, llvm::raw_ostream& output) const;
+ void SerializeTo(llvm::raw_ostream& output) const;
private:
EditTracker(const EditTracker&) = delete;
@@ -44,6 +52,8 @@ class EditTracker {
// The string key is the original text.
llvm::StringMap<EditInfo> tracked_edits_;
+
+ RenameCategory category_;
};
#endif // #define TOOLS_CLANG_REWRITE_TO_CHROME_STYLE_EDIT_TRACKER_H_
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/OWNERS b/chromium/tools/clang/rewrite_to_chrome_style/OWNERS
index cc9ab9fe0e2..b2ca4271d78 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/OWNERS
+++ b/chromium/tools/clang/rewrite_to_chrome_style/OWNERS
@@ -1,3 +1,5 @@
danakj@chromium.org
dcheng@chromium.org
lukasza@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp b/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
index 5a0d6442044..9f94e9b6a51 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
+++ b/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
@@ -51,6 +51,8 @@ namespace {
const char kBlinkFieldPrefix[] = "m_";
const char kBlinkStaticMemberPrefix[] = "s_";
const char kGeneratedFileRegex[] = "^gen/|/gen/";
+const char kGeneratedFileExclusionRegex[] =
+ "(^gen/|/gen/).*/ComputedStyleBase\\.h$";
const char kGMockMethodNamePrefix[] = "gmock_";
const char kMethodBlocklistParamName[] = "method-blocklist";
@@ -88,6 +90,13 @@ AST_MATCHER_P(clang::FunctionTemplateDecl,
return InnerMatcher.matches(*Node.getTemplatedDecl(), Finder, Builder);
}
+AST_MATCHER_P(clang::Decl,
+ hasCanonicalDecl,
+ clang::ast_matchers::internal::Matcher<clang::Decl>,
+ InnerMatcher) {
+ return InnerMatcher.matches(*Node.getCanonicalDecl(), Finder, Builder);
+}
+
// Matches a CXXMethodDecl of a method declared via MOCK_METHODx macro if such
// method mocks a method matched by the InnerMatcher. For example if "foo"
// matcher matches "interfaceMethod", then mocksMethod(foo()) will match
@@ -106,9 +115,6 @@ AST_MATCHER_P(clang::CXXMethodDecl,
llvm::StringRef mocked_method_name =
method_name.substr(strlen(kGMockMethodNamePrefix));
for (const auto& potentially_mocked_method : Node.getParent()->methods()) {
- if (!potentially_mocked_method->isVirtual())
- continue;
-
clang::DeclarationName decl_name = potentially_mocked_method->getDeclName();
if (!decl_name.isIdentifier() ||
potentially_mocked_method->getName() != mocked_method_name)
@@ -131,6 +137,9 @@ class MethodBlocklist {
}
bool Contains(const clang::FunctionDecl& method) const {
+ if (!method.getDeclName().isIdentifier())
+ return false;
+
auto it = method_to_class_to_args_.find(method.getName());
if (it == method_to_class_to_args_.end())
return false;
@@ -142,6 +151,8 @@ class MethodBlocklist {
clang::dyn_cast<clang::NamedDecl>(method.getDeclContext());
if (!method_context)
return false;
+ if (!method_context->getDeclName().isIdentifier())
+ return false;
const llvm::StringMap<std::set<unsigned>>& class_to_args = it->second;
auto it2 = class_to_args.find(method_context->getName());
@@ -435,7 +446,7 @@ bool IsBlacklistedInstanceMethodName(llvm::StringRef name) {
// https://crbug.com/672902: Should not rewrite names that mimick methods
// from std library.
- "back", "empty", "erase", "front", "insert",
+ "back", "empty", "erase", "front", "insert", "length", "size",
};
for (const auto& b : kBlacklistedNames) {
if (name == b)
@@ -450,11 +461,17 @@ bool IsBlacklistedMethodName(llvm::StringRef name) {
}
bool IsBlacklistedFunction(const clang::FunctionDecl& decl) {
+ if (!decl.getDeclName().isIdentifier())
+ return false;
+
clang::StringRef name = decl.getName();
return IsBlacklistedFunctionName(name) || IsBlacklistedFreeFunctionName(name);
}
bool IsBlacklistedMethod(const clang::CXXMethodDecl& decl) {
+ if (!decl.getDeclName().isIdentifier())
+ return false;
+
clang::StringRef name = decl.getName();
if (IsBlacklistedFunctionName(name))
return true;
@@ -484,6 +501,50 @@ AST_MATCHER(clang::CXXMethodDecl, isBlacklistedMethod) {
return IsBlacklistedMethod(Node);
}
+bool IsKnownTraitName(clang::StringRef name) {
+ // This set of names is globally a type trait throughout chromium.
+ return name == "safeToCompareToEmptyOrDeleted";
+}
+
+AST_MATCHER(clang::VarDecl, isKnownTraitName) {
+ return Node.getDeclName().isIdentifier() && IsKnownTraitName(Node.getName());
+}
+
+AST_MATCHER(clang::Decl, isDeclInGeneratedFile) {
+ // This matcher mimics the built-in isExpansionInFileMatching matcher from
+ // llvm/tools/clang/include/clang/ASTMatchers/ASTMatchers.h, except:
+ // - It special cases some files (e.g. doesn't skip renaming of identifiers
+ // from gen/blink/core/ComputedStyleBase.h)
+
+ const clang::SourceManager& source_manager =
+ Node.getASTContext().getSourceManager();
+
+ // TODO(lukasza): Consider using getSpellingLoc below.
+ // The built-in isExpansionInFileMatching matcher uses getExpansionLoc below.
+ // We could consider using getSpellingLoc (which properly handles things like
+ // SETTINGS_GETTERS_AND_SETTERS macro which is defined in generated code
+ // (gen/blink/core/SettingsMacros.h), but expanded in non-generated code
+ // (third_party/WebKit/Source/core/frame/Settings.h).
+ clang::SourceLocation loc =
+ source_manager.getExpansionLoc(Node.getLocStart());
+
+ // TODO(lukasza): jump out of scratch space if token concatenation was used.
+ if (loc.isInvalid())
+ return false;
+
+ const clang::FileEntry* file_entry =
+ source_manager.getFileEntryForID(source_manager.getFileID(loc));
+ if (!file_entry)
+ return false;
+
+ static llvm::Regex exclusion_regex(kGeneratedFileExclusionRegex);
+ if (exclusion_regex.match(file_entry->getName()))
+ return false;
+
+ static llvm::Regex generated_file_regex(kGeneratedFileRegex);
+ return generated_file_regex.match(file_entry->getName());
+}
+
// Helper to convert from a camelCaseName to camel_case_name. It uses some
// heuristics to try to handle acronyms in camel case names correctly.
std::string CamelCaseToUnderscoreCase(StringRef input) {
@@ -638,17 +699,18 @@ bool ShouldPrefixFunctionName(const std::string& old_method_name) {
"frame",
"frameBlameContext",
"frontend",
+ "gridCell",
"hash",
"heapObjectHeader",
"iconURL",
"image",
"inputMethodController",
"inputType",
+ "interpolationTypes",
"layout",
"layoutBlock",
"layoutObject",
"layoutSize",
- "length",
"lineCap",
"lineEndings",
"lineJoin",
@@ -660,15 +722,20 @@ bool ShouldPrefixFunctionName(const std::string& old_method_name) {
"name",
"navigationType",
"node",
+ "notificationManager",
"outcome",
"pagePopup",
"paintWorklet",
"path",
+ "position",
"processingInstruction",
"readyState",
"relList",
+ "referrer",
+ "referrerPolicy",
"resource",
"response",
+ "restrictedKeyMap",
"sandboxSupport",
"screenInfo",
"screenOrientationController",
@@ -676,6 +743,7 @@ bool ShouldPrefixFunctionName(const std::string& old_method_name) {
"selectionInFlatTree",
"settings",
"signalingState",
+ "snapshotById",
"state",
"string",
"styleSheet",
@@ -687,8 +755,10 @@ bool ShouldPrefixFunctionName(const std::string& old_method_name) {
"thread",
"timing",
"topLevelBlameContext",
+ "type",
"vector",
"visibleSelection",
+ "visibleSelectionInFlatTree",
"webFrame",
"widget",
"wordBoundaries",
@@ -703,7 +773,8 @@ bool ShouldPrefixFunctionName(const std::string& old_method_name) {
}
AST_MATCHER(clang::FunctionDecl, shouldPrefixFunctionName) {
- return ShouldPrefixFunctionName(Node.getName().str());
+ return Node.getDeclName().isIdentifier() &&
+ ShouldPrefixFunctionName(Node.getName().str());
}
bool GetNameForDecl(const clang::FunctionDecl& decl,
@@ -828,6 +899,13 @@ bool GetNameForDecl(const clang::VarDecl& decl,
return false;
}
+ // This is a type trait that appears in consumers of WTF as well as inside
+ // WTF. We want it to be named in this_style_of_case accordingly.
+ if (IsKnownTraitName(original_name)) {
+ name = CamelCaseToUnderscoreCase(original_name);
+ return true;
+ }
+
// static class members match against VarDecls. Blink style dictates that
// these should be prefixed with `s_`, so strip that off. Also check for `m_`
// and strip that off too, for code that accidentally uses the wrong prefix.
@@ -998,8 +1076,9 @@ struct TargetNodeTraits<clang::UnresolvedUsingValueDecl> {
template <typename TargetNode>
class RewriterBase : public MatchFinder::MatchCallback {
public:
- explicit RewriterBase(std::set<Replacement>* replacements)
- : replacements_(replacements) {}
+ explicit RewriterBase(std::set<Replacement>* replacements,
+ RenameCategory category)
+ : replacements_(replacements), edit_tracker_(category) {}
const TargetNode& GetTargetNode(const MatchFinder::MatchResult& result) {
const TargetNode* target_node = result.Nodes.getNodeAs<TargetNode>(
@@ -1085,23 +1164,57 @@ class RewriterBase : public MatchFinder::MatchCallback {
edit_tracker_.Add(*result.SourceManager, loc, old_name, new_name);
}
- const EditTracker& edit_tracker() const { return edit_tracker_; }
+ const EditTracker* edit_tracker() const { return &edit_tracker_; }
private:
std::set<Replacement>* const replacements_;
EditTracker edit_tracker_;
};
+template <typename DeclNode>
+RenameCategory GetCategory();
+template <>
+RenameCategory GetCategory<clang::FieldDecl>() {
+ return RenameCategory::kField;
+}
+template <>
+RenameCategory GetCategory<clang::VarDecl>() {
+ return RenameCategory::kVariable;
+}
+template <>
+RenameCategory GetCategory<clang::FunctionDecl>() {
+ return RenameCategory::kFunction;
+}
+template <>
+RenameCategory GetCategory<clang::CXXMethodDecl>() {
+ return RenameCategory::kFunction;
+}
+template <>
+RenameCategory GetCategory<clang::EnumConstantDecl>() {
+ return RenameCategory::kEnumValue;
+}
+template <>
+RenameCategory GetCategory<clang::NamedDecl>() {
+ return RenameCategory::kUnresolved;
+}
+template <>
+RenameCategory GetCategory<clang::UsingDecl>() {
+ return RenameCategory::kUnresolved;
+}
+
template <typename DeclNode, typename TargetNode>
class DeclRewriterBase : public RewriterBase<TargetNode> {
public:
using Base = RewriterBase<TargetNode>;
explicit DeclRewriterBase(std::set<Replacement>* replacements)
- : Base(replacements) {}
+ : Base(replacements, GetCategory<DeclNode>()) {}
void run(const MatchFinder::MatchResult& result) override {
const DeclNode* decl = result.Nodes.getNodeAs<DeclNode>("decl");
+ if (!decl->getDeclName().isIdentifier())
+ return;
+
assert(decl);
llvm::StringRef old_name = decl->getName();
@@ -1175,28 +1288,29 @@ class GMockMemberRewriter
// Find location of the gmock_##MockedMethod identifier.
clang::SourceLocation target_loc = Base::GetTargetLoc(result);
- // Find location of EXPECT_CALL macro invocation.
+ // Find location of EXPECT_CALL or ON_CALL macro invocation.
clang::SourceLocation macro_call_loc =
result.SourceManager->getExpansionLoc(target_loc);
// Map |macro_call_loc| to argument location (location of the method name
// that needs renaming).
- auto it = expect_call_to_2nd_arg.find(macro_call_loc);
- if (it == expect_call_to_2nd_arg.end())
+ auto it = gmock_macro_call_to_2nd_arg.find(macro_call_loc);
+ if (it == gmock_macro_call_to_2nd_arg.end())
return clang::SourceLocation();
return it->second;
}
private:
- std::map<clang::SourceLocation, clang::SourceLocation> expect_call_to_2nd_arg;
+ std::map<clang::SourceLocation, clang::SourceLocation>
+ gmock_macro_call_to_2nd_arg;
- // Called from PPCallbacks with the locations of EXPECT_CALL macro invocation:
- // Example:
+ // Called from PPCallbacks with the locations of EXPECT_CALL and ON_CALL macro
+ // invocation. Example:
// EXPECT_CALL(my_mock, myMethod(123, 456));
// ^- expansion_loc ^- actual_arg_loc
- void RecordExpectCallMacroInvocation(clang::SourceLocation expansion_loc,
- clang::SourceLocation second_arg_loc) {
- expect_call_to_2nd_arg[expansion_loc] = second_arg_loc;
+ void RecordGMockMacroInvocation(clang::SourceLocation expansion_loc,
+ clang::SourceLocation second_arg_loc) {
+ gmock_macro_call_to_2nd_arg[expansion_loc] = second_arg_loc;
}
class PPCallbacks : public clang::PPCallbacks {
@@ -1211,7 +1325,7 @@ class GMockMemberRewriter
if (!id)
return;
- if (id->getName() != "EXPECT_CALL")
+ if (id->getName() != "EXPECT_CALL" && id->getName() != "ON_CALL")
return;
if (def.getMacroInfo()->getNumArgs() != 2)
@@ -1221,7 +1335,7 @@ class GMockMemberRewriter
// is in testing/gmock/include/gmock/gmock-spec-builders.h but I don't
// know how to get clang::SourceManager to call getFileName.
- rewriter_->RecordExpectCallMacroInvocation(
+ rewriter_->RecordGMockMacroInvocation(
name.getLocation(), args->getUnexpArgument(1)->getLocation());
}
@@ -1281,7 +1395,7 @@ class UnresolvedRewriterBase : public RewriterBase<TargetNode> {
using Base = RewriterBase<TargetNode>;
explicit UnresolvedRewriterBase(std::set<Replacement>* replacements)
- : RewriterBase<TargetNode>(replacements) {}
+ : RewriterBase<TargetNode>(replacements, RenameCategory::kUnresolved) {}
void run(const MatchFinder::MatchResult& result) override {
const TargetNode& node = Base::GetTargetNode(result);
@@ -1439,7 +1553,7 @@ int main(int argc, const char* argv[]) {
auto in_blink_namespace = decl(
anyOf(decl_under_blink_namespace, decl_has_qualifier_to_blink_namespace,
hasAncestor(decl_has_qualifier_to_blink_namespace)),
- unless(isExpansionInFileMatching(kGeneratedFileRegex)));
+ unless(hasCanonicalDecl(isDeclInGeneratedFile())));
// Field, variable, and enum declarations ========
// Given
@@ -1458,6 +1572,8 @@ int main(int argc, const char* argv[]) {
has(cxxMethodDecl(isUserProvided(), isInstanceMethod()))))));
auto var_decl_matcher =
id("decl", varDecl(in_blink_namespace, unless(is_type_trait_value)));
+ // For known trait names, rename every instance anywhere in the codebase.
+ auto type_trait_decl_matcher = id("decl", varDecl(isKnownTraitName()));
auto enum_member_decl_matcher =
id("decl", enumConstantDecl(in_blink_namespace));
@@ -1466,6 +1582,7 @@ int main(int argc, const char* argv[]) {
VarDeclRewriter var_decl_rewriter(&replacements);
match_finder.addMatcher(var_decl_matcher, &var_decl_rewriter);
+ match_finder.addMatcher(type_trait_decl_matcher, &var_decl_rewriter);
EnumConstantDeclRewriter enum_member_decl_rewriter(&replacements);
match_finder.addMatcher(enum_member_decl_matcher, &enum_member_decl_rewriter);
@@ -1488,6 +1605,8 @@ int main(int argc, const char* argv[]) {
// there's nothing interesting to rewrite in those either.
unless(hasAncestor(functionDecl(isDefaulted())))));
auto decl_ref_matcher = id("expr", declRefExpr(to(var_decl_matcher)));
+ auto type_trait_ref_matcher =
+ id("expr", declRefExpr(to(type_trait_decl_matcher)));
auto enum_member_ref_matcher =
id("expr", declRefExpr(to(enum_member_decl_matcher)));
@@ -1496,6 +1615,7 @@ int main(int argc, const char* argv[]) {
DeclRefRewriter decl_ref_rewriter(&replacements);
match_finder.addMatcher(decl_ref_matcher, &decl_ref_rewriter);
+ match_finder.addMatcher(type_trait_ref_matcher, &decl_ref_rewriter);
EnumConstantDeclRefRewriter enum_member_ref_rewriter(&replacements);
match_finder.addMatcher(enum_member_ref_matcher, &enum_member_ref_rewriter);
@@ -1773,7 +1893,9 @@ int main(int argc, const char* argv[]) {
// GMock calls lookup ========
// Given
// EXPECT_CALL(obj, myMethod(...))
- // will match obj.gmock_myMethod(...) call generated by the macro
+ // or
+ // ON_CALL(obj, myMethod(...))
+ // will match obj.gmock_myMethod(...) call generated by the macros
// (but only if it mocks a Blink method).
auto gmock_member_matcher =
id("expr", memberExpr(hasDeclaration(
@@ -1791,13 +1913,32 @@ int main(int argc, const char* argv[]) {
return result;
// Supplemental data for the Blink rename rebase helper.
- // TODO(dcheng): There's a lot of match rewriters missing from this list.
+ std::vector<const EditTracker*> all_edit_trackers{
+ field_decl_rewriter.edit_tracker(),
+ var_decl_rewriter.edit_tracker(),
+ enum_member_decl_rewriter.edit_tracker(),
+ member_rewriter.edit_tracker(),
+ decl_ref_rewriter.edit_tracker(),
+ enum_member_ref_rewriter.edit_tracker(),
+ member_ref_rewriter.edit_tracker(),
+ function_decl_rewriter.edit_tracker(),
+ function_ref_rewriter.edit_tracker(),
+ method_decl_rewriter.edit_tracker(),
+ method_ref_rewriter.edit_tracker(),
+ method_member_rewriter.edit_tracker(),
+ constructor_initializer_rewriter.edit_tracker(),
+ unresolved_lookup_rewriter.edit_tracker(),
+ unresolved_member_rewriter.edit_tracker(),
+ unresolved_dependent_member_rewriter.edit_tracker(),
+ unresolved_using_value_decl_rewriter.edit_tracker(),
+ using_decl_rewriter.edit_tracker(),
+ dependent_scope_decl_ref_expr_rewriter.edit_tracker(),
+ cxx_dependent_scope_member_expr_rewriter.edit_tracker(),
+ gmock_member_rewriter.edit_tracker(),
+ };
llvm::outs() << "==== BEGIN TRACKED EDITS ====\n";
- field_decl_rewriter.edit_tracker().SerializeTo("var", llvm::outs());
- var_decl_rewriter.edit_tracker().SerializeTo("var", llvm::outs());
- enum_member_decl_rewriter.edit_tracker().SerializeTo("enu", llvm::outs());
- function_decl_rewriter.edit_tracker().SerializeTo("fun", llvm::outs());
- method_decl_rewriter.edit_tracker().SerializeTo("fun", llvm::outs());
+ for (const EditTracker* edit_tracker : all_edit_trackers)
+ edit_tracker->SerializeTo(llvm::outs());
llvm::outs() << "==== END TRACKED EDITS ====\n";
// Serialization format is documented in tools/clang/scripts/run_tool.py
diff --git a/chromium/tools/clang/scripts/blink_gc_plugin_flags.py b/chromium/tools/clang/scripts/blink_gc_plugin_flags.py
deleted file mode 100755
index 0fce3b3558b..00000000000
--- a/chromium/tools/clang/scripts/blink_gc_plugin_flags.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script returns the flags that should be passed to clang.
-
-# TODO(sof): the script can be removed when/once gyp support is retired;
-# unused with gn.
-
-import os
-import sys
-
-THIS_DIR = os.path.abspath(os.path.dirname(__file__))
-SRC_DIR = os.path.join(THIS_DIR, '..', '..', '..')
-CLANG_LIB_PATH = os.path.normpath(os.path.join(
- SRC_DIR, 'third_party', 'llvm-build', 'Release+Asserts', 'lib'))
-
-FLAGS = '-Xclang -add-plugin -Xclang blink-gc-plugin'
-PREFIX= ' -Xclang -plugin-arg-blink-gc-plugin -Xclang '
-
-for arg in sys.argv[1:]:
- if arg == 'dump-graph=1':
- FLAGS += PREFIX + 'dump-graph'
- elif arg == 'warn-unneeded-finalizer=1':
- FLAGS += PREFIX + 'warn-unneeded-finalizer'
- elif arg.startswith('custom_clang_lib_path='):
- CLANG_LIB_PATH = arg[len('custom_clang_lib_path='):]
- elif arg == 'target_os=ios':
- sys.stderr.write('error: blink is unsupported on iOS\n')
- sys.exit(1)
-
-if not sys.platform in ['win32', 'cygwin']:
- LIBSUFFIX = 'dylib' if sys.platform == 'darwin' else 'so'
- FLAGS = ('-Xclang -load -Xclang "%s/libBlinkGCPlugin.%s" ' + FLAGS) % \
- (CLANG_LIB_PATH, LIBSUFFIX)
-
-print FLAGS
diff --git a/chromium/tools/clang/scripts/generate_win_compdb.py b/chromium/tools/clang/scripts/generate_win_compdb.py
index a7a7ba647d8..6edd5938cb3 100755
--- a/chromium/tools/clang/scripts/generate_win_compdb.py
+++ b/chromium/tools/clang/scripts/generate_win_compdb.py
@@ -48,15 +48,6 @@ def _ProcessEntry(e):
except IOError:
pass
- # TODO(dcheng): This should be implemented in Clang tooling.
- # http://llvm.org/bugs/show_bug.cgi?id=19687
- # Finally, use slashes instead of backslashes to avoid bad escaping by the
- # tooling. This should really only matter for command, but we do it for all
- # keys for consistency.
- e['directory'] = e['directory'].replace('\\', '/')
- e['command'] = e['command'].replace('\\', '/')
- e['file'] = e['file'].replace('\\', '/')
-
return e
diff --git a/chromium/tools/clang/scripts/package.py b/chromium/tools/clang/scripts/package.py
index 17181098f97..780bf5f1dab 100755
--- a/chromium/tools/clang/scripts/package.py
+++ b/chromium/tools/clang/scripts/package.py
@@ -87,7 +87,7 @@ def RunGsutil(args):
def GsutilArchiveExists(archive_name, platform):
gsutil_args = ['-q', 'stat',
- 'gs://chromium-browser-clang/%s/%s.tgz' %
+ 'gs://chromium-browser-clang-staging/%s/%s.tgz' %
(platform, archive_name)]
return RunGsutil(gsutil_args) == 0
@@ -97,7 +97,7 @@ def MaybeUpload(args, archive_name, platform):
# so -n option to gsutil is used. It will warn, if the upload was aborted.
gsutil_args = ['cp', '-n', '-a', 'public-read',
'%s.tgz' % archive_name,
- 'gs://chromium-browser-clang/%s/%s.tgz' %
+ 'gs://chromium-browser-clang-staging/%s/%s.tgz' %
(platform, archive_name)]
if args.upload:
print 'Uploading %s to Google Cloud Storage...' % archive_name
@@ -141,9 +141,10 @@ def main():
GsutilArchiveExists(golddir, platform)):
print ('Desired toolchain revision %s is already available '
'in Google Cloud Storage:') % expected_stamp
- print 'gs://chromium-browser-clang/%s/%s.tgz' % (platform, pdir)
+ print 'gs://chromium-browser-clang-staging/%s/%s.tgz' % (platform, pdir)
if sys.platform.startswith('linux'):
- print 'gs://chromium-browser-clang/%s/%s.tgz' % (platform, golddir)
+ print 'gs://chromium-browser-clang-staging/%s/%s.tgz' % (platform,
+ golddir)
return 0
with open('buildlog.txt', 'w') as log:
diff --git a/chromium/tools/clang/scripts/update.py b/chromium/tools/clang/scripts/update.py
index dc7a439b298..2dfef6f4efd 100755
--- a/chromium/tools/clang/scripts/update.py
+++ b/chromium/tools/clang/scripts/update.py
@@ -27,14 +27,14 @@ import zipfile
# Do NOT CHANGE this if you don't know what you're doing -- see
# https://chromium.googlesource.com/chromium/src/+/master/docs/updating_clang.md
# Reverting problematic clang rolls is safe, though.
-CLANG_REVISION = '289944'
+CLANG_REVISION = '296320'
use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
if use_head_revision:
CLANG_REVISION = 'HEAD'
# This is incremented when pushing a new build of Clang at the same revision.
-CLANG_SUB_REVISION=2
+CLANG_SUB_REVISION=1
PACKAGE_VERSION = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION)
@@ -71,7 +71,7 @@ BINUTILS_BIN_DIR = os.path.join(BINUTILS_DIR, BINUTILS_DIR,
'Linux_x64', 'Release', 'bin')
BFD_PLUGINS_DIR = os.path.join(BINUTILS_DIR, 'Linux_x64', 'Release',
'lib', 'bfd-plugins')
-VERSION = '4.0.0'
+VERSION = '5.0.0'
ANDROID_NDK_DIR = os.path.join(
CHROMIUM_DIR, 'third_party', 'android_tools', 'ndk')
@@ -336,7 +336,7 @@ def AddGnuWinToPath():
return
gnuwin_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gnuwin')
- GNUWIN_VERSION = '5'
+ GNUWIN_VERSION = '6'
GNUWIN_STAMP = os.path.join(gnuwin_dir, 'stamp')
if ReadStampFile(GNUWIN_STAMP) == GNUWIN_VERSION:
print 'GNU Win tools already up to date.'
@@ -862,29 +862,11 @@ def main():
args.lto_gold_plugin = False
if args.if_needed:
- is_clang_required = False
- # clang is always used on Mac and Linux.
- if sys.platform == 'darwin' or sys.platform.startswith('linux'):
- is_clang_required = True
- # clang requested via $GYP_DEFINES.
- if re.search(r'\b(clang|asan|lsan|msan|tsan)=1',
- os.environ.get('GYP_DEFINES', '')):
- is_clang_required = True
- # clang previously downloaded, keep it up to date.
- # If you don't want this, delete third_party/llvm-build on your machine.
- if os.path.isdir(LLVM_BUILD_DIR):
- is_clang_required = True
- if not is_clang_required:
- return 0
+ # TODO(thakis): Can probably remove this and --if-needed altogether.
if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')):
print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
return 0
- if use_head_revision:
- # TODO(hans): Trunk was updated; remove after the next roll.
- global VERSION
- VERSION = '5.0.0'
-
global CLANG_REVISION, PACKAGE_VERSION
if args.print_revision:
if use_head_revision or args.llvm_force_head_revision:
diff --git a/chromium/tools/clang/value_cleanup/ListValueRewriter.cpp b/chromium/tools/clang/value_cleanup/ListValueRewriter.cpp
index f32f818d85c..85b1d28b8e0 100644
--- a/chromium/tools/clang/value_cleanup/ListValueRewriter.cpp
+++ b/chromium/tools/clang/value_cleanup/ListValueRewriter.cpp
@@ -351,25 +351,25 @@ void ListValueRewriter::RegisterMatchers(MatchFinder* match_finder) {
callee(cxxMethodDecl(hasName("::base::ListValue::Append"))),
argumentCountIs(1));
- // base::ListValue::Append(new base::FundamentalValue(bool))
+ // base::ListValue::Append(new base::Value(bool))
// => base::ListValue::AppendBoolean()
match_finder->addMatcher(
id("callExpr",
cxxMemberCallExpr(
is_list_append,
hasArgument(
- 0, ignoringParenImpCasts(id(
- "newExpr",
- cxxNewExpr(has(cxxConstructExpr(
- hasDeclaration(cxxMethodDecl(hasName(
- "::base::FundamentalValue::FundamentalValue"))),
- argumentCountIs(1),
- hasArgument(
- 0, id("argExpr",
- expr(hasType(booleanType())))))))))))),
+ 0, ignoringParenImpCasts(
+ id("newExpr",
+ cxxNewExpr(has(cxxConstructExpr(
+ hasDeclaration(cxxMethodDecl(
+ hasName("::base::Value::FundamentalValue"))),
+ argumentCountIs(1),
+ hasArgument(
+ 0, id("argExpr",
+ expr(hasType(booleanType())))))))))))),
&append_boolean_callback_);
- // base::ListValue::Append(new base::FundamentalValue(int))
+ // base::ListValue::Append(new base::Value(int))
// => base::ListValue::AppendInteger()
match_finder->addMatcher(
id("callExpr",
@@ -380,8 +380,8 @@ void ListValueRewriter::RegisterMatchers(MatchFinder* match_finder) {
ignoringParenImpCasts(id(
"newExpr",
cxxNewExpr(has(cxxConstructExpr(
- hasDeclaration(cxxMethodDecl(hasName(
- "::base::FundamentalValue::FundamentalValue"))),
+ hasDeclaration(cxxMethodDecl(
+ hasName("::base::Value::FundamentalValue"))),
argumentCountIs(1),
hasArgument(0, id("argExpr",
expr(hasType(isInteger()),
@@ -389,7 +389,7 @@ void ListValueRewriter::RegisterMatchers(MatchFinder* match_finder) {
booleanType()))))))))))))),
&append_integer_callback_);
- // base::ListValue::Append(new base::FundamentalValue(double))
+ // base::ListValue::Append(new base::Value(double))
// => base::ListValue::AppendDouble()
match_finder->addMatcher(
id("callExpr",
@@ -399,8 +399,8 @@ void ListValueRewriter::RegisterMatchers(MatchFinder* match_finder) {
0, ignoringParenImpCasts(id(
"newExpr",
cxxNewExpr(has(cxxConstructExpr(
- hasDeclaration(cxxMethodDecl(hasName(
- "::base::FundamentalValue::FundamentalValue"))),
+ hasDeclaration(cxxMethodDecl(
+ hasName("::base::Value::FundamentalValue"))),
argumentCountIs(1),
hasArgument(
0, id("argExpr",
diff --git a/chromium/tools/clang_format_merge_driver/OWNERS b/chromium/tools/clang_format_merge_driver/OWNERS
index 092d09f4e5d..7011ca31994 100644
--- a/chromium/tools/clang_format_merge_driver/OWNERS
+++ b/chromium/tools/clang_format_merge_driver/OWNERS
@@ -1,2 +1,4 @@
dcheng@chromium.org
thakis@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/cr/OWNERS b/chromium/tools/cr/OWNERS
index fa6851a195a..852aadf447c 100644
--- a/chromium/tools/cr/OWNERS
+++ b/chromium/tools/cr/OWNERS
@@ -1,3 +1,5 @@
miguelg@chromium.org
petrcermak@chromium.org
skyostil@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/cygprofile/profile_android_startup.py b/chromium/tools/cygprofile/profile_android_startup.py
index b4d011f185d..96d1b4c63b2 100755
--- a/chromium/tools/cygprofile/profile_android_startup.py
+++ b/chromium/tools/cygprofile/profile_android_startup.py
@@ -178,7 +178,7 @@ class WprManager(object):
self._flag_changer = flag_changer.FlagChanger(
self._device, self._cmdline_file)
self._flag_changer.AddFlags([
- '--host-resolver-rules="MAP * 127.0.0.1,EXCLUDE localhost"',
+ '--host-resolver-rules=MAP * 127.0.0.1,EXCLUDE localhost',
'--testing-fixed-http-port=%s' % device_http,
'--testing-fixed-https-port=%s' % device_https])
@@ -416,4 +416,3 @@ def main():
if __name__ == '__main__':
sys.exit(main())
-
diff --git a/chromium/tools/determinism/deterministic_build_whitelist.pyl b/chromium/tools/determinism/deterministic_build_whitelist.pyl
index 0ecda9648f1..6dcc7890478 100644
--- a/chromium/tools/determinism/deterministic_build_whitelist.pyl
+++ b/chromium/tools/determinism/deterministic_build_whitelist.pyl
@@ -17,15 +17,10 @@
{
# https://crbug.com/383340
'android': [
- 'flatc',
],
# https://crbug.com/330263
'linux': [
- 'flatc',
- # https://crbug.com/654989
- 'ppapi_nacl_tests_pnacl_newlib_x32_nonsfi.nexe',
- 'ppapi_nacl_tests_pnacl_newlib_x64.nexe',
],
# https://crbug.com/330262
@@ -82,7 +77,6 @@
'ffmpeg_regression_tests',
'filesystem_service_unittests',
'filter_fuzz_stub',
- 'flatc',
'gcm_unit_tests',
'generate_barcode_video',
'generate_timecode_audio',
diff --git a/chromium/tools/gn/analyzer.cc b/chromium/tools/gn/analyzer.cc
index 06f93363b5b..559287ef20b 100644
--- a/chromium/tools/gn/analyzer.cc
+++ b/chromium/tools/gn/analyzer.cc
@@ -51,13 +51,13 @@ struct Outputs {
LabelSet LabelsFor(const TargetSet& targets) {
LabelSet labels;
- for (const auto& target : targets)
+ for (auto* target : targets)
labels.insert(target->label());
return labels;
}
bool AnyBuildFilesWereModified(const SourceFileSet& source_files) {
- for (const auto& file : source_files) {
+ for (auto* file : source_files) {
if (base::EndsWith(file->value(), ".gn", base::CompareCase::SENSITIVE) ||
base::EndsWith(file->value(), ".gni", base::CompareCase::SENSITIVE))
return true;
@@ -287,7 +287,7 @@ std::string Analyzer::Analyze(const std::string& input, Err* err) const {
TargetSet compile_targets = TargetsFor(inputs.compile_labels);
if (inputs.compile_included_all) {
- for (auto& root : roots_)
+ for (auto* root : roots_)
compile_targets.insert(root);
}
TargetSet filtered_targets = Filter(compile_targets);
@@ -307,10 +307,10 @@ std::string Analyzer::Analyze(const std::string& input, Err* err) const {
TargetSet Analyzer::AllAffectedTargets(
const SourceFileSet& source_files) const {
TargetSet direct_matches;
- for (const auto& source_file : source_files)
+ for (auto* source_file : source_files)
AddTargetsDirectlyReferringToFileTo(source_file, &direct_matches);
TargetSet all_matches;
- for (const auto& match : direct_matches)
+ for (auto* match : direct_matches)
AddAllRefsTo(match, &all_matches);
return all_matches;
}
@@ -392,7 +392,7 @@ bool Analyzer::TargetRefersToFile(const Target* target,
void Analyzer::AddTargetsDirectlyReferringToFileTo(const SourceFile* file,
TargetSet* matches) const {
- for (const auto& target : all_targets_) {
+ for (auto* target : all_targets_) {
// Only handles targets in the default toolchain.
if ((target->label().GetToolchainLabel() == default_toolchain_) &&
TargetRefersToFile(target, file))
diff --git a/chromium/tools/gn/args.cc b/chromium/tools/gn/args.cc
index f81ffbe09a3..3bb40b41574 100644
--- a/chromium/tools/gn/args.cc
+++ b/chromium/tools/gn/args.cc
@@ -26,6 +26,9 @@ How build arguments are set
- target_cpu
- target_os
+ Next, project-specific overrides are applied. These are specified inside
+ the default_args variable of //.gn. See "gn help dotfile" for more.
+
If specified, arguments from the --args command line flag are used. If that
flag is not specified, args from previous builds in the build directory will
be used (this is in the file args.gn in the build directory).
@@ -81,6 +84,21 @@ void RemoveDeclaredOverrides(const Scope::KeyValueMap& declared_arguments,
} // namespace
+Args::ValueWithOverride::ValueWithOverride()
+ : default_value(),
+ has_override(false),
+ override_value() {
+}
+
+Args::ValueWithOverride::ValueWithOverride(const Value& def_val)
+ : default_value(def_val),
+ has_override(false),
+ override_value() {
+}
+
+Args::ValueWithOverride::~ValueWithOverride() {
+}
+
Args::Args() {
}
@@ -121,11 +139,6 @@ const Value* Args::GetArgOverride(const char* name) const {
return &found->second;
}
-Scope::KeyValueMap Args::GetAllOverrides() const {
- base::AutoLock lock(lock_);
- return all_overrides_;
-}
-
void Args::SetupRootScope(Scope* dest,
const Scope::KeyValueMap& toolchain_overrides) const {
base::AutoLock lock(lock_);
@@ -251,12 +264,27 @@ bool Args::VerifyAllOverridesUsed(Err* err) const {
return false;
}
-void Args::MergeDeclaredArguments(Scope::KeyValueMap* dest) const {
+Args::ValueWithOverrideMap Args::GetAllArguments() const {
+ ValueWithOverrideMap result;
+
base::AutoLock lock(lock_);
+
+ // Default values.
for (const auto& map_pair : declared_arguments_per_toolchain_) {
for (const auto& arg : map_pair.second)
- (*dest)[arg.first] = arg.second;
+ result.insert(std::make_pair(arg.first, ValueWithOverride(arg.second)));
}
+
+ // Merge in overrides.
+ for (const auto& over : overrides_) {
+ auto found = result.find(over.first);
+ if (found != result.end()) {
+ found->second.has_override = true;
+ found->second.override_value = over.second;
+ }
+ }
+
+ return result;
}
void Args::SetSystemVarsLocked(Scope* dest) const {
diff --git a/chromium/tools/gn/args.h b/chromium/tools/gn/args.h
index 9df5712d9f3..79fe93f1e06 100644
--- a/chromium/tools/gn/args.h
+++ b/chromium/tools/gn/args.h
@@ -5,6 +5,8 @@
#ifndef TOOLS_GN_ARGS_H_
#define TOOLS_GN_ARGS_H_
+#include <map>
+
#include "base/containers/hash_tables.h"
#include "base/macros.h"
#include "base/synchronization/lock.h"
@@ -23,6 +25,18 @@ extern const char kBuildArgs_Help[];
// the argument was unused.
class Args {
public:
+ struct ValueWithOverride {
+ ValueWithOverride();
+ ValueWithOverride(const Value& def_val);
+ ~ValueWithOverride();
+
+ Value default_value; // Default value given in declare_args.
+
+ bool has_override; // True indicates override_value is valid.
+ Value override_value; // From .gn or the current build's "gn args".
+ };
+ using ValueWithOverrideMap = std::map<base::StringPiece, ValueWithOverride>;
+
Args();
Args(const Args& other);
~Args();
@@ -36,9 +50,6 @@ class Args {
// argument is set.
const Value* GetArgOverride(const char* name) const;
- // Gets all overrides set on the build.
- Scope::KeyValueMap GetAllOverrides() const;
-
// Sets up the root scope for a toolchain. This applies the default system
// flags and saves the toolchain overrides so they can be applied to
// declare_args blocks that appear when loading files in that toolchain.
@@ -62,10 +73,10 @@ class Args {
// arguments. If there are, this returns false and sets the error.
bool VerifyAllOverridesUsed(Err* err) const;
- // Adds all declared arguments to the given output list. If the values exist
- // in the list already, their values will be overwriten, but other values
- // already in the list will remain.
- void MergeDeclaredArguments(Scope::KeyValueMap* dest) const;
+ // Returns information about all arguements, both defaults and overrides.
+ // This is used for the help system which is not performance critical. Use a
+ // map instead of a hash map so the arguements are sorted alphabetically.
+ ValueWithOverrideMap GetAllArguments() const;
private:
using ArgumentsPerToolchain =
diff --git a/chromium/tools/gn/bootstrap/bootstrap.py b/chromium/tools/gn/bootstrap/bootstrap.py
index c3642e8f487..38cfb117d29 100755
--- a/chromium/tools/gn/bootstrap/bootstrap.py
+++ b/chromium/tools/gn/bootstrap/bootstrap.py
@@ -154,6 +154,13 @@ def write_buildflag_header_manually(root_gen_dir, header, flags):
os.remove(temp_path)
+def write_build_date_header(root_gen_dir):
+ check_call([
+ os.path.join(SRC_ROOT, 'build', 'write_build_date_header.py'),
+ os.path.join(root_gen_dir, 'base/generated_build_date.h'),
+ 'default',
+ ])
+
def build_gn_with_ninja_manually(tempdir, options):
root_gen_dir = os.path.join(tempdir, 'gen')
mkdir_p(root_gen_dir)
@@ -167,6 +174,8 @@ def build_gn_with_ninja_manually(tempdir, options):
'ENABLE_MEMORY_TASK_PROFILER': 'false'
})
+ write_build_date_header(root_gen_dir)
+
if is_mac:
# //base/build_time.cc needs base/generated_build_date.h,
# and this file is only included for Mac builds.
@@ -374,6 +383,7 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/at_exit.cc',
'base/base_paths.cc',
'base/base_switches.cc',
+ 'base/build_time.cc',
'base/callback_internal.cc',
'base/command_line.cc',
'base/debug/activity_tracker.cc',
@@ -382,6 +392,7 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/debug/stack_trace.cc',
'base/debug/task_annotator.cc',
'base/environment.cc',
+ 'base/feature_list.cc',
'base/files/file.cc',
'base/files/file_descriptor_watcher_posix.cc',
'base/files/file_enumerator.cc',
@@ -413,6 +424,8 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/message_loop/message_pump.cc',
'base/message_loop/message_pump_default.cc',
'base/metrics/bucket_ranges.cc',
+ 'base/metrics/field_trial.cc',
+ 'base/metrics/field_trial_param_associator.cc',
'base/metrics/histogram.cc',
'base/metrics/histogram_base.cc',
'base/metrics/histogram_samples.cc',
@@ -428,12 +441,14 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/pending_task.cc',
'base/pickle.cc',
'base/process/kill.cc',
+ 'base/process/memory.cc',
'base/process/process_handle.cc',
'base/process/process_iterator.cc',
'base/process/process_metrics.cc',
'base/profiler/scoped_profile.cc',
'base/profiler/scoped_tracker.cc',
'base/profiler/tracked_time.cc',
+ 'base/rand_util.cc',
'base/run_loop.cc',
'base/sequence_token.cc',
'base/sequence_checker_impl.cc',
@@ -501,6 +516,7 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/trace_event/memory_allocator_dump_guid.cc',
'base/trace_event/memory_dump_manager.cc',
'base/trace_event/memory_dump_request_args.cc',
+ 'base/trace_event/memory_dump_scheduler.cc',
'base/trace_event/memory_dump_session_state.cc',
'base/trace_event/memory_infra_background_whitelist.cc',
'base/trace_event/process_memory_dump.cc',
@@ -533,11 +549,13 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/files/memory_mapped_file_posix.cc',
'base/message_loop/message_pump_libevent.cc',
'base/posix/file_descriptor_shuffle.cc',
+ 'base/posix/global_descriptors.cc',
'base/posix/safe_strerror.cc',
'base/process/kill_posix.cc',
'base/process/process_handle_posix.cc',
'base/process/process_metrics_posix.cc',
'base/process/process_posix.cc',
+ 'base/rand_util_posix.cc',
'base/strings/string16.cc',
'base/synchronization/condition_variable_posix.cc',
'base/synchronization/lock_impl_posix.cc',
@@ -589,6 +607,7 @@ def write_gn_ninja(path, root_gen_dir, options):
'base/memory/shared_memory_posix.cc',
'base/nix/xdg_util.cc',
'base/process/internal_linux.cc',
+ 'base/process/memory_linux.cc',
'base/process/process_handle_linux.cc',
'base/process/process_iterator_linux.cc',
'base/process/process_linux.cc',
diff --git a/chromium/tools/gn/command_args.cc b/chromium/tools/gn/command_args.cc
index 1b81135be3d..98283f29814 100644
--- a/chromium/tools/gn/command_args.cc
+++ b/chromium/tools/gn/command_args.cc
@@ -102,62 +102,91 @@ void GetContextForValue(const Value& value,
}
}
-void PrintArgHelp(const base::StringPiece& name, const Value& value) {
- OutputString(name.as_string(), DECORATION_YELLOW);
- OutputString(" Default = " + value.ToString(true) + "\n");
-
+// Prints the value and origin for a default value. Default values always list
+// an origin and if there is no origin, print a message about it being
+// internally set. Overrides can't be internally set so the location handling
+// is a bit different.
+//
+// The default value also contains the docstring.
+void PrintDefaultValueInfo(base::StringPiece name, const Value& value) {
+ OutputString(value.ToString(true) + "\n");
if (value.origin()) {
std::string location, comment;
GetContextForValue(value, &location, &comment);
- OutputString(" " + location + "\n" + comment);
+ OutputString(" From " + location + "\n");
+ if (!comment.empty())
+ OutputString("\n" + comment);
} else {
- OutputString(" (Internally set; try `gn help " + name.as_string() +
+ OutputString(" (Internally set; try `gn help " + name.as_string() +
"`.)\n");
}
}
+// Override value is null if there is no override.
+void PrintArgHelp(const base::StringPiece& name,
+ const Args::ValueWithOverride& val) {
+ OutputString(name.as_string(), DECORATION_YELLOW);
+ OutputString("\n");
+
+ if (val.has_override) {
+ // Override present, print both it and the default.
+ OutputString(" Current value = " + val.override_value.ToString(true) +
+ "\n");
+ if (val.override_value.origin()) {
+ std::string location, comment;
+ GetContextForValue(val.override_value, &location, &comment);
+ OutputString(" From " + location + "\n");
+ }
+ OutputString(" Overridden from the default = ");
+ PrintDefaultValueInfo(name, val.default_value);
+ } else {
+ // No override.
+ OutputString(" Current value (from the default) = ");
+ PrintDefaultValueInfo(name, val.default_value);
+ }
+}
+
int ListArgs(const std::string& build_dir) {
Setup* setup = new Setup;
if (!setup->DoSetup(build_dir, false) || !setup->Run())
return 1;
- Scope::KeyValueMap build_args;
- setup->build_settings().build_args().MergeDeclaredArguments(&build_args);
-
- // Find all of the arguments we care about. Use a regular map so they're
- // sorted nicely when we write them out.
- std::map<base::StringPiece, Value> sorted_args;
+ Args::ValueWithOverrideMap args =
+ setup->build_settings().build_args().GetAllArguments();
std::string list_value =
base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(kSwitchList);
- if (list_value.empty()) {
- // List all values.
- for (const auto& arg : build_args)
- sorted_args.insert(arg);
- } else {
+ if (!list_value.empty()) {
// List just the one specified as the parameter to --list.
- Scope::KeyValueMap::const_iterator found_arg = build_args.find(list_value);
- if (found_arg == build_args.end()) {
+ auto found = args.find(list_value);
+ if (found == args.end()) {
Err(Location(), "Unknown build argument.",
"You asked for \"" + list_value + "\" which I didn't find in any "
"build file\nassociated with this build.").PrintToStdout();
return 1;
}
- sorted_args.insert(*found_arg);
+
+ // Delete everything from the map except the one requested.
+ Args::ValueWithOverrideMap::value_type preserved = *found;
+ args.clear();
+ args.insert(preserved);
}
if (base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchShort)) {
- // Short key=value output.
- for (const auto& arg : sorted_args) {
+ // Short <key>=<current_value> output.
+ for (const auto& arg : args) {
OutputString(arg.first.as_string());
OutputString(" = ");
- OutputString(arg.second.ToString(true));
+ if (arg.second.has_override)
+ OutputString(arg.second.override_value.ToString(true));
+ else
+ OutputString(arg.second.default_value.ToString(true));
OutputString("\n");
}
return 0;
}
// Long output.
- for (const auto& arg : sorted_args) {
+ for (const auto& arg : args) {
PrintArgHelp(arg.first, arg.second);
OutputString("\n");
}
@@ -199,9 +228,9 @@ bool RunEditor(const base::FilePath& file_to_edit) {
#else // POSIX
bool RunEditor(const base::FilePath& file_to_edit) {
- const char* editor_ptr = getenv("VISUAL");
+ const char* editor_ptr = getenv("GN_EDITOR");
if (!editor_ptr)
- editor_ptr = getenv("GN_EDITOR");
+ editor_ptr = getenv("VISUAL");
if (!editor_ptr)
editor_ptr = getenv("EDITOR");
if (!editor_ptr)
@@ -284,15 +313,19 @@ extern const char kArgs_Help[] =
build arguments work.
Usage
+
gn args <out_dir>
- Open the arguments for the given build directory in an editor (as
- specified by the EDITOR environment variable). If the given build
- directory doesn't exist, it will be created and an empty args file will
- be opened in the editor. You would type something like this into that
- file:
+ Open the arguments for the given build directory in an editor. If the
+ given build directory doesn't exist, it will be created and an empty args
+ file will be opened in the editor. You would type something like this
+ into that file:
enable_doom_melon=false
os="android"
+ To find your editor on Posix, GN will search the environment variables in
+ order: GN_EDITOR, VISUAL, and EDITOR. On Windows GN will open the command
+ associated with .txt files.
+
Note: you can edit the build args manually by editing the file "args.gn"
in the build directory and then running "gn gen <out_dir>".
@@ -301,20 +334,12 @@ Usage
an exact_arg is specified for the list flag, just that one build
argument.
- The output will list the declaration location, default value, and comment
- preceeding the declaration. If --short is specified, only the names and
- values will be printed.
-
- If the out_dir is specified, the build configuration will be taken from
- that build directory. The reason this is needed is that the definition of
- some arguments is dependent on the build configuration, so setting some
- values might add, remove, or change the default values for other
- arguments. Specifying your exact configuration allows the proper
- arguments to be displayed.
+ The output will list the declaration location, current value for the
+ build, default value (if different than the current value), and comment
+ preceeding the declaration.
- Instead of specifying the out_dir, you can also use the command-line flag
- to specify the build configuration:
- --args=<exact list of args to use>
+ If --short is specified, only the names and current values will be
+ printed.
Examples
diff --git a/chromium/tools/gn/docs/cross_compiles.md b/chromium/tools/gn/docs/cross_compiles.md
index 2d7e8a3c81d..52f95596bc6 100644
--- a/chromium/tools/gn/docs/cross_compiles.md
+++ b/chromium/tools/gn/docs/cross_compiles.md
@@ -49,7 +49,7 @@ gn gen out/Default --args='target_os="android"'
(We don't have to specify target\_cpu because of the conditionals
mentioned above).
-And, to do a 64-bit MIPS ChromeOS cross-compile:
+And, to do a 64-bit MIPS Chrome OS cross-compile:
```
gn gen out/Default --args='target_os="chromeos" target_cpu="mips64el"'
diff --git a/chromium/tools/gn/docs/reference.md b/chromium/tools/gn/docs/reference.md
index d27db443e34..97a2c866fc8 100644
--- a/chromium/tools/gn/docs/reference.md
+++ b/chromium/tools/gn/docs/reference.md
@@ -325,16 +325,20 @@
```
### **Usage**
+
```
gn args <out_dir>
- Open the arguments for the given build directory in an editor (as
- specified by the EDITOR environment variable). If the given build
- directory doesn't exist, it will be created and an empty args file will
- be opened in the editor. You would type something like this into that
- file:
+ Open the arguments for the given build directory in an editor. If the
+ given build directory doesn't exist, it will be created and an empty args
+ file will be opened in the editor. You would type something like this
+ into that file:
enable_doom_melon=false
os="android"
+ To find your editor on Posix, GN will search the environment variables in
+ order: GN_EDITOR, VISUAL, and EDITOR. On Windows GN will open the command
+ associated with .txt files.
+
Note: you can edit the build args manually by editing the file "args.gn"
in the build directory and then running "gn gen <out_dir>".
@@ -343,20 +347,12 @@
an exact_arg is specified for the list flag, just that one build
argument.
- The output will list the declaration location, default value, and comment
- preceeding the declaration. If --short is specified, only the names and
- values will be printed.
+ The output will list the declaration location, current value for the
+ build, default value (if different than the current value), and comment
+ preceeding the declaration.
- If the out_dir is specified, the build configuration will be taken from
- that build directory. The reason this is needed is that the definition of
- some arguments is dependent on the build configuration, so setting some
- values might add, remove, or change the default values for other
- arguments. Specifying your exact configuration allows the proper
- arguments to be displayed.
-
- Instead of specifying the out_dir, you can also use the command-line flag
- to specify the build configuration:
- --args=<exact list of args to use>
+ If --short is specified, only the names and current values will be
+ printed.
```
@@ -1686,7 +1682,7 @@
}
If you want to override the (default disabled) Doom Melon:
- gn --args="enable_doom_melon=true enable_teleporter=false"
+ gn --args="enable_doom_melon=true enable_teleporter=true"
This also sets the teleporter, but it's already defaulted to on so it will
have no effect.
@@ -3897,7 +3893,7 @@
This should be set to the most specific value possible. So, "android" or
"chromeos" should be used instead of "linux" where applicable, even though
- Android and ChromeOS are both Linux variants. This can mean that one needs to
+ Android and Chrome OS are both Linux variants. This can mean that one needs to
write
if (target_os == "android" || target_os == "linux") {
@@ -5730,6 +5726,9 @@
- target_cpu
- target_os
+ Next, project-specific overrides are applied. These are specified inside
+ the default_args variable of //.gn. See "gn help dotfile" for more.
+
If specified, arguments from the --args command line flag are used. If that
flag is not specified, args from previous builds in the build directory will
be used (this is in the file args.gn in the build directory).
@@ -5845,6 +5844,15 @@
The secondary source root must be inside the main source tree.
+ default_args [optional]
+ Scope containing the default overrides for declared arguments. These
+ overrides take precedence over the default values specified in the
+ declare_args() block, but can be overriden using --args or the
+ args.gn file.
+
+ This is intended to be used when subprojects declare arguments with
+ default values that need to be changed for whatever reason.
+
```
### **Example .gn file contents**
@@ -5861,6 +5869,12 @@
secondary_source = "//build/config/temporary_buildfiles/"
+ default_args = {
+ # Default to release builds for this project.
+ is_debug = false
+ is_component_build = false
+ }
+
```
## **Build graph and execution overview**
diff --git a/chromium/tools/gn/functions.cc b/chromium/tools/gn/functions.cc
index 41be19b5206..272cd9430bc 100644
--- a/chromium/tools/gn/functions.cc
+++ b/chromium/tools/gn/functions.cc
@@ -429,7 +429,7 @@ Example
}
If you want to override the (default disabled) Doom Melon:
- gn --args="enable_doom_melon=true enable_teleporter=false"
+ gn --args="enable_doom_melon=true enable_teleporter=true"
This also sets the teleporter, but it's already defaulted to on so it will
have no effect.
)";
diff --git a/chromium/tools/gn/import_manager.cc b/chromium/tools/gn/import_manager.cc
index 0213c3367a4..c9a44d4eaf4 100644
--- a/chromium/tools/gn/import_manager.cc
+++ b/chromium/tools/gn/import_manager.cc
@@ -111,7 +111,7 @@ bool ImportManager::DoImport(const SourceFile& file,
base::TimeDelta::FromMilliseconds(20);
if (TracingEnabled() &&
import_block_end - import_block_begin > kImportBlockTraceThreshold) {
- auto import_block_trace =
+ auto* import_block_trace =
new TraceItem(TraceItem::TRACE_IMPORT_BLOCK, file.value(),
base::PlatformThread::CurrentId());
import_block_trace->set_begin(import_block_begin);
diff --git a/chromium/tools/gn/misc/vim/autoload/gn.vim b/chromium/tools/gn/misc/vim/autoload/gn.vim
new file mode 100644
index 00000000000..5573efc0af3
--- /dev/null
+++ b/chromium/tools/gn/misc/vim/autoload/gn.vim
@@ -0,0 +1,26 @@
+" Copyright 2017 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+function! gn#TranslateToBuildFile(name) abort
+ " Strip '//' prefix
+ let l:new_path = substitute(a:name, '\v^//', '', '')
+
+ " Strip the build target name (necessary if 'isfname' contains ':')
+ let l:new_path = substitute(l:new_path, '\v:.*$', '', '')
+
+ " Append 'BUILD.gn', only if this is a directory and not a file
+ " Prefer using maktaba if it's available, but fallback to an alternative
+ if exists('*maktaba#path#Basename')
+ " Check if the last part of the path appears to be a file
+ if maktaba#path#Basename(l:new_path) !~# '\V.'
+ let l:new_path = maktaba#path#Join([l:new_path, 'BUILD.gn'])
+ endif
+ else
+ " This will break if 'autochdir' is enabled
+ if isdirectory(l:new_path)
+ let l:new_path = substitute(l:new_path, '\v/?$', '/BUILD.gn', '')
+ endif
+ endif
+ return l:new_path
+endfunction
diff --git a/chromium/tools/gn/misc/vim/ftplugin/gn.vim b/chromium/tools/gn/misc/vim/ftplugin/gn.vim
new file mode 100644
index 00000000000..ede251dfe99
--- /dev/null
+++ b/chromium/tools/gn/misc/vim/ftplugin/gn.vim
@@ -0,0 +1,12 @@
+" Copyright 2017 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+if exists('b:did_ftplugin')
+ finish
+endif
+let b:did_ftplugin = 1
+
+setlocal includeexpr=gn#TranslateToBuildFile(v:fname)
+
+setlocal commentstring=#\ %s
diff --git a/chromium/tools/gn/misc/vim/syntax/gn.vim b/chromium/tools/gn/misc/vim/syntax/gn.vim
index 55f18524c93..0fbd6e016b8 100644
--- a/chromium/tools/gn/misc/vim/syntax/gn.vim
+++ b/chromium/tools/gn/misc/vim/syntax/gn.vim
@@ -52,8 +52,10 @@ syn keyword gnVariable visibility
hi def link gnVariable Keyword
" Strings
-syn region gnString start=+L\="+ skip=+\\\\\|\\"+ end=+"+ contains=@Spell
+syn region gnString start=+L\="+ skip=+\\\\\|\\"+ end=+"+ contains=@Spell,gnTargetName
+syn match gnTargetName '\v:[^"]+' contained
hi def link gnString String
+hi def link gnTargetName Special
" Comments
syn keyword gnTodo contained TODO FIXME XXX BUG NOTE
diff --git a/chromium/tools/gn/parser.cc b/chromium/tools/gn/parser.cc
index 447dfcf5b0f..7e2c7f4338d 100644
--- a/chromium/tools/gn/parser.cc
+++ b/chromium/tools/gn/parser.cc
@@ -824,7 +824,7 @@ void Parser::AssignComments(ParseNode* file) {
CHECK_EQ(node, file) << "Only expected on top file node";
continue;
}
- const Location& start = node->GetRange().begin();
+ const Location start = node->GetRange().begin();
while (cur_comment < static_cast<int>(line_comment_tokens_.size())) {
if (start.byte() >= line_comment_tokens_[cur_comment].location().byte()) {
const_cast<ParseNode*>(node)->comments_mutable()->append_before(
diff --git a/chromium/tools/gn/setup.cc b/chromium/tools/gn/setup.cc
index 06062e9e5ab..5c4e9842fa4 100644
--- a/chromium/tools/gn/setup.cc
+++ b/chromium/tools/gn/setup.cc
@@ -106,6 +106,15 @@ Variables
The secondary source root must be inside the main source tree.
+ default_args [optional]
+ Scope containing the default overrides for declared arguments. These
+ overrides take precedence over the default values specified in the
+ declare_args() block, but can be overriden using --args or the
+ args.gn file.
+
+ This is intended to be used when subprojects declare arguments with
+ default values that need to be changed for whatever reason.
+
Example .gn file contents
buildconfig = "//build/config/BUILDCONFIG.gn"
@@ -118,6 +127,12 @@ Example .gn file contents
root = "//:root"
secondary_source = "//build/config/temporary_buildfiles/"
+
+ default_args = {
+ # Default to release builds for this project.
+ is_debug = false
+ is_component_build = false
+ }
)";
namespace {
@@ -273,6 +288,7 @@ Setup::Setup()
check_public_headers_(false),
dotfile_settings_(&build_settings_, std::string()),
dotfile_scope_(&dotfile_settings_),
+ default_args_(nullptr),
fill_arguments_(true) {
dotfile_settings_.set_toolchain_label(Label());
@@ -316,6 +332,14 @@ bool Setup::DoSetup(const std::string& build_dir, bool force_create) {
return false;
}
+ // Apply project-specific default (if specified).
+ // Must happen before FillArguments().
+ if (default_args_) {
+ Scope::KeyValueMap overrides;
+ default_args_->GetCurrentScopeValues(&overrides);
+ build_settings_.build_args().AddArgOverrides(overrides);
+ }
+
if (fill_arguments_) {
if (!FillArguments(*cmdline))
return false;
@@ -754,5 +778,17 @@ bool Setup::FillOtherConfig(const base::CommandLine& cmdline) {
build_settings_.set_exec_script_whitelist(std::move(whitelist));
}
+ // Fill optional default_args.
+ const Value* default_args_value =
+ dotfile_scope_.GetValue("default_args", true);
+ if (default_args_value) {
+ if (!default_args_value->VerifyTypeIs(Value::SCOPE, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ default_args_ = default_args_value->scope_value();
+ }
+
return true;
}
diff --git a/chromium/tools/gn/setup.h b/chromium/tools/gn/setup.h
index 45335bf5b0e..d9a77d063a3 100644
--- a/chromium/tools/gn/setup.h
+++ b/chromium/tools/gn/setup.h
@@ -150,6 +150,10 @@ class Setup {
std::vector<Token> dotfile_tokens_;
std::unique_ptr<ParseNode> dotfile_root_;
+ // Default overrides, specified in the dotfile.
+ // Owned by the Value (if it exists) in the dotfile_scope_.
+ const Scope* default_args_;
+
// Set to true when we should populate the build arguments from the command
// line or build argument file. See setter above.
bool fill_arguments_;
diff --git a/chromium/tools/gn/visual_studio_writer.cc b/chromium/tools/gn/visual_studio_writer.cc
index 76c38dca2b0..6e0b1bd835d 100644
--- a/chromium/tools/gn/visual_studio_writer.cc
+++ b/chromium/tools/gn/visual_studio_writer.cc
@@ -76,7 +76,7 @@ const char kProjectVersionVs2015[] = "14.0"; // Visual Studio 2015
const char kVersionStringVs2013[] = "Visual Studio 2013"; // Visual Studio 2013
const char kVersionStringVs2015[] = "Visual Studio 2015"; // Visual Studio 2015
const char kWindowsKitsVersion[] = "10"; // Windows 10 SDK
-const char kWindowsKitsIncludeVersion[] = "10.0.10586.0"; // Windows 10 SDK
+const char kWindowsKitsIncludeVersion[] = "10.0.14393.0"; // Windows 10 SDK
const char kGuidTypeProject[] = "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}";
const char kGuidTypeFolder[] = "{2150E333-8FDC-42A3-9474-1A3956D46DE8}";
diff --git a/chromium/tools/grit/grit/format/gen_predetermined_ids.py b/chromium/tools/grit/grit/format/gen_predetermined_ids.py
new file mode 100755
index 00000000000..eb6afa86256
--- /dev/null
+++ b/chromium/tools/grit/grit/format/gen_predetermined_ids.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A tool to generate a predetermined resource ids file that can be used as an
+input to grit via the -p option. This is meant to be run manually every once in
+a while and its output checked in. See tools/gritsettings/README.md for details.
+"""
+
+import fnmatch
+import os
+import re
+import sys
+
+# Regular expressions for parsing the #define macro format. Separate regular
+# expressions are used for parsing lines with pragma (for builds with
+# enable_resource_whitelist_generation flag) in windows and non-windows, and for
+# lines without pragma, For example,
+# Without generate whitelist flag:
+# #define IDS_FOO_MESSAGE 1234
+# With generate whitelist flag in non-windows:
+# #define IDS_FOO_MESSAGE _Pragma("whitelisted_resource_1234") 1234
+# With generate whitelist flag in windows:
+# #define IDS_FOO_MESSAGE __pragma(message("whitelisted_resource_1234")) 1234
+RESOURCE_EXTRACT_REGEX = re.compile('^#define (\S*) (\d*)$', re.MULTILINE)
+RESOURCE_EXTRACT_REGEX_PRAGMA = re.compile(
+ '^#define (\S*) _Pragma\("whitelisted_resource_\d*"\) (\d*)$',
+ re.MULTILINE)
+RESOURCE_EXTRACT_REGEX_PRAGMA_WINDOWS = re.compile(
+ '^#define (\S*) __pragma\(message\("whitelisted_resource_\d*"\)\) (\d*)$',
+ re.MULTILINE)
+
+ORDERED_RESOURCE_IDS_REGEX = re.compile('^Resource=(\d*)$', re.MULTILINE)
+
+
+def _GetResourceNameIdPairsIter(string_to_scan):
+ """Gets an iterator of the resource name and id pairs of the given string.
+
+ Scans the input string for lines of the form "#define NAME ID" and returns
+ an iterator over all matching (NAME, ID) pairs.
+
+ Args:
+ string_to_scan: The input string to scan.
+
+ Yields:
+ A tuple of name and id.
+ """
+ for match in RESOURCE_EXTRACT_REGEX.finditer(string_to_scan):
+ yield match.group(1, 2)
+ for match in RESOURCE_EXTRACT_REGEX_PRAGMA.finditer(string_to_scan):
+ yield match.group(1, 2)
+ for match in RESOURCE_EXTRACT_REGEX_PRAGMA_WINDOWS.finditer(string_to_scan):
+ yield match.group(1, 2)
+
+
+def _ReadOrderedResourceIds(path):
+ """Reads ordered resource ids from the given file.
+
+ The resources are expected to be of the format produced by running Chrome
+ with --print-resource-ids command line.
+
+ Args:
+ path: File path to read resource ids from.
+
+ Returns:
+ An array of ordered resource ids.
+ """
+ ordered_resource_ids = []
+ with open(path, "r") as f:
+ for match in ORDERED_RESOURCE_IDS_REGEX.finditer(f.read()):
+ ordered_resource_ids.append(int(match.group(1)))
+ return ordered_resource_ids
+
+
+def GenerateResourceMapping(original_resources, ordered_resource_ids):
+ """Generates a resource mapping from the ordered ids and the original mapping.
+
+ The returned dict will assign new ids to ordered_resource_ids numerically
+ increasing from 101.
+
+ Args:
+ original_resources: A dict of original resource ids to resource names.
+ ordered_resource_ids: An array of ordered resource ids.
+
+ Returns:
+ A dict of resource ids to resource names.
+ """
+ output_resource_map = {}
+ # 101 is used as the starting value since other parts of GRIT require it to be
+ # the minimum (e.g. rc_header.py) based on Windows resource numbering.
+ next_id = 101
+ for original_id in ordered_resource_ids:
+ resource_name = original_resources[original_id]
+ output_resource_map[next_id] = resource_name
+ next_id += 1
+ return output_resource_map
+
+
+def ReadResourceIdsFromFile(file, original_resources):
+ """Reads resource ids from a GRIT-produced header file.
+
+ Args:
+ file: File to a GRIT-produced header file to read from.
+ original_resources: Dict of resource ids to resource names to add to.
+ """
+ for resource_name, resource_id in _GetResourceNameIdPairsIter(file.read()):
+ original_resources[int(resource_id)] = resource_name
+
+
+def _ReadOriginalResourceIds(out_dir):
+ """Reads resource ids from GRIT header files in the specified directory.
+
+ Args:
+ out_dir: A Chrome build output directory (e.g. out/gn) to scan.
+
+ Returns:
+ A dict of resource ids to resource names.
+ """
+ original_resources = {}
+ for root, dirnames, filenames in os.walk(out_dir + '/gen'):
+ for filename in filenames:
+ if filename.endswith(('_resources.h', '_settings.h', '_strings.h')):
+ with open(os.path.join(root, filename), "r") as f:
+ ReadResourceIdsFromFile(f, original_resources)
+ return original_resources
+
+
+def _GeneratePredeterminedIdsFile(ordered_resources_file, out_dir):
+ """Generates a predetermined ids file.
+
+ Args:
+ ordered_resources_file: File path to read ordered resource ids from.
+ out_dir: A Chrome build output directory (e.g. out/gn) to scan.
+
+ Returns:
+ A dict of resource ids to resource names.
+ """
+ original_resources = _ReadOriginalResourceIds(out_dir)
+ ordered_resource_ids = _ReadOrderedResourceIds(ordered_resources_file)
+ output_resource_map = GenerateResourceMapping(original_resources,
+ ordered_resource_ids)
+ for res_id in sorted(output_resource_map.keys()):
+ print "{} {}".format(output_resource_map[res_id], res_id)
+
+
+def main(argv):
+ if len(argv) != 2:
+ print("usage: gen_predetermined_ids.py <ordered_resources_file> <out_dir>")
+ sys.exit(1)
+ ordered_resources_file, out_dir = argv[0], argv[1]
+ _GeneratePredeterminedIdsFile(ordered_resources_file, out_dir)
+
+
+if '__main__' == __name__:
+ main(sys.argv[1:])
diff --git a/chromium/tools/grit/grit/format/gen_predetermined_ids_unittest.py b/chromium/tools/grit/grit/format/gen_predetermined_ids_unittest.py
new file mode 100755
index 00000000000..472a09d331f
--- /dev/null
+++ b/chromium/tools/grit/grit/format/gen_predetermined_ids_unittest.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Unit tests for the gen_predetermined_ids module.'''
+
+import os
+import sys
+if __name__ == '__main__':
+ sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+import StringIO
+import unittest
+
+from grit.format import gen_predetermined_ids
+
+class GenPredeterminedIdsUnittest(unittest.TestCase):
+ def testGenerateResourceMapping(self):
+ original_resources = {200: 'A', 201: 'B', 300: 'C', 350: 'D', 370: 'E'}
+ ordered_resource_ids = [300, 201, 370]
+ mapping = gen_predetermined_ids.GenerateResourceMapping(
+ original_resources, ordered_resource_ids)
+ self.assertEqual({101: 'C', 102: 'B', 103: 'E'}, mapping)
+
+ def testReadResourceIdsFromFile(self):
+ f = StringIO.StringIO('''
+// This file is automatically generated by GRIT. Do not edit.
+
+#pragma once
+
+#define IDS_BOOKMARKS_NO_ITEMS 12500
+#define IDS_BOOKMARK_BAR_IMPORT_LINK _Pragma("whitelisted_resource_12501") 12501
+#define IDS_BOOKMARK_X __pragma(message("whitelisted_resource_12502")) 12502
+''')
+ resources = {}
+ gen_predetermined_ids.ReadResourceIdsFromFile(f, resources)
+ self.assertEqual({12500: 'IDS_BOOKMARKS_NO_ITEMS',
+ 12501: 'IDS_BOOKMARK_BAR_IMPORT_LINK',
+ 12502: 'IDS_BOOKMARK_X'}, resources)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/grit/grit/format/html_inline.py b/chromium/tools/grit/grit/format/html_inline.py
index 9e5ec6a81e4..3c8e21be466 100755
--- a/chromium/tools/grit/grit/format/html_inline.py
+++ b/chromium/tools/grit/grit/format/html_inline.py
@@ -48,7 +48,23 @@ _INCLUDE_RE = lazy_re.compile(
'(\s*</include>)?',
re.DOTALL)
_SRC_RE = lazy_re.compile(
- r'<(?!script)(?:[^>]+?\s)src=(?P<quote>")(?!\[\[|{{)(?P<filename>[^"\']*)\1',
+ r'<(?!script)(?:[^>]+?\s)src="(?!\[\[|{{)(?P<filename>[^"\']*)"',
+ re.MULTILINE)
+# This re matches '<img srcset="..."'
+_SRCSET_RE = lazy_re.compile(
+ r'<img\b(?:[^>]*?\s)srcset="(?!\[\[|{{)(?P<srcset>[^"\']*)"',
+ re.MULTILINE)
+# This re is for splitting srcset value string into "image candidate strings".
+# Notes:
+# - HTML 5.2 states that URL cannot start with comma.
+# - the "descriptor" is either "width descriptor" or "pixel density descriptor".
+# The first one consists of "valid non-negative integer + letter 'x'",
+# the second one is formed of "positive valid floating-point number +
+# letter 'w'". As a reasonable compromise, we match a list of characters
+# that form both of them.
+# Matches for example "img2.png 2x" or "img9.png 11E-2w".
+_SRCSET_ENTRY_RE = lazy_re.compile(
+ r'\s*(?P<url>[^,]\S+)\s+(?P<descriptor>[\deE.-]+[wx])\s*',
re.MULTILINE)
_ICON_RE = lazy_re.compile(
r'<link rel="icon"\s(?:[^>]+?\s)?'
@@ -69,20 +85,17 @@ def GetDistribution():
distribution = distribution[1:].lower()
return distribution
+def ConvertFileToDataURL(filename, base_path, distribution, inlined_files,
+ names_only):
+ """Convert filename to inlined data URI.
-def SrcInlineAsDataURL(
- src_match, base_path, distribution, inlined_files, names_only=False,
- filename_expansion_function=None):
- """regex replace function.
-
- Takes a regex match for src="filename", attempts to read the file
- at 'filename' and returns the src attribute with the file inlined
- as a data URI. If it finds DIST_SUBSTR string in file name, replaces
- it with distribution.
+ Takes a filename from ether "src" or "srcset", and attempts to read the file
+ at 'filename'. Returns data URI as string with given file inlined.
+ If it finds DIST_SUBSTR string in file name, replaces it with distribution.
+ If filename contains ':', it is considered URL and not translated.
Args:
- src_match: regex match object with 'filename' and 'quote' named capturing
- groups
+ filename: filename string from ether src or srcset attributes.
base_path: path that to look for files in
distribution: string that should replace DIST_SUBSTR
inlined_files: The name of the opened file is appended to this list.
@@ -92,14 +105,9 @@ def SrcInlineAsDataURL(
Returns:
string
"""
- filename = src_match.group('filename')
- if filename_expansion_function:
- filename = filename_expansion_function(filename)
- quote = src_match.group('quote')
-
if filename.find(':') != -1:
# filename is probably a URL, which we don't want to bother inlining
- return src_match.group(0)
+ return filename
filename = filename.replace(DIST_SUBSTR , distribution)
filepath = os.path.normpath(os.path.join(base_path, filename))
@@ -113,11 +121,122 @@ def SrcInlineAsDataURL(
raise Exception('%s is of an an unknown type and '
'cannot be stored in a data url.' % filename)
inline_data = base64.standard_b64encode(util.ReadFile(filepath, util.BINARY))
+ return 'data:%s;base64,%s' % (mimetype, inline_data)
+
+
+def SrcInlineAsDataURL(
+ src_match, base_path, distribution, inlined_files, names_only=False,
+ filename_expansion_function=None):
+ """regex replace function.
+
+ Takes a regex match for src="filename", attempts to read the file
+ at 'filename' and returns the src attribute with the file inlined
+ as a data URI. If it finds DIST_SUBSTR string in file name, replaces
+ it with distribution.
+
+ Args:
+ src_match: regex match object with 'filename' named capturing group
+ base_path: path that to look for files in
+ distribution: string that should replace DIST_SUBSTR
+ inlined_files: The name of the opened file is appended to this list.
+ names_only: If true, the function will not read the file but just return "".
+ It will still add the filename to |inlined_files|.
+
+ Returns:
+ string
+ """
+ filename = src_match.group('filename')
+ if filename_expansion_function:
+ filename = filename_expansion_function(filename)
+
+ data_url = ConvertFileToDataURL(filename, base_path, distribution,
+ inlined_files, names_only)
+
+ if not data_url:
+ return data_url
prefix = src_match.string[src_match.start():src_match.start('filename')]
suffix = src_match.string[src_match.end('filename'):src_match.end()]
- return '%sdata:%s;base64,%s%s' % (prefix, mimetype, inline_data, suffix)
+ return prefix + data_url + suffix
+
+def SrcsetInlineAsDataURL(
+ srcset_match, base_path, distribution, inlined_files, names_only=False,
+ filename_expansion_function=None):
+ """regex replace function to inline files in srcset="..." attributes
+
+ Takes a regex match for srcset="filename 1x, filename 2x, ...", attempts to
+ read the files referenced by filenames and returns the srcset attribute with
+ the files inlined as a data URI. If it finds DIST_SUBSTR string in file name,
+ replaces it with distribution.
+
+ Args:
+ srcset_match: regex match object with 'srcset' named capturing group
+ base_path: path that to look for files in
+ distribution: string that should replace DIST_SUBSTR
+ inlined_files: The name of the opened file is appended to this list.
+ names_only: If true, the function will not read the file but just return "".
+ It will still add the filename to |inlined_files|.
+
+ Returns:
+ string
+ """
+ srcset = srcset_match.group('srcset')
+ if not srcset:
+ return srcset_match.group(0)
+
+ # HTML 5.2 defines srcset as a list of "image candidate strings".
+ # Each of them consists of URL and descriptor.
+ # _SRCSET_ENTRY_RE splits srcset into a list of URLs, descriptors and
+ # commas.
+ parts = _SRCSET_ENTRY_RE.split(srcset)
+
+ if not parts:
+ return srcset_match.group(0)
+
+ # List of image candidate strings that will form new srcset="..."
+ new_candidates = []
+
+ # When iterating over split srcset we fill this parts of a single image
+ # candidate string: [url, descriptor]
+ candidate = [];
+
+ for part in parts:
+ if not part:
+ continue
+
+ if part == ',':
+ # There must be no URL without a descriptor.
+ assert not candidate, "Bad srcset format in '%s'" % srcset_match.group(0)
+ continue
+
+ if candidate:
+ # descriptor found
+ if candidate[0]:
+ # This is not "names_only" mode.
+ candidate.append(part)
+ new_candidates.append(" ".join(candidate))
+
+ candidate = []
+ continue
+
+ if filename_expansion_function:
+ filename = filename_expansion_function(part)
+ else:
+ filename = part
+
+ data_url = ConvertFileToDataURL(filename, base_path, distribution,
+ inlined_files, names_only)
+
+ candidate.append(data_url)
+
+ # There must be no URL without a descriptor
+ assert not candidate, "Bad srcset ending in '%s' " % srcset_match.group(0)
+
+ prefix = srcset_match.string[srcset_match.start():
+ srcset_match.start('srcset')]
+ suffix = srcset_match.string[srcset_match.end('srcset'):srcset_match.end()]
+ return prefix + ','.join(new_candidates) + suffix
class InlinedData:
"""Helper class holding the results from DoInline().
@@ -168,6 +287,16 @@ def DoInline(
src_match, filepath, distribution, inlined_files, names_only=names_only,
filename_expansion_function=filename_expansion_function)
+ def SrcsetReplace(srcset_match, filepath=input_filepath,
+ inlined_files=inlined_files):
+ """Helper function to provide SrcsetInlineAsDataURL with the base file
+ path.
+ """
+ return SrcsetInlineAsDataURL(
+ srcset_match, filepath, distribution, inlined_files,
+ names_only=names_only,
+ filename_expansion_function=filename_expansion_function)
+
def GetFilepath(src_match, base_path = input_filepath):
matches = src_match.groupdict().iteritems()
filename = [v for k, v in matches if k.startswith('file') and v][0]
@@ -369,6 +498,7 @@ def DoInline(
if rewrite_function:
flat_text = rewrite_function(input_filepath, flat_text, distribution)
flat_text = _SRC_RE.sub(SrcReplace, flat_text)
+ flat_text = _SRCSET_RE.sub(SrcsetReplace, flat_text)
# TODO(arv): Only do this inside <style> tags.
flat_text = InlineCSSImages(flat_text)
diff --git a/chromium/tools/grit/grit/format/html_inline_unittest.py b/chromium/tools/grit/grit/format/html_inline_unittest.py
index d049b1ef963..062296c9187 100755
--- a/chromium/tools/grit/grit/format/html_inline_unittest.py
+++ b/chromium/tools/grit/grit/format/html_inline_unittest.py
@@ -506,6 +506,60 @@ class HtmlInlineUnittest(unittest.TestCase):
self.failUnlessEqual(expected_inlined,
util.FixLineEnd(result.inlined_data, '\n'))
+ def testImgSrcset(self):
+ '''Tests that img srcset="" attributes are converted.'''
+
+ # Note that there is no space before "img10.png"
+ files = {
+ 'index.html': '''
+ <html>
+ <img src="img1.png" srcset="img2.png 1x, img3.png 2x">
+ <img src="img4.png" srcset=" img5.png 1x , img6.png 2x ">
+ <img src="chrome://theme/img11.png" srcset="img7.png 1x, '''\
+ '''chrome://theme/img13.png 2x">
+ <img srcset="img8.png 300w, img9.png 11E-2w,img10.png -1e2w">
+ </html>
+ ''',
+ 'img1.png': '''a1''',
+ 'img2.png': '''a2''',
+ 'img3.png': '''a3''',
+ 'img4.png': '''a4''',
+ 'img5.png': '''a5''',
+ 'img6.png': '''a6''',
+ 'img7.png': '''a7''',
+ 'img8.png': '''a8''',
+ 'img9.png': '''a9''',
+ 'img10.png': '''a10''',
+ }
+
+ expected_inlined = '''
+ <html>
+ <img src="data:image/png;base64,YTE=" srcset="data:image/png;base64,'''\
+ '''YTI= 1x,data:image/png;base64,YTM= 2x">
+ <img src="data:image/png;base64,YTQ=" srcset="data:image/png;base64,'''\
+ '''YTU= 1x,data:image/png;base64,YTY= 2x">
+ <img src="chrome://theme/img11.png" srcset="data:image/png;base64,'''\
+ '''YTc= 1x,chrome://theme/img13.png 2x">
+ <img srcset="data:image/png;base64,YTg= 300w,data:image/png;base64,'''\
+ '''YTk= 11E-2w,data:image/png;base64,YTEw -1e2w">
+ </html>
+ '''
+
+ source_resources = set()
+ tmp_dir = util.TempDir(files)
+ for filename in files:
+ source_resources.add(tmp_dir.GetPath(filename))
+
+ # Test normal inlining.
+ result = html_inline.DoInline(
+ tmp_dir.GetPath('index.html'),
+ None)
+ resources = result.inlined_files
+ resources.add(tmp_dir.GetPath('index.html'))
+ self.failUnlessEqual(resources, source_resources)
+ self.failUnlessEqual(expected_inlined,
+ util.FixLineEnd(result.inlined_data, '\n'))
+
if __name__ == '__main__':
unittest.main()
diff --git a/chromium/tools/grit/grit/format/rc_header.py b/chromium/tools/grit/grit/format/rc_header.py
index 74e7127b584..7b021223d21 100755
--- a/chromium/tools/grit/grit/format/rc_header.py
+++ b/chromium/tools/grit/grit/format/rc_header.py
@@ -79,21 +79,44 @@ def FormatDefines(root, output_all_resource_defines=True,
_cached_ids = {}
+_predetermined_tids = {}
+
+
+def SetPredeterminedIdsFile(predetermined_ids_file):
+ global _predetermined_tids
+ if predetermined_ids_file:
+ _predetermined_tids = _ReadIdsFromFile(predetermined_ids_file)
+ else:
+ _predetermined_tids = {}
+
+
+def _ReadIdsFromFile(path):
+ with open(path, "r") as f:
+ content = f.readlines()
+ tids = {} # Maps textual id to numeric id
+ for line in content:
+ tid, id = line.split()
+ tids[tid] = int(id)
+ return tids
+
+
def GetIds(root):
'''Return a dictionary mapping textual ids to numeric ids for the given tree.
Args:
root: A GritNode.
'''
+ global _cached_ids
+ global _predetermined_tids
# TODO(benrg): Since other formatters use this, it might make sense to move it
# and _ComputeIds to GritNode and store the cached ids as an attribute. On the
# other hand, GritNode has too much random stuff already.
if root not in _cached_ids:
- _cached_ids[root] = _ComputeIds(root)
+ _cached_ids[root] = _ComputeIds(root, _predetermined_tids)
return _cached_ids[root]
-def _ComputeIds(root):
+def _ComputeIds(root, predetermined_tids):
from grit.node import empty, include, message, misc, structure
ids = {} # Maps numeric id to textual id
@@ -101,6 +124,8 @@ def _ComputeIds(root):
id_reasons = {} # Maps numeric id to text id and a human-readable explanation
group = None
last_id = None
+ predetermined_ids = {value: key
+ for key, value in predetermined_tids.iteritems()}
for item in root:
if isinstance(item, empty.GroupingNode):
@@ -129,9 +154,13 @@ def _ComputeIds(root):
if tid in tids:
continue
+ if predetermined_tids and tid in predetermined_tids:
+ id = predetermined_tids[tid]
+ reason = "from predetermined_tids map"
+
# Some identifier nodes can provide their own id,
# and we use that id in the generated header in that case.
- if hasattr(item, 'GetId') and item.GetId():
+ elif hasattr(item, 'GetId') and item.GetId():
id = long(item.GetId())
reason = 'returned by GetId() method'
@@ -197,6 +226,10 @@ def _ComputeIds(root):
print ('WARNING: Numeric resource IDs should be greater than 100 to\n'
'avoid conflicts with system-defined resource IDs.')
+ if tid not in predetermined_tids and id in predetermined_ids:
+ raise exception.IdRangeOverlap('ID %d overlaps between %s and %s'
+ % (id, tid, predetermined_ids[tid]))
+
ids[id] = tid
tids[tid] = id
id_reasons[id] = reason
diff --git a/chromium/tools/grit/grit/format/rc_header_unittest.py b/chromium/tools/grit/grit/format/rc_header_unittest.py
index 5d780e3e44a..22c5f38f9e8 100755
--- a/chromium/tools/grit/grit/format/rc_header_unittest.py
+++ b/chromium/tools/grit/grit/format/rc_header_unittest.py
@@ -10,6 +10,7 @@
import os
import sys
+import tempfile
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
@@ -27,6 +28,13 @@ class RcHeaderFormatterUnittest(unittest.TestCase):
output = rc_header.FormatDefines(grd, grd.ShouldOutputAllResourceDefines())
return ''.join(output).replace(' ', '')
+ def _MakeTempPredeterminedIdsFile(self, content):
+ tmp_dir = tempfile.gettempdir()
+ predetermined_ids_file = tmp_dir + "/predetermined_ids.txt"
+ with open(predetermined_ids_file, 'w') as f:
+ f.write(content)
+ return predetermined_ids_file
+
def testFormatter(self):
grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3" base_dir=".">
@@ -189,5 +197,53 @@ class RcHeaderFormatterUnittest(unittest.TestCase):
'#define IDS_BONGO _Pragma("IDS_BONGO") 10001\n'),
''.join(output))
+ def testPredeterminedIds(self):
+ predetermined_ids_file = self._MakeTempPredeterminedIdsFile(
+ 'IDS_BONGO 101\nID_LOGO 102\n')
+ grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
+ <grit latest_public_release="2" source_lang_id="en" current_release="3" base_dir=".">
+ <release seq="3">
+ <includes first_id="300" comment="bingo">
+ <include type="gif" name="ID_LOGO" file="images/logo.gif" />
+ </includes>
+ <messages first_id="10000">
+ <message name="IDS_GREETING" desc="Printed to greet the currently logged in user">
+ Hello <ph name="USERNAME">%s<ex>Joi</ex></ph>, how are you doing today?
+ </message>
+ <message name="IDS_BONGO">
+ Bongo!
+ </message>
+ </messages>
+ </release>
+ </grit>'''), '.', predetermined_ids_file=predetermined_ids_file)
+ output = rc_header.FormatDefines(grd, grd.ShouldOutputAllResourceDefines(),
+ grd.GetRcHeaderFormat())
+ self.assertEqual(('#define ID_LOGO 102\n'
+ '#define IDS_GREETING 10000\n'
+ '#define IDS_BONGO 101\n'), ''.join(output))
+
+ def testPredeterminedIdsOverlap(self):
+ predetermined_ids_file = self._MakeTempPredeterminedIdsFile(
+ 'ID_LOGO 10000\n')
+ grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
+ <grit latest_public_release="2" source_lang_id="en" current_release="3" base_dir=".">
+ <release seq="3">
+ <includes first_id="300" comment="bingo">
+ <include type="gif" name="ID_LOGO" file="images/logo.gif" />
+ </includes>
+ <messages first_id="10000">
+ <message name="IDS_GREETING" desc="Printed to greet the currently logged in user">
+ Hello <ph name="USERNAME">%s<ex>Joi</ex></ph>, how are you doing today?
+ </message>
+ <message name="IDS_BONGO">
+ Bongo!
+ </message>
+ </messages>
+ </release>
+ </grit>'''), '.', predetermined_ids_file=predetermined_ids_file)
+ output = rc_header.FormatDefines(grd, grd.ShouldOutputAllResourceDefines(),
+ grd.GetRcHeaderFormat())
+ self.assertRaises(exception.IdRangeOverlap, self.FormatAll, grd)
+
if __name__ == '__main__':
unittest.main()
diff --git a/chromium/tools/grit/grit/format/resource_map.py b/chromium/tools/grit/grit/format/resource_map.py
index e5fdd16ef20..aca302b79f0 100755
--- a/chromium/tools/grit/grit/format/resource_map.py
+++ b/chromium/tools/grit/grit/format/resource_map.py
@@ -56,7 +56,7 @@ def _FormatHeader(root, lang='en', output_dir='.'):
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
- const char* const name;
+ const char* name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
diff --git a/chromium/tools/grit/grit/format/resource_map_unittest.py b/chromium/tools/grit/grit/format/resource_map_unittest.py
index ecc997a1806..31d481e9a32 100755
--- a/chromium/tools/grit/grit/format/resource_map_unittest.py
+++ b/chromium/tools/grit/grit/format/resource_map_unittest.py
@@ -58,7 +58,7 @@ class FormatResourceMapUnittest(unittest.TestCase):
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
- const char* const name;
+ const char* name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
@@ -150,7 +150,7 @@ const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
- const char* const name;
+ const char* name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
@@ -235,7 +235,7 @@ const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
- const char* const name;
+ const char* name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
@@ -313,7 +313,7 @@ const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
- const char* const name;
+ const char* name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
diff --git a/chromium/tools/grit/grit/grd_reader.py b/chromium/tools/grit/grit/grd_reader.py
index 87eec5e622c..ba8ad49feed 100755
--- a/chromium/tools/grit/grit/grd_reader.py
+++ b/chromium/tools/grit/grit/grd_reader.py
@@ -13,6 +13,7 @@ import xml.sax.handler
from grit import exception
from grit import util
+from grit.format import rc_header
from grit.node import base
from grit.node import mapping
from grit.node import misc
@@ -139,7 +140,8 @@ class GrdPartContentHandler(xml.sax.handler.ContentHandler):
def Parse(filename_or_stream, dir=None, stop_after=None, first_ids_file=None,
- debug=False, defines=None, tags_to_ignore=None, target_platform=None):
+ debug=False, defines=None, tags_to_ignore=None, target_platform=None,
+ predetermined_ids_file=None):
'''Parses a GRD file into a tree of nodes (from grit.node).
If filename_or_stream is a stream, 'dir' should point to the directory
@@ -168,6 +170,9 @@ def Parse(filename_or_stream, dir=None, stop_after=None, first_ids_file=None,
defines: dictionary of defines, like {'chromeos': '1'}
target_platform: None or the value that would be returned by sys.platform
on your target platform.
+ predetermined_ids_file: File path to a file containing a pre-determined
+ mapping from resource names to resource ids which will be used to assign
+ resource ids to those resources.
Return:
Subclass of grit.node.base.Node
@@ -179,6 +184,7 @@ def Parse(filename_or_stream, dir=None, stop_after=None, first_ids_file=None,
if dir is None and isinstance(filename_or_stream, types.StringType):
dir = util.dirname(filename_or_stream)
+ rc_header.SetPredeterminedIdsFile(predetermined_ids_file)
handler = GrdContentHandler(stop_after=stop_after, debug=debug, dir=dir,
defines=defines, tags_to_ignore=tags_to_ignore,
target_platform=target_platform)
diff --git a/chromium/tools/grit/grit/tool/build.py b/chromium/tools/grit/grit/tool/build.py
index a5703b20bb6..cab2b37c67e 100755
--- a/chromium/tools/grit/grit/tool/build.py
+++ b/chromium/tools/grit/grit/tool/build.py
@@ -85,6 +85,17 @@ Options:
-o OUTPUTDIR Specify what directory output paths are relative to.
Defaults to the current directory.
+ -p FILE Specify a file containing a pre-determined mapping from
+ resource names to resource ids which will be used to assign
+ resource ids to those resources. Resources not found in this
+ file will be assigned ids normally. The motivation is to run
+ your app's startup and have it dump the resources it loads,
+ and then pass these via this flag. This will pack startup
+ resources together, thus reducing paging while all other
+ resources are unperturbed. The file should have the format:
+ RESOURCE_ONE_NAME 123
+ RESOURCE_TWO_NAME 124
+
-D NAME[=VAL] Specify a C-preprocessor-like define NAME with optional
value VAL (defaults to 1) which will be used to control
conditional inclusion of resources.
@@ -146,6 +157,7 @@ are exported to translation interchange files (e.g. XMB files), etc.
def Run(self, opts, args):
self.output_directory = '.'
first_ids_file = None
+ predetermined_ids_file = None
whitelist_filenames = []
assert_output_files = []
target_platform = None
@@ -157,7 +169,7 @@ are exported to translation interchange files (e.g. XMB files), etc.
depend_on_stamp = False
js_minifier = None
replace_ellipsis = True
- (own_opts, args) = getopt.getopt(args, 'a:o:D:E:f:w:t:h:',
+ (own_opts, args) = getopt.getopt(args, 'a:p:o:D:E:f:w:t:h:',
('depdir=','depfile=','assert-file-list=',
'output-all-resource-defines',
'no-output-all-resource-defines',
@@ -192,6 +204,8 @@ are exported to translation interchange files (e.g. XMB files), etc.
output_all_resource_defines = False
elif key == '--no-replace-ellipsis':
replace_ellipsis = False
+ elif key == '-p':
+ predetermined_ids_file = val
elif key == '-t':
target_platform = val
elif key == '-h':
@@ -233,6 +247,7 @@ are exported to translation interchange files (e.g. XMB files), etc.
self.res = grd_reader.Parse(opts.input,
debug=opts.extra_verbose,
first_ids_file=first_ids_file,
+ predetermined_ids_file=predetermined_ids_file,
defines=self.defines,
target_platform=target_platform)
diff --git a/chromium/tools/grit/grit/tool/xmb.py b/chromium/tools/grit/grit/tool/xmb.py
index 0e7950ccde1..c25dbf143d1 100755
--- a/chromium/tools/grit/grit/tool/xmb.py
+++ b/chromium/tools/grit/grit/tool/xmb.py
@@ -28,11 +28,6 @@ _XML_QUOTE_ESCAPES = {
u"'": u'&apos;',
u'"': u'&quot;',
}
-# See http://www.w3.org/TR/xml/#charsets
-_XML_BAD_CHAR_REGEX = lazy_re.compile(u'[^\u0009\u000A\u000D'
- u'\u0020-\uD7FF\uE000-\uFFFD'
- u'\U00010000-\U0010FFFF]')
-
def _XmlEscape(s):
"""Returns text escaped for XML in a way compatible with Google's
@@ -41,12 +36,7 @@ def _XmlEscape(s):
"""
if not type(s) == unicode:
s = unicode(s)
- result = saxutils.escape(s, _XML_QUOTE_ESCAPES)
- illegal_chars = _XML_BAD_CHAR_REGEX.search(result)
- if illegal_chars:
- raise Exception('String contains characters disallowed in XML: %s' %
- repr(result))
- return result.encode('utf-8')
+ return saxutils.escape(s, _XML_QUOTE_ESCAPES).encode('utf-8')
def _WriteAttribute(file, name, value):
diff --git a/chromium/tools/grit/grit/tool/xmb_unittest.py b/chromium/tools/grit/grit/tool/xmb_unittest.py
index df8e84b6200..200ecd9a594 100755
--- a/chromium/tools/grit/grit/tool/xmb_unittest.py
+++ b/chromium/tools/grit/grit/tool/xmb_unittest.py
@@ -12,6 +12,7 @@ if __name__ == '__main__':
import unittest
import StringIO
+import xml.sax
from grit import grd_reader
from grit import util
@@ -103,6 +104,25 @@ class XmbUnittest(unittest.TestCase):
output = self.xmb_file.getvalue()
self.failUnless(output.count('OK ? </msg>'))
+ def testDisallowedChars(self):
+ # Validate that the invalid unicode is not accepted. Since it's not valid,
+ # we can't specify it in a string literal, so write as a byte sequence.
+ bad_xml = StringIO.StringIO()
+ bad_xml.write('''<?xml version="1.0" encoding="UTF-8"?>
+ <grit latest_public_release="2" source_lang_id="en-US"
+ current_release="3" base_dir=".">
+ <release seq="3">
+ <messages>
+ <message name="ID_FOO">''')
+ # UTF-8 corresponding to to \U00110000
+ # http://apps.timwhitlock.info/unicode/inspect/hex/110000
+ bad_xml.write(b'\xF4\x90\x80\x80')
+ bad_xml.write('''</message>
+ </messages>
+ </release>
+ </grit>''')
+ bad_xml.seek(0)
+ self.assertRaises(xml.sax.SAXParseException, grd_reader.Parse, bad_xml, '.')
if __name__ == '__main__':
unittest.main()
diff --git a/chromium/tools/grit/grit_rule.gni b/chromium/tools/grit/grit_rule.gni
index 00dd9258ec2..d437ee102fd 100644
--- a/chromium/tools/grit/grit_rule.gni
+++ b/chromium/tools/grit/grit_rule.gni
@@ -249,6 +249,17 @@ _js_minifier = "//third_party/closure_compiler/js_minify.py"
grit_resource_id_file = "//tools/gritsettings/resource_ids"
grit_info_script = "//tools/grit/grit_info.py"
+# TODO(asvitkine): Add predetermined ids files for other platforms.
+grit_predetermined_resource_ids_file = ""
+if (is_mac) {
+ grit_predetermined_resource_ids_file =
+ "//tools/gritsettings/startup_resources_mac.txt"
+}
+if (is_win) {
+ grit_predetermined_resource_ids_file =
+ "//tools/gritsettings/startup_resources_win.txt"
+}
+
template("grit") {
assert(defined(invoker.source),
"\"source\" must be defined for the grit template $target_name")
@@ -301,6 +312,12 @@ template("grit") {
rebase_path(resource_ids, root_build_dir),
]
}
+ if (grit_predetermined_resource_ids_file != "") {
+ grit_flags += [
+ "-p",
+ rebase_path(grit_predetermined_resource_ids_file, root_build_dir),
+ ]
+ }
if (defined(invoker.source_is_generated)) {
source_is_generated = invoker.source_is_generated
diff --git a/chromium/tools/gritsettings/README.md b/chromium/tools/gritsettings/README.md
new file mode 100644
index 00000000000..344fef0913f
--- /dev/null
+++ b/chromium/tools/gritsettings/README.md
@@ -0,0 +1,36 @@
+### tools/gritsettings README
+
+This directory contains several files that apply global to the Chrome resource
+generation system (which uses GRIT - see tools/grit).
+
+**resource_ids**: This file is used to assign starting resource ids for
+resources and strings used by Chromium. This is done to ensure that resource ids
+are unique across all the grd files. If you are adding a new grd file, please
+add a new entry to this file.
+
+**translation_expectations.pyl**: Specifies which grd files should be translated
+and into which languages they should be translated. Used by the internal
+translation process.
+
+**startup_resources_[platform].txt**: These files provide a pre-determined
+resource id ordering that will be used by GRIT when assigning resources ids. The
+goal is to have the resource loaded during Chrome startup be ordered first in
+the .pak files, so that fewer page faults are suffered during Chrome start up.
+To update or generate one of these files, follow these instructions:
+
+ 1. Build a Chrome official release build and launch it with command line:
+ `--print-resource-ids` and save the output to a file (e.g. res.txt).
+
+ 2. Generate the startup_resources_[platform].txt via the following command
+ (you can redirect its output to the new file location):
+
+ `
+ tools/grit/grit/format/gen_predetermined_ids.py res_ids.txt out/gn
+ `
+
+ In the above command, res_ids.txt is the file produced in step 1 and out/gn
+ is you Chrome build directory where you compiled Chrome. The output of the
+ command can be added as a new startup_resource_[platform]
+
+ 3. If this is a new file, modify `tools/grit/grit_rule.gni` to set its path
+ via `grit_predetermined_resource_ids_file` for the given platform.
diff --git a/chromium/tools/gritsettings/resource_ids b/chromium/tools/gritsettings/resource_ids
index d454de70b3f..4e3fd2d2b98 100644
--- a/chromium/tools/gritsettings/resource_ids
+++ b/chromium/tools/gritsettings/resource_ids
@@ -85,58 +85,64 @@
# START chrome/browser section.
"chrome/browser/browser_resources.grd": {
"includes": [11000],
- "structures": [11450],
+ "structures": [11480],
},
"chrome/browser/resources/component_extension_resources.grd": {
- "includes": [11550],
- "structures": [11800],
+ "includes": [11580],
+ "structures": [11830],
},
"chrome/browser/resources/invalidations_resources.grd": {
- "includes": [11850],
+ "includes": [11880],
},
"chrome/browser/resources/md_policy/policy_resources.grd": {
- "structures": [11860],
+ "structures": [11890],
},
"chrome/browser/resources/net_internals_resources.grd": {
- "includes": [11900],
+ "includes": [11930],
},
"chrome/browser/resources/options_resources.grd": {
- "includes": [11910],
- "structures": [11920],
+ "includes": [11940],
+ "structures": [11950],
},
"chrome/browser/resources/options_test_resources.grd": {
- "structures": [11950],
+ "structures": [12000],
},
"chrome/browser/resources/password_manager_internals_resources.grd": {
- "includes": [11960],
+ "includes": [12010],
},
"chrome/browser/resources/quota_internals_resources.grd": {
- "includes": [11970],
+ "includes": [12020],
+ },
+ "chrome/browser/resources/settings/settings_resources_vulcanized.grd": {
+ "includes": [12040],
},
"chrome/browser/resources/settings/settings_resources.grd": {
- "structures": [12000],
+ "structures": [12050],
},
"chrome/browser/resources/sync_file_system_internals_resources.grd": {
- "includes": [12500],
+ "includes": [12550],
},
"chrome/browser/resources/task_scheduler_internals/resources.grd": {
- "includes": [12530],
+ "includes": [12580],
},
"chrome/browser/resources/translate_internals_resources.grd": {
- "includes": [12540],
+ "includes": [12590],
+ },
+ "chrome/browser/resources/webapks_ui_resources.grd": {
+ "includes": [12600],
},
# END chrome/browser section.
# START chrome/ miscellaneous section.
"chrome/common/common_resources.grd": {
- "includes": [12700],
+ "includes": [12720],
},
"chrome/renderer/resources/renderer_resources.grd": {
- "includes": [12710],
- "structures": [12790],
+ "includes": [12730],
+ "structures": [12810],
},
"chrome/test/data/webui_test_resources.grd": {
- "includes": [12800],
+ "includes": [12820],
},
# END chrome/ miscellaneous section.
diff --git a/chromium/tools/gritsettings/startup_resources_mac.txt b/chromium/tools/gritsettings/startup_resources_mac.txt
new file mode 100644
index 00000000000..f9daa12f370
--- /dev/null
+++ b/chromium/tools/gritsettings/startup_resources_mac.txt
@@ -0,0 +1,266 @@
+IDS_PROFILES_OPTIONS_GROUP_NAME 101
+IDS_APP_MENU_PRODUCT_NAME 102
+IDS_PRODUCT_NAME 103
+IDS_ABOUT_MAC 104
+IDS_PREFERENCES 105
+IDS_CLEAR_BROWSING_DATA 106
+IDS_IMPORT_SETTINGS_MENU_MAC 107
+IDS_SERVICES_MAC 108
+IDS_HIDE_APP_MAC 109
+IDS_HIDE_OTHERS_MAC 110
+IDS_SHOW_ALL_MAC 111
+IDS_CONFIRM_TO_QUIT_OPTION 112
+IDS_EXIT_MAC 113
+IDS_FILE_MENU_MAC 114
+IDS_NEW_TAB_MAC 115
+IDS_NEW_WINDOW_MAC 116
+IDS_NEW_INCOGNITO_WINDOW_MAC 117
+IDS_REOPEN_CLOSED_TABS_MAC 118
+IDS_OPEN_FILE_MAC 119
+IDS_OPEN_LOCATION_MAC 120
+IDS_CLOSE_WINDOW_MAC 121
+IDS_CLOSE_TAB_MAC 122
+IDS_SAVE_PAGE_MAC 123
+IDS_EMAIL_PAGE_LOCATION_MAC 124
+IDS_PRINT 125
+IDS_PRINT_USING_SYSTEM_DIALOG_MAC 126
+IDS_EDIT_MENU_MAC 127
+IDS_EDIT_UNDO_MAC 128
+IDS_EDIT_REDO_MAC 129
+IDS_CUT_MAC 130
+IDS_COPY_MAC 131
+IDS_PASTE_MAC 132
+IDS_PASTE_MATCH_STYLE_MAC 133
+IDS_EDIT_DELETE_MAC 134
+IDS_EDIT_SELECT_ALL_MAC 135
+IDS_EDIT_FIND_SUBMENU_MAC 136
+IDS_EDIT_SEARCH_WEB_MAC 137
+IDS_EDIT_FIND_MAC 138
+IDS_EDIT_FIND_NEXT_MAC 139
+IDS_EDIT_FIND_PREVIOUS_MAC 140
+IDS_EDIT_USE_SELECTION_MAC 141
+IDS_EDIT_JUMP_TO_SELECTION_MAC 142
+IDS_EDIT_SPELLING_GRAMMAR_MAC 143
+IDS_EDIT_SHOW_SPELLING_GRAMMAR_MAC 144
+IDS_EDIT_CHECK_DOCUMENT_MAC 145
+IDS_EDIT_CHECK_SPELLING_TYPING_MAC 146
+IDS_EDIT_CHECK_GRAMMAR_MAC 147
+IDS_SPEECH_MAC 148
+IDS_SPEECH_START_SPEAKING_MAC 149
+IDS_SPEECH_STOP_SPEAKING_MAC 150
+IDS_VIEW_MENU_MAC 151
+IDS_BOOKMARK_BAR_ALWAYS_SHOW_MAC 152
+IDS_TOGGLE_FULLSCREEN_TOOLBAR_MAC 153
+IDS_STOP_MENU_MAC 154
+IDS_RELOAD_MENU_MAC 155
+IDS_RELOAD_BYPASSING_CACHE_MENU_MAC 156
+IDS_ENTER_FULLSCREEN_MAC 157
+IDS_TEXT_DEFAULT_MAC 158
+IDS_TEXT_BIGGER_MAC 159
+IDS_TEXT_SMALLER_MAC 160
+IDS_MEDIA_ROUTER_MENU_ITEM_TITLE 161
+IDS_DEVELOPER_MENU_MAC 162
+IDS_VIEW_SOURCE_MAC 163
+IDS_DEV_TOOLS_MAC 164
+IDS_DEV_TOOLS_CONSOLE_MAC 165
+IDS_HISTORY_MENU_MAC 166
+IDS_HISTORY_HOME_MAC 167
+IDS_HISTORY_BACK_MAC 168
+IDS_HISTORY_FORWARD_MAC 169
+IDS_HISTORY_CLOSED_MAC 170
+IDS_HISTORY_VISITED_MAC 171
+IDS_HISTORY_SHOWFULLHISTORY_LINK 172
+IDS_BOOKMARKS_MENU 173
+IDS_BOOKMARK_MANAGER 174
+IDS_BOOKMARK_THIS_PAGE 175
+IDS_BOOKMARK_ALL_TABS_MAC 176
+IDS_WINDOW_MENU_MAC 177
+IDS_MINIMIZE_WINDOW_MAC 178
+IDS_ZOOM_WINDOW_MAC 179
+IDS_NEXT_TAB_MAC 180
+IDS_PREV_TAB_MAC 181
+IDS_SHOW_AS_TAB 182
+IDS_SHOW_DOWNLOADS_MAC 183
+IDS_SHOW_EXTENSIONS_MAC 184
+IDS_TASK_MANAGER_MAC 185
+IDS_ALL_WINDOWS_FRONT_MAC 186
+IDS_HELP_MENU_MAC 187
+IDS_FEEDBACK_MAC 188
+IDS_HELP_MAC 189
+IDS_POLICY_DEPRECATED 190
+IDR_MOJO_CONTENT_BROWSER_MANIFEST 191
+IDR_CHROME_CONTENT_BROWSER_MANIFEST_OVERLAY 192
+IDR_MOJO_CONTENT_GPU_MANIFEST 193
+IDR_CHROME_CONTENT_GPU_MANIFEST_OVERLAY 194
+IDR_MOJO_CONTENT_PACKAGED_SERVICES_MANIFEST 195
+IDR_CHROME_CONTENT_PACKAGED_SERVICES_MANIFEST_OVERLAY 196
+IDR_MOJO_CONTENT_PLUGIN_MANIFEST 197
+IDR_CHROME_CONTENT_PLUGIN_MANIFEST_OVERLAY 198
+IDR_MOJO_CONTENT_RENDERER_MANIFEST 199
+IDR_CHROME_CONTENT_RENDERER_MANIFEST_OVERLAY 200
+IDR_MOJO_CONTENT_UTILITY_MANIFEST 201
+IDR_CHROME_CONTENT_UTILITY_MANIFEST_OVERLAY 202
+IDR_MOJO_CATALOG_MANIFEST 203
+IDR_NACL_LOADER_MANIFEST 204
+IDR_DOWNLOAD_FILE_TYPES_PB 205
+IDR_PLUGIN_DB_JSON 206
+IDS_DEFAULT_PROFILE_NAME 207
+IDS_LEGACY_DEFAULT_PROFILE_NAME 208
+IDS_ACCEPT_LANGUAGES 209
+IDS_DEFAULT_ENCODING 210
+IDS_STANDARD_FONT_FAMILY 211
+IDS_FIXED_FONT_FAMILY 212
+IDS_SERIF_FONT_FAMILY 213
+IDS_SANS_SERIF_FONT_FAMILY 214
+IDS_CURSIVE_FONT_FAMILY 215
+IDS_FANTASY_FONT_FAMILY 216
+IDS_PICTOGRAPH_FONT_FAMILY 217
+IDS_STANDARD_FONT_FAMILY_JAPANESE 218
+IDS_FIXED_FONT_FAMILY_JAPANESE 219
+IDS_SERIF_FONT_FAMILY_JAPANESE 220
+IDS_SANS_SERIF_FONT_FAMILY_JAPANESE 221
+IDS_STANDARD_FONT_FAMILY_KOREAN 222
+IDS_SERIF_FONT_FAMILY_KOREAN 223
+IDS_SANS_SERIF_FONT_FAMILY_KOREAN 224
+IDS_STANDARD_FONT_FAMILY_SIMPLIFIED_HAN 225
+IDS_SERIF_FONT_FAMILY_SIMPLIFIED_HAN 226
+IDS_SANS_SERIF_FONT_FAMILY_SIMPLIFIED_HAN 227
+IDS_STANDARD_FONT_FAMILY_TRADITIONAL_HAN 228
+IDS_SERIF_FONT_FAMILY_TRADITIONAL_HAN 229
+IDS_SANS_SERIF_FONT_FAMILY_TRADITIONAL_HAN 230
+IDS_CURSIVE_FONT_FAMILY_SIMPLIFIED_HAN 231
+IDS_CURSIVE_FONT_FAMILY_TRADITIONAL_HAN 232
+IDS_DEFAULT_FONT_SIZE 233
+IDS_DEFAULT_FIXED_FONT_SIZE 234
+IDS_MINIMUM_FONT_SIZE 235
+IDS_MINIMUM_LOGICAL_FONT_SIZE 236
+IDS_SPELLCHECK_DICTIONARY 237
+IDR_PREF_HASH_SEED_BIN 238
+IDS_BOOKMARK_BAR_FOLDER_NAME 239
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME 240
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME 241
+IDS_PROFILES_DEFAULT_NAME 242
+IDR_BOOKMARKS_MANIFEST 243
+IDR_CLOUDPRINT_MANIFEST 244
+IDS_WEBSTORE_NAME_STORE 245
+IDS_WEBSTORE_APP_DESCRIPTION 246
+IDR_WEBSTORE_MANIFEST 247
+IDR_HANGOUT_SERVICES_MANIFEST 248
+IDR_FEEDBACK_MANIFEST 249
+IDR_NETWORK_SPEECH_SYNTHESIS_MANIFEST 250
+IDR_CRYPTOTOKEN_MANIFEST 251
+IDR_PDF_MANIFEST 252
+IDS_CHROME_WELCOME_URL 253
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE 254
+IDS_WEBSTORE_URL 255
+IDS_EXTENSION_WEB_STORE_TITLE 256
+IDR_EXTENSIONS_FAVICON 257
+IDS_EXTENSION_USB_DEVICE_PRODUCT_NAME_AND_VENDOR 258
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME 259
+IDS_BOOKMARK_BAR_SUPERVISED_FOLDER_DEFAULT_NAME 260
+IDS_TOOLTIP_NEW_TAB 261
+IDS_PROFILES_NEW_AVATAR_BUTTON_ACCESSIBLE_NAME 262
+IDR_DEFAULT_FAVICON 263
+IDS_ACCNAME_NEWTAB 264
+IDS_APPMENU_TOOLTIP 265
+IDS_TOOLTIP_BACK 266
+IDS_TOOLTIP_FORWARD 267
+IDS_TOOLTIP_RELOAD 268
+IDS_TOOLTIP_HOME 269
+IDS_RELOAD_MENU_NORMAL_RELOAD_ITEM 270
+IDS_RELOAD_MENU_HARD_RELOAD_ITEM 271
+IDS_RELOAD_MENU_EMPTY_AND_HARD_RELOAD_ITEM 272
+IDS_TOOLTIP_STAR 273
+IDS_REGISTER_PROTOCOL_HANDLER_TOOLTIP 274
+IDS_VIEW_INCOMPATIBILITIES 275
+IDR_INPUT_ALERT_MENU 276
+IDS_NEW_TAB 277
+IDS_NEW_WINDOW 278
+IDS_NEW_INCOGNITO_WINDOW 279
+IDS_HISTORY_SHOW_HISTORY 280
+IDS_RECENTLY_CLOSED 281
+IDS_RECENT_TABS_NO_DEVICE_TABS 282
+IDS_HISTORY_MENU 283
+IDS_SHOW_DOWNLOADS 284
+IDS_BOOKMARK_OPEN_PAGES 285
+IDS_SHOW_BOOKMARK_BAR 286
+IDS_IMPORT_SETTINGS_MENU_LABEL 287
+IDS_ZOOM_MENU 288
+IDS_ZOOM_MINUS2 289
+IDS_ZOOM_PLUS2 290
+IDS_FIND 291
+IDS_SAVE_PAGE 292
+IDS_SHOW_EXTENSIONS 293
+IDS_TASK_MANAGER 294
+IDS_DEV_TOOLS 295
+IDS_MORE_TOOLS_MENU 296
+IDS_EDIT 297
+IDS_CUT 298
+IDS_COPY 299
+IDS_PASTE 300
+IDS_SETTINGS 301
+IDS_ABOUT 302
+IDS_HELP_PAGE 303
+IDS_FEEDBACK 304
+IDS_HELP_MENU 305
+IDS_ACCNAME_BACK 306
+IDS_ACCNAME_TOOLTIP_BACK 307
+IDS_ACCNAME_FORWARD 308
+IDS_ACCNAME_TOOLTIP_FORWARD 309
+IDS_ACCNAME_RELOAD 310
+IDS_ACCNAME_HOME 311
+IDS_ACCNAME_LOCATION 312
+IDS_ACCNAME_APP 313
+IDR_BOOKMARK_BAR_FOLDER 314
+IDR_BOOKMARK_BAR_FOLDER_WHITE 315
+IDS_BOOKMARK_BAR_IMPORT_LINK 316
+IDS_BOOKMARKS_NO_ITEMS 317
+IDS_TOOLTIP_LOCATION_ICON 318
+IDS_NEW_TAB_TITLE 319
+IDS_DEFAULT_TAB_TITLE 320
+IDS_ACCNAME_CLOSE 321
+IDS_TOOLTIP_CLOSE_TAB 322
+IDS_ACCNAME_CLOSE_TAB 323
+IDR_THROBBER_WAITING 324
+IDR_THROBBER_WAITING_INCOGNITO 325
+IDR_THROBBER 326
+IDR_THROBBER_INCOGNITO 327
+IDR_CRASH_SAD_FAVICON 328
+IDS_TOOLTIP_STOP 329
+IDS_UTILITY_PROCESS_PROXY_RESOLVER_NAME 330
+IDS_NEW_MAC 331
+IDS_QUIT_WITH_APPS_QUIT_LABEL 332
+IDS_QUIT_WITH_APPS_SUPPRESSION_LABEL 333
+IDS_QUIT_WITH_APPS_TITLE 334
+IDS_QUIT_WITH_APPS_EXPLANATION 335
+IDR_PRODUCT_LOGO_128 336
+IDS_QUIT_WITH_APPS_NOTIFICATION_DISPLAY_SOURCE 337
+IDR_PROFILE_AVATAR_PLACEHOLDER_LARGE 338
+IDS_PROFILES_LOCAL_PROFILE_STATE 339
+IDS_PROFILES_MANAGE_BUTTON_LABEL 340
+IDS_PROFILES_CREATE_NEW_PROFILE_OPTION 341
+IDR_PROFILE_AVATAR_26 342
+IDS_BOOKMARK_BAR_APPS_SHORTCUT_NAME 343
+IDR_BOOKMARK_BAR_APPS_SHORTCUT 344
+IDS_BOOKMARK_BAR_APPS_SHORTCUT_TOOLTIP 345
+IDR_BOOKMARK_BAR_FOLDER_MANAGED 346
+IDR_BOOKMARK_BAR_FOLDER_SUPERVISED 347
+IDS_WEB_FONT_FAMILY 348
+IDS_WEB_FONT_SIZE 349
+IDR_HISTORY_FAVICON 350
+IDS_UTILITY_PROCESS_MANIFEST_PARSER_NAME 351
+IDR_MOST_VISITED_SINGLE_HTML 352
+IDR_MOST_VISITED_SINGLE_CSS 353
+IDR_MOST_VISITED_SINGLE_JS 354
+IDR_NEWTAB_CHROME_WELCOME_PAGE_THUMBNAIL 355
+IDR_PRODUCT_LOGO_16 356
+IDR_NEWTAB_WEBSTORE_THUMBNAIL 357
+IDR_WEBSTORE_ICON_16 358
+IDR_CLOSE_3_MASK 359
+IDS_UTILITY_PROCESS_EXTENSION_UNPACKER_NAME 360
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION 361
+IDS_AUTOCOMPLETE_MATCH_DESCRIPTION_SEPARATOR 362
+IDR_OVERLAY_DROP_SHADOW 363
+IDS_ANNOUNCEMENT_COMPLETION_AVAILABLE_MAC 364
+IDS_EXTENSION_PROMPT_WARNING_CLIPBOARD_READWRITE 365
+IDS_EXTENSION_PROMPT_WARNING_NOTIFICATIONS 366
diff --git a/chromium/tools/gritsettings/startup_resources_win.txt b/chromium/tools/gritsettings/startup_resources_win.txt
new file mode 100644
index 00000000000..3c56a679f87
--- /dev/null
+++ b/chromium/tools/gritsettings/startup_resources_win.txt
@@ -0,0 +1,223 @@
+IDR_MOJO_CONTENT_BROWSER_MANIFEST 101
+IDR_CHROME_CONTENT_BROWSER_MANIFEST_OVERLAY 102
+IDR_MOJO_CONTENT_GPU_MANIFEST 103
+IDR_CHROME_CONTENT_GPU_MANIFEST_OVERLAY 104
+IDR_MOJO_CONTENT_PACKAGED_SERVICES_MANIFEST 105
+IDR_CHROME_CONTENT_PACKAGED_SERVICES_MANIFEST_OVERLAY 106
+IDR_MOJO_CONTENT_PLUGIN_MANIFEST 107
+IDR_CHROME_CONTENT_PLUGIN_MANIFEST_OVERLAY 108
+IDR_MOJO_CONTENT_RENDERER_MANIFEST 109
+IDR_CHROME_CONTENT_RENDERER_MANIFEST_OVERLAY 110
+IDR_MOJO_CONTENT_UTILITY_MANIFEST 111
+IDR_CHROME_CONTENT_UTILITY_MANIFEST_OVERLAY 112
+IDR_MOJO_CATALOG_MANIFEST 113
+IDR_NACL_LOADER_MANIFEST 114
+IDR_NACL_BROKER_MANIFEST 115
+IDR_DOWNLOAD_FILE_TYPES_PB 116
+IDR_PLUGIN_DB_JSON 117
+IDS_DEFAULT_PROFILE_NAME 118
+IDS_LEGACY_DEFAULT_PROFILE_NAME 119
+IDS_ACCEPT_LANGUAGES 120
+IDS_DEFAULT_ENCODING 121
+IDS_STANDARD_FONT_FAMILY 122
+IDS_FIXED_FONT_FAMILY 123
+IDS_FIXED_FONT_FAMILY_ALT_WIN 124
+IDS_SERIF_FONT_FAMILY 125
+IDS_SANS_SERIF_FONT_FAMILY 126
+IDS_CURSIVE_FONT_FAMILY 127
+IDS_FANTASY_FONT_FAMILY 128
+IDS_PICTOGRAPH_FONT_FAMILY 129
+IDS_STANDARD_FONT_FAMILY_JAPANESE 130
+IDS_FIXED_FONT_FAMILY_JAPANESE 131
+IDS_SERIF_FONT_FAMILY_JAPANESE 132
+IDS_SANS_SERIF_FONT_FAMILY_JAPANESE 133
+IDS_STANDARD_FONT_FAMILY_KOREAN 134
+IDS_SERIF_FONT_FAMILY_KOREAN 135
+IDS_SANS_SERIF_FONT_FAMILY_KOREAN 136
+IDS_STANDARD_FONT_FAMILY_SIMPLIFIED_HAN 137
+IDS_SERIF_FONT_FAMILY_SIMPLIFIED_HAN 138
+IDS_SANS_SERIF_FONT_FAMILY_SIMPLIFIED_HAN 139
+IDS_STANDARD_FONT_FAMILY_TRADITIONAL_HAN 140
+IDS_SERIF_FONT_FAMILY_TRADITIONAL_HAN 141
+IDS_SANS_SERIF_FONT_FAMILY_TRADITIONAL_HAN 142
+IDS_CURSIVE_FONT_FAMILY_SIMPLIFIED_HAN 143
+IDS_CURSIVE_FONT_FAMILY_TRADITIONAL_HAN 144
+IDS_FIXED_FONT_FAMILY_ARABIC 145
+IDS_SANS_SERIF_FONT_FAMILY_ARABIC 146
+IDS_STANDARD_FONT_FAMILY_CYRILLIC 147
+IDS_FIXED_FONT_FAMILY_CYRILLIC 148
+IDS_SERIF_FONT_FAMILY_CYRILLIC 149
+IDS_SANS_SERIF_FONT_FAMILY_CYRILLIC 150
+IDS_STANDARD_FONT_FAMILY_GREEK 151
+IDS_FIXED_FONT_FAMILY_GREEK 152
+IDS_SERIF_FONT_FAMILY_GREEK 153
+IDS_SANS_SERIF_FONT_FAMILY_GREEK 154
+IDS_FIXED_FONT_FAMILY_KOREAN 155
+IDS_CURSIVE_FONT_FAMILY_KOREAN 156
+IDS_FIXED_FONT_FAMILY_SIMPLIFIED_HAN 157
+IDS_FIXED_FONT_FAMILY_TRADITIONAL_HAN 158
+IDS_DEFAULT_FONT_SIZE 159
+IDS_DEFAULT_FIXED_FONT_SIZE 160
+IDS_MINIMUM_FONT_SIZE 161
+IDS_MINIMUM_LOGICAL_FONT_SIZE 162
+IDS_SPELLCHECK_DICTIONARY 163
+IDR_PREF_HASH_SEED_BIN 164
+IDS_POLICY_VALUE_FORMAT_ERROR 165
+IDS_POLICY_LIST_ENTRY_ERROR 166
+IDS_BOOKMARK_BAR_FOLDER_NAME 167
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME 168
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME 169
+IDS_PRODUCT_NAME 170
+IDS_PROFILES_DEFAULT_NAME 171
+IDR_BOOKMARKS_MANIFEST 172
+IDR_CLOUDPRINT_MANIFEST 173
+IDS_WEBSTORE_APP_DESCRIPTION 174
+IDS_WEBSTORE_NAME_STORE 175
+IDR_WEBSTORE_MANIFEST 176
+IDR_HANGOUT_SERVICES_MANIFEST 177
+IDR_FEEDBACK_MANIFEST 178
+IDR_NETWORK_SPEECH_SYNTHESIS_MANIFEST 179
+IDR_CRYPTOTOKEN_MANIFEST 180
+IDR_PDF_MANIFEST 181
+IDS_CHROME_WELCOME_URL 182
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE 183
+IDS_WEBSTORE_URL 184
+IDS_EXTENSION_WEB_STORE_TITLE 185
+IDR_EXTENSIONS_FAVICON 186
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME 187
+IDS_BOOKMARK_BAR_SUPERVISED_FOLDER_DEFAULT_NAME 188
+IDS_CRASH_RECOVERY_TITLE 189
+IDS_CRASH_RECOVERY_CONTENT 190
+IDR_CONTENT_RIGHT_SIDE 191
+IDR_CONTENT_BOTTOM_CENTER 192
+IDR_CONTENT_LEFT_SIDE 193
+IDS_UI_FONT_FAMILY 194
+IDS_UI_FONT_SIZE_SCALER 195
+IDS_MINIMUM_UI_FONT_SIZE 196
+IDS_APP_COPY 197
+IDS_APP_SELECT_ALL 198
+IDR_TEXTBUTTON_HOVER_TOP_LEFT 199
+IDR_TEXTBUTTON_HOVER_TOP 200
+IDR_TEXTBUTTON_HOVER_TOP_RIGHT 201
+IDR_TEXTBUTTON_HOVER_LEFT 202
+IDR_TEXTBUTTON_HOVER_CENTER 203
+IDR_TEXTBUTTON_HOVER_RIGHT 204
+IDR_TEXTBUTTON_HOVER_BOTTOM_LEFT 205
+IDR_TEXTBUTTON_HOVER_BOTTOM 206
+IDR_TEXTBUTTON_HOVER_BOTTOM_RIGHT 207
+IDR_TEXTBUTTON_PRESSED_TOP_LEFT 208
+IDR_TEXTBUTTON_PRESSED_TOP 209
+IDR_TEXTBUTTON_PRESSED_TOP_RIGHT 210
+IDR_TEXTBUTTON_PRESSED_LEFT 211
+IDR_TEXTBUTTON_PRESSED_CENTER 212
+IDR_TEXTBUTTON_PRESSED_RIGHT 213
+IDR_TEXTBUTTON_PRESSED_BOTTOM_LEFT 214
+IDR_TEXTBUTTON_PRESSED_BOTTOM 215
+IDR_TEXTBUTTON_PRESSED_BOTTOM_RIGHT 216
+IDR_AVATAR_GLASS_BUTTON_NORMAL_TOP_LEFT 217
+IDR_AVATAR_GLASS_BUTTON_NORMAL_TOP 218
+IDR_AVATAR_GLASS_BUTTON_NORMAL_TOP_RIGHT 219
+IDR_AVATAR_GLASS_BUTTON_NORMAL_LEFT 220
+IDR_AVATAR_GLASS_BUTTON_NORMAL_CENTER 221
+IDR_AVATAR_GLASS_BUTTON_NORMAL_RIGHT 222
+IDR_AVATAR_GLASS_BUTTON_NORMAL_BOTTOM_LEFT 223
+IDR_AVATAR_GLASS_BUTTON_NORMAL_BOTTOM 224
+IDR_AVATAR_GLASS_BUTTON_NORMAL_BOTTOM_RIGHT 225
+IDR_AVATAR_GLASS_BUTTON_HOVER_TOP_LEFT 226
+IDR_AVATAR_GLASS_BUTTON_HOVER_TOP 227
+IDR_AVATAR_GLASS_BUTTON_HOVER_TOP_RIGHT 228
+IDR_AVATAR_GLASS_BUTTON_HOVER_LEFT 229
+IDR_AVATAR_GLASS_BUTTON_HOVER_CENTER 230
+IDR_AVATAR_GLASS_BUTTON_HOVER_RIGHT 231
+IDR_AVATAR_GLASS_BUTTON_HOVER_BOTTOM_LEFT 232
+IDR_AVATAR_GLASS_BUTTON_HOVER_BOTTOM 233
+IDR_AVATAR_GLASS_BUTTON_HOVER_BOTTOM_RIGHT 234
+IDR_AVATAR_GLASS_BUTTON_PRESSED_TOP_LEFT 235
+IDR_AVATAR_GLASS_BUTTON_PRESSED_TOP 236
+IDR_AVATAR_GLASS_BUTTON_PRESSED_TOP_RIGHT 237
+IDR_AVATAR_GLASS_BUTTON_PRESSED_LEFT 238
+IDR_AVATAR_GLASS_BUTTON_PRESSED_CENTER 239
+IDR_AVATAR_GLASS_BUTTON_PRESSED_RIGHT 240
+IDR_AVATAR_GLASS_BUTTON_PRESSED_BOTTOM_LEFT 241
+IDR_AVATAR_GLASS_BUTTON_PRESSED_BOTTOM 242
+IDR_AVATAR_GLASS_BUTTON_PRESSED_BOTTOM_RIGHT 243
+IDR_AVATAR_GLASS_BUTTON_AVATAR 244
+IDS_TOOLTIP_NEW_TAB 245
+IDS_ACCNAME_NEWTAB 246
+IDR_TAB_DROP_DOWN 247
+IDS_TOOLTIP_BACK 248
+IDS_ACCNAME_BACK 249
+IDS_TOOLTIP_FORWARD 250
+IDS_ACCNAME_FORWARD 251
+IDS_RELOAD_MENU_NORMAL_RELOAD_ITEM 252
+IDS_RELOAD_MENU_HARD_RELOAD_ITEM 253
+IDS_RELOAD_MENU_EMPTY_AND_HARD_RELOAD_ITEM 254
+IDS_ACCNAME_RELOAD 255
+IDS_TOOLTIP_HOME 256
+IDS_ACCNAME_HOME 257
+IDR_DEVELOPER_MODE_HIGHLIGHT_TOP_LEFT 258
+IDR_DEVELOPER_MODE_HIGHLIGHT_TOP 259
+IDR_DEVELOPER_MODE_HIGHLIGHT_TOP_RIGHT 260
+IDR_DEVELOPER_MODE_HIGHLIGHT_LEFT 261
+IDR_DEVELOPER_MODE_HIGHLIGHT_CENTER 262
+IDR_DEVELOPER_MODE_HIGHLIGHT_RIGHT 263
+IDR_DEVELOPER_MODE_HIGHLIGHT_BOTTOM_LEFT 264
+IDR_DEVELOPER_MODE_HIGHLIGHT_BOTTOM 265
+IDR_DEVELOPER_MODE_HIGHLIGHT_BOTTOM_RIGHT 266
+IDR_MENU_DROPARROW 267
+IDS_ACCNAME_APP 268
+IDS_APPMENU_TOOLTIP 269
+IDS_APP_TAB_KEY 270
+IDS_REGISTER_PROTOCOL_HANDLER_TOOLTIP 271
+IDS_TOOLTIP_SAVE_CREDIT_CARD 272
+IDS_TOOLTIP_TRANSLATE 273
+IDS_TOOLTIP_STAR 274
+IDS_DEFAULT_TAB_TITLE 275
+IDS_BROWSER_WINDOW_TITLE_FORMAT 276
+IDR_DEFAULT_FAVICON 277
+IDS_NEW_TAB_TITLE 278
+IDS_ACCNAME_CLOSE 279
+IDS_ACCNAME_BOOKMARKS_CHEVRON 280
+IDS_BOOKMARK_BAR_APPS_SHORTCUT_NAME 281
+IDS_BOOKMARK_BAR_APPS_SHORTCUT_TOOLTIP 282
+IDR_BOOKMARK_BAR_APPS_SHORTCUT 283
+IDS_BOOKMARKS_NO_ITEMS 284
+IDS_BOOKMARK_BAR_IMPORT_LINK 285
+IDR_BOOKMARK_BAR_FOLDER 286
+IDR_BOOKMARK_BAR_FOLDER_MANAGED 287
+IDR_BOOKMARK_BAR_FOLDER_SUPERVISED 288
+IDS_WEB_FONT_SIZE 289
+IDS_WEB_FONT_FAMILY 290
+IDS_DEFAULT_BROWSER_INFOBAR_SHORT_TEXT 291
+IDS_DEFAULT_BROWSER_INFOBAR_OK_BUTTON_LABEL 292
+IDR_CONTENT_BOTTOM_RIGHT_CORNER 293
+IDR_CONTENT_BOTTOM_LEFT_CORNER 294
+IDS_UTILITY_PROCESS_PROXY_RESOLVER_NAME 295
+IDS_UTILITY_PROCESS_MANIFEST_PARSER_NAME 296
+IDR_MOST_VISITED_SINGLE_HTML 297
+IDR_MOST_VISITED_SINGLE_CSS 298
+IDR_MOST_VISITED_SINGLE_JS 299
+IDR_NEWTAB_CHROME_WELCOME_PAGE_THUMBNAIL 300
+IDR_PRODUCT_LOGO_16 301
+IDR_NEWTAB_WEBSTORE_THUMBNAIL 302
+IDR_WEBSTORE_ICON_16 303
+IDR_CLOSE_3_MASK 304
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION 305
+IDS_AUTOCOMPLETE_MATCH_DESCRIPTION_SEPARATOR 306
+IDS_UTILITY_PROCESS_EXTENSION_UNPACKER_NAME 307
+IDS_NEW_TAB_MOST_VISITED 308
+IDS_RECENTLY_CLOSED 309
+IDS_NEW_WINDOW 310
+IDS_NEW_INCOGNITO_WINDOW 311
+IDS_EXTENSION_USB_DEVICE_PRODUCT_NAME_AND_VENDOR 312
+IDS_APP_SHORTCUTS_SUBDIR_NAME 313
+IDR_LOCAL_NTP_HTML 314
+IDR_LOCAL_NTP_CSS 315
+IDS_NEW_TAB_THUMBNAIL_REMOVED_NOTIFICATION 316
+IDS_NEW_TAB_REMOVE_THUMBNAIL_TOOLTIP 317
+IDS_NEW_TAB_UNDO_THUMBNAIL_REMOVE 318
+IDS_NEW_TAB_RESTORE_THUMBNAILS_SHORT_LINK 319
+IDS_NEW_TAB_ATTRIBUTION_INTRO 320
+IDS_SEARCH_BOX_EMPTY_HINT 321
+IDR_LOCAL_NTP_JS 322
+IDR_ADDITIONAL_MODULE_IDS 323
diff --git a/chromium/tools/idl_parser/OWNERS b/chromium/tools/idl_parser/OWNERS
index afc0d893965..ff5f0e4e413 100644
--- a/chromium/tools/idl_parser/OWNERS
+++ b/chromium/tools/idl_parser/OWNERS
@@ -2,5 +2,5 @@ bashi@chromium.org
haraken@chromium.org
yukishiino@chromium.org
-# TEAM: blink-bindings-reviews@chromium.org
+# TEAM: blink-reviews-bindings@chromium.org
# COMPONENT: Blink>Bindings
diff --git a/chromium/tools/idl_parser/idl_lexer.py b/chromium/tools/idl_parser/idl_lexer.py
index 1f186d722d9..a86fe59e4c0 100755
--- a/chromium/tools/idl_parser/idl_lexer.py
+++ b/chromium/tools/idl_parser/idl_lexer.py
@@ -9,7 +9,7 @@ The lexer uses the PLY library to build a tokenizer which understands both
WebIDL and Pepper tokens.
WebIDL, and WebIDL regular expressions can be found at:
- http://www.w3.org/TR/2012/CR-WebIDL-20120419/
+ http://heycam.github.io/webidl/
PLY can be found at:
http://www.dabeaz.com/ply/
"""
@@ -17,20 +17,10 @@ PLY can be found at:
import os.path
import sys
-#
-# Try to load the ply module, if not, then assume it is in the third_party
-# directory.
-#
-try:
- # Disable lint check which fails to find the ply module.
- # pylint: disable=F0401
- from ply import lex
-except ImportError:
- module_path, module_name = os.path.split(__file__)
- third_party = os.path.join(module_path, '..', '..', 'third_party')
- sys.path.append(third_party)
- # pylint: disable=F0401
- from ply import lex
+SRC_DIR = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
+sys.path.insert(0, os.path.join(SRC_DIR, 'third_party'))
+from ply import lex
+
#
# IDL Lexer
@@ -101,6 +91,7 @@ class IDLLexer(object):
'Promise' : 'PROMISE',
'readonly' : 'READONLY',
'RegExp' : 'REGEXP',
+ 'record' : 'RECORD',
'required' : 'REQUIRED',
'sequence' : 'SEQUENCE',
'serializer' : 'SERIALIZER',
@@ -113,6 +104,7 @@ class IDLLexer(object):
'true' : 'TRUE',
'unsigned' : 'UNSIGNED',
'unrestricted' : 'UNRESTRICTED',
+ 'USVString' : 'USVSTRING',
'void' : 'VOID'
}
diff --git a/chromium/tools/idl_parser/idl_parser.py b/chromium/tools/idl_parser/idl_parser.py
index b1e74c7a5a8..9443ce1c683 100755
--- a/chromium/tools/idl_parser/idl_parser.py
+++ b/chromium/tools/idl_parser/idl_parser.py
@@ -36,22 +36,11 @@ import time
from idl_lexer import IDLLexer
from idl_node import IDLAttribute, IDLNode
-#
-# Try to load the ply module, if not, then assume it is in the third_party
-# directory.
-#
-try:
- # Disable lint check which fails to find the ply module.
- # pylint: disable=F0401
- from ply import lex
- from ply import yacc
-except ImportError:
- module_path, module_name = os.path.split(__file__)
- third_party = os.path.join(module_path, os.par, os.par, 'third_party')
- sys.path.append(third_party)
- # pylint: disable=F0401
- from ply import lex
- from ply import yacc
+SRC_DIR = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
+sys.path.insert(0, os.path.join(SRC_DIR, 'third_party'))
+from ply import lex
+from ply import yacc
+
#
# ERROR_REMAP
@@ -903,7 +892,8 @@ class IDLParser(object):
| PromiseType Null
| identifier TypeSuffix
| SEQUENCE '<' Type '>' Null
- | FROZENARRAY '<' Type '>' Null"""
+ | FROZENARRAY '<' Type '>' Null
+ | RecordType Null"""
if len(p) == 3:
if type(p[1]) == str:
typeref = self.BuildNamed('Typeref', p, 1)
@@ -928,15 +918,14 @@ class IDLParser(object):
p[0] = p[1]
- # [81] Added BYTESTRING, DOMSTRING, OBJECT, DATE, REGEXP
+ # [81] Added StringType, OBJECT, DATE, REGEXP
def p_PrimitiveType(self, p):
"""PrimitiveType : UnsignedIntegerType
| UnrestrictedFloatType
+ | StringType
| BOOLEAN
| BYTE
| OCTET
- | BYTESTRING
- | DOMSTRING
| OBJECT
| DATE
| REGEXP"""
@@ -1087,6 +1076,23 @@ class IDLParser(object):
value = self.BuildNamed('Call', p, 3, args)
p[0] = self.BuildNamed('ExtAttribute', p, 1, value)
+ # [99]
+ def p_StringType(self, p):
+ """StringType : BYTESTRING
+ | DOMSTRING
+ | USVSTRING"""
+ p[0] = self.BuildNamed('StringType', p, 1)
+
+ # [100]
+ def p_RecordType(self, p):
+ """RecordType : RECORD '<' StringType ',' Type '>'"""
+ p[0] = self.BuildProduction('Record', p, 2, ListFromConcat(p[3], p[5]))
+
+ # [100.1] Error recovery for RecordType.
+ def p_RecordTypeError(self, p):
+ """RecordType : RECORD '<' error ',' Type '>'"""
+ p[0] = self.BuildError(p, 'RecordType')
+
#
# Parser Errors
#
@@ -1149,9 +1155,9 @@ class IDLParser(object):
# Production is the set of items sent to a grammar rule resulting in a new
# item being returned.
#
+# cls - The type of item being producted
# p - Is the Yacc production object containing the stack of items
# index - Index into the production of the name for the item being produced.
-# cls - The type of item being producted
# childlist - The children of the new item
def BuildProduction(self, cls, p, index, childlist=None):
try:
diff --git a/chromium/tools/idl_parser/idl_ppapi_lexer.py b/chromium/tools/idl_parser/idl_ppapi_lexer.py
index ac6f42cc2a4..a13c4e4ac95 100755
--- a/chromium/tools/idl_parser/idl_ppapi_lexer.py
+++ b/chromium/tools/idl_parser/idl_ppapi_lexer.py
@@ -9,7 +9,7 @@ The lexer uses the PLY library to build a tokenizer which understands both
WebIDL and Pepper tokens.
WebIDL, and WebIDL regular expressions can be found at:
- http://www.w3.org/TR/2012/CR-WebIDL-20120419/
+ http://heycam.github.io/webidl/
PLY can be found at:
http://www.dabeaz.com/ply/
"""
@@ -60,7 +60,7 @@ class IDLPPAPILexer(IDLLexer):
# Remove JS types
self._DelKeywords(['boolean', 'byte', 'ByteString', 'Date', 'DOMString',
'double', 'float', 'long', 'object', 'octet', 'Promise',
- 'RegExp', 'short', 'unsigned'])
+ 'record', 'RegExp', 'short', 'unsigned', 'USVString'])
# If run by itself, attempt to build the lexer
diff --git a/chromium/tools/idl_parser/idl_ppapi_parser.py b/chromium/tools/idl_parser/idl_ppapi_parser.py
index 8914c841997..2556ffb47a6 100755
--- a/chromium/tools/idl_parser/idl_ppapi_parser.py
+++ b/chromium/tools/idl_parser/idl_ppapi_parser.py
@@ -248,6 +248,18 @@ class IDLPPAPIParser(IDLParser):
""" """
pass
+ def p_StringType(self, p):
+ """ """
+ pass
+
+ def p_RecordType(self, p):
+ """ """
+ pass
+
+ def p_RecordTypeError(self, p):
+ """ """
+ pass
+
# We only support:
# [ identifier ]
# [ identifier ( ArgumentList )]
diff --git a/chromium/tools/idl_parser/test_lexer/keywords.in b/chromium/tools/idl_parser/test_lexer/keywords.in
index 05b832d6c4e..abca990e415 100644
--- a/chromium/tools/idl_parser/test_lexer/keywords.in
+++ b/chromium/tools/idl_parser/test_lexer/keywords.in
@@ -34,6 +34,7 @@ OR or
PARTIAL partial
PROMISE Promise
READONLY readonly
+RECORD record
REGEXP RegExp
REQUIRED required
SEQUENCE sequence
@@ -47,4 +48,5 @@ TYPEDEF typedef
TRUE true
UNSIGNED unsigned
UNRESTRICTED unrestricted
+USVSTRING USVString
VOID void
diff --git a/chromium/tools/idl_parser/test_parser/dictionary_web.idl b/chromium/tools/idl_parser/test_parser/dictionary_web.idl
index 2fd602e1491..83512460341 100644
--- a/chromium/tools/idl_parser/test_parser/dictionary_web.idl
+++ b/chromium/tools/idl_parser/test_parser/dictionary_web.idl
@@ -53,7 +53,7 @@ partial dictionary MyDictInherit : Foo {};
*Dictionary(MyDictBig)
* Key(setString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Default(Foo)
* Key(setLong)
* Type()
@@ -88,7 +88,7 @@ dictionary {
*Dictionary(MyDictionaryInvalidOptional)
* Key(mandatory)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Error(Unexpected keyword "optional" after ">".)
*/
dictionary MyDictionaryInvalidOptional {
@@ -105,7 +105,7 @@ dictionary ForParent NoColon {
*Dictionary(MyDictNull)
* Key(setString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Default(NULL)
*/
dictionary MyDictNull {
diff --git a/chromium/tools/idl_parser/test_parser/exception_web.idl b/chromium/tools/idl_parser/test_parser/exception_web.idl
index 3801a4aa7dc..2e28107dfb2 100644
--- a/chromium/tools/idl_parser/test_parser/exception_web.idl
+++ b/chromium/tools/idl_parser/test_parser/exception_web.idl
@@ -48,7 +48,7 @@ partial exception MyExcPartial { };
*Exception(MyExcBig)
* ExceptionField(MyString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Error(Unexpected "=" after identifier "ErrorSetLong".)
* ExceptionField(MyLong)
* Type()
@@ -75,7 +75,7 @@ exception ForParent NoColon {
/* TREE
*Exception(MyExcConst)
* Const(setString)
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Value(NULL)
*/
exception MyExcConst {
diff --git a/chromium/tools/idl_parser/test_parser/interface_web.idl b/chromium/tools/idl_parser/test_parser/interface_web.idl
index 007ac7ffed8..c8ec6d26435 100644
--- a/chromium/tools/idl_parser/test_parser/interface_web.idl
+++ b/chromium/tools/idl_parser/test_parser/interface_web.idl
@@ -55,7 +55,7 @@ partial interface MyIFaceInherit : Foo {};
* Arguments()
* Argument(arg)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Error(Missing argument.)
* Type()
* PrimitiveType(void)
@@ -76,7 +76,7 @@ interface MyIFaceMissingAttribute {
* Operation(foo)
* Arguments()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Error(Unexpected "<" after ";".)
*/
interface MyIFaceContainsUnresolvedConflictDiff {
@@ -89,9 +89,23 @@ interface MyIFaceContainsUnresolvedConflictDiff {
};
/* TREE
+ *Interface(MyIFaceWrongRecordKeyType)
+ * Operation(foo)
+ * Arguments()
+ * Argument(arg)
+ * Type()
+ * Error(Unexpected identifier "int" after "<".)
+ * Type()
+ * PrimitiveType(void)
+ */
+interface MyIFaceWrongRecordKeyType {
+ void foo(record<int, ByteString> arg);
+};
+
+/* TREE
*Interface(MyIFaceBig)
* Const(setString)
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Value(NULL)
*/
interface MyIFaceBig {
@@ -106,7 +120,7 @@ interface MyIFaceBig {
* Type()
* Sequence()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Default()
* Type()
* PrimitiveType(void)
@@ -116,9 +130,40 @@ interface MyIfaceEmptySequenceDefalutValue {
};
/* TREE
+ *Interface(MyIfaceWithRecords)
+ * Operation(foo)
+ * Arguments()
+ * Argument(arg)
+ * Type()
+ * Record()
+ * StringType(DOMString)
+ * Type()
+ * PrimitiveType(long)
+ * Type()
+ * PrimitiveType(void)
+ * Operation(bar)
+ * Arguments()
+ * Argument(arg1)
+ * Type()
+ * Typeref(int)
+ * Argument(arg2)
+ * Type()
+ * Record()
+ * StringType(ByteString)
+ * Type()
+ * PrimitiveType(float)
+ * Type()
+ * PrimitiveType(double)
+ */
+interface MyIfaceWithRecords {
+ void foo(record<DOMString, long> arg);
+ double bar(int arg1, record<ByteString, float> arg2);
+};
+
+/* TREE
*Interface(MyIFaceBig2)
* Const(nullValue)
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Value(NULL)
* Const(longValue)
* PrimitiveType(long)
@@ -128,13 +173,13 @@ interface MyIfaceEmptySequenceDefalutValue {
* Value(123)
* Attribute(myString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Attribute(readOnlyString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Attribute(staticString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Operation(myFunction)
* Arguments()
* Argument(myLong)
@@ -168,14 +213,14 @@ interface MyIFaceBig2 {
* Arguments()
* Argument(property)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Type()
* PrimitiveType(void)
* Operation(_unnamed_)
* Arguments()
* Argument(property)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Type()
* PrimitiveType(double)
* Operation(GetFiveSix)
@@ -201,16 +246,16 @@ interface MyIFaceSpecials {
* Operation(_unnamed_)
* Arguments()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Stringifier()
* Operation(namedStringifier)
* Arguments()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Stringifier()
* Attribute(stringValue)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
*/
interface MyIFaceStringifiers {
stringifier;
@@ -281,7 +326,7 @@ interface MyIfacePromise {
* Type()
* PrimitiveType(double)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* LegacyIterable()
* Type()
* PrimitiveType(boolean)
@@ -298,7 +343,7 @@ interface MyIfaceIterable {
* Type()
* PrimitiveType(long)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Maplike()
* Type()
* PrimitiveType(double)
@@ -376,7 +421,7 @@ interface MyIfaceSerializer {
* Type()
* FrozenArray()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
*/
interface MyIfaceFrozenArray {
readonly attribute FrozenArray<DOMString> foo;
@@ -388,7 +433,7 @@ interface MyIfaceFrozenArray {
* Type()
* UnionType()
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
* Type()
* PrimitiveType(long)
*/
diff --git a/chromium/tools/idl_parser/test_parser/typedef_web.idl b/chromium/tools/idl_parser/test_parser/typedef_web.idl
index ba95db7ccfe..6e651c1e2ae 100644
--- a/chromium/tools/idl_parser/test_parser/typedef_web.idl
+++ b/chromium/tools/idl_parser/test_parser/typedef_web.idl
@@ -121,7 +121,7 @@ typedef unsigned long long MyULongLong;
/* TREE
*Typedef(MyString)
* Type()
- * PrimitiveType(DOMString)
+ * StringType(DOMString)
*/
typedef DOMString MyString;
@@ -188,3 +188,19 @@ typedef byte MyByte;
*/
typedef octet MyOctet;
+/* TREE
+ *Typedef(MyRecord)
+ * Type()
+ * Record()
+ * StringType(ByteString)
+ * Type()
+ * Typeref(int)
+ */
+typedef record<ByteString, int> MyRecord;
+
+/* TREE
+ *Typedef(MyInvalidRecord)
+ * Type()
+ * Error(Unexpected keyword "double" after "<".)
+ */
+typedef record<double, ByteString> MyInvalidRecord;
diff --git a/chromium/tools/ipc_fuzzer/OWNERS b/chromium/tools/ipc_fuzzer/OWNERS
index 2cca4f2232c..66e501b8b49 100644
--- a/chromium/tools/ipc_fuzzer/OWNERS
+++ b/chromium/tools/ipc_fuzzer/OWNERS
@@ -1,3 +1,5 @@
inferno@chromium.org
mbarbella@chromium.org
tsepez@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc
index 08e99d30d42..8efc54741f6 100644
--- a/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc
+++ b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc
@@ -509,21 +509,21 @@ struct FuzzTraits<base::ListValue> {
bool tmp;
p->GetBoolean(index, &tmp);
fuzzer->FuzzBool(&tmp);
- p->Set(index, new base::FundamentalValue(tmp));
+ p->Set(index, new base::Value(tmp));
break;
}
case base::Value::Type::INTEGER: {
int tmp;
p->GetInteger(index, &tmp);
fuzzer->FuzzInt(&tmp);
- p->Set(index, new base::FundamentalValue(tmp));
+ p->Set(index, new base::Value(tmp));
break;
}
case base::Value::Type::DOUBLE: {
double tmp;
p->GetDouble(index, &tmp);
fuzzer->FuzzDouble(&tmp);
- p->Set(index, new base::FundamentalValue(tmp));
+ p->Set(index, new base::Value(tmp));
break;
}
case base::Value::Type::STRING: {
@@ -581,19 +581,19 @@ struct FuzzTraits<base::DictionaryValue> {
case base::Value::Type::BOOLEAN: {
bool tmp;
fuzzer->FuzzBool(&tmp);
- p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ p->SetWithoutPathExpansion(property, new base::Value(tmp));
break;
}
case base::Value::Type::INTEGER: {
int tmp;
fuzzer->FuzzInt(&tmp);
- p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ p->SetWithoutPathExpansion(property, new base::Value(tmp));
break;
}
case base::Value::Type::DOUBLE: {
double tmp;
fuzzer->FuzzDouble(&tmp);
- p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ p->SetWithoutPathExpansion(property, new base::Value(tmp));
break;
}
case base::Value::Type::STRING: {
@@ -1505,23 +1505,6 @@ struct FuzzTraits<ppapi::SocketOptionData> {
};
template <>
-struct FuzzTraits<printing::PdfRenderSettings> {
- static bool Fuzz(printing::PdfRenderSettings* p, Fuzzer* fuzzer) {
- gfx::Rect area = p->area;
- int dpi = p->dpi;
- bool autorotate = p->autorotate;
- if (!FuzzParam(&area, fuzzer))
- return false;
- if (!FuzzParam(&dpi, fuzzer))
- return false;
- if (!FuzzParam(&autorotate, fuzzer))
- return false;
- *p = printing::PdfRenderSettings(area, dpi, autorotate);
- return true;
- }
-};
-
-template <>
struct FuzzTraits<SkBitmap> {
static bool Fuzz(SkBitmap* p, Fuzzer* fuzzer) {
// TODO(mbarbella): This should actually do something.
diff --git a/chromium/tools/ipc_fuzzer/message_lib/BUILD.gn b/chromium/tools/ipc_fuzzer/message_lib/BUILD.gn
index ffc254073b5..6ee64801046 100644
--- a/chromium/tools/ipc_fuzzer/message_lib/BUILD.gn
+++ b/chromium/tools/ipc_fuzzer/message_lib/BUILD.gn
@@ -22,8 +22,8 @@ static_library("ipc_message_lib") {
"//skia",
"//third_party/WebKit/public:blink",
"//third_party/WebKit/public:blink_headers",
- "//third_party/libjingle",
"//third_party/mt19937ar",
+ "//third_party/webrtc_overrides",
"//ui/accessibility:ax_gen",
]
sources = [
diff --git a/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc b/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc
index d69f9c1bdf4..f8129959b5d 100644
--- a/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc
+++ b/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc
@@ -11,7 +11,6 @@
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/logging.h"
-#include "base/posix/global_descriptors.h"
#include "base/run_loop.h"
#include "build/build_config.h"
#include "chrome/common/chrome_switches.h"
@@ -24,6 +23,7 @@
#include "mojo/edk/embedder/scoped_ipc_support.h"
#if defined(OS_POSIX)
+#include "base/posix/global_descriptors.h"
#include "content/public/common/content_descriptors.h"
#endif
diff --git a/chromium/tools/json_schema_compiler/cc_generator.py b/chromium/tools/json_schema_compiler/cc_generator.py
index 3fd3d2a2b24..b653a9c83bc 100644
--- a/chromium/tools/json_schema_compiler/cc_generator.py
+++ b/chromium/tools/json_schema_compiler/cc_generator.py
@@ -650,7 +650,7 @@ class _Generator(object):
if underlying_type.property_type == PropertyType.STRING:
return 'base::MakeUnique<base::StringValue>(%s)' % var
else:
- return 'base::MakeUnique<base::FundamentalValue>(%s)' % var
+ return 'base::MakeUnique<base::Value>(%s)' % var
else:
raise NotImplementedError('Conversion of %s to base::Value not '
'implemented' % repr(type_.type_))
diff --git a/chromium/tools/json_schema_compiler/cpp_bundle_generator.py b/chromium/tools/json_schema_compiler/cpp_bundle_generator.py
index 89501e2cf14..88e0f36eb94 100644
--- a/chromium/tools/json_schema_compiler/cpp_bundle_generator.py
+++ b/chromium/tools/json_schema_compiler/cpp_bundle_generator.py
@@ -147,15 +147,19 @@ class CppBundleGenerator(object):
raise ValueError("Unsupported platform ifdef: %s" % platform.name)
return ' || '.join(ifdefs)
- def _GenerateRegisterFunctions(self, namespace_name, function):
+ def _GenerateRegistrationEntry(self, namespace_name, function):
c = code.Code()
function_ifdefs = self._GetPlatformIfdefs(function)
if function_ifdefs is not None:
c.Append("#if %s" % function_ifdefs, indent_level=0)
- function_name = JsFunctionNameToClassName(namespace_name, function.name)
- c.Append("registry->RegisterFunction<%sFunction>();" % (
- function_name))
+ function_name = '%sFunction' % JsFunctionNameToClassName(
+ namespace_name, function.name)
+ c.Sblock('{')
+ c.Append('&NewExtensionFunction<%s>,' % function_name)
+ c.Append('%s::function_name(),' % function_name)
+ c.Append('%s::histogram_value(),' % function_name)
+ c.Eblock('},')
if function_ifdefs is not None:
c.Append("#endif // %s" % function_ifdefs, indent_level=0)
@@ -166,6 +170,7 @@ class CppBundleGenerator(object):
c.Append('// static')
c.Sblock('void %s::RegisterAll(ExtensionFunctionRegistry* registry) {' %
self._GenerateBundleClass('GeneratedFunctionRegistry'))
+ c.Sblock('constexpr ExtensionFunctionRegistry::FactoryEntry kEntries[] = {')
for namespace in self._model.namespaces.values():
namespace_ifdefs = self._GetPlatformIfdefs(namespace)
if namespace_ifdefs is not None:
@@ -174,7 +179,7 @@ class CppBundleGenerator(object):
for function in namespace.functions.values():
if function.nocompile:
continue
- c.Concat(self._GenerateRegisterFunctions(namespace.name, function))
+ c.Concat(self._GenerateRegistrationEntry(namespace.name, function))
for type_ in namespace.types.values():
for function in type_.functions.values():
@@ -182,11 +187,15 @@ class CppBundleGenerator(object):
continue
namespace_types_name = JsFunctionNameToClassName(
namespace.name, type_.name)
- c.Concat(self._GenerateRegisterFunctions(namespace_types_name,
+ c.Concat(self._GenerateRegistrationEntry(namespace_types_name,
function))
if namespace_ifdefs is not None:
c.Append("#endif // %s" % namespace_ifdefs, indent_level=0)
+ c.Eblock("};")
+ c.Sblock("for (const auto& entry : kEntries) {")
+ c.Append(" registry->Register(entry);")
+ c.Eblock("}")
c.Eblock("}")
return c
diff --git a/chromium/tools/json_schema_compiler/feature_compiler.py b/chromium/tools/json_schema_compiler/feature_compiler.py
index 0b12f5eb389..0a36a193690 100644
--- a/chromium/tools/json_schema_compiler/feature_compiler.py
+++ b/chromium/tools/json_schema_compiler/feature_compiler.py
@@ -25,11 +25,11 @@ HEADER_FILE_TEMPLATE = """
#ifndef %(header_guard)s
#define %(header_guard)s
-#include "extensions/common/features/base_feature_provider.h"
+#include "extensions/common/features/feature_provider.h"
namespace extensions {
-class %(provider_class)s : public BaseFeatureProvider {
+class %(provider_class)s : public FeatureProvider {
public:
%(provider_class)s();
~%(provider_class)s() override;
diff --git a/chromium/tools/json_schema_compiler/idl_schema.py b/chromium/tools/json_schema_compiler/idl_schema.py
index 58efe28598f..fcad49bef4e 100755
--- a/chromium/tools/json_schema_compiler/idl_schema.py
+++ b/chromium/tools/json_schema_compiler/idl_schema.py
@@ -50,13 +50,20 @@ def ProcessComment(comment):
Returns: A tuple that looks like:
(
- "The processed comment, minus all |parameter| mentions.",
+ "The processed comment, minus all |parameter| mentions and jsexterns.",
+ "Any block wrapped in <jsexterns></jsexterns>.",
{
'parameter_name_1': "The comment that followed |parameter_name_1|:",
...
}
)
'''
+ jsexterns = None
+ match = re.search('<jsexterns>(.*)</jsexterns>', comment, re.DOTALL)
+ if match:
+ jsexterns = match.group(1).strip()
+ comment = comment[:match.start()] + comment[match.end():]
+
def add_paragraphs(content):
paragraphs = content.split('\n\n')
if len(paragraphs) < 2:
@@ -85,7 +92,7 @@ def ProcessComment(comment):
add_paragraphs(comment[param_comment_start:param_comment_end].strip())
.replace('\n', ''))
- return (parent_comment, params)
+ return (parent_comment, jsexterns, params)
class Callspec(object):
@@ -195,8 +202,10 @@ class Member(object):
parameter_comments = OrderedDict()
for node in self.node.GetChildren():
if node.cls == 'Comment':
- (parent_comment, parameter_comments) = ProcessComment(node.GetName())
+ (parent_comment, jsexterns, parameter_comments) = ProcessComment(
+ node.GetName())
properties['description'] = parent_comment
+ properties['jsexterns'] = jsexterns
elif node.cls == 'Callspec':
name, parameters, return_type = (Callspec(node, parameter_comments)
.process(callbacks))
diff --git a/chromium/tools/json_schema_compiler/idl_schema_test.py b/chromium/tools/json_schema_compiler/idl_schema_test.py
index 3e0d3cc482a..f9a4d8702a7 100755
--- a/chromium/tools/json_schema_compiler/idl_schema_test.py
+++ b/chromium/tools/json_schema_compiler/idl_schema_test.py
@@ -70,7 +70,8 @@ class IdlSchemaTest(unittest.TestCase):
def testLegalValues(self):
self.assertEquals({
'x': {'name': 'x', 'type': 'integer', 'enum': [1,2],
- 'description': 'This comment tests "double-quotes".'},
+ 'description': 'This comment tests "double-quotes".',
+ 'jsexterns': None},
'y': {'name': 'y', 'type': 'string'},
'z': {'name': 'z', 'type': 'string'},
'a': {'name': 'a', 'type': 'string'},
@@ -402,21 +403,25 @@ class IdlSchemaTest(unittest.TestCase):
self.assertEquals(OrderedDict([
('first', OrderedDict([
('description', 'Integer property.'),
+ ('jsexterns', None),
('type', 'integer'),
('value', 42),
])),
('second', OrderedDict([
('description', 'Double property.'),
+ ('jsexterns', None),
('type', 'number'),
('value', 42.0),
])),
('third', OrderedDict([
('description', 'String property.'),
+ ('jsexterns', None),
('type', 'string'),
('value', 'hello world'),
])),
('fourth', OrderedDict([
('description', 'Unvalued property.'),
+ ('jsexterns', None),
('type', 'integer'),
])),
]), schema.get('properties'))
diff --git a/chromium/tools/json_schema_compiler/js_externs_generator.py b/chromium/tools/json_schema_compiler/js_externs_generator.py
index 065e4d3aaba..a5ef075ac26 100644
--- a/chromium/tools/json_schema_compiler/js_externs_generator.py
+++ b/chromium/tools/json_schema_compiler/js_externs_generator.py
@@ -30,6 +30,7 @@ class JsExternsGenerator(object):
class _Generator(object):
def __init__(self, namespace):
self._namespace = namespace
+ self._class_name = None
self._js_util = JsUtil()
def Generate(self):
@@ -79,7 +80,7 @@ class _Generator(object):
.Append(self._js_util.GetSeeLink(self._namespace.name, 'type',
js_type.simple_name))
.Eblock(' */'))
- c.Append('chrome.%s.%s = {' % (self._namespace.name, js_type.name))
+ c.Append('%s.%s = {' % (self._GetNamespace(), js_type.name))
def get_property_name(e):
# Enum properties are normified to be in ALL_CAPS_STYLE.
@@ -105,7 +106,7 @@ class _Generator(object):
return any(prop.type_.property_type is PropertyType.FUNCTION
for prop in js_type.properties.values())
- def _AppendTypeJsDoc(self, c, js_type):
+ def _AppendTypeJsDoc(self, c, js_type, optional=False):
"""Appends the documentation for a type as a Code.
"""
c.Sblock(line='/**', line_prefix=' * ')
@@ -114,9 +115,16 @@ class _Generator(object):
for line in js_type.description.splitlines():
c.Append(line)
+ if js_type.jsexterns:
+ for line in js_type.jsexterns.splitlines():
+ c.Append(line)
+
is_constructor = self._IsTypeConstructor(js_type)
- if is_constructor:
- c.Comment('@constructor', comment_prefix = ' * ', wrap_indent=4)
+ if js_type.property_type is not PropertyType.OBJECT:
+ self._js_util.AppendTypeJsDoc(c, self._namespace.name, js_type, optional)
+ elif is_constructor:
+ c.Comment('@constructor', comment_prefix = '', wrap_indent=4)
+ c.Comment('@private', comment_prefix = '', wrap_indent=4)
else:
self._AppendTypedef(c, js_type.properties)
@@ -124,11 +132,22 @@ class _Generator(object):
js_type.simple_name))
c.Eblock(' */')
- var = 'chrome.%s.%s' % (js_type.namespace.name, js_type.simple_name)
+ var = '%s.%s' % (self._GetNamespace(), js_type.simple_name)
if is_constructor: var += ' = function() {}'
var += ';'
c.Append(var)
+ if is_constructor:
+ c.Append()
+ self._class_name = js_type.name
+ for prop in js_type.properties.values():
+ if prop.type_.property_type is PropertyType.FUNCTION:
+ self._AppendFunction(c, prop.type_.function)
+ else:
+ self._AppendTypeJsDoc(c, prop.type_, prop.optional)
+ c.Append()
+ self._class_name = None
+
def _AppendTypedef(self, c, properties):
"""Given an OrderedDict of properties, Appends code containing a @typedef.
"""
@@ -150,8 +169,8 @@ class _Generator(object):
"""
self._js_util.AppendFunctionJsDoc(c, self._namespace.name, function)
params = self._GetFunctionParams(function)
- c.Append('chrome.%s.%s = function(%s) {};' % (self._namespace.name,
- function.name, params))
+ c.Append('%s.%s = function(%s) {};' % (self._GetNamespace(),
+ function.name, params))
c.Append()
def _AppendEvent(self, c, event):
@@ -168,7 +187,7 @@ class _Generator(object):
c.Append(self._js_util.GetSeeLink(self._namespace.name, 'event',
event.name))
c.Eblock(' */')
- c.Append('chrome.%s.%s;' % (self._namespace.name, event.name))
+ c.Append('%s.%s;' % (self._GetNamespace(), event.name))
c.Append()
def _AppendNamespaceObject(self, c):
@@ -193,3 +212,17 @@ class _Generator(object):
if function.callback:
params.append(function.callback)
return ', '.join(param.name for param in params)
+
+ def _GetNamespace(self):
+ """Returns the namespace to be prepended to a top-level typedef.
+
+ For example, it might return "chrome.namespace".
+
+ Also optionally includes the class name if this is in the context
+ of outputting the members of a class.
+
+ For example, "chrome.namespace.ClassName.prototype"
+ """
+ if self._class_name:
+ return 'chrome.%s.%s.prototype' % (self._namespace.name, self._class_name)
+ return 'chrome.%s' % self._namespace.name
diff --git a/chromium/tools/json_schema_compiler/js_externs_generator_test.py b/chromium/tools/json_schema_compiler/js_externs_generator_test.py
index 917ce6379fb..88c2a09f04d 100755
--- a/chromium/tools/json_schema_compiler/js_externs_generator_test.py
+++ b/chromium/tools/json_schema_compiler/js_externs_generator_test.py
@@ -47,6 +47,18 @@ namespace fakeApi {
ArrayBuffer arrayBuff;
};
+ dictionary Qux {
+ long notOptionalLong;
+ long? optionalLong;
+
+ // A map from string to number.
+ // <jsexterns>@type {Object<string, number>}</jsexterns>
+ object dict;
+
+ static void go();
+ static void stop();
+ };
+
callback VoidCallback = void();
callback BazGreekCallback = void(Baz baz, Greek greek);
@@ -132,6 +144,43 @@ chrome.fakeApi.Bar;
chrome.fakeApi.Baz;
/**
+ * @constructor
+ * @private
+ * @see https://developer.chrome.com/extensions/fakeApi#type-Qux
+ */
+chrome.fakeApi.Qux = function() {};
+
+/**
+ * @type {number}
+ * @see https://developer.chrome.com/extensions/fakeApi#type-notOptionalLong
+ */
+chrome.fakeApi.Qux.prototype.notOptionalLong;
+
+/**
+ * @type {(number|undefined)}
+ * @see https://developer.chrome.com/extensions/fakeApi#type-optionalLong
+ */
+chrome.fakeApi.Qux.prototype.optionalLong;
+
+/**
+ * A map from string to number.
+ * @type {Object<string, number>}
+ * @see https://developer.chrome.com/extensions/fakeApi#type-dict
+ */
+chrome.fakeApi.Qux.prototype.dict;
+
+/**
+ * @see https://developer.chrome.com/extensions/fakeApi#method-go
+ */
+chrome.fakeApi.Qux.prototype.go = function() {};
+
+/**
+ * @see https://developer.chrome.com/extensions/fakeApi#method-stop
+ */
+chrome.fakeApi.Qux.prototype.stop = function() {};
+
+
+/**
* Does something exciting! And what's more, this is a multiline function
* comment! It goes onto multiple lines!
* @param {!chrome.fakeApi.Baz} baz The baz to use.
diff --git a/chromium/tools/json_schema_compiler/js_util.py b/chromium/tools/json_schema_compiler/js_util.py
index 2549aefa2fa..e563b8889af 100644
--- a/chromium/tools/json_schema_compiler/js_util.py
+++ b/chromium/tools/json_schema_compiler/js_util.py
@@ -101,6 +101,18 @@ class JsUtil(object):
c.Eblock(' */')
+ def AppendTypeJsDoc(self, c, namespace_name, js_type, optional):
+ """Appends the documentation for a type as a Code.
+ """
+ c.Append('@type {')
+ if optional:
+ c.Append('(', new_line=False)
+ c.Concat(self._TypeToJsType(namespace_name, js_type), new_line=False)
+ c.Append('|undefined)', new_line=False)
+ else:
+ c.Concat(self._TypeToJsType(namespace_name, js_type), new_line=False)
+ c.Append('}', new_line=False)
+
def _FunctionToJsFunction(self, namespace_name, function):
"""Converts a model.Function to a JS type (i.e., function([params])...)"""
c = Code()
diff --git a/chromium/tools/json_schema_compiler/model.py b/chromium/tools/json_schema_compiler/model.py
index e0147b5b488..6dc87a964ec 100644
--- a/chromium/tools/json_schema_compiler/model.py
+++ b/chromium/tools/json_schema_compiler/model.py
@@ -183,6 +183,7 @@ class Type(object):
self.simple_name = _StripNamespace(self.name, namespace)
self.unix_name = UnixName(self.name)
self.description = json.get('description', None)
+ self.jsexterns = json.get('jsexterns', None)
self.origin = origin
self.parent = parent
self.instance_of = json.get('isInstanceOf', None)
@@ -317,8 +318,9 @@ class Function(object):
self.filters = [GeneratePropertyFromParam(filter_instance)
for filter_instance in json.get('filters', [])]
callback_param = None
- for param in json.get('parameters', []):
- if param.get('type') == 'function':
+ params = json.get('parameters', [])
+ for i, param in enumerate(params):
+ if param.get('type') == 'function' and i == len(params) - 1:
if callback_param:
# No ParseException because the webstore has this.
# Instead, pretend all intermediate callbacks are properties.
diff --git a/chromium/tools/licenses.py b/chromium/tools/licenses.py
index 45e783e4592..b5b68ff9a6d 100755
--- a/chromium/tools/licenses.py
+++ b/chromium/tools/licenses.py
@@ -18,8 +18,15 @@ Commands:
import argparse
import cgi
import os
+import subprocess
import sys
+# TODO(agrieve): Move build_utils.WriteDepFile into a non-android directory.
+_REPOSITORY_ROOT = os.path.dirname(os.path.dirname(__file__))
+sys.path.append(os.path.join(_REPOSITORY_ROOT, 'build/android/gyp/util'))
+import build_utils
+
+
# Paths from the root of the tree to directories to skip.
PRUNE_PATHS = set([
# Placeholder directory only, not third-party code.
@@ -47,7 +54,6 @@ PRUNE_PATHS = set([
os.path.join('third_party','gnu_binutils'),
os.path.join('third_party','gold'),
os.path.join('third_party','gperf'),
- os.path.join('third_party','kasko'),
os.path.join('third_party','lighttpd'),
os.path.join('third_party','llvm'),
os.path.join('third_party','llvm-build'),
@@ -271,7 +277,6 @@ KNOWN_NON_IOS_LIBRARIES = set([
os.path.join('third_party', 'bspatch'),
os.path.join('third_party', 'cacheinvalidation'),
os.path.join('third_party', 'cld'),
- os.path.join('third_party', 'codesighs'),
os.path.join('third_party', 'flot'),
os.path.join('third_party', 'gtk+'),
os.path.join('third_party', 'iaccessible2'),
@@ -457,6 +462,20 @@ def FindThirdPartyDirsWithFiles(root):
return FilterDirsWithFiles(third_party_dirs, root)
+def FindThirdPartyDeps(gn_out_dir, gn_target):
+ if not gn_out_dir:
+ raise RuntimeError("--gn-out-dir is required if --gn-target is used.")
+
+ gn_deps = subprocess.check_output(["gn", "desc", gn_out_dir, gn_target,
+ "deps", "--as=buildfile", "--all"])
+ third_party_deps = set()
+ for build_dep in gn_deps.split():
+ if ("third_party" in build_dep and
+ os.path.basename(build_dep) == "BUILD.gn"):
+ third_party_deps.add(os.path.dirname(build_dep))
+ return third_party_deps
+
+
def ScanThirdPartyDirs(root=None):
"""Scan a list of directories and report on any problems we find."""
if root is None:
@@ -478,7 +497,8 @@ def ScanThirdPartyDirs(root=None):
def GenerateCredits(
- file_template_file, entry_template_file, output_file, target_os):
+ file_template_file, entry_template_file, output_file, target_os,
+ gn_out_dir, gn_target, depfile=None):
"""Generate about:credits."""
def EvaluateTemplate(template, env, escape=True):
@@ -490,29 +510,41 @@ def GenerateCredits(
template = template.replace('{{%s}}' % key, val)
return template
- root = os.path.join(os.path.dirname(__file__), '..')
- third_party_dirs = FindThirdPartyDirs(PRUNE_PATHS, root)
+ if gn_target:
+ third_party_dirs = FindThirdPartyDeps(gn_out_dir, gn_target)
+
+ # Sanity-check to raise a build error if invalid gn_... settings are
+ # somehow passed to this script.
+ if not third_party_dirs:
+ raise RuntimeError("No deps found.")
+ else:
+ third_party_dirs = FindThirdPartyDirs(PRUNE_PATHS, _REPOSITORY_ROOT)
if not file_template_file:
- file_template_file = os.path.join(root, 'components', 'about_ui',
- 'resources', 'about_credits.tmpl')
+ file_template_file = os.path.join(_REPOSITORY_ROOT, 'components',
+ 'about_ui', 'resources',
+ 'about_credits.tmpl')
if not entry_template_file:
- entry_template_file = os.path.join(root, 'components', 'about_ui',
- 'resources',
+ entry_template_file = os.path.join(_REPOSITORY_ROOT, 'components',
+ 'about_ui', 'resources',
'about_credits_entry.tmpl')
entry_template = open(entry_template_file).read()
entries = []
for path in third_party_dirs:
try:
- metadata = ParseDir(path, root)
+ metadata = ParseDir(path, _REPOSITORY_ROOT)
except LicenseError:
# TODO(phajdan.jr): Convert to fatal error (http://crbug.com/39240).
continue
if metadata['License File'] == NOT_SHIPPED:
continue
- if target_os == 'ios':
- # Skip over files that are known not to be used on iOS.
+ if target_os == 'ios' and not gn_target:
+ # Skip over files that are known not to be used on iOS. But
+ # skipping is unnecessary if GN was used to query the actual
+ # dependencies.
+ # TODO(lambroslambrou): Remove this step once the iOS build is
+ # updated to provide --gn-target to this script.
if path in KNOWN_NON_IOS_LIBRARIES:
continue
env = {
@@ -523,6 +555,7 @@ def GenerateCredits(
entry = {
'name': metadata['Name'],
'content': EvaluateTemplate(entry_template, env),
+ 'license_file': metadata['License File'],
}
entries.append(entry)
@@ -540,6 +573,19 @@ def GenerateCredits(
else:
print template_contents
+ if depfile:
+ assert output_file
+ # Add in build.ninja so that the target will be considered dirty whenever
+ # gn gen is run. Otherwise, it will fail to notice new files being added.
+ # This is still no perfect, as it will fail if no build files are changed,
+ # but a new README.chromium / LICENSE is added. This shouldn't happen in
+ # practice however.
+ license_file_list = (entry['license_file'] for entry in entries)
+ license_file_list = (os.path.relpath(p) for p in license_file_list)
+ license_file_list = sorted(set(license_file_list))
+ build_utils.WriteDepfile(depfile, output_file,
+ license_file_list + ['build.ninja'])
+
return True
@@ -551,8 +597,13 @@ def main():
help='Template HTML to use for each license.')
parser.add_argument('--target-os',
help='OS that this build is targeting.')
+ parser.add_argument('--gn-out-dir',
+ help='GN output directory for scanning dependencies.')
+ parser.add_argument('--gn-target',
+ help='GN target to scan for dependencies.')
parser.add_argument('command', choices=['help', 'scan', 'credits'])
parser.add_argument('output_file', nargs='?')
+ build_utils.AddDepfileOption(parser)
args = parser.parse_args()
if args.command == 'scan':
@@ -560,7 +611,8 @@ def main():
return 1
elif args.command == 'credits':
if not GenerateCredits(args.file_template, args.entry_template,
- args.output_file, args.target_os):
+ args.output_file, args.target_os,
+ args.gn_out_dir, args.gn_target, args.depfile):
return 1
else:
print __doc__
diff --git a/chromium/tools/luci-go/linux64/isolate.sha1 b/chromium/tools/luci-go/linux64/isolate.sha1
index d0b8f6c04b5..b8593a765bd 100644
--- a/chromium/tools/luci-go/linux64/isolate.sha1
+++ b/chromium/tools/luci-go/linux64/isolate.sha1
@@ -1 +1 @@
-0bc14f6977e9c033abe1c2fe612128accdac4b1b
+3c0fbcab83730c86bbd5a09e760388dcb7053bc4
diff --git a/chromium/tools/luci-go/mac64/isolate.sha1 b/chromium/tools/luci-go/mac64/isolate.sha1
index 885db74daf0..bf7e1c1dd57 100644
--- a/chromium/tools/luci-go/mac64/isolate.sha1
+++ b/chromium/tools/luci-go/mac64/isolate.sha1
@@ -1 +1 @@
-a1b6aa3e545ee371ff1e488ac4a83fb5dce3c349
+d37a2f34eff58e1fb04038bd52381001479d4aa1
diff --git a/chromium/tools/luci-go/win64/isolate.exe.sha1 b/chromium/tools/luci-go/win64/isolate.exe.sha1
index 8ff73f6b54f..c575f97042c 100644
--- a/chromium/tools/luci-go/win64/isolate.exe.sha1
+++ b/chromium/tools/luci-go/win64/isolate.exe.sha1
@@ -1 +1 @@
-4e4bcb96b7bd4828e625d523e9b9984e1fc499d5
+d4b894493b1ee5c04ec5bc88e6ea286426540770
diff --git a/chromium/tools/mb/docs/design_spec.md b/chromium/tools/mb/docs/design_spec.md
index 33fda806e8a..fb202da74e9 100644
--- a/chromium/tools/mb/docs/design_spec.md
+++ b/chromium/tools/mb/docs/design_spec.md
@@ -411,9 +411,9 @@ config file change, however.
### Non-goals
* MB is not intended to replace direct invocation of GN or GYP for
- complicated build scenarios (aka ChromeOS), where multiple flags need
+ complicated build scenarios (a.k.a. Chrome OS), where multiple flags need
to be set to user-defined paths for specific toolchains (e.g., where
- ChromeOS needs to specify specific board types and compilers).
+ Chrome OS needs to specify specific board types and compilers).
* MB is not intended at this time to be something developers use frequently,
or to add a lot of features to. We hope to be able to get rid of it once
diff --git a/chromium/tools/mb/mb.py b/chromium/tools/mb/mb.py
index edbb434d27b..917f3c62be3 100755
--- a/chromium/tools/mb/mb.py
+++ b/chromium/tools/mb/mb.py
@@ -1086,23 +1086,14 @@ class MetaBuildWrapper(object):
output_path=None)
if android and test_type != "script":
- logdog_command = [
- '--logdog-bin-cmd', './../../bin/logdog_butler',
- '--project', 'chromium',
- '--service-account-json',
- '/creds/service_accounts/service-account-luci-logdog-publisher.json',
- '--prefix', 'android/swarming/logcats/${SWARMING_TASK_ID}',
- '--source', '${ISOLATED_OUTDIR}/logcats',
- '--name', 'unified_logcats',
- ]
- test_cmdline = [
+ # TODO(crbug.com/693203): Reenable logcat logdog uploading when outage
+ # has been resolved.
+ cmdline = [
self.PathJoin('bin', 'run_%s' % target),
'--logcat-output-file', '${ISOLATED_OUTDIR}/logcats',
'--target-devices-file', '${SWARMING_BOT_FILE}',
'-v'
]
- cmdline = (['./../../build/android/test_wrapper/logdog_wrapper.py']
- + logdog_command + test_cmdline)
elif use_xvfb and test_type == 'windowed_test_launcher':
extra_files = [
'../../testing/test_env.py',
diff --git a/chromium/tools/mb/mb_config.pyl b/chromium/tools/mb/mb_config.pyl
index 6767f94ab49..ad91f61bb9c 100644
--- a/chromium/tools/mb/mb_config.pyl
+++ b/chromium/tools/mb/mb_config.pyl
@@ -55,6 +55,7 @@
},
'chromium.android.fyi': {
+ 'Memory Infra Tester': 'android_release_thumb_bot',
'NDK Next MIPS Builder':
'android_ndk_next_release_bot_minimal_symbols_mipsel',
'NDK Next arm Builder':
@@ -96,7 +97,7 @@
'chromium.fyi': {
'Afl Upload Linux ASan': 'release_afl_asan',
- 'Android Builder (dbg)': 'android_debug_static_bot_vr_shell',
+ 'Android Builder (dbg)': 'android_debug_static_bot',
'Android Builder Goma Canary (dbg)': 'android_debug_bot',
'Android deterministic': 'android_without_codecs_release_bot_minimal_symbols',
'Android deterministic (dbg)': 'android_debug_bot',
@@ -106,7 +107,9 @@
'CFI Linux': 'cfi_release_static',
'CFI Linux Full': 'cfi_full_release_static',
'Chromium Linux Goma Canary': 'release_bot',
+ 'Chromium Linux Goma Canary': 'release_bot',
'Chromium Linux Goma Canary (clobber)': 'release_bot',
+ 'Chromium Linux Goma Canary LocalOutputCache': 'release_bot',
'Chromium Linux Precise Goma LinkTest': 'release_bot',
'Chromium Linux32 Goma Canary (clobber)': 'release_bot_x86',
'Chromium Mac 10.10 MacViews': 'mac_views_browser_release_bot',
@@ -173,7 +176,7 @@
'EarlGreyiOS': 'ios',
'GomaCanaryiOS': 'ios',
'ios-simulator': 'ios',
- 'Headless Linux (dbg)': '//build/args/bots/chromium.fyi/headless_linux_dbg.gn',
+ 'Headless Linux (dbg)': 'headless_linux_debug_bot',
'MD Top Chrome ChromeOS material-hybrid': 'chromeos_with_codecs_debug_bot',
'MD Top Chrome ChromeOS non-material': 'chromeos_with_codecs_debug_bot',
'MD Top Chrome Win material': 'debug_bot',
@@ -186,6 +189,7 @@
'Libfuzzer Upload Linux UBSan': 'release_libfuzzer_ubsan',
'Libfuzzer Upload Mac ASan': 'release_libfuzzer_mac_asan',
'Linux ARM': 'release_bot_arm',
+ 'Linux Clang Analyzer': 'linux_chromium_analysis',
'Linux deterministic': 'release_bot',
'Linux deterministic (dbg)': 'debug_bot',
'Linux remote_run Builder': 'release_bot',
@@ -248,6 +252,7 @@
'GPU Win x64 Builder (dbg)': 'gpu_tests_deqp_gles_debug_trybot',
'Linux ChromiumOS Builder': 'gpu_fyi_tests_chromeos_release_trybot',
'Linux ChromiumOS Ozone Builder': 'gpu_fyi_tests_chromeos_ozone_release_trybot',
+ 'Linux GPU TSAN Release': 'gpu_fyi_tests_release_trybot_tsan',
'Mac GPU ASAN Release': 'gpu_fyi_tests_release_trybot_asan',
},
@@ -268,6 +273,7 @@
'Linux Builder Trusty (dbg)': 'debug_bot',
'Linux Builder Trusty (dbg)(32)': 'debug_bot_x86',
'Linux Builder Trusty': 'release_bot',
+ 'Deterministic Linux': 'release_bot',
},
'chromium.lkgr': {
@@ -300,6 +306,7 @@
'ios-device-xcode-clang': 'ios',
'ios-simulator': 'ios',
'ios-simulator-cronet': 'ios',
+ 'ios-simulator-eg': 'ios',
'ios-simulator-xcode-clang': 'ios',
},
@@ -336,6 +343,14 @@
'Android Builder FYI': 'official_goma_minimal_symbols_android',
'Win Builder FYI': 'official_goma',
'Win Clang Builder': 'official_goma_minimal_symbols_clang',
+ 'Battor Agent Linux': 'debug_bot',
+ },
+
+ 'chromium.swarm': {
+ 'Android Swarm': 'android_without_codecs_release_bot_minimal_symbols',
+ 'Linux Swarm': 'release_bot',
+ 'Mac Swarm': 'release_bot_mac_strip',
+ 'Windows Swarm': 'release_bot_x86',
},
'client.nacl.sdk': {
@@ -348,6 +363,11 @@
'windows-sdk-multirel': 'release_bot_x86',
},
+ 'client.v8.chromium': {
+ 'Linux - Future': 'v8_future_release_bot',
+ 'Linux - Future (dbg)': 'v8_future_debug_bot',
+ },
+
'client.v8.fyi': {
'Android Builder': 'official_goma_minimal_symbols_android',
'Chromium ASAN - debug': 'asan_lsan_edge_debug_bot',
@@ -417,6 +437,7 @@
},
'official.desktop': {
+ 'linux64': 'official',
'mac64': 'official',
'precise64': 'official',
@@ -441,9 +462,9 @@
'mac beta': 'official',
'mac stable': 'official',
'mac trunk': 'official',
- 'precise64 beta': 'official',
- 'precise64 stable': 'official',
- 'precise64 trunk': 'official',
+ 'precise64 beta': 'official_six_concurrent_links',
+ 'precise64 stable': 'official_six_concurrent_links',
+ 'precise64 trunk': 'official_six_concurrent_links',
'win beta': 'official_six_concurrent_links',
'win stable': 'official_six_concurrent_links',
'win trunk': 'official_six_concurrent_links',
@@ -534,6 +555,7 @@
'linux_chromium_chromeos_msan_rel_ng': 'chromeos_msan_release_bot',
'linux_chromium_chromeos_ozone_rel_ng': 'chromeos_with_codecs_ozone_release_trybot',
'linux_chromium_chromeos_rel_ng': 'chromeos_with_codecs_release_trybot',
+ 'linux_chromium_clobber_deterministic': 'release_trybot',
'linux_chromium_clobber_rel_ng': 'release_trybot',
'linux_chromium_compile_dbg_32_ng': 'debug_trybot_x86',
'linux_chromium_compile_dbg_ng': 'debug_trybot',
@@ -541,8 +563,7 @@
'linux_chromium_dbg_32_ng': 'debug_trybot_x86',
'linux_chromium_dbg_ng': 'debug_trybot',
'linux_chromium_gn_upload': 'gn_linux_upload',
- 'linux_chromium_headless_dbg': '//build/args/bots/tryserver.chromium.linux/linux_chromium_headless_dbg.gn',
- 'linux_chromium_headless_rel': '//build/args/bots/tryserver.chromium.linux/linux_chromium_headless_rel.gn',
+ 'linux_chromium_headless_rel': 'headless_linux_release_trybot',
'linux_chromium_ozone_compile_only_ng': 'ozone_linux_release_trybot',
# This is intentionally a release_bot and not a release_trybot;
@@ -592,6 +613,7 @@
'ios-device': 'ios',
'ios-device-xcode-clang': 'ios',
'ios-simulator': 'ios',
+ 'ios-simulator-eg': 'ios',
'ios-simulator-cronet': 'ios',
'ios-simulator-xcode-clang': 'ios',
'mac_chromium_10.10_rel_ng': 'gpu_tests_release_trybot',
@@ -809,6 +831,10 @@
'android', 'release_bot', 'minimal_symbols', 'arm64',
],
+ 'android_release_thumb_bot': [
+ 'android', 'release_bot', 'arm_thumb',
+ ],
+
'android_release_trybot': [
'android', 'release_trybot',
],
@@ -825,10 +851,6 @@
'android', 'debug_static_bot', 'x86',
],
- 'android_debug_static_bot_vr_shell': [
- 'android', 'debug_static_bot', 'vr_shell',
- ],
-
'android_debug_static_minimal_symbols_mipsel': [
'android', 'debug', 'static', 'minimal_symbols', 'mipsel',
],
@@ -1026,11 +1048,11 @@
],
'chromeos_with_codecs_ozone_release_bot': [
- 'chromeos_with_codecs', 'ozone', 'release_bot',
+ 'chromeos_with_codecs', 'ozone', 'default_ozone_platform_x11', 'release_bot',
],
'chromeos_with_codecs_ozone_release_trybot': [
- 'chromeos_with_codecs', 'ozone', 'release_trybot',
+ 'chromeos_with_codecs', 'ozone', 'default_ozone_platform_x11', 'release_trybot',
],
'chromeos_with_codecs_release_bot': [
@@ -1190,7 +1212,7 @@
],
'gpu_fyi_tests_chromeos_ozone_release_trybot': [
- 'gpu_fyi_tests', 'release_trybot', 'chromeos', 'ozone',
+ 'gpu_fyi_tests', 'release_trybot', 'ozone', 'ozone_linux', 'system_gbm_libdrm',
],
'gpu_fyi_tests_chromeos_release_trybot': [
@@ -1209,6 +1231,10 @@
'gpu_fyi_tests', 'release_trybot', 'asan', 'full_symbols', 'disable_nacl',
],
+ 'gpu_fyi_tests_release_trybot_tsan': [
+ 'gpu_fyi_tests', 'release_trybot', 'tsan', 'full_symbols', 'disable_nacl',
+ ],
+
'gpu_fyi_tests_win_clang_debug_bot': [
'gpu_tests', 'internal_gles2_conform_tests', 'clang', 'debug_bot', 'minimal_symbols',
],
@@ -1257,6 +1283,14 @@
'gn_linux_upload', 'official', 'goma',
],
+ 'headless_linux_debug_bot': [
+ 'debug_bot', 'headless',
+ ],
+
+ 'headless_linux_release_trybot': [
+ 'release_trybot', 'headless',
+ ],
+
# The 'ios' config is just used for auditing. iOS bots
# actually use the ios recipes, not the chromium recipe, and look
# up their GN arguments via files checked in under //ios/build/bots.
@@ -1264,6 +1298,10 @@
# build files.
'ios': [ 'error'],
+ 'linux_chromium_analysis': [
+ 'analysis'
+ ],
+
'mac_views_browser_release_bot': [
'mac_views_browser', 'release_bot',
],
@@ -1480,6 +1518,14 @@
'ubsan_vptr', 'ubsan_no_recover_hack', 'release_trybot',
],
+ 'v8_future_debug_bot': [
+ 'v8_future', 'debug_bot',
+ ],
+
+ 'v8_future_release_bot': [
+ 'v8_future', 'release_bot',
+ ],
+
'win_clang_debug_bot': [
'clang', 'debug_bot', 'minimal_symbols',
],
@@ -1494,6 +1540,8 @@
'mixins': {
'afl': { 'gn_args': 'use_afl=true' },
+ 'analysis': { 'gn_args': 'use_clang_static_analyzer=true'},
+
# We build Android with codecs on most bots to ensure maximum test
# coverage, but use 'android_without_codecs' on bots responsible for
# building publicly advertised non-Official Android builds --
@@ -1527,6 +1575,10 @@
'gn_args': 'arm_use_neon=false',
},
+ 'arm_thumb': {
+ 'gn_args': 'arm_use_thumb=true',
+ },
+
'asan': {
'gn_args': 'is_asan=true',
},
@@ -1638,6 +1690,10 @@
'mixins': ['debug_bot', 'minimal_symbols'],
},
+ 'default_ozone_platform_x11': {
+ 'gn_args': 'ozone_platform="x11"',
+ },
+
'disable_nacl': {
'gn_args': 'enable_nacl=false',
},
@@ -1700,6 +1756,10 @@
'gn_args': 'use_system_xcode=false',
},
+ 'headless': {
+ 'args_file': '//build/args/headless.gn',
+ },
+
'hybrid': {
'gn_args': 'v8_target_cpu="arm" target_cpu="x86"',
'mixins': ['disable_nacl'],
@@ -1732,7 +1792,7 @@
},
'minimal_symbols': {
- 'gn_args': 'symbol_level=1',
+ 'gn_args': 'symbol_level=1 strip_absolute_paths_from_debug_symbols=true',
},
'mipsel': {
@@ -1781,7 +1841,8 @@
'ozone_linux': {
'gn_args': ('ozone_auto_platforms=false ozone_platform_wayland=true '
'ozone_platform_x11=true ozone_platform_gbm=true '
- 'enable_package_mash_services=true use_ash=false'),
+ 'enable_package_mash_services=true use_ash=false '
+ 'use_jessie_sysroot=true use_xkbcommon=true'),
},
'pdf_xfa': {
@@ -1824,6 +1885,10 @@
'gn_args': 'is_component_build=false',
},
+ 'system_gbm_libdrm': {
+ 'gn_args': 'use_system_libdrm=true use_system_minigbm=true',
+ },
+
'syzyasan': {
'gn_args': 'is_syzyasan=true',
},
@@ -1858,12 +1923,12 @@
'gn_args': 'use_lld=true',
},
- 'v8_heap': {
- 'gn_args': 'v8_enable_verify_heap=true',
+ 'v8_future': {
+ 'gn_args': 'v8_enable_future=true',
},
- 'vr_shell': {
- 'gn_args': 'enable_vr_shell=true',
+ 'v8_heap': {
+ 'gn_args': 'v8_enable_verify_heap=true',
},
'win_analyze': {
diff --git a/chromium/tools/nocompile_driver.py b/chromium/tools/nocompile_driver.py
index 1c48c4ba545..598e4130f6f 100755
--- a/chromium/tools/nocompile_driver.py
+++ b/chromium/tools/nocompile_driver.py
@@ -15,7 +15,6 @@ For more info, see:
import StringIO
import ast
-import locale
import os
import re
import select
@@ -66,15 +65,13 @@ RESULT_FILE_HEADER = """
"""
-# The GUnit test function to output on a successful test completion.
-SUCCESS_GUNIT_TEMPLATE = """
-TEST(%s, %s) {
- LOG(INFO) << "Took %f secs. Started at %f, ended at %f";
-}
+# The log message on a test completion.
+LOG_TEMPLATE = """
+TEST(%s, %s) took %f secs. Started at %f, ended at %f.
"""
-# The GUnit test function to output for a disabled test.
-DISABLED_GUNIT_TEMPLATE = """
+# The GUnit test function to output for a successful or disabled test.
+GUNIT_TEMPLATE = """
TEST(%s, %s) { }
"""
@@ -241,24 +238,25 @@ def StartTest(sourcefile_path, cflags, config):
'expectations': expectations}
-def PassTest(resultfile, test):
+def PassTest(resultfile, resultlog, test):
"""Logs the result of a test started by StartTest(), or a disabled test
configuration.
Args:
resultfile: File object for .cc file that results are written to.
+ resultlog: File object for the log file.
test: An instance of the dictionary returned by StartTest(), a
configuration from ExtractTestConfigs().
"""
+ resultfile.write(GUNIT_TEMPLATE % (
+ test['suite_name'], test['name']))
+
# The 'started_at' key is only added if a test has been started.
if 'started_at' in test:
- resultfile.write(SUCCESS_GUNIT_TEMPLATE % (
+ resultlog.write(LOG_TEMPLATE % (
test['suite_name'], test['name'],
test['finished_at'] - test['started_at'],
test['started_at'], test['finished_at']))
- else:
- resultfile.write(DISABLED_GUNIT_TEMPLATE % (
- test['suite_name'], test['name']))
def FailTest(resultfile, test, error, stdout=None, stderr=None):
@@ -285,31 +283,32 @@ def FailTest(resultfile, test, error, stdout=None, stderr=None):
resultfile.write('\n')
-def WriteStats(resultfile, suite_name, timings):
- """Logs the peformance timings for each stage of the script into a fake test.
+def WriteStats(resultlog, suite_name, timings):
+ """Logs the peformance timings for each stage of the script.
Args:
- resultfile: File object for .cc file that results are written to.
+ resultlog: File object for the log file.
suite_name: The name of the GUnit suite this test belongs to.
timings: Dictionary with timestamps for each stage of the script run.
"""
- stats_template = ("Started %f, Ended %f, Total %fs, Extract %fs, "
- "Compile %fs, Process %fs")
+ stats_template = """
+TEST(%s): Started %f, Ended %f, Total %fs, Extract %fs, Compile %fs, Process %fs
+"""
total_secs = timings['results_processed'] - timings['started']
extract_secs = timings['extract_done'] - timings['started']
compile_secs = timings['compile_done'] - timings['extract_done']
process_secs = timings['results_processed'] - timings['compile_done']
- resultfile.write('TEST(%s, Stats) { LOG(INFO) << "%s"; }\n' % (
- suite_name, stats_template % (
- timings['started'], timings['results_processed'], total_secs,
- extract_secs, compile_secs, process_secs)))
+ resultlog.write(stats_template % (
+ suite_name, timings['started'], timings['results_processed'], total_secs,
+ extract_secs, compile_secs, process_secs))
-def ProcessTestResult(resultfile, test):
+def ProcessTestResult(resultfile, resultlog, test):
"""Interprets and logs the result of a test started by StartTest()
Args:
resultfile: File object for .cc file that results are written to.
+ resultlog: File object for the log file.
test: The dictionary from StartTest() to process.
"""
# Snap a copy of stdout and stderr into the test dictionary immediately
@@ -333,14 +332,14 @@ def ProcessTestResult(resultfile, test):
# Check the output has the right expectations. If there are no
# expectations, then we just consider the output "matched" by default.
if len(test['expectations']) == 0:
- PassTest(resultfile, test)
+ PassTest(resultfile, resultlog, test)
return
# Otherwise test against all expectations.
for regexp in test['expectations']:
if (regexp.search(stdout) is not None or
regexp.search(stderr) is not None):
- PassTest(resultfile, test)
+ PassTest(resultfile, resultlog, test)
return
expectation_str = ', '.join(
["r'%s'" % regexp.pattern for regexp in test['expectations']])
@@ -350,7 +349,7 @@ def ProcessTestResult(resultfile, test):
return
-def CompleteAtLeastOneTest(resultfile, executing_tests):
+def CompleteAtLeastOneTest(executing_tests):
"""Blocks until at least one task is removed from executing_tests.
This function removes completed tests from executing_tests, logging failures
@@ -375,7 +374,7 @@ def CompleteAtLeastOneTest(resultfile, executing_tests):
read_set = []
for test in executing_tests.values():
read_set.extend([test['proc'].stderr, test['proc'].stdout])
- result = select.select(read_set, [], read_set, NCTEST_TERMINATE_TIMEOUT_SEC)
+ select.select(read_set, [], read_set, NCTEST_TERMINATE_TIMEOUT_SEC)
# Now attempt to process results.
now = time.time()
@@ -425,6 +424,7 @@ def main():
timings['extract_done'] = time.time()
resultfile = StringIO.StringIO()
+ resultlog = StringIO.StringIO()
resultfile.write(RESULT_FILE_HEADER % sourcefile_path)
# Run the no-compile tests, but ensure we do not run more than |parallelism|
@@ -447,10 +447,10 @@ def main():
# acts as a semaphore. We cannot use threads + a real semaphore because
# subprocess forks, which can cause all sorts of hilarity with threads.
if len(executing_tests) >= parallelism:
- finished_tests.extend(CompleteAtLeastOneTest(resultfile, executing_tests))
+ finished_tests.extend(CompleteAtLeastOneTest(executing_tests))
if config['name'].startswith('DISABLED_'):
- PassTest(resultfile, config)
+ PassTest(resultfile, resultlog, config)
else:
test = StartTest(sourcefile_path, cflags, config)
assert test['name'] not in executing_tests
@@ -459,9 +459,10 @@ def main():
# If there are no more test to start, we still need to drain the running
# ones.
while len(executing_tests) > 0:
- finished_tests.extend(CompleteAtLeastOneTest(resultfile, executing_tests))
+ finished_tests.extend(CompleteAtLeastOneTest(executing_tests))
timings['compile_done'] = time.time()
+ finished_tests = sorted(finished_tests, key=lambda test: test['name'])
for test in finished_tests:
if test['name'] == 'NCTEST_SANITY':
_, stderr = test['proc'].communicate()
@@ -469,11 +470,13 @@ def main():
if return_code != 0:
sys.stderr.write(stderr)
continue
- ProcessTestResult(resultfile, test)
+ ProcessTestResult(resultfile, resultlog, test)
timings['results_processed'] = time.time()
- WriteStats(resultfile, suite_name, timings)
+ WriteStats(resultlog, suite_name, timings)
+ with open(resultfile_path + '.log', 'w') as fd:
+ fd.write(resultlog.getvalue())
if return_code == 0:
with open(resultfile_path, 'w') as fd:
fd.write(resultfile.getvalue())
diff --git a/chromium/tools/origin_trials/generate_token.py b/chromium/tools/origin_trials/generate_token.py
index a79f169234b..ea529847b2e 100755
--- a/chromium/tools/origin_trials/generate_token.py
+++ b/chromium/tools/origin_trials/generate_token.py
@@ -179,6 +179,8 @@ def main():
print " Is Subdomain: %s" % args.is_subdomain
print " Feature: %s" % args.trial_name
print " Expiry: %d (%s UTC)" % (expiry, datetime.utcfromtimestamp(expiry))
+ print " Signature: %s" % ", ".join('0x%02x' % ord(x) for x in signature)
+ print " Signature (Base64): %s" % base64.b64encode(signature)
print
# Output the properly-formatted token.
diff --git a/chromium/tools/origin_trials/third_party/ed25519/OWNERS b/chromium/tools/origin_trials/third_party/ed25519/OWNERS
index d9278608565..ae089aae454 100644
--- a/chromium/tools/origin_trials/third_party/ed25519/OWNERS
+++ b/chromium/tools/origin_trials/third_party/ed25519/OWNERS
@@ -1,3 +1,6 @@
dhnishi@chromium.org
iclelland@chromium.org
mek@chromium.org
+
+# TEAM: experimentation-dev@chromium.org
+# COMPONENT: Internals>OriginTrials
diff --git a/chromium/tools/protoc_wrapper/protoc_wrapper.py b/chromium/tools/protoc_wrapper/protoc_wrapper.py
index 8eac88409c6..6a25de0ed5a 100755
--- a/chromium/tools/protoc_wrapper/protoc_wrapper.py
+++ b/chromium/tools/protoc_wrapper/protoc_wrapper.py
@@ -89,7 +89,9 @@ def main(argv):
help="Standard C++ generator options.")
parser.add_argument("--include",
help="Name of include to insert into generated headers.")
-
+ parser.add_argument("--import-dir", action="append", default=[],
+ help="Extra import directory for protos, can be repeated."
+ )
parser.add_argument("protos", nargs="+",
help="Input protobuf definition file(s).")
@@ -121,7 +123,11 @@ def main(argv):
]
protoc_cmd += ["--proto_path", proto_dir]
+ for path in options.import_dir:
+ protoc_cmd += ["--proto_path", path]
+
protoc_cmd += [os.path.join(proto_dir, name) for name in protos]
+
ret = subprocess.call(protoc_cmd)
if ret != 0:
raise RuntimeError("Protoc has returned non-zero status: "
diff --git a/chromium/tools/resource_prefetch_predictor/generate_test_data.py b/chromium/tools/resource_prefetch_predictor/generate_test_data.py
index 39f8c35da5a..89986f1b68d 100755
--- a/chromium/tools/resource_prefetch_predictor/generate_test_data.py
+++ b/chromium/tools/resource_prefetch_predictor/generate_test_data.py
@@ -33,6 +33,10 @@ import prefetch_predictor_common
_PAGE_LOAD_TIMEOUT = 40
+_LEARNING_FLAGS = [
+ '--force-fieldtrials=trial/group',
+ '--force-fieldtrial-params=trial.group:mode/learning',
+ '--enable-features=SpeculativeResourcePrefetching<trial']
def _CreateArgumentParser():
@@ -98,8 +102,7 @@ def main():
logging.error('Could not find device: %s.', args.device)
sys.exit(1)
- chrome_controller = prefetch_predictor_common.Setup(
- device, ['--speculative-resource-prefetching=learning'])
+ chrome_controller = prefetch_predictor_common.Setup(device, _LEARNING_FLAGS)
_GenerateDatabase(chrome_controller, args.urls_filename,
args.output_filename, int(args.url_repeat))
_GenerateWprArchive(device, args.test_url, args.wpr_archive)
diff --git a/chromium/tools/resource_prefetch_predictor/prefetch_benchmark.py b/chromium/tools/resource_prefetch_predictor/prefetch_benchmark.py
index b92d4bf76da..e31bd1d1ad2 100755
--- a/chromium/tools/resource_prefetch_predictor/prefetch_benchmark.py
+++ b/chromium/tools/resource_prefetch_predictor/prefetch_benchmark.py
@@ -34,10 +34,6 @@ from devil.android.sdk import intent
import prefetch_predictor_common
-_EXTERNAL_PREFETCH_FLAG = (
- '--speculative-resource-prefetching=enabled-external-only')
-
-
def _CreateArgumentParser():
"""Creates and returns the argument parser."""
parser = argparse.ArgumentParser(
@@ -63,7 +59,7 @@ def _CreateArgumentParser():
def _Setup(device, database_filename):
"""Sets up a device and returns an instance of RemoteChromeController."""
- chrome_controller = prefetch_predictor_common.Setup(device, [''])
+ chrome_controller = prefetch_predictor_common.Setup(device)
chrome_package = OPTIONS.ChromePackage()
device.ForceStop(chrome_package.package)
chrome_controller.ResetBrowserState()
@@ -72,9 +68,8 @@ def _Setup(device, database_filename):
# Make sure that the speculative prefetch predictor is enabled to ensure
# that the disk database is re-created.
- command_line_path = '/data/local/tmp/chrome-command-line'
with device_setup.FlagReplacer(
- device, command_line_path, ['--disable-fre', _EXTERNAL_PREFETCH_FLAG]):
+ device, chrome_package.cmdline_file, ['--disable-fre']):
# Launch Chrome for the first time to recreate the local state.
launch_intent = intent.Intent(
action='android.intent.action.MAIN',
@@ -104,8 +99,11 @@ def _RunOnce(device, database_filename, url, prefetch_delay_ms,
# Startup tracing to ease debugging.
chrome_args = (customtabs_benchmark.CHROME_ARGS
+ ['--trace-startup', '--trace-startup-duration=20'])
- if not disable_prefetch:
- chrome_args.append(_EXTERNAL_PREFETCH_FLAG)
+ # Speculative Prefetch is enabled through an experiment.
+ chrome_args.extend([
+ '--force-fieldtrials=trial/group',
+ '--force-fieldtrial-params=trial.group:mode/external-prefetching',
+ '--enable-features=SpeculativeResourcePrefetching<trial'])
chrome_controller = controller.RemoteChromeController(device)
device.ForceStop(OPTIONS.ChromePackage().package)
diff --git a/chromium/tools/roll_webgl_conformance.py b/chromium/tools/roll_webgl_conformance.py
index 25afe071337..7f0bd95e1ab 100755
--- a/chromium/tools/roll_webgl_conformance.py
+++ b/chromium/tools/roll_webgl_conformance.py
@@ -52,6 +52,8 @@ TRYJOB_STATUS_SLEEP_SECONDS = 30
# Use a shell for subcommands on Windows to get a PATH search.
IS_WIN = sys.platform.startswith('win')
WEBGL_PATH = os.path.join('third_party', 'webgl', 'src')
+WEBGL_REVISION_TEXT_FILE = os.path.join(
+ 'content', 'test', 'gpu', 'gpu_tests', 'webgl_conformance_revision.txt')
CommitInfo = collections.namedtuple('CommitInfo', ['git_commit',
'git_repo_url'])
@@ -102,11 +104,12 @@ def _GenerateCLDescriptionCommand(webgl_current, webgl_new, bugs):
bug_str += str(bug) + ','
return bug_str.rstrip(',')
- if webgl_current.git_commit != webgl_new.git_commit:
- change_str = GetChangeString(webgl_current.git_commit,
- webgl_new.git_commit)
- changelog_url = GetChangeLogURL(webgl_current.git_repo_url,
- change_str)
+ change_str = GetChangeString(webgl_current.git_commit,
+ webgl_new.git_commit)
+ changelog_url = GetChangeLogURL(webgl_current.git_repo_url,
+ change_str)
+ if webgl_current.git_commit == webgl_new.git_commit:
+ print 'WARNING: WebGL repository is unchanged; proceeding with no-op roll'
def GetExtraTrybotString():
s = ''
@@ -273,6 +276,7 @@ class AutoRoller(object):
self._RunCommand(['git', 'config', 'core.autocrlf', 'true'])
self._UpdateDep(deps_filename, WEBGL_PATH, webgl_latest)
+ self._UpdateWebGLRevTextFile(WEBGL_REVISION_TEXT_FILE, webgl_latest)
if self._IsTreeClean():
logging.debug('Tree is clean - no changes detected.')
@@ -322,6 +326,19 @@ class AutoRoller(object):
commit_info.git_commit, '')
os.chdir(cwd)
+ def _UpdateWebGLRevTextFile(self, txt_filename, commit_info):
+ # Rolling the WebGL conformance tests must cause at least all of
+ # the WebGL tests to run. There are already exclusions in
+ # trybot_analyze_config.json which force all tests to run if
+ # changes under src/content/test/gpu are made. (This rule
+ # typically only takes effect on the GPU bots.) To make sure this
+ # happens all the time, update an autogenerated text file in this
+ # directory.
+ with open(txt_filename, 'w') as fh:
+ print >> fh, '# AUTOGENERATED FILE - DO NOT EDIT'
+ print >> fh, '# SEE roll_webgl_conformance.py'
+ print >> fh, 'Current webgl revision %s' % commit_info.git_commit
+
def _DeleteRollBranch(self):
self._RunCommand(['git', 'checkout', 'master'])
self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
diff --git a/chromium/tools/security/OWNERS b/chromium/tools/security/OWNERS
index 0102bf5c5d5..ffed83d78e4 100644
--- a/chromium/tools/security/OWNERS
+++ b/chromium/tools/security/OWNERS
@@ -1,2 +1,4 @@
jschuh@chromium.org
tsepez@chromium.org
+
+# COMPONENT: Security
diff --git a/chromium/tools/traffic_annotation/DEPS b/chromium/tools/traffic_annotation/DEPS
new file mode 100644
index 00000000000..efc7233addb
--- /dev/null
+++ b/chromium/tools/traffic_annotation/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+net/traffic_annotation",
+]
+
diff --git a/chromium/tools/traffic_annotation/OWNERS b/chromium/tools/traffic_annotation/OWNERS
new file mode 100644
index 00000000000..3e10541b42c
--- /dev/null
+++ b/chromium/tools/traffic_annotation/OWNERS
@@ -0,0 +1,2 @@
+battre@chromium.org
+msramek@chromium.org \ No newline at end of file
diff --git a/chromium/tools/traffic_annotation/sample_traffic_annotation.cc b/chromium/tools/traffic_annotation/sample_traffic_annotation.cc
new file mode 100644
index 00000000000..45a191c2d65
--- /dev/null
+++ b/chromium/tools/traffic_annotation/sample_traffic_annotation.cc
@@ -0,0 +1,72 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "net/traffic_annotation/network_traffic_annotation.h"
+
+// This file includes a sample and a template for text-coded traffic_annotation.
+// For more description on each field, please refer to:
+// tools/traffic_annotation/traffic_annotation.proto
+// and
+// out/Debug/gen/components/policy/proto/cloud_policy.proto
+// For more information on policies, please refer to:
+// http://dev.chromium.org/administrators/policy-list-3
+
+void network_traffic_annotation_sample() {
+ net::NetworkTrafficAnnotationTag traffic_annotation =
+ net::DefineNetworkTrafficAnnotation("spellcheck_lookup", R"(
+ semantics {
+ sender: "Online Spellcheck"
+ description:
+ "Google Chrome can provide smarter spell-checking by sending "
+ "text you type into the browser to Google's servers, allowing "
+ "you to use the same spell-checking technology used by Google "
+ "products, such as Docs. If the feature is enabled, Chrome will "
+ "send the entire contents of text fields as you type in them to "
+ "Google along with the browser’s default language. Google "
+ "returns a list of suggested spellings, which will be displayed "
+ "in the context menu."
+ trigger: "User types text into a text field or asks to correct a "
+ "misspelled word."
+ data: "Text a user has typed into a text field. No user identifier "
+ "is sent along with the text."
+ destination: GOOGLE_OWNED_SERVICE
+ }
+ policy {
+ cookies_allowed: false
+ setting:
+ "You can enable or disable this feature via 'Use a web service to "
+ "help resolve spelling errors.' in Chrome's settings under "
+ "Advanced. The feature is disabled by default."
+ policy {
+ SpellCheckServiceEnabled {
+ policy_options {mode: MANDATORY}
+ value: false
+ }
+ }
+ })");
+}
+
+void network_traffic_annotation_template() {
+ net::NetworkTrafficAnnotationTag traffic_annotation =
+ net::DefineNetworkTrafficAnnotation("...", R"(
+ semantics {
+ sender: "..."
+ description: "..."
+ trigger: "..."
+ data: "..."
+ destination: WEBSITE/GOOGLE_OWNED_SERVICE/OTHER
+ }
+ policy {
+ cookies_allowed: false/true
+ cookies_store: "..."
+ setting: "..."
+ policy {
+ [POLICY_NAME] {
+ policy_options {mode: MANDATORY/RECOMMENDED/UNSET}
+ value: ...
+ }
+ }
+ policy_exception_justification = "..."
+ })");
+} \ No newline at end of file
diff --git a/chromium/tools/traffic_annotation/traffic_annotation.proto b/chromium/tools/traffic_annotation/traffic_annotation.proto
new file mode 100644
index 00000000000..8f8d43c32f5
--- /dev/null
+++ b/chromium/tools/traffic_annotation/traffic_annotation.proto
@@ -0,0 +1,185 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto3";
+package traffic_annotation;
+
+// cloud_policy_full_runtime.proto is a version of the following proto without
+// lite runtime optimization:
+// out/Debug/gen/components/policy/proto/cloud_policy.proto
+import "cloud_policy_full_runtime.proto";
+
+// Describes a specific kind of network traffic based on a fine-grained
+// semantic classification of all network traffic generated by Chrome.
+// Used for auditing purposes.
+message NetworkTrafficAnnotation {
+ // This is a globally unique identifier that must stay unchanged while the
+ // network request carries the same semantic meaning. If the network request
+ // gets a new meaning, this ID needs to be changed.
+ // The purpose of this ID is to give humans a chance to reference
+ // NetworkTrafficAnnotations externally even when those change a little bit
+ // (e.g. adding a new piece of data that is sent along with a network
+ // request).
+ // IDs of one component should have a shared prefix so that sorting all
+ // NetworkTrafficAnnotations by unique_id groups those that belong to the same
+ // component together.
+ // For example:
+ // "spellchecker_lookup"
+ string unique_id = 1;
+
+ // Encapsulates information about the code location that generates this kind
+ // of network traffic.
+ message TrafficSource {
+ // File name where the network request is triggered.
+ // This is typically filled by the extractor and does not need to be
+ // specified in the source code. For manual whitelisting this needs to be
+ // specified.
+ string file = 1;
+
+ // Function name where the network request is instantiated.
+ // This is typically filled by the extractor and does not need to be
+ // specified in the source code. For manual whitelisting this needs to be
+ // specified.
+ string function = 2;
+
+ // __LINE__ in file, where the AuditPolicy object is instantiated.
+ // This is typically filled by the extractor and does not need to be
+ // specified in the source code.
+ // For whitelisted network requests in third_party/ that cannot be properly
+ // annotated in the source code, this attribute is empty.
+ int32 line = 3;
+
+ // For whitelisted network requests in third_party/ that cannot be properly
+ // annotated in the source code, this distinguishes between the first,
+ // second, ... annotated call.
+ // For annotations in the source code, this is not used because the line
+ // attribute uniquely identifies the network request.
+ int32 call_number = 4;
+ }
+
+ TrafficSource source = 2;
+
+ // Meta information about the network request.
+ message TrafficSemantics {
+ // Justification for an empty AuditPolicy policy.
+ // Typically this can be either a TODO or a hint that the annotation is
+ // made upstream in the code. For example, if net::URLFetcher::Create() has
+ // an annotation, the net::TCPClientSocket() that is used by the URLFetcher
+ // does not need to be annotated as well.
+ string empty_policy_justification = 1;
+
+ // What component triggers the request. The components should be human
+ // readable and don’t need to reflect the components/ directory. Avoid
+ // abbreviations.
+ // Examples: spellchecker, component updater, website
+ string sender = 2;
+
+ // Plaintext description of the network request in language that is
+ // understandable by admins (ideally also users). Please avoid acronyms.
+ // Please describe the feature and the feature's value proposition as well.
+ // Examples:
+ // - Google Chrome can provide smarter spell-checking by sending text you
+ // type into the browser to Google's servers, allowing you to use the same
+ // spell-checking technology used by Google products, such as Docs.
+ // If the feature is enabled, Chrome will send the entire contents of text
+ // fields as you type in them to Google along with the browser’s default
+ // language. Google returns a list of suggested spellings, which will be
+ // displayed in the context menu.
+ // - A network request that comes from web content (a page the user visits)
+ string description = 3;
+
+ // What triggered the network request. Use a textual description. This
+ // should be a human readable string.
+ // For things that are clearly part of the website (resource load, form
+ // submission, fetch by a service worker,...), you *may* just put “website”
+ // here.
+ string trigger = 4;
+
+ // What nature of data is being sent. This should be a human readable
+ // string. Any user data and/or PII should be pointed out.
+ // Examples: “log files from /var/...”, “statistics about foobar”, “the
+ // signature of a form of a website”, “installed extensions and their
+ // version”, “a word on a website the user tapped on”
+ string data = 5;
+
+ enum Destination {
+ // A website the user visits or interacts with. The distinction from a
+ // google owned service can be difficult when the user navigates to
+ // google.com or searches with the omnibar. Therefore follow the following
+ // guideline: If the source code has hardcoded that the request goes to
+ // Google (e.g. for ZeroSuggest), use GOOGLE_OWNED_SERVICE. If the request
+ // can go to other domains and is perceived as a part of a website rather
+ // than a native browser feature, use WEBSITE. In other cases use OTHER.
+ WEBSITE = 0;
+ // A Google owned service, like SafeBrowsing, spellchecking, ...
+ GOOGLE_OWNED_SERVICE = 1;
+ // Other endpoints, e.g. a service hosting a PAC script. In case of doubt,
+ // use this category. We will audit it in the future to see whether we
+ // need more categories.
+ OTHER = 1000;
+ }
+ Destination destination = 6;
+
+ // Human readable description in case the destination points to OTHER.
+ string destination_other = 7;
+ }
+
+ TrafficSemantics semantics = 3;
+
+ message TrafficPolicy {
+ // Whether cookies/channel IDs/... can be sent or saved (use true if at
+ // least one is correct).
+ bool cookies_allowed = 1;
+
+ // If a request sends or stores cookies/channel IDs/... (i.e. if
+ // cookies_allowed is true), we want to know which cookie store is being
+ // used. The answer to this question can typically be derived from the
+ // URLRequestContext that is being used.
+ // The three most common cases will be:
+ // - If cookies_allowed is false, leave this field unset.
+ // - If the profile's default URLRequestContext is being used (e.g. from
+ // Profile::GetRequestContext()), this means that the user's normal
+ // cookies are sent. In this case, put "user" here.
+ // - If the system URLRequestContext is being used (for example via
+ // io_thread()->system_url_request_context_getter()), put "system" here.
+ // Otherwise, please explain (e.g. SafeBrowsing uses a separate cookie
+ // store).
+ string cookies_store = 2;
+
+ // Human readable description of how to enable/disable a feature that
+ // triggers this network request by a user. Use “NA”, if no such setting
+ // exists (e.g. “Disable ‘Use a web service to help resolve spelling
+ // errors.’ in Chrome’s settings under Advanced”).
+ string setting = 3;
+
+ // Example policy configuration that disables this network request.
+ // This would be a text serialized protobuf of any enterprise policy.
+ // see out/Debug/gen/components/policy/proto/cloud_policy.proto
+ repeated enterprise_management.CloudPolicySettings policy = 4;
+
+ // Justification for not having a policy that disables this feature.
+ string policy_exception_justification = 5;
+ }
+
+ TrafficPolicy policy = 4;
+};
+
+// NetworkTrafficAnnotations that were extracted from the source code.
+message ExtractedNetworkTrafficAnnotation {
+ repeated NetworkTrafficAnnotation network_traffic_annotation = 1;
+};
+
+// NetworkTrafficAnnotations that had to go into a whitelist file because the
+// source code could not be annotated (e.g. because it is in a third-party
+// library).
+message WhitelistedNetworkTrafficAnnotations {
+ repeated NetworkTrafficAnnotation network_traffic_annotation = 1;
+};
+
+// All NetworkTrafficAnnotations from a Chromium configuration.
+message NetworkTrafficAnnotations {
+ ExtractedNetworkTrafficAnnotation extracted_network_traffic_annotations = 1;
+ WhitelistedNetworkTrafficAnnotations whitelisted_network_traffic_annotations =
+ 2;
+};
diff --git a/chromium/tools/valgrind/memcheck/suppressions.txt b/chromium/tools/valgrind/memcheck/suppressions.txt
index 0e1d0db0494..6fc159861a1 100644
--- a/chromium/tools/valgrind/memcheck/suppressions.txt
+++ b/chromium/tools/valgrind/memcheck/suppressions.txt
@@ -1046,19 +1046,19 @@
{
bug_84770_a
Memcheck:Unaddressable
- fun:_ZN5blink21FrameLoaderClientImpl12allowPluginsEb
+ fun:_ZN5blink21LocalFrameClientImpl12allowPluginsEb
fun:_ZN5blink14SubframeLoader12allowPluginsENS_28ReasonForCallingAllowPluginsE
}
{
bug_84770_b
Memcheck:Unaddressable
- fun:_ZN5blink21FrameLoaderClientImpl15allowJavaScriptEb
+ fun:_ZN5blink21LocalFrameClientImpl15allowJavaScriptEb
fun:_ZN5blink16ScriptController17canExecuteScriptsENS_33ReasonForCallingCanExecuteScriptsE
}
{
bug_84770_c
Memcheck:Unaddressable
- fun:_ZN5blink21FrameLoaderClientImpl20allowScriptExtensionERKN3WTF6StringEi
+ fun:_ZN5blink21LocalFrameClientImpl20allowScriptExtensionERKN3WTF6StringEi
fun:_ZN5blink16V8DOMWindowShell16createNewContextEN2v86HandleINS1_6ObjectEEEi
}
{
@@ -2262,7 +2262,7 @@
fun:_ZN7content14RenderViewImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
fun:_ZN7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
fun:_ZThn16_N7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
- fun:_ZN5blink21FrameLoaderClientImpl39dispatchDidClearWindowObjectInMainWorldEv
+ fun:_ZN5blink21LocalFrameClientImpl39dispatchDidClearWindowObjectInMainWorldEv
fun:_ZN5blink11FrameLoader39dispatchDidClearWindowObjectInMainWorldEv
fun:_ZN5blink16ScriptController11windowShellERNS_15DOMWrapperWorldE
fun:_ZN5blink11toV8ContextEPNS_10LocalFrameERNS_15DOMWrapperWorldE
diff --git a/chromium/tools/vim/OWNERS b/chromium/tools/vim/OWNERS
index 4480539fab6..80cc25371f1 100644
--- a/chromium/tools/vim/OWNERS
+++ b/chromium/tools/vim/OWNERS
@@ -1,3 +1,5 @@
asanka@chromium.org
eroman@chromium.org
scottmg@chromium.org
+
+# COMPONENT: Tools
diff --git a/chromium/tools/vim/ninja_output.py b/chromium/tools/vim/ninja_output.py
index af30520d0b0..f959dc2e18a 100644
--- a/chromium/tools/vim/ninja_output.py
+++ b/chromium/tools/vim/ninja_output.py
@@ -5,10 +5,13 @@
import sys
import os
-import exceptions
import itertools
import re
+try:
+ from exceptions import RuntimeError
+except ImportError:
+ pass
def GetNinjaOutputDirectory(chrome_root):
"""Returns <chrome_root>/<output_dir>/(Release|Debug|<other>).
@@ -51,15 +54,21 @@ def GetNinjaOutputDirectory(chrome_root):
def approx_directory_mtime(path):
# This is a heuristic; don't recurse into subdirectories.
paths = [path] + [os.path.join(path, f) for f in os.listdir(path)]
- return max(os.path.getmtime(p) for p in paths)
+ return max(filter(None, [safe_mtime(p) for p in paths]))
+
+ def safe_mtime(path):
+ try:
+ return os.path.getmtime(path)
+ except OSError:
+ return None
try:
return max(generate_paths(), key=approx_directory_mtime)
except ValueError:
- raise exceptions.RuntimeError(
+ raise RuntimeError(
'Unable to find a valid ninja output directory.')
if __name__ == '__main__':
if len(sys.argv) != 2:
- raise exceptions.RuntimeError('Expected a single path argument.')
- print GetNinjaOutputDirectory(sys.argv[1])
+ raise RuntimeError('Expected a single path argument.')
+ print(GetNinjaOutputDirectory(sys.argv[1]))
diff --git a/chromium/tools/win/DebugVisualizers/chrome.natvis b/chromium/tools/win/DebugVisualizers/chrome.natvis
index 7a7c45e1ac1..fc9f9a9c2bc 100644
--- a/chromium/tools/win/DebugVisualizers/chrome.natvis
+++ b/chromium/tools/win/DebugVisualizers/chrome.natvis
@@ -228,7 +228,7 @@
<Item Name="Type">type_</Item>
</Expand>
</Type>
- <Type Name="base::FundamentalValue">
+ <Type Name="base::Value">
<DisplayString>Fundamental</DisplayString>
<Expand>
<ExpandedItem>(base::Value*)this,nd</ExpandedItem>
diff --git a/chromium/tools/win/ShowThreadNames/ReadMe.txt b/chromium/tools/win/ShowThreadNames/ReadMe.txt
new file mode 100644
index 00000000000..63aa0cf8243
--- /dev/null
+++ b/chromium/tools/win/ShowThreadNames/ReadMe.txt
@@ -0,0 +1,61 @@
+[Motivation]
+This tool is designed to test the usage of the SetThreadDescription WinAPI in
+Chrome. In Chrome, the SetThreadDescription API has been enabled to set thread
+names. However, since there is no tool support to retrieve thread names set by
+GetThreadDescription, we will still rely on SetNameInternal function in
+platform_thread_win.cc to set thread names. Despite this, we need a tool to
+demo the SetThreadDescription API works, even without the debugger to be
+present.
+
+The problem setting can be referred to
+https://bugs.chromium.org/p/chromium/issues/detail?id=684203
+
+This tool incorporates the GetThreadDescription API trying to get names of all
+threads in a process specified by its ID. If the thread names have been set by
+SetThreadDescription API call like in Chrome, all thread ID/name pairs are
+returned.
+
+[Requirement]
+Since SetThreadDescription/GetThreadDescription APIs are brought in Windows 10,
+version 1607, this tool can only be effective if running in this version or
+later ones.
+
+[How to use it]
+Please download the three files (.cc, .sln, .vcxproj) and compile the code in
+Visual Studio. Run "ShowThreadNames.exe" either from the build directory or
+from Visual Studio. No parameters are needed. This tool allows interaction
+with users. Once launched, it will show "Please enter the process Id, or
+"quit" to end the program :" on the terminal. Simply type in the ID of any
+Chrome process you are interested in, and you will get output like below:
+
+thread_ID thread_name
+12116
+10292
+6532
+6928
+2488
+11304
+2256 AudioThread
+9308 BrokerEvent
+5668 BrowserWatchdog
+4352 Chrome_CacheThread
+12268 Chrome_DBThread
+8616 Chrome_FileThread
+1072 Chrome_FileUserBlockingThread
+8280 Chrome_HistoryThread
+7472 Chrome_IOThread
+6336 Chrome_ProcessLauncherThread
+12212 CompositorTileWorker1/12212
+3628 CrBrowserMain
+6472 DnsConfigService
+1980 IndexedDB
+10560 TaskSchedulerBackgroundBlockingWorker0
+11464 TaskSchedulerBackgroundWorker0
+3156 TaskSchedulerForegroundBlockingWorker5
+7660 TaskSchedulerForegroundWorker0
+8216 TaskSchedulerServiceThread
+11088 VideoCaptureThread
+
+The threads have been sorted by their names. Note that some threads have
+no names in this example. If checking them using Visual Studio debugger, it
+is found that they are ntdll.dll!WorkerThreads.
diff --git a/chromium/tools/win/ShowThreadNames/ShowThreadNames.cc b/chromium/tools/win/ShowThreadNames/ShowThreadNames.cc
new file mode 100644
index 00000000000..b083f2feef8
--- /dev/null
+++ b/chromium/tools/win/ShowThreadNames/ShowThreadNames.cc
@@ -0,0 +1,142 @@
+// Copyright (c) 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+
+#include <stdio.h>
+#include <tchar.h>
+#include <tlhelp32.h>
+
+#include <algorithm>
+#include <iostream>
+#include <iterator>
+#include <map>
+#include <sstream>
+#include <string>
+
+// List all thread names in a process specified.
+BOOL ListProcessThreadNames(DWORD owner_pid);
+// Print the error message.
+void printError(TCHAR* msg);
+
+// The GetThreadDescription API is available since Windows 10, version 1607.
+// The reason why this API is bound in this way rather than just using the
+// Windows SDK, is that this API isn't yet available in the SDK that Chrome
+// builds with.
+// Binding SetThreadDescription API in Chrome can only be done by
+// GetProcAddress, rather than the import library.
+typedef HRESULT(WINAPI* GETTHREADDESCRIPTION)(HANDLE hThread,
+ PWSTR* threadDescription);
+
+int main(void) {
+ DWORD process_Id;
+ std::string user_input;
+ while (true) {
+ std::cout
+ << "\nPlease enter the process Id, or \"quit\" to end the program : ";
+ std::getline(std::cin, user_input);
+ // Convert the user input to lower case.
+ std::transform(user_input.begin(), user_input.end(), user_input.begin(),
+ ::tolower);
+ if (user_input == "quit")
+ break;
+ std::cout << std::endl;
+ std::stringstream ss(user_input);
+ if (ss >> process_Id) {
+ ListProcessThreadNames(process_Id);
+ } else {
+ std::cout << "Input is invalid" << std::endl;
+ }
+ std::cout << std::endl;
+ }
+ return 0;
+}
+
+BOOL ListProcessThreadNames(DWORD owner_pid) {
+ auto get_thread_description_func =
+ reinterpret_cast<GETTHREADDESCRIPTION>(::GetProcAddress(
+ ::GetModuleHandle(L"Kernel32.dll"), "GetThreadDescription"));
+
+ if (!get_thread_description_func) {
+ printError(TEXT("GetThreadDescription"));
+ return (FALSE);
+ }
+
+ HANDLE thread_snapshot = INVALID_HANDLE_VALUE;
+ // Take a snapshot of all running threads.
+ thread_snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0);
+ if (thread_snapshot == INVALID_HANDLE_VALUE) {
+ printError(TEXT("CreateToolhelp32Snapshot"));
+ return (FALSE);
+ }
+
+ THREADENTRY32 te32;
+ te32.dwSize = sizeof(THREADENTRY32);
+
+ // Retrieve information about the first thread, and exit if unsuccessful.
+ if (!Thread32First(thread_snapshot, &te32)) {
+ printError(TEXT("Thread32First"));
+ CloseHandle(thread_snapshot);
+ return (FALSE);
+ }
+
+ // Walk the thread list of the system, and display ID and name about each
+ // thread associated with the process specified.
+ std::cout << "thread_ID thread_name" << std::endl;
+ std::multimap<std::wstring, DWORD> name_id_map;
+ do {
+ if (te32.th32OwnerProcessID == owner_pid) {
+ HANDLE thread_handle =
+ OpenThread(THREAD_QUERY_INFORMATION, FALSE, te32.th32ThreadID);
+ if (thread_handle) {
+ PWSTR data;
+ HRESULT hr = get_thread_description_func(thread_handle, &data);
+ if (SUCCEEDED(hr)) {
+ std::wstring thread_name(data);
+ LocalFree(data);
+ name_id_map.insert(std::make_pair(thread_name, te32.th32ThreadID));
+ } else {
+ printError(TEXT("GetThreadDescription"));
+ }
+ CloseHandle(thread_handle);
+ } else {
+ printError(TEXT("OpenThread"));
+ }
+ }
+ } while (Thread32Next(thread_snapshot, &te32));
+
+ // Clean up the snapshot object.
+ CloseHandle(thread_snapshot);
+
+ // Show all thread ID/name pairs.
+ for (auto name_id_pair : name_id_map) {
+ std::cout << name_id_pair.second << "\t";
+ std::wcout << name_id_pair.first << std::endl;
+ }
+
+ return (TRUE);
+}
+
+void printError(TCHAR* msg) {
+ DWORD eNum;
+ TCHAR sysMsg[256];
+ TCHAR* p;
+
+ eNum = GetLastError();
+ FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
+ NULL, eNum, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), sysMsg,
+ 256, NULL);
+
+ // Trim the end of the line and terminate it with a null.
+ p = sysMsg;
+ while ((*p > 31) || (*p == 9))
+ ++p;
+ do {
+ *p-- = 0;
+ } while ((p >= sysMsg) && ((*p == '.') || (*p < 33)));
+
+ // Display the message.
+ _tprintf(TEXT("\n WARNING: %s failed with error %d (%s)"), msg, eNum,
+ sysMsg);
+}
diff --git a/chromium/tools/win/ShowThreadNames/ShowThreadNames.sln b/chromium/tools/win/ShowThreadNames/ShowThreadNames.sln
new file mode 100644
index 00000000000..5c7717543f6
--- /dev/null
+++ b/chromium/tools/win/ShowThreadNames/ShowThreadNames.sln
@@ -0,0 +1,28 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 14
+VisualStudioVersion = 14.0.25420.1
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ShowThreadNames", "ShowThreadNames.vcxproj", "{49B5C5D6-4C69-4C38-B559-DF27E2A48302}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Debug|x64.ActiveCfg = Debug|x64
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Debug|x64.Build.0 = Debug|x64
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Debug|x86.ActiveCfg = Debug|Win32
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Debug|x86.Build.0 = Debug|Win32
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Release|x64.ActiveCfg = Release|x64
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Release|x64.Build.0 = Release|x64
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Release|x86.ActiveCfg = Release|Win32
+ {49B5C5D6-4C69-4C38-B559-DF27E2A48302}.Release|x86.Build.0 = Release|Win32
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/chromium/tools/win/ShowThreadNames/ShowThreadNames.vcxproj b/chromium/tools/win/ShowThreadNames/ShowThreadNames.vcxproj
new file mode 100644
index 00000000000..230cf9d6161
--- /dev/null
+++ b/chromium/tools/win/ShowThreadNames/ShowThreadNames.vcxproj
@@ -0,0 +1,118 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Debug|Win32">
+ <Configuration>Debug</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|Win32">
+ <Configuration>Release</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Debug|x64">
+ <Configuration>Debug</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|x64">
+ <Configuration>Release</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>{49B5C5D6-4C69-4C38-B559-DF27E2A48302}</ProjectGuid>
+ <RootNamespace>ShowThreadNames</RootNamespace>
+ <WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>true</UseDebugLibraries>
+ <PlatformToolset>v140</PlatformToolset>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <PlatformToolset>v140</PlatformToolset>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>true</UseDebugLibraries>
+ <PlatformToolset>v140</PlatformToolset>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <PlatformToolset>v140</PlatformToolset>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Label="Shared">
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup />
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <Optimization>Disabled</Optimization>
+ <SDLCheck>true</SDLCheck>
+ </ClCompile>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <Optimization>Disabled</Optimization>
+ <SDLCheck>true</SDLCheck>
+ </ClCompile>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <Optimization>MaxSpeed</Optimization>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <SDLCheck>true</SDLCheck>
+ </ClCompile>
+ <Link>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <OptimizeReferences>true</OptimizeReferences>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <Optimization>MaxSpeed</Optimization>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <SDLCheck>true</SDLCheck>
+ </ClCompile>
+ <Link>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <OptimizeReferences>true</OptimizeReferences>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+ <ClCompile Include="ShowThreadNames.cc" />
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project> \ No newline at end of file
diff --git a/chromium/tools/win/chromeexts/BUILD.gn b/chromium/tools/win/chromeexts/BUILD.gn
index 1b344ea0286..39d188b2bb8 100644
--- a/chromium/tools/win/chromeexts/BUILD.gn
+++ b/chromium/tools/win/chromeexts/BUILD.gn
@@ -4,7 +4,15 @@
shared_library("chromeexts") {
sources = [
+ "chrome_exts_command.cc",
+ "chrome_exts_command.h",
"chromeexts.cc",
"chromeexts.def",
+ "commands/hwnd_command.cc",
+ "commands/hwnd_command.h",
+ ]
+
+ deps = [
+ "//base",
]
}
diff --git a/chromium/tools/win/chromeexts/chrome_exts_command.cc b/chromium/tools/win/chromeexts/chrome_exts_command.cc
new file mode 100644
index 00000000000..4c016002201
--- /dev/null
+++ b/chromium/tools/win/chromeexts/chrome_exts_command.cc
@@ -0,0 +1,56 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/win/chromeexts/chrome_exts_command.h"
+
+#include "base/logging.h"
+
+namespace tools {
+namespace win {
+namespace chromeexts {
+
+ChromeExtsCommand::~ChromeExtsCommand() = default;
+
+ChromeExtsCommand::ChromeExtsCommand() = default;
+
+HRESULT ChromeExtsCommand::Initialize(IDebugClient* debug_client,
+ const char* args) {
+ DCHECK(debug_client);
+ DCHECK(args);
+ args_ = args;
+ debug_client_ = debug_client;
+ HRESULT hr = debug_client_->QueryInterface(IID_PPV_ARGS(&debug_control_));
+ if (FAILED(hr)) {
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT ChromeExtsCommand::Printf(const char* format, ...) {
+ va_list ap;
+ va_start(ap, format);
+ HRESULT hr = PrintV(format, ap);
+ va_end(ap);
+ return hr;
+}
+
+HRESULT ChromeExtsCommand::PrintV(const char* format, va_list ap) {
+ return debug_control_->OutputVaList(DEBUG_OUTPUT_NORMAL, format, ap);
+}
+
+HRESULT ChromeExtsCommand::PrintErrorf(const char* format, ...) {
+ va_list ap;
+ va_start(ap, format);
+ HRESULT hr = PrintErrorV(format, ap);
+ va_end(ap);
+ return hr;
+}
+
+HRESULT ChromeExtsCommand::PrintErrorV(const char* format, va_list ap) {
+ return debug_control_->OutputVaList(DEBUG_OUTPUT_ERROR, format, ap);
+}
+
+} // namespace chromeexts
+} // namespace win
+} // namespace tools
diff --git a/chromium/tools/win/chromeexts/chrome_exts_command.h b/chromium/tools/win/chromeexts/chrome_exts_command.h
new file mode 100644
index 00000000000..49450ef62ba
--- /dev/null
+++ b/chromium/tools/win/chromeexts/chrome_exts_command.h
@@ -0,0 +1,76 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_WIN_CHROMEEXTS_CHROME_EXTS_COMMAND_H_
+#define TOOLS_WIN_CHROMEEXTS_CHROME_EXTS_COMMAND_H_
+
+#include <dbgeng.h>
+#include <stdarg.h>
+#include <wrl/client.h>
+
+#include <memory>
+#include <string>
+
+#include "base/macros.h"
+#include "base/memory/ptr_util.h"
+
+namespace tools {
+namespace win {
+namespace chromeexts {
+
+namespace {
+using Microsoft::WRL::ComPtr;
+} // namespace
+
+// Superclass of all commands in the debugger extension.
+// To implement your own command, just follow these steps:
+// 1) Create a new class and subclass ChromeExtsCommand.
+// 2) Implement Execute().
+// 3) Add a function that calls Run<Your Subclass>() to chromeexts.cc.
+// 4) Add your new function to the exports list in chromeexts.def.
+// Done!
+class ChromeExtsCommand {
+ public:
+ template <typename T>
+ static HRESULT Run(IDebugClient* debug_client, const char* args) {
+ std::unique_ptr<ChromeExtsCommand> command = base::MakeUnique<T>();
+ HRESULT hr = command->Initialize(debug_client, args);
+ if (SUCCEEDED(hr)) {
+ hr = command->Execute();
+ }
+ return hr;
+ }
+
+ virtual ~ChromeExtsCommand();
+
+ protected:
+ ChromeExtsCommand();
+
+ virtual HRESULT Initialize(IDebugClient* debug_client, const char* args);
+
+ virtual HRESULT Execute() = 0;
+
+ HRESULT Printf(const char* format, ...);
+ HRESULT PrintV(const char* format, va_list ap);
+
+ HRESULT PrintErrorf(const char* format, ...);
+ HRESULT PrintErrorV(const char* format, va_list ap);
+
+ const std::string& args() const { return args_; }
+ IDebugClient* debug_client() { return debug_client_.Get(); }
+ IDebugControl* debug_control() { return debug_control_.Get(); }
+
+ private:
+ std::string args_;
+ ComPtr<IDebugClient> debug_client_;
+ ComPtr<IDebugControl> debug_control_;
+
+ DISALLOW_COPY_AND_ASSIGN(ChromeExtsCommand);
+};
+
+} // namespace chromeexts
+} // namespace win
+} // namespace tools
+
+#endif // TOOLS_WIN_CHROMEEXTS_CHROME_EXTS_COMMAND_H_
diff --git a/chromium/tools/win/chromeexts/chromeexts.cc b/chromium/tools/win/chromeexts/chromeexts.cc
index dad25b70416..2b050d98160 100644
--- a/chromium/tools/win/chromeexts/chromeexts.cc
+++ b/chromium/tools/win/chromeexts/chromeexts.cc
@@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include <windows.h>
#include <dbgeng.h>
#include <wrl/client.h>
+#include "tools/win/chromeexts/chrome_exts_command.h"
+#include "tools/win/chromeexts/commands/hwnd_command.h"
+
namespace {
using Microsoft::WRL::ComPtr;
-constexpr size_t kMaxWindowStringLength = 256;
} // namespace
HRESULT CALLBACK DebugExtensionInitialize(ULONG* version, ULONG* flags) {
@@ -33,56 +34,7 @@ HRESULT CALLBACK help(IDebugClient* client, PCSTR args) {
return S_OK;
}
-HRESULT CALLBACK hwnd(IDebugClient* client, PCSTR args) {
- ComPtr<IDebugControl> debug_control;
- HRESULT hr = client->QueryInterface(IID_PPV_ARGS(&debug_control));
- if (FAILED(hr)) {
- return hr;
- }
-
- // While sizeof(HWND) can change between 32-bit and 64-bit platforms, Windows
- // only cares about the lower 32-bits. We evaluate as 64-bit as a convenience
- // and truncate the displayed hwnds to 32-bit below.
- // See https://msdn.microsoft.com/en-us/library/aa384203.aspx
- DEBUG_VALUE value;
- hr = debug_control->Evaluate(args, DEBUG_VALUE_INT64, &value, nullptr);
- if (FAILED(hr)) {
- debug_control->Output(DEBUG_OUTPUT_ERROR, "Unable to evaluate %s\n", args);
- return hr;
- }
-
- HWND hwnd = reinterpret_cast<HWND>(value.I64);
- if (!IsWindow(hwnd)) {
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Not a window: %s\n", args);
- return E_FAIL;
- }
-
- wchar_t title[kMaxWindowStringLength];
- GetWindowText(hwnd, title, ARRAYSIZE(title));
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Title: %ws\n", title);
- wchar_t window_class[kMaxWindowStringLength];
- GetClassName(hwnd, window_class, ARRAYSIZE(window_class));
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Class: %ws\n", window_class);
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Hierarchy: \n");
- debug_control->Output(DEBUG_OUTPUT_NORMAL, " Owner: %08x Parent: %08x\n",
- GetWindow(hwnd, GW_OWNER), GetParent(hwnd));
- debug_control->Output(DEBUG_OUTPUT_NORMAL, " Prev: %08x Next: %08x\n",
- GetNextWindow(hwnd, GW_HWNDPREV),
- GetNextWindow(hwnd, GW_HWNDNEXT));
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Styles: %08x (Ex: %08x)\n",
- GetWindowLong(hwnd, GWL_STYLE),
- GetWindowLong(hwnd, GWL_EXSTYLE));
- RECT window_rect;
- if (GetWindowRect(hwnd, &window_rect)) {
- debug_control->Output(DEBUG_OUTPUT_NORMAL, "Bounds: (%d, %d) %dx%d\n",
- window_rect.left, window_rect.top,
- window_rect.right - window_rect.left,
- window_rect.bottom - window_rect.top);
- } else {
- DWORD last_error = GetLastError();
- debug_control->Output(DEBUG_OUTPUT_NORMAL,
- "Bounds: Unavailable (Last Error = %d)\n",
- last_error);
- }
- return S_OK;
+HRESULT CALLBACK RunHwndCommand(IDebugClient* client, PCSTR args) {
+ return tools::win::chromeexts::ChromeExtsCommand::Run<
+ tools::win::chromeexts::HwndCommand>(client, args);
}
diff --git a/chromium/tools/win/chromeexts/chromeexts.def b/chromium/tools/win/chromeexts/chromeexts.def
index 642d119e1aa..621f4fc7c60 100644
--- a/chromium/tools/win/chromeexts/chromeexts.def
+++ b/chromium/tools/win/chromeexts/chromeexts.def
@@ -8,5 +8,4 @@ EXPORTS
DebugExtensionInitialize
DebugExtensionUninitialize
help
- hwnd
-
+ hwnd = RunHwndCommand
diff --git a/chromium/tools/win/chromeexts/commands/hwnd_command.cc b/chromium/tools/win/chromeexts/commands/hwnd_command.cc
new file mode 100644
index 00000000000..7fb914171a6
--- /dev/null
+++ b/chromium/tools/win/chromeexts/commands/hwnd_command.cc
@@ -0,0 +1,68 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/win/chromeexts/commands/hwnd_command.h"
+
+#include <dbgeng.h>
+#include <windows.h>
+
+namespace tools {
+namespace win {
+namespace chromeexts {
+
+namespace {
+constexpr size_t kMaxWindowStringLength = 256;
+} // namespace
+
+HwndCommand::HwndCommand() = default;
+
+HwndCommand::~HwndCommand() = default;
+
+HRESULT HwndCommand::Execute() {
+ // While sizeof(HWND) can change between 32-bit and 64-bit platforms, Windows
+ // only cares about the lower 32-bits. We evaluate as 64-bit as a convenience
+ // and truncate the displayed hwnds to 32-bit below.
+ // See https://msdn.microsoft.com/en-us/library/aa384203.aspx
+ DEBUG_VALUE value;
+ HRESULT hr = debug_control()->Evaluate(args().c_str(), DEBUG_VALUE_INT64,
+ &value, nullptr);
+ if (FAILED(hr)) {
+ PrintErrorf("Unable to evaluate %s\n", args().c_str());
+ return hr;
+ }
+
+ HWND hwnd = reinterpret_cast<HWND>(value.I64);
+ if (!IsWindow(hwnd)) {
+ PrintErrorf("Not a window: %s\n", args().c_str());
+ return E_FAIL;
+ }
+
+ wchar_t title[kMaxWindowStringLength];
+ GetWindowText(hwnd, title, ARRAYSIZE(title));
+ Printf("Title: %ws\n", title);
+ wchar_t window_class[kMaxWindowStringLength];
+ GetClassName(hwnd, window_class, ARRAYSIZE(window_class));
+ Printf("Class: %ws\n", window_class);
+ Printf("Hierarchy: \n");
+ Printf(" Owner: %08x Parent: %08x\n", GetWindow(hwnd, GW_OWNER),
+ GetParent(hwnd));
+ Printf(" Prev: %08x Next: %08x\n", GetNextWindow(hwnd, GW_HWNDPREV),
+ GetNextWindow(hwnd, GW_HWNDNEXT));
+ Printf("Styles: %08x (Ex: %08x)\n", GetWindowLong(hwnd, GWL_STYLE),
+ GetWindowLong(hwnd, GWL_EXSTYLE));
+ RECT window_rect;
+ if (GetWindowRect(hwnd, &window_rect)) {
+ Printf("Bounds: (%d, %d) %dx%d\n", window_rect.left, window_rect.top,
+ window_rect.right - window_rect.left,
+ window_rect.bottom - window_rect.top);
+ } else {
+ DWORD last_error = GetLastError();
+ PrintErrorf("Bounds: Unavailable (Last Error = %d)\n", last_error);
+ }
+ return S_OK;
+}
+
+} // namespace chromeexts
+} // namespace win
+} // namespace tools
diff --git a/chromium/tools/win/chromeexts/commands/hwnd_command.h b/chromium/tools/win/chromeexts/commands/hwnd_command.h
new file mode 100644
index 00000000000..b25fc3fef26
--- /dev/null
+++ b/chromium/tools/win/chromeexts/commands/hwnd_command.h
@@ -0,0 +1,30 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_WIN_CHROME_EXTS_COMMANDS_HWND_COMMAND_H_
+#define TOOLS_WIN_CHROME_EXTS_COMMANDS_HWND_COMMAND_H_
+
+#include "tools/win/chromeexts/chrome_exts_command.h"
+
+namespace tools {
+namespace win {
+namespace chromeexts {
+
+class HwndCommand : public ChromeExtsCommand {
+ public:
+ HwndCommand();
+ ~HwndCommand() override;
+
+ protected:
+ HRESULT Execute() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(HwndCommand);
+};
+
+} // namespace chromeexts
+} // namespace win
+} // namespace tools
+
+#endif // TOOLS_WIN_CHROME_EXTS_COMMANDS_HWND_COMMAND_H_
diff --git a/chromium/tools/win/static_initializers/build.bat b/chromium/tools/win/static_initializers/build.bat
new file mode 100755
index 00000000000..46d493cff49
--- /dev/null
+++ b/chromium/tools/win/static_initializers/build.bat
@@ -0,0 +1,3 @@
+@setlocal
+call "%VS140COMNTOOLS%..\..\VC\vcvarsall.bat"
+cl static_initializers.cc /EHsc /I "c:\Program Files (x86)\Microsoft Visual Studio 14.0\DIA SDK\include" /link Ole32.lib OleAut32.lib
diff --git a/chromium/tools/win/static_initializers/static_initializers.cc b/chromium/tools/win/static_initializers/static_initializers.cc
index 440bb9de833..fadda9a0c9a 100644
--- a/chromium/tools/win/static_initializers/static_initializers.cc
+++ b/chromium/tools/win/static_initializers/static_initializers.cc
@@ -98,7 +98,8 @@ static void PrintIfDynamicInitializer(const std::wstring& module,
BSTR bstr_name;
if (SUCCEEDED(symbol->get_name(&bstr_name))) {
- if (wcsstr(bstr_name, L"`dynamic initializer for '")) {
+ if (wcsstr(bstr_name, L"`dynamic initializer for '") ||
+ wcsstr(bstr_name, L"`dynamic atexit destructor for '")) {
wprintf(L"%s: %s\n", module.c_str(), bstr_name);
SysFreeString(bstr_name);
}