diff options
author | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2017-11-20 10:33:36 +0100 |
---|---|---|
committer | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2017-11-22 11:45:12 +0000 |
commit | be59a35641616a4cf23c4a13fa0632624b021c1b (patch) | |
tree | 9da183258bdf9cc413f7562079d25ace6955467f /chromium/PRESUBMIT.py | |
parent | d702e4b6a64574e97fc7df8fe3238cde70242080 (diff) | |
download | qtwebengine-chromium-be59a35641616a4cf23c4a13fa0632624b021c1b.tar.gz |
BASELINE: Update Chromium to 62.0.3202.101
Change-Id: I2d5eca8117600df6d331f6166ab24d943d9814ac
Reviewed-by: Alexandru Croitor <alexandru.croitor@qt.io>
Diffstat (limited to 'chromium/PRESUBMIT.py')
-rw-r--r-- | chromium/PRESUBMIT.py | 399 |
1 files changed, 331 insertions, 68 deletions
diff --git a/chromium/PRESUBMIT.py b/chromium/PRESUBMIT.py index cf8be130b73..fbcbd549d90 100644 --- a/chromium/PRESUBMIT.py +++ b/chromium/PRESUBMIT.py @@ -16,15 +16,13 @@ _EXCLUDED_PATHS = ( r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk", r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*", r"^skia[\\\/].*", - r"^third_party[\\\/]WebKit[\\\/].*", + r"^third_party[\\\/](WebKit|blink)[\\\/].*", r"^v8[\\\/].*", r".*MakeFile$", r".+_autogen\.h$", r".+[\\\/]pnacl_shim\.c$", r"^gpu[\\\/]config[\\\/].*_list_json\.cc$", r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js", - r".*vulcanized.html$", - r".*crisper.js$", r"tools[\\\/]md_browser[\\\/].*\.css$", # Test pages for WebRTC telemetry tests. r"tools[\\\/]perf[\\\/]page_sets[\\\/]webrtc_cases.*", @@ -232,6 +230,8 @@ _BANNED_CPP_FUNCTIONS = ( r"^net[\\\/]test[\\\/]embedded_test_server[\\\/]" + r"embedded_test_server\.cc$", r"^net[\\\/]test[\\\/]spawned_test_server[\\\/]local_test_server\.cc$", + r"^net[\\\/]test[\\\/]spawned_test_server[\\\/]" + + r"remote_test_server_config\.cc$", r"^net[\\\/]test[\\\/]test_data_directory\.cc$", r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$", r"^remoting[\\\/]protocol[\\\/]webrtc_transport\.cc$", @@ -334,7 +334,7 @@ _BANNED_CPP_FUNCTIONS = ( (), ), ( - 'BrowserThread::GetBlockingPool', + r'/(WebThread|BrowserThread)::GetBlockingPool', ( 'Use base/task_scheduler/post_task.h instead of the blocking pool. See', 'mapping between both APIs in content/public/browser/browser_thread.h.', @@ -344,7 +344,7 @@ _BANNED_CPP_FUNCTIONS = ( (), ), ( - 'BrowserThread::(FILE|FILE_USER_BLOCKING|DB|PROCESS_LAUNCHER|CACHE)', + r'/(WebThread|BrowserThread)::(FILE|FILE_USER_BLOCKING|DB|CACHE)', ( 'The non-UI/IO BrowserThreads are deprecated, please migrate this', 'code to TaskScheduler. See https://goo.gl/mDSxKl for details.', @@ -409,6 +409,41 @@ _BANNED_CPP_FUNCTIONS = ( ( r'^third_party/leveldatabase/.*\.(cc|h)$', ), + ), + ( + 'MessageLoop::QuitWhenIdleClosure', + ( + 'MessageLoop::QuitWhenIdleClosure is deprecated. Please migrate to', + 'Runloop.', + ), + True, + (), + ), + ( + 'RunLoop::QuitCurrent', + ( + 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member', + 'methods of a specific RunLoop instance instead.', + ), + True, + (), + ), + ( + 'base::ScopedMockTimeMessageLoopTaskRunner', + ( + 'ScopedMockTimeMessageLoopTaskRunner is deprecated.', + ), + True, + (), + ), + ( + r'std::regex', + ( + 'Using std::regex adds unnecessary binary size to Chrome. Please use', + 're2::RE2 instead (crbug/755321)', + ), + True, + (), ) ) @@ -418,6 +453,14 @@ _IPC_ENUM_TRAITS_DEPRECATED = ( 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc') +# These paths contain test data and other known invalid JSON files. +_KNOWN_INVALID_JSON_FILE_PATTERNS = [ + r'test[\\\/]data[\\\/]', + r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$', + r'^third_party[\\\/]protobuf[\\\/]', +] + + _VALID_OS_MACROS = ( # Please keep sorted. 'OS_AIX', @@ -1095,7 +1138,7 @@ def _CheckAddedDepsHaveTargetApprovals(input_api, output_api): virtual_depended_on_files = set() file_filter = lambda f: not input_api.re.match( - r"^third_party[\\\/]WebKit[\\\/].*", f.LocalPath()) + r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) for f in input_api.AffectedFiles(include_deletes=False, file_filter=file_filter): filename = input_api.os_path.basename(f.LocalPath()) @@ -1323,19 +1366,20 @@ def _CheckUserActionUpdate(input_api, output_api): return [] +def _ImportJSONCommentEater(input_api): + import sys + sys.path = sys.path + [input_api.os_path.join( + input_api.PresubmitLocalPath(), + 'tools', 'json_comment_eater')] + import json_comment_eater + return json_comment_eater + + def _GetJSONParseError(input_api, filename, eat_comments=True): try: contents = input_api.ReadFile(filename) if eat_comments: - import sys - original_sys_path = sys.path - try: - sys.path = sys.path + [input_api.os_path.join( - input_api.PresubmitLocalPath(), - 'tools', 'json_comment_eater')] - import json_comment_eater - finally: - sys.path = original_sys_path + json_comment_eater = _ImportJSONCommentEater(input_api) contents = json_comment_eater.Nom(contents) input_api.json.loads(contents) @@ -1368,11 +1412,6 @@ def _CheckParseErrors(input_api, output_api): '.idl': _GetIDLParseError, '.json': _GetJSONParseError, } - # These paths contain test data and other known invalid JSON files. - excluded_patterns = [ - r'test[\\\/]data[\\\/]', - r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$', - ] # Most JSON files are preprocessed and support comments, but these do not. json_no_comments_patterns = [ r'^testing[\\\/]', @@ -1387,23 +1426,17 @@ def _CheckParseErrors(input_api, output_api): filename = affected_file.LocalPath() return actions.get(input_api.os_path.splitext(filename)[1]) - def MatchesFile(patterns, path): - for pattern in patterns: - if input_api.re.search(pattern, path): - return True - return False - def FilterFile(affected_file): action = get_action(affected_file) if not action: return False path = affected_file.LocalPath() - if MatchesFile(excluded_patterns, path): + if _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS, path): return False if (action == _GetIDLParseError and - not MatchesFile(idl_included_patterns, path)): + not _MatchesFile(input_api, idl_included_patterns, path)): return False return True @@ -1413,7 +1446,8 @@ def _CheckParseErrors(input_api, output_api): action = get_action(affected_file) kwargs = {} if (action == _GetJSONParseError and - MatchesFile(json_no_comments_patterns, affected_file.LocalPath())): + _MatchesFile(input_api, json_no_comments_patterns, + affected_file.LocalPath())): kwargs['eat_comments'] = False parse_error = action(input_api, affected_file.AbsoluteLocalPath(), @@ -1441,6 +1475,13 @@ def _CheckJavaStyle(input_api, output_api): black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST) +def _MatchesFile(input_api, patterns, path): + for pattern in patterns: + if input_api.re.search(pattern, path): + return True + return False + + def _CheckIpcOwners(input_api, output_api): """Checks that affected files involving IPC have an IPC OWNERS rule. @@ -1468,6 +1509,7 @@ def _CheckIpcOwners(input_api, output_api): # matching the above patterns, which trigger false positives. exclude_paths = [ 'third_party/crashpad/*', + 'third_party/win_build_output/*', ] # Dictionary mapping an OWNERS file path to Patterns. @@ -1496,12 +1538,39 @@ def _CheckIpcOwners(input_api, output_api): # } to_check = {} + def AddPatternToCheck(input_file, pattern): + owners_file = input_api.os_path.join( + input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS') + if owners_file not in to_check: + to_check[owners_file] = {} + if pattern not in to_check[owners_file]: + to_check[owners_file][pattern] = { + 'files': [], + 'rules': [ + 'per-file %s=set noparent' % pattern, + 'per-file %s=file://ipc/SECURITY_OWNERS' % pattern, + ] + } + to_check[owners_file][pattern]['files'].append(f) + # Iterate through the affected files to see what we actually need to check # for. We should only nag patch authors about per-file rules if a file in that # directory would match that pattern. If a directory only contains *.mojom # files and no *_messages*.h files, we should only nag about rules for # *.mojom files. - for f in input_api.change.AffectedFiles(include_deletes=False): + for f in input_api.AffectedFiles(include_deletes=False): + # Manifest files don't have a strong naming convention. Instead, scan + # affected files for .json files and see if they look like a manifest. + if (f.LocalPath().endswith('.json') and + not _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS, + f.LocalPath())): + json_comment_eater = _ImportJSONCommentEater(input_api) + mostly_json_lines = '\n'.join(f.NewContents()) + # Comments aren't allowed in strict JSON, so filter them out. + json_lines = json_comment_eater.Nom(mostly_json_lines) + json_content = input_api.json.loads(json_lines) + if 'interface_provider_specs' in json_content: + AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath())) for pattern in file_patterns: if input_api.fnmatch.fnmatch( input_api.os_path.basename(f.LocalPath()), pattern): @@ -1512,19 +1581,7 @@ def _CheckIpcOwners(input_api, output_api): break if skip: continue - owners_file = input_api.os_path.join( - input_api.os_path.dirname(f.LocalPath()), 'OWNERS') - if owners_file not in to_check: - to_check[owners_file] = {} - if pattern not in to_check[owners_file]: - to_check[owners_file][pattern] = { - 'files': [], - 'rules': [ - 'per-file %s=set noparent' % pattern, - 'per-file %s=file://ipc/SECURITY_OWNERS' % pattern, - ] - } - to_check[owners_file][pattern]['files'].append(f) + AddPatternToCheck(f, pattern) break # Now go through the OWNERS files we collected, filtering out rules that are @@ -1579,6 +1636,8 @@ def _CheckUselessForwardDeclarations(input_api, output_api): input_api.re.MULTILINE) for f in input_api.AffectedFiles(include_deletes=False): if (f.LocalPath().startswith('third_party') and + not f.LocalPath().startswith('third_party/blink') and + not f.LocalPath().startswith('third_party\\blink') and not f.LocalPath().startswith('third_party/WebKit') and not f.LocalPath().startswith('third_party\\WebKit')): continue @@ -1757,6 +1816,58 @@ def _CheckAndroidCrLogUsage(input_api, output_api): return results +def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api): + """Checks that junit.framework.* is no longer used.""" + deprecated_junit_framework_pattern = input_api.re.compile( + r'^import junit\.framework\..*;', + input_api.re.MULTILINE) + sources = lambda x: input_api.FilterSourceFile( + x, white_list=(r'.*\.java$',), black_list=None) + errors = [] + for f in input_api.AffectedFiles(sources): + for line_num, line in f.ChangedContents(): + if deprecated_junit_framework_pattern.search(line): + errors.append("%s:%d" % (f.LocalPath(), line_num)) + + results = [] + if errors: + results.append(output_api.PresubmitError( + 'APIs from junit.framework.* are deprecated, please use JUnit4 framework' + '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org' + ' if you have any question.', errors)) + return results + + +def _CheckAndroidTestJUnitInheritance(input_api, output_api): + """Checks that if new Java test classes have inheritance. + Either the new test class is JUnit3 test or it is a JUnit4 test class + with a base class, either case is undesirable. + """ + class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ') + + sources = lambda x: input_api.FilterSourceFile( + x, white_list=(r'.*Test\.java$',), black_list=None) + errors = [] + for f in input_api.AffectedFiles(sources): + if not f.OldContents(): + class_declaration_start_flag = False + for line_num, line in f.ChangedContents(): + if class_declaration_pattern.search(line): + class_declaration_start_flag = True + if class_declaration_start_flag and ' extends ' in line: + errors.append('%s:%d' % (f.LocalPath(), line_num)) + if '{' in line: + class_declaration_start_flag = False + + results = [] + if errors: + results.append(output_api.PresubmitPromptWarning( + 'The newly created files include Test classes that inherits from base' + ' class. Please do not use inheritance in JUnit4 tests or add new' + ' JUnit3 tests. Contact yolandyan@chromium.org if you have any' + ' questions.', errors)) + return results + def _CheckAndroidTestAnnotationUsage(input_api, output_api): """Checks that android.test.suitebuilder.annotation.* is no longer used.""" deprecated_annotation_import_pattern = input_api.re.compile( @@ -2009,27 +2120,66 @@ def _CheckNoDeprecatedJs(input_api, output_api): (fpath.LocalPath(), lnum, deprecated, replacement))) return results +def _CheckForRiskyJsArrowFunction(line_number, line): + if ' => ' in line: + return "line %d, is using an => (arrow) function\n %s\n" % ( + line_number, line) + return '' + +def _CheckForRiskyJsConstLet(input_api, line_number, line): + if input_api.re.match('^\s*(const|let)\s', line): + return "line %d, is using const/let keyword\n %s\n" % ( + line_number, line) + return '' def _CheckForRiskyJsFeatures(input_api, output_api): maybe_ios_js = (r"^(ios|components|ui\/webui\/resources)\/.+\.js$", ) - file_filter = lambda f: input_api.FilterSourceFile(f, white_list=maybe_ios_js) - - arrow_lines = [] + # 'ui/webui/resources/cr_components are not allowed on ios' + not_ios_filter = (r".*ui\/webui\/resources\/cr_components.*", ) + file_filter = lambda f: input_api.FilterSourceFile(f, white_list=maybe_ios_js, + black_list=not_ios_filter) + results = [] for f in input_api.AffectedFiles(file_filter=file_filter): + arrow_error_lines = [] + const_let_error_lines = [] for lnum, line in f.ChangedContents(): - if ' => ' in line: - arrow_lines.append((f.LocalPath(), lnum)) - - if not arrow_lines: - return [] - - return [output_api.PresubmitPromptWarning(""" -Use of => operator detected in: + arrow_error_lines += filter(None, [ + _CheckForRiskyJsArrowFunction(lnum, line), + ]) + + const_let_error_lines += filter(None, [ + _CheckForRiskyJsConstLet(input_api, lnum, line), + ]) + + if arrow_error_lines: + arrow_error_lines = map( + lambda e: "%s:%s" % (f.LocalPath(), e), arrow_error_lines) + results.append( + output_api.PresubmitPromptWarning('\n'.join(arrow_error_lines + [ +""" +Use of => (arrow) operator detected in: %s Please ensure your code does not run on iOS9 (=> (arrow) does not work there). https://chromium.googlesource.com/chromium/src/+/master/docs/es6_chromium.md#Arrow-Functions -""" % "\n".join(" %s:%d\n" % line for line in arrow_lines))] +""" % f.LocalPath() + ]))) + + if const_let_error_lines: + const_let_error_lines = map( + lambda e: "%s:%s" % (f.LocalPath(), e), const_let_error_lines) + results.append( + output_api.PresubmitPromptWarning('\n'.join(const_let_error_lines + [ +""" +Use of const/let keywords detected in: +%s +Please ensure your code does not run on iOS9 because const/let is not fully +supported. +https://chromium.googlesource.com/chromium/src/+/master/docs/es6_chromium.md#let-Block_Scoped-Variables +https://chromium.googlesource.com/chromium/src/+/master/docs/es6_chromium.md#const-Block_Scoped-Constants +""" % f.LocalPath() + ]))) + return results def _CheckForRelativeIncludes(input_api, output_api): # Need to set the sys.path so PRESUBMIT_test.py runs properly @@ -2079,12 +2229,137 @@ def _CheckForRelativeIncludes(input_api, output_api): return results + +def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast): + if not isinstance(key, ast.Str): + return 'Key at line %d must be a string literal' % key.lineno + if not isinstance(value, ast.Dict): + return 'Value at line %d must be a dict' % value.lineno + if len(value.keys) != 1: + return 'Dict at line %d must have single entry' % value.lineno + if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath': + return ( + 'Entry at line %d must have a string literal \'filepath\' as key' % + value.lineno) + return None + + +def _CheckWatchlistsEntrySyntax(key, value, ast): + if not isinstance(key, ast.Str): + return 'Key at line %d must be a string literal' % key.lineno + if not isinstance(value, ast.List): + return 'Value at line %d must be a list' % value.lineno + return None + + +def _CheckWATCHLISTSEntries(wd_dict, w_dict, ast): + mismatch_template = ( + 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS ' + 'entry (%s)') + + i = 0 + last_key = '' + while True: + if i >= len(wd_dict.keys): + if i >= len(w_dict.keys): + return None + return mismatch_template % ('missing', 'line %d' % w_dict.keys[i].lineno) + elif i >= len(w_dict.keys): + return ( + mismatch_template % ('line %d' % wd_dict.keys[i].lineno, 'missing')) + + wd_key = wd_dict.keys[i] + w_key = w_dict.keys[i] + + result = _CheckWatchlistDefinitionsEntrySyntax( + wd_key, wd_dict.values[i], ast) + if result is not None: + return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result + + result = _CheckWatchlistsEntrySyntax(w_key, w_dict.values[i], ast) + if result is not None: + return 'Bad entry in WATCHLISTS dict: %s' % result + + if wd_key.s != w_key.s: + return mismatch_template % ( + '%s at line %d' % (wd_key.s, wd_key.lineno), + '%s at line %d' % (w_key.s, w_key.lineno)) + + if wd_key.s < last_key: + return ( + 'WATCHLISTS dict is not sorted lexicographically at line %d and %d' % + (wd_key.lineno, w_key.lineno)) + last_key = wd_key.s + + i = i + 1 + + +def _CheckWATCHLISTSSyntax(expression, ast): + if not isinstance(expression, ast.Expression): + return 'WATCHLISTS file must contain a valid expression' + dictionary = expression.body + if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2: + return 'WATCHLISTS file must have single dict with exactly two entries' + + first_key = dictionary.keys[0] + first_value = dictionary.values[0] + second_key = dictionary.keys[1] + second_value = dictionary.values[1] + + if (not isinstance(first_key, ast.Str) or + first_key.s != 'WATCHLIST_DEFINITIONS' or + not isinstance(first_value, ast.Dict)): + return ( + 'The first entry of the dict in WATCHLISTS file must be ' + 'WATCHLIST_DEFINITIONS dict') + + if (not isinstance(second_key, ast.Str) or + second_key.s != 'WATCHLISTS' or + not isinstance(second_value, ast.Dict)): + return ( + 'The second entry of the dict in WATCHLISTS file must be ' + 'WATCHLISTS dict') + + return _CheckWATCHLISTSEntries(first_value, second_value, ast) + + +def _CheckWATCHLISTS(input_api, output_api): + for f in input_api.AffectedFiles(include_deletes=False): + if f.LocalPath() == 'WATCHLISTS': + contents = input_api.ReadFile(f, 'r') + + try: + # First, make sure that it can be evaluated. + input_api.ast.literal_eval(contents) + # Get an AST tree for it and scan the tree for detailed style checking. + expression = input_api.ast.parse( + contents, filename='WATCHLISTS', mode='eval') + except ValueError as e: + return [output_api.PresubmitError( + 'Cannot parse WATCHLISTS file', long_text=repr(e))] + except SyntaxError as e: + return [output_api.PresubmitError( + 'Cannot parse WATCHLISTS file', long_text=repr(e))] + except TypeError as e: + return [output_api.PresubmitError( + 'Cannot parse WATCHLISTS file', long_text=repr(e))] + + result = _CheckWATCHLISTSSyntax(expression, input_api.ast) + if result is not None: + return [output_api.PresubmitError(result)] + break + + return [] + + def _AndroidSpecificOnUploadChecks(input_api, output_api): """Groups checks that target android code.""" results = [] results.extend(_CheckAndroidCrLogUsage(input_api, output_api)) results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api)) results.extend(_CheckAndroidToastUsage(input_api, output_api)) + results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api)) + results.extend(_CheckAndroidTestJUnitFrameworkImport(input_api, output_api)) results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api)) return results @@ -2140,6 +2415,7 @@ def _CommonChecks(input_api, output_api): results.extend(_CheckUselessForwardDeclarations(input_api, output_api)) results.extend(_CheckForRiskyJsFeatures(input_api, output_api)) results.extend(_CheckForRelativeIncludes(input_api, output_api)) + results.extend(_CheckWATCHLISTS(input_api, output_api)) if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()): results.extend(input_api.canned_checks.RunUnitTestsInDirectory( @@ -2417,19 +2693,6 @@ def GetTryServerMasterForBot(bot): return master -def GetDefaultTryConfigs(bots): - """Returns a list of ('bot', set(['tests']), filtered by [bots]. - """ - - builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots) - - # Build up the mapping from tryserver master to bot/test. - out = dict() - for bot, tests in builders_and_tests.iteritems(): - out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests - return out - - def CheckChangeOnCommit(input_api, output_api): results = [] results.extend(_CommonChecks(input_api, output_api)) |