Update web-platform-tests to revision dc5cbf088edcdb266541d4e5a76149a2c6e716a0

This commit is contained in:
Ms2ger 2016-09-09 09:40:35 +02:00
parent 1d40075f03
commit 079092dfea
2381 changed files with 90360 additions and 17722 deletions

View file

@ -1,6 +1,8 @@
from __future__ import print_function, unicode_literals
import abc
import argparse
import ast
import fnmatch
import json
import os
@ -43,29 +45,26 @@ def check_path_length(repo_root, path):
return [("PATH LENGTH", "/%s longer than maximum path length (%d > 150)" % (path, len(path) + 1), None)]
return []
def set_type(error_type, errors):
return [(error_type,) + error for error in errors]
def parse_whitelist_file(filename):
def parse_whitelist(f):
"""
Parse the whitelist file at `filename`, and return the parsed structure.
Parse the whitelist file given by `f`, and return the parsed structure.
"""
data = defaultdict(lambda:defaultdict(set))
with open(filename) as f:
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
parts = [item.strip() for item in line.split(":")]
if len(parts) == 2:
parts.append(None)
else:
parts[-1] = int(parts[-1])
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
parts = [item.strip() for item in line.split(":")]
if len(parts) == 2:
parts.append(None)
else:
parts[-1] = int(parts[-1])
error_type, file_match, line_number = parts
data[file_match][error_type].add(line_number)
error_type, file_match, line_number = parts
data[file_match][error_type].add(line_number)
return data
@ -133,7 +132,7 @@ class Webidl2Regexp(Regexp):
class ConsoleRegexp(Regexp):
pattern = b"console\.[a-zA-Z]+\s*\("
error = "CONSOLE"
file_extensions = [".html", ".htm", ".js", ".xht", ".html", ".svg"]
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Console logging API used"
class PrintRegexp(Regexp):
@ -164,7 +163,7 @@ def check_regexp_line(repo_root, path, f):
return errors
def check_parsed(repo_root, path, f):
source_file = SourceFile(repo_root, path, "/")
source_file = SourceFile(repo_root, path, "/", contents=f.read())
errors = []
@ -199,6 +198,11 @@ def check_parsed(repo_root, path, f):
errors.append(("MULTIPLE-TESTHARNESSREPORT",
"More than one <script src='/resources/testharnessreport.js'>", path, None))
testharnesscss_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}link[@href='/resources/testharness.css']")
if testharnesscss_nodes:
errors.append(("PRESENT-TESTHARNESSCSS",
"Explicit link to testharness.css present", path, None))
for element in source_file.variant_nodes:
if "content" not in element.attrib:
errors.append(("VARIANT-MISSING",
@ -238,6 +242,79 @@ def check_parsed(repo_root, path, f):
return errors
class ASTCheck(object):
__metaclass__ = abc.ABCMeta
error = None
description = None
@abc.abstractmethod
def check(self, root):
pass
class OpenModeCheck(ASTCheck):
error = "OPEN-NO-MODE"
description = "File opened without providing an explicit mode (note: binary files must be read with 'b' in the mode flags)"
def check(self, root):
errors = []
for node in ast.walk(root):
if isinstance(node, ast.Call):
if hasattr(node.func, "id") and node.func.id in ("open", "file"):
if (len(node.args) < 2 and
all(item.arg != "mode" for item in node.keywords)):
errors.append(node.lineno)
return errors
ast_checkers = [item() for item in [OpenModeCheck]]
def check_python_ast(repo_root, path, f):
if not path.endswith(".py"):
return []
try:
root = ast.parse(f.read())
except SyntaxError as e:
return [("PARSE-FAILED", "Unable to parse file", path, e.lineno)]
errors = []
for checker in ast_checkers:
for lineno in checker.check(root):
errors.append((checker.error, checker.description, path, lineno))
return errors
def check_path(repo_root, path):
"""
Runs lints that check the file path.
:param repo_root: the repository root
:param path: the path of the file within the repository
:returns: a list of errors found in ``path``
"""
errors = []
for path_fn in path_lints:
errors.extend(path_fn(repo_root, path))
return errors
def check_file_contents(repo_root, path, f):
"""
Runs lints that check the file contents.
:param repo_root: the repository root
:param path: the path of the file within the repository
:param f: a file-like object with the file contents
:returns: a list of errors found in ``f``
"""
errors = []
for file_fn in file_lints:
errors.extend(file_fn(repo_root, path, f))
f.seek(0)
return errors
def output_errors_text(errors):
for error_type, description, path, line_number in errors:
pos_string = path
@ -279,35 +356,47 @@ def lint(repo_root, paths, output_json):
error_count = defaultdict(int)
last = None
whitelist = parse_whitelist_file(os.path.join(repo_root, "lint.whitelist"))
with open(os.path.join(repo_root, "lint.whitelist")) as f:
whitelist = parse_whitelist(f)
if output_json:
output_errors = output_errors_json
else:
output_errors = output_errors_text
def run_lint(path, fn, last, *args):
errors = filter_whitelist_errors(whitelist, path, fn(repo_root, path, *args))
if errors:
last = (errors[-1][0], path)
def process_errors(path, errors):
"""
Filters and prints the errors, and updates the ``error_count`` object.
:param path: the path of the file that contains the errors
:param errors: a list of error tuples (error type, message, path, line number)
:returns: ``None`` if there were no errors, or
a tuple of the error type and the path otherwise
"""
errors = filter_whitelist_errors(whitelist, path, errors)
if not errors:
return None
output_errors(errors)
for error_type, error, path, line in errors:
error_count[error_type] += 1
return last
return (errors[-1][0], path)
for path in paths:
abs_path = os.path.join(repo_root, path)
if not os.path.exists(abs_path):
continue
for path_fn in path_lints:
last = run_lint(path, path_fn, last)
errors = check_path(repo_root, path)
last = process_errors(path, errors) or last
if not os.path.isdir(abs_path):
with open(abs_path) as f:
for file_fn in file_lints:
last = run_lint(path, file_fn, last, f)
f.seek(0)
errors = check_file_contents(repo_root, path, f)
last = process_errors(path, errors) or last
if not output_json:
output_error_count(error_count)
@ -316,7 +405,7 @@ def lint(repo_root, paths, output_json):
return sum(error_count.itervalues())
path_lints = [check_path_length]
file_lints = [check_regexp_line, check_parsed]
file_lints = [check_regexp_line, check_parsed, check_python_ast]
if __name__ == "__main__":
error_count = main()