mirror of
https://github.com/servo/servo.git
synced 2025-08-09 23:45:35 +01:00
Auto merge of #25239 - marmeladema:issue-23607/test-tidy-no-wpt, r=jdm
Make `mach test-tidy --no-wpt` compatible with Python3 Make `mach test-tidy --no-wpt` compatible with Python3 See also #23607 After this pull request, all python files (except WPT) will be checked for Python3 syntax compatibility. --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors <!-- Either: --> - [ ] There are tests for these changes OR - [ ] These changes do not require tests because ___ <!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.--> <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
commit
b3b72cb9e3
12 changed files with 118 additions and 107 deletions
|
@ -3943,7 +3943,7 @@ class CGMemberJITInfo(CGThing):
|
|||
depth=self.descriptor.interface.inheritanceDepth(),
|
||||
opType=opType,
|
||||
aliasSet=aliasSet,
|
||||
returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
|
||||
returnType=functools.reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
|
||||
""),
|
||||
isInfallible=toStringBool(infallible),
|
||||
isMovable=toStringBool(movable),
|
||||
|
@ -4131,7 +4131,7 @@ class CGMemberJITInfo(CGThing):
|
|||
if u.hasNullableType:
|
||||
# Might be null or not
|
||||
return "JSVAL_TYPE_UNKNOWN"
|
||||
return reduce(CGMemberJITInfo.getSingleReturnType,
|
||||
return functools.reduce(CGMemberJITInfo.getSingleReturnType,
|
||||
u.flatMemberTypes, "")
|
||||
if t.isDictionary():
|
||||
return "JSVAL_TYPE_OBJECT"
|
||||
|
@ -4202,7 +4202,7 @@ class CGMemberJITInfo(CGThing):
|
|||
if t.isUnion():
|
||||
u = t.unroll()
|
||||
type = "JSJitInfo::Null as i32" if u.hasNullableType else ""
|
||||
return reduce(CGMemberJITInfo.getSingleArgType,
|
||||
return functools.reduce(CGMemberJITInfo.getSingleArgType,
|
||||
u.flatMemberTypes, type)
|
||||
if t.isDictionary():
|
||||
return "JSJitInfo_ArgType::Object as i32"
|
||||
|
@ -5858,7 +5858,7 @@ class CGInterfaceTrait(CGThing):
|
|||
def contains_unsafe_arg(arguments):
|
||||
if not arguments or len(arguments) == 0:
|
||||
return False
|
||||
return reduce((lambda x, y: x or y[1] == '*mut JSContext'), arguments, False)
|
||||
return functools.reduce((lambda x, y: x or y[1] == '*mut JSContext'), arguments, False)
|
||||
|
||||
methods = []
|
||||
for name, arguments, rettype in members():
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
import functools
|
||||
import os
|
||||
|
||||
from WebIDL import IDLExternalInterface, IDLSequenceType, IDLWrapperType, WebIDLError
|
||||
|
@ -15,7 +16,7 @@ class Configuration:
|
|||
def __init__(self, filename, parseData):
|
||||
# Read the configuration file.
|
||||
glbl = {}
|
||||
execfile(filename, glbl)
|
||||
exec(compile(open(filename).read(), filename, 'exec'), glbl)
|
||||
config = glbl['DOMInterfaces']
|
||||
|
||||
# Build descriptors for all the interfaces we have in the parse data.
|
||||
|
@ -62,7 +63,8 @@ class Configuration:
|
|||
c.isCallback() and not c.isInterface()]
|
||||
|
||||
# Keep the descriptor list sorted for determinism.
|
||||
self.descriptors.sort(lambda x, y: cmp(x.name, y.name))
|
||||
cmp = lambda x, y: (x > y) - (x < y)
|
||||
self.descriptors.sort(key=functools.cmp_to_key(lambda x, y: cmp(x.name, y.name)))
|
||||
|
||||
def getInterface(self, ifname):
|
||||
return self.interfaces[ifname]
|
||||
|
|
|
@ -69,7 +69,7 @@ def transform_report_for_test(report):
|
|||
while remaining:
|
||||
(name, value) = remaining.pop()
|
||||
transformed[name] = '%s %s' % (value['amount'], value['unit'])
|
||||
remaining += map(lambda (k, v): (name + '/' + k, v), list(value['children'].items()))
|
||||
remaining += map(lambda k_v: (name + '/' + k_v[0], k_v[1]), list(value['children'].items()))
|
||||
return transformed
|
||||
|
||||
|
||||
|
|
|
@ -205,6 +205,7 @@ def linux_tidy_unit():
|
|||
.with_treeherder("Linux x64", "Tidy+Unit")
|
||||
.with_script("""
|
||||
./mach test-tidy --no-progress --all
|
||||
python3 ./mach test-tidy --no-progress --all --no-wpt
|
||||
python3 ./mach build --dev
|
||||
python3 ./mach test-unit
|
||||
python3 ./mach package --dev
|
||||
|
|
|
@ -627,7 +627,7 @@ install them, let us know by filing a bug!")
|
|||
def build_env(self, hosts_file_path=None, target=None, is_build=False, test_unit=False, uwp=False, features=None):
|
||||
"""Return an extended environment dictionary."""
|
||||
env = os.environ.copy()
|
||||
if sys.platform == "win32" and type(env['PATH']) == unicode:
|
||||
if sys.platform == "win32" and type(env['PATH']) == six.text_type:
|
||||
# On win32, the virtualenv's activate_this.py script sometimes ends up
|
||||
# turning os.environ['PATH'] into a unicode string. This doesn't work
|
||||
# for passing env vars in to a process, so we force it back to ascii.
|
||||
|
|
|
@ -776,7 +776,7 @@ class MachCommands(CommandBase):
|
|||
def setup_clangfmt(env):
|
||||
cmd = "clang-format.exe" if sys.platform == "win32" else "clang-format"
|
||||
try:
|
||||
version = check_output([cmd, "--version"], env=env).rstrip()
|
||||
version = check_output([cmd, "--version"], env=env, universal_newlines=True).rstrip()
|
||||
print(version)
|
||||
if not version.startswith("clang-format version {}.".format(CLANGFMT_VERSION)):
|
||||
print("clang-format: wrong version (v{} required). Skipping CPP formatting.".format(CLANGFMT_VERSION))
|
||||
|
@ -785,7 +785,7 @@ def setup_clangfmt(env):
|
|||
print("clang-format not installed. Skipping CPP formatting.")
|
||||
return False, None, None
|
||||
gitcmd = ['git', 'ls-files']
|
||||
gitfiles = check_output(gitcmd + CLANGFMT_CPP_DIRS).splitlines()
|
||||
gitfiles = check_output(gitcmd + CLANGFMT_CPP_DIRS, universal_newlines=True).splitlines()
|
||||
filtered = [line for line in gitfiles if line.endswith(".h") or line.endswith(".cpp")]
|
||||
return True, cmd, filtered
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ def wpt_path(*args):
|
|||
CONFIG_FILE_PATH = os.path.join(".", "servo-tidy.toml")
|
||||
WPT_MANIFEST_PATH = wpt_path("include.ini")
|
||||
# regex source https://stackoverflow.com/questions/6883049/
|
||||
URL_REGEX = re.compile('https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+')
|
||||
URL_REGEX = re.compile(b'https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+')
|
||||
|
||||
# Import wptmanifest only when we do have wpt in tree, i.e. we're not
|
||||
# inside a Firefox checkout.
|
||||
|
@ -64,7 +64,7 @@ config = {
|
|||
"check_ext": {}
|
||||
}
|
||||
|
||||
COMMENTS = ["// ", "# ", " *", "/* "]
|
||||
COMMENTS = [b"// ", b"# ", b" *", b"/* "]
|
||||
|
||||
# File patterns to include in the non-WPT tidy check.
|
||||
FILE_PATTERNS_TO_CHECK = ["*.rs", "*.rc", "*.cpp", "*.c",
|
||||
|
@ -78,40 +78,40 @@ FILE_PATTERNS_TO_IGNORE = ["*.#*", "*.pyc", "fake-ld.sh", "*.ogv", "*.webm"]
|
|||
SPEC_BASE_PATH = "components/script/dom/"
|
||||
|
||||
WEBIDL_STANDARDS = [
|
||||
"//www.khronos.org/registry/webgl/extensions",
|
||||
"//www.khronos.org/registry/webgl/specs",
|
||||
"//developer.mozilla.org/en-US/docs/Web/API",
|
||||
"//dev.w3.org/2006/webapi",
|
||||
"//dev.w3.org/csswg",
|
||||
"//dev.w3.org/fxtf",
|
||||
"//dvcs.w3.org/hg",
|
||||
"//dom.spec.whatwg.org",
|
||||
"//drafts.csswg.org",
|
||||
"//drafts.css-houdini.org",
|
||||
"//drafts.fxtf.org",
|
||||
"//encoding.spec.whatwg.org",
|
||||
"//fetch.spec.whatwg.org",
|
||||
"//html.spec.whatwg.org",
|
||||
"//url.spec.whatwg.org",
|
||||
"//xhr.spec.whatwg.org",
|
||||
"//w3c.github.io",
|
||||
"//heycam.github.io/webidl",
|
||||
"//webbluetoothcg.github.io/web-bluetooth/",
|
||||
"//svgwg.org/svg2-draft",
|
||||
"//wicg.github.io",
|
||||
"//webaudio.github.io",
|
||||
"//immersive-web.github.io/",
|
||||
"//github.com/immersive-web/webxr-test-api/",
|
||||
"//gpuweb.github.io",
|
||||
b"//www.khronos.org/registry/webgl/extensions",
|
||||
b"//www.khronos.org/registry/webgl/specs",
|
||||
b"//developer.mozilla.org/en-US/docs/Web/API",
|
||||
b"//dev.w3.org/2006/webapi",
|
||||
b"//dev.w3.org/csswg",
|
||||
b"//dev.w3.org/fxtf",
|
||||
b"//dvcs.w3.org/hg",
|
||||
b"//dom.spec.whatwg.org",
|
||||
b"//drafts.csswg.org",
|
||||
b"//drafts.css-houdini.org",
|
||||
b"//drafts.fxtf.org",
|
||||
b"//encoding.spec.whatwg.org",
|
||||
b"//fetch.spec.whatwg.org",
|
||||
b"//html.spec.whatwg.org",
|
||||
b"//url.spec.whatwg.org",
|
||||
b"//xhr.spec.whatwg.org",
|
||||
b"//w3c.github.io",
|
||||
b"//heycam.github.io/webidl",
|
||||
b"//webbluetoothcg.github.io/web-bluetooth/",
|
||||
b"//svgwg.org/svg2-draft",
|
||||
b"//wicg.github.io",
|
||||
b"//webaudio.github.io",
|
||||
b"//immersive-web.github.io/",
|
||||
b"//github.com/immersive-web/webxr-test-api/",
|
||||
b"//gpuweb.github.io",
|
||||
# Not a URL
|
||||
"// This interface is entirely internal to Servo, and should not be" +
|
||||
" accessible to\n// web pages."
|
||||
b"// This interface is entirely internal to Servo, and should not be" +
|
||||
b" accessible to\n// web pages."
|
||||
]
|
||||
|
||||
|
||||
def is_iter_empty(iterator):
|
||||
try:
|
||||
obj = iterator.next()
|
||||
obj = next(iterator)
|
||||
return True, itertools.chain((obj,), iterator)
|
||||
except StopIteration:
|
||||
return False, iterator
|
||||
|
@ -163,9 +163,9 @@ class FileList(object):
|
|||
|
||||
def _git_changed_files(self):
|
||||
args = ["git", "log", "-n1", "--merges", "--format=%H"]
|
||||
last_merge = subprocess.check_output(args).strip()
|
||||
last_merge = subprocess.check_output(args, universal_newlines=True).strip()
|
||||
args = ["git", "diff", "--name-only", last_merge, self.directory]
|
||||
file_list = normilize_paths(subprocess.check_output(args).splitlines())
|
||||
file_list = normilize_paths(subprocess.check_output(args, universal_newlines=True).splitlines())
|
||||
|
||||
for f in file_list:
|
||||
if not any(os.path.join('.', os.path.dirname(f)).startswith(path) for path in self.excluded):
|
||||
|
@ -179,7 +179,7 @@ class FileList(object):
|
|||
yield os.path.join(root, rel_path)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
return self.generator
|
||||
|
||||
def next(self):
|
||||
return next(self.generator)
|
||||
|
@ -200,7 +200,7 @@ def filter_files(start_dir, only_changed_files, progress):
|
|||
# always yield Cargo.lock so that the correctness of transitive dependacies is checked
|
||||
yield "./Cargo.lock"
|
||||
|
||||
for file_name in file_iter:
|
||||
for file_name in iter(file_iter):
|
||||
base_name = os.path.basename(file_name)
|
||||
if not any(fnmatch.fnmatch(base_name, pattern) for pattern in FILE_PATTERNS_TO_CHECK):
|
||||
continue
|
||||
|
@ -212,7 +212,7 @@ def filter_files(start_dir, only_changed_files, progress):
|
|||
def uncomment(line):
|
||||
for c in COMMENTS:
|
||||
if line.startswith(c):
|
||||
if line.endswith("*/"):
|
||||
if line.endswith(b"*/"):
|
||||
return line[len(c):(len(line) - 3)].strip()
|
||||
return line[len(c):].strip()
|
||||
|
||||
|
@ -227,15 +227,15 @@ def check_license(file_name, lines):
|
|||
config["skip-check-licenses"]:
|
||||
raise StopIteration
|
||||
|
||||
if lines[0].startswith("#!") and lines[1].strip():
|
||||
if lines[0].startswith(b"#!") and lines[1].strip():
|
||||
yield (1, "missing blank line after shebang")
|
||||
|
||||
blank_lines = 0
|
||||
max_blank_lines = 2 if lines[0].startswith("#!") else 1
|
||||
max_blank_lines = 2 if lines[0].startswith(b"#!") else 1
|
||||
license_block = []
|
||||
|
||||
for l in lines:
|
||||
l = l.rstrip('\n')
|
||||
l = l.rstrip(b'\n')
|
||||
if not l.strip():
|
||||
blank_lines += 1
|
||||
if blank_lines >= max_blank_lines:
|
||||
|
@ -245,7 +245,7 @@ def check_license(file_name, lines):
|
|||
if line is not None:
|
||||
license_block.append(line)
|
||||
|
||||
header = " ".join(license_block)
|
||||
header = (b" ".join(license_block)).decode("utf-8")
|
||||
valid_license = OLD_MPL in header or MPL in header or is_apache_licensed(header)
|
||||
acknowledged_bad_license = "xfail-license" in header
|
||||
if not (valid_license or acknowledged_bad_license):
|
||||
|
@ -254,9 +254,9 @@ def check_license(file_name, lines):
|
|||
|
||||
def check_modeline(file_name, lines):
|
||||
for idx, line in enumerate(lines[:5]):
|
||||
if re.search('^.*[ \t](vi:|vim:|ex:)[ \t]', line):
|
||||
if re.search(b'^.*[ \t](vi:|vim:|ex:)[ \t]', line):
|
||||
yield (idx + 1, "vi modeline present")
|
||||
elif re.search('-\*-.*-\*-', line, re.IGNORECASE):
|
||||
elif re.search(b'-\*-.*-\*-', line, re.IGNORECASE):
|
||||
yield (idx + 1, "emacs file variables present")
|
||||
|
||||
|
||||
|
@ -267,7 +267,7 @@ def check_length(file_name, idx, line):
|
|||
|
||||
# Prefer shorter lines when shell scripting.
|
||||
max_length = 80 if file_name.endswith(".sh") else 120
|
||||
if len(line.rstrip('\n')) > max_length and not is_unsplittable(file_name, line):
|
||||
if len(line.rstrip(b'\n')) > max_length and not is_unsplittable(file_name, line):
|
||||
yield (idx + 1, "Line is longer than %d characters" % max_length)
|
||||
|
||||
|
||||
|
@ -279,38 +279,38 @@ def is_unsplittable(file_name, line):
|
|||
return (
|
||||
contains_url(line) or
|
||||
file_name.endswith(".rs") and
|
||||
line.startswith("use ") and
|
||||
"{" not in line
|
||||
line.startswith(b"use ") and
|
||||
b"{" not in line
|
||||
)
|
||||
|
||||
|
||||
def check_whatwg_specific_url(idx, line):
|
||||
match = re.search(r"https://html\.spec\.whatwg\.org/multipage/[\w-]+\.html#([\w\:-]+)", line)
|
||||
match = re.search(br"https://html\.spec\.whatwg\.org/multipage/[\w-]+\.html#([\w\:-]+)", line)
|
||||
if match is not None:
|
||||
preferred_link = "https://html.spec.whatwg.org/multipage/#{}".format(match.group(1))
|
||||
yield (idx + 1, "link to WHATWG may break in the future, use this format instead: {}".format(preferred_link))
|
||||
|
||||
|
||||
def check_whatwg_single_page_url(idx, line):
|
||||
match = re.search(r"https://html\.spec\.whatwg\.org/#([\w\:-]+)", line)
|
||||
match = re.search(br"https://html\.spec\.whatwg\.org/#([\w\:-]+)", line)
|
||||
if match is not None:
|
||||
preferred_link = "https://html.spec.whatwg.org/multipage/#{}".format(match.group(1))
|
||||
yield (idx + 1, "links to WHATWG single-page url, change to multi page: {}".format(preferred_link))
|
||||
|
||||
|
||||
def check_whitespace(idx, line):
|
||||
if line[-1] == "\n":
|
||||
if line.endswith(b"\n"):
|
||||
line = line[:-1]
|
||||
else:
|
||||
yield (idx + 1, "no newline at EOF")
|
||||
|
||||
if line.endswith(" "):
|
||||
if line.endswith(b" "):
|
||||
yield (idx + 1, "trailing whitespace")
|
||||
|
||||
if "\t" in line:
|
||||
if b"\t" in line:
|
||||
yield (idx + 1, "tab on line")
|
||||
|
||||
if "\r" in line:
|
||||
if b"\r" in line:
|
||||
yield (idx + 1, "CR on line")
|
||||
|
||||
|
||||
|
@ -339,7 +339,7 @@ def check_flake8(file_name, contents):
|
|||
output = ""
|
||||
try:
|
||||
args = ["flake8", "--ignore=" + ",".join(ignore), file_name]
|
||||
subprocess.check_output(args)
|
||||
subprocess.check_output(args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
output = e.output
|
||||
for error in output.splitlines():
|
||||
|
@ -360,7 +360,7 @@ def check_lock(file_name, contents):
|
|||
# Package names to be neglected (as named by cargo)
|
||||
exceptions = config["ignore"]["packages"]
|
||||
|
||||
content = toml.loads(contents)
|
||||
content = toml.loads(contents.decode("utf-8"))
|
||||
|
||||
packages_by_name = {}
|
||||
for package in content.get("package", []):
|
||||
|
@ -431,7 +431,7 @@ def check_toml(file_name, lines):
|
|||
if not file_name.endswith("Cargo.toml"):
|
||||
raise StopIteration
|
||||
ok_licensed = False
|
||||
for idx, line in enumerate(lines):
|
||||
for idx, line in enumerate(map(lambda line: line.decode("utf-8"), lines)):
|
||||
if idx == 0 and "[workspace]" in line:
|
||||
raise StopIteration
|
||||
line_without_comment, _, _ = line.partition("#")
|
||||
|
@ -447,7 +447,7 @@ def check_shell(file_name, lines):
|
|||
if not file_name.endswith(".sh"):
|
||||
raise StopIteration
|
||||
|
||||
shebang = "#!/usr/bin/env bash"
|
||||
shebang = b"#!/usr/bin/env bash"
|
||||
required_options = {"set -o errexit", "set -o nounset", "set -o pipefail"}
|
||||
|
||||
did_shebang_check = False
|
||||
|
@ -459,10 +459,10 @@ def check_shell(file_name, lines):
|
|||
if lines[0].rstrip() != shebang:
|
||||
yield (1, 'script does not have shebang "{}"'.format(shebang))
|
||||
|
||||
for idx in range(1, len(lines)):
|
||||
stripped = lines[idx].rstrip()
|
||||
for idx, line in enumerate(map(lambda line: line.decode("utf-8"), lines[1:])):
|
||||
stripped = line.rstrip()
|
||||
# Comments or blank lines are ignored. (Trailing whitespace is caught with a separate linter.)
|
||||
if lines[idx].startswith("#") or stripped == "":
|
||||
if line.startswith("#") or stripped == "":
|
||||
continue
|
||||
|
||||
if not did_shebang_check:
|
||||
|
@ -548,7 +548,7 @@ def check_rust(file_name, lines):
|
|||
decl_expected = "\n\t\033[93mexpected: {}\033[0m"
|
||||
decl_found = "\n\t\033[91mfound: {}\033[0m"
|
||||
|
||||
for idx, original_line in enumerate(lines):
|
||||
for idx, original_line in enumerate(map(lambda line: line.decode("utf-8"), lines)):
|
||||
# simplify the analysis
|
||||
line = original_line.strip()
|
||||
indent = len(original_line) - len(line)
|
||||
|
@ -661,7 +661,7 @@ def check_rust(file_name, lines):
|
|||
match = re.search(r"#!\[feature\((.*)\)\]", line)
|
||||
|
||||
if match:
|
||||
features = map(lambda w: w.strip(), match.group(1).split(','))
|
||||
features = list(map(lambda w: w.strip(), match.group(1).split(',')))
|
||||
sorted_features = sorted(features)
|
||||
if sorted_features != features and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("feature attribute")
|
||||
|
@ -683,7 +683,7 @@ def check_rust(file_name, lines):
|
|||
# strip /(pub )?mod/ from the left and ";" from the right
|
||||
mod = line[4:-1] if line.startswith("mod ") else line[8:-1]
|
||||
|
||||
if (idx - 1) < 0 or "#[macro_use]" not in lines[idx - 1]:
|
||||
if (idx - 1) < 0 or "#[macro_use]" not in lines[idx - 1].decode("utf-8"):
|
||||
match = line.find(" {")
|
||||
if indent not in prev_mod:
|
||||
prev_mod[indent] = ""
|
||||
|
@ -703,7 +703,7 @@ def check_rust(file_name, lines):
|
|||
# match the derivable traits filtering out macro expansions
|
||||
match = re.search(r"#\[derive\(([a-zA-Z, ]*)", line)
|
||||
if match:
|
||||
derives = map(lambda w: w.strip(), match.group(1).split(','))
|
||||
derives = list(map(lambda w: w.strip(), match.group(1).split(',')))
|
||||
# sort, compare and report
|
||||
sorted_derives = sorted(derives)
|
||||
if sorted_derives != derives and check_alphabetical_order:
|
||||
|
@ -870,7 +870,7 @@ def check_spec(file_name, lines):
|
|||
in_impl = False
|
||||
pattern = "impl {}Methods for {} {{".format(file_name, file_name)
|
||||
|
||||
for idx, line in enumerate(lines):
|
||||
for idx, line in enumerate(map(lambda line: line.decode("utf-8"), lines)):
|
||||
if "// check-tidy: no specs after this line" in line:
|
||||
break
|
||||
if not patt.match(line):
|
||||
|
@ -878,7 +878,7 @@ def check_spec(file_name, lines):
|
|||
in_impl = True
|
||||
if ("fn " in line or macro_patt.match(line)) and brace_count == 1:
|
||||
for up_idx in range(1, idx + 1):
|
||||
up_line = lines[idx - up_idx]
|
||||
up_line = lines[idx - up_idx].decode("utf-8")
|
||||
if link_patt.match(up_line):
|
||||
# Comment with spec link exists
|
||||
break
|
||||
|
@ -1023,7 +1023,7 @@ def collect_errors_for_files(files_to_check, checking_functions, line_checking_f
|
|||
for filename in files_to_check:
|
||||
if not os.path.exists(filename):
|
||||
continue
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "rb") as f:
|
||||
contents = f.read()
|
||||
if not contents.strip():
|
||||
yield filename, 0, "file is empty"
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
from os import path
|
||||
|
@ -12,6 +14,7 @@ import datetime
|
|||
import argparse
|
||||
import platform
|
||||
import subprocess
|
||||
import six
|
||||
|
||||
TOP_DIR = path.join("..", "..")
|
||||
GUARD_TIME = 10
|
||||
|
@ -133,7 +136,7 @@ def execute(base_dir, build_target, renderer, page, profile, trial, layout_threa
|
|||
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
|
||||
"trial_" + str(trial))
|
||||
if os.path.exists(log_dir):
|
||||
print "Log directory already exists: " + log_dir
|
||||
print("Log directory already exists: " + log_dir)
|
||||
sys.exit(1)
|
||||
os.makedirs(log_dir)
|
||||
|
||||
|
@ -142,16 +145,16 @@ def execute(base_dir, build_target, renderer, page, profile, trial, layout_threa
|
|||
|
||||
# Execute
|
||||
start_energy_reader()
|
||||
print 'sleep ' + str(GUARD_TIME)
|
||||
print('sleep ' + str(GUARD_TIME))
|
||||
time.sleep(GUARD_TIME)
|
||||
time_start = time.time()
|
||||
energy_start = read_energy()
|
||||
print cmd
|
||||
print(cmd)
|
||||
os.system(cmd)
|
||||
energy_end = read_energy()
|
||||
time_end = time.time()
|
||||
stop_energy_reader()
|
||||
print 'sleep ' + str(GUARD_TIME)
|
||||
print('sleep ' + str(GUARD_TIME))
|
||||
time.sleep(GUARD_TIME)
|
||||
|
||||
uj = energy_end - energy_start
|
||||
|
@ -172,11 +175,11 @@ def execute(base_dir, build_target, renderer, page, profile, trial, layout_threa
|
|||
f.write("\nPower (W): " + str(watts))
|
||||
|
||||
|
||||
def characterize(build_target, base_dir, (min_layout_threads, max_layout_threads), renderer, page, profile, trials):
|
||||
def characterize(build_target, base_dir, layout_threads_limits, renderer, page, profile, trials):
|
||||
"""Run all configurations and capture results.
|
||||
"""
|
||||
for layout_thread_count in xrange(min_layout_threads, max_layout_threads + 1):
|
||||
for trial in xrange(1, trials + 1):
|
||||
for layout_thread_count in six.moves.xrange(layout_threads_limits[0], layout_threads_limits[1] + 1):
|
||||
for trial in six.moves.xrange(1, trials + 1):
|
||||
execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count)
|
||||
|
||||
|
||||
|
@ -250,7 +253,7 @@ def main():
|
|||
trials = args.trials
|
||||
|
||||
if os.path.exists(output_dir):
|
||||
print "Output directory already exists: " + output_dir
|
||||
print("Output directory already exists: " + output_dir)
|
||||
sys.exit(1)
|
||||
os.makedirs(output_dir)
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
from os import path
|
||||
|
@ -43,15 +45,15 @@ def execute(base_dir, renderer, page, profile, trial, layout_thread_count):
|
|||
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
|
||||
"trial_" + str(trial))
|
||||
if os.path.exists(log_dir):
|
||||
print "Log directory already exists: " + log_dir
|
||||
print("Log directory already exists: " + log_dir)
|
||||
sys.exit(1)
|
||||
os.makedirs(log_dir)
|
||||
|
||||
# Execute
|
||||
cmd = get_command(layout_thread_count, renderer, page, profile)
|
||||
print cmd
|
||||
print(cmd)
|
||||
os.system(cmd)
|
||||
print 'sleep ' + str(GUARD_TIME)
|
||||
print('sleep ' + str(GUARD_TIME))
|
||||
time.sleep(GUARD_TIME)
|
||||
|
||||
# Write a file that describes this execution
|
||||
|
@ -109,7 +111,7 @@ def main():
|
|||
profile = args.profile
|
||||
|
||||
if os.path.exists(output_dir):
|
||||
print "Output directory already exists: " + output_dir
|
||||
print("Output directory already exists: " + output_dir)
|
||||
sys.exit(1)
|
||||
os.makedirs(output_dir)
|
||||
|
||||
|
|
|
@ -4,11 +4,14 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import os
|
||||
from os import path
|
||||
import six
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
@ -196,7 +199,7 @@ def plot_trial_time_series(config, trial, trial_data, max_end_time, max_power, o
|
|||
|
||||
i = 10
|
||||
for (p, ts, te, es, ee) in trial_data:
|
||||
xranges = [(ts[j] / 1000000.0, (te[j] - ts[j]) / 1000000.0) for j in xrange(len(ts))]
|
||||
xranges = [(ts[j] / 1000000.0, (te[j] - ts[j]) / 1000000.0) for j in six.moves.xrange(len(ts))]
|
||||
ax1.broken_barh(xranges, (i - 0.5 * width, width))
|
||||
i += 10
|
||||
# place a vbar at the final time for this trial
|
||||
|
@ -385,20 +388,20 @@ def main():
|
|||
android = args.android
|
||||
|
||||
if not os.path.exists(directory):
|
||||
print "Input directory does not exist: " + directory
|
||||
print("Input directory does not exist: " + directory)
|
||||
sys.exit(1)
|
||||
|
||||
if os.path.exists(output_dir):
|
||||
print "Output directory already exists: " + output_dir
|
||||
print("Output directory already exists: " + output_dir)
|
||||
sys.exit(1)
|
||||
|
||||
res = process_logs(directory)
|
||||
|
||||
if not android:
|
||||
best = find_best_executions(directory)
|
||||
print 'Best time:', best[0]
|
||||
print 'Best energy:', best[1]
|
||||
print 'Best power:', best[2]
|
||||
print('Best time:', best[0])
|
||||
print('Best energy:', best[1])
|
||||
print('Best power:', best[2])
|
||||
|
||||
os.makedirs(output_dir)
|
||||
plot_all_raw_totals(res, output_dir)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue