tests/wpt: use iterkeys, itervalues and iteritems from six package

This commit is contained in:
marmeladema 2019-10-20 23:30:00 +01:00
parent f51ae46570
commit 7fdd0b94cf
3 changed files with 13 additions and 10 deletions

View file

@ -8,6 +8,7 @@ import os
import sys import sys
import subprocess import subprocess
import platform import platform
from six import itervalues, iteritems
DEFAULT_MOVE_UP_CODE = u"\x1b[A" DEFAULT_MOVE_UP_CODE = u"\x1b[A"
DEFAULT_CLEAR_EOL_CODE = u"\x1b[K" DEFAULT_CLEAR_EOL_CODE = u"\x1b[K"
@ -105,7 +106,7 @@ class GroupingFormatter(base.BaseFormatter):
return new_display + "No tests running.\n" return new_display + "No tests running.\n"
def suite_start(self, data): def suite_start(self, data):
self.number_of_tests = sum(len(tests) for tests in data["tests"].itervalues()) self.number_of_tests = sum(len(tests) for tests in itervalues(data["tests"]))
self.start_time = data["time"] self.start_time = data["time"]
if self.number_of_tests == 0: if self.number_of_tests == 0:
@ -182,7 +183,7 @@ class GroupingFormatter(base.BaseFormatter):
else: else:
failures_by_stack[failure['stack']].append(failure) failures_by_stack[failure['stack']].append(failure)
for (stack, failures) in failures_by_stack.iteritems(): for (stack, failures) in iteritems(failures_by_stack):
output += make_subtests_failure(test_name, failures, stack) output += make_subtests_failure(test_name, failures, stack)
return output return output

View file

@ -8,6 +8,7 @@ import os
import sys import sys
import tempfile import tempfile
from collections import defaultdict from collections import defaultdict
from six import iterkeys, iteritems
from mozlog.structured import commandline from mozlog.structured import commandline
from wptrunner.wptcommandline import get_test_paths, set_from_config from wptrunner.wptcommandline import get_test_paths, set_from_config
@ -58,7 +59,7 @@ def update(logger, wpt_dir, check_clean=True, rebuild=False):
def _update(logger, test_paths, rebuild): def _update(logger, test_paths, rebuild):
for url_base, paths in test_paths.iteritems(): for url_base, paths in iteritems(test_paths):
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json") manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
cache_subdir = os.path.relpath(os.path.dirname(manifest_path), cache_subdir = os.path.relpath(os.path.dirname(manifest_path),
os.path.dirname(__file__)) os.path.dirname(__file__))
@ -75,7 +76,7 @@ def _update(logger, test_paths, rebuild):
def _check_clean(logger, test_paths): def _check_clean(logger, test_paths):
manifests_by_path = {} manifests_by_path = {}
rv = 0 rv = 0
for url_base, paths in test_paths.iteritems(): for url_base, paths in iteritems(test_paths):
tests_path = paths["tests_path"] tests_path = paths["tests_path"]
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json") manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
@ -102,7 +103,7 @@ def _check_clean(logger, test_paths):
manifests_by_path[manifest_path] = (old_manifest, new_manifest) manifests_by_path[manifest_path] = (old_manifest, new_manifest)
for manifest_path, (old_manifest, new_manifest) in manifests_by_path.iteritems(): for manifest_path, (old_manifest, new_manifest) in iteritems(manifests_by_path):
if not diff_manifests(logger, manifest_path, old_manifest, new_manifest): if not diff_manifests(logger, manifest_path, old_manifest, new_manifest):
logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path) logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path)
rv = 1 rv = 1
@ -136,8 +137,8 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
test_id = tuple(test_id) test_id = tuple(test_id)
items[path].add((test_type, test_id)) items[path].add((test_type, test_id))
old_paths = set(old_items.iterkeys()) old_paths = set(iterkeys(old_items))
new_paths = set(new_items.iterkeys()) new_paths = set(iterkeys(new_items))
added_paths = new_paths - old_paths added_paths = new_paths - old_paths
deleted_paths = old_paths - new_paths deleted_paths = old_paths - new_paths
@ -166,9 +167,9 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
# Manifest currently has some list vs tuple inconsistencies that break # Manifest currently has some list vs tuple inconsistencies that break
# a simple equality comparison. # a simple equality comparison.
new_paths = {(key, value[0], value[1]) new_paths = {(key, value[0], value[1])
for (key, value) in new_manifest.to_json()["paths"].iteritems()} for (key, value) in iteritems(new_manifest.to_json()["paths"])}
old_paths = {(key, value[0], value[1]) old_paths = {(key, value[0], value[1])
for (key, value) in old_manifest.to_json()["paths"].iteritems()} for (key, value) in iteritems(old_manifest.to_json()["paths"])}
if old_paths != new_paths: if old_paths != new_paths:
logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa
clean = False clean = False

View file

@ -6,6 +6,7 @@ import subprocess
import sys import sys
import six.moves.urllib as urllib import six.moves.urllib as urllib
from six.moves import input from six.moves import input
from six import iteritems
from wptrunner.update.sync import UpdateCheckout from wptrunner.update.sync import UpdateCheckout
from wptrunner.update.tree import get_unique_name from wptrunner.update.tree import get_unique_name
@ -287,7 +288,7 @@ class UpdateLastSyncData(Step):
data = {"local": state.local_tree.rev, data = {"local": state.local_tree.rev,
"upstream": state.sync_tree.rev} "upstream": state.sync_tree.rev}
with open(state.sync_data_path, "w") as f: with open(state.sync_data_path, "w") as f:
for key, value in data.iteritems(): for key, value in iteritems(data):
f.write("%s: %s\n" % (key, value)) f.write("%s: %s\n" % (key, value))
# This gets added to the patch later on # This gets added to the patch later on