Auto merge of #24512 - marmeladema:issue-23607/compat, r=SimonSapin

More changes for Python3 compatibility

Following #24435 for #23607 here are even more changes to be compatible with Python3.
I managed to get `./mach build` properly work with Python3 but `test-tidy` does not work yet because of a lot of problems in `web-platform-tests` which i will have to deal with at some point.

Because flake8 appears to be incompatible with the distro package in some way, i had to change to way we call flake8 (which was deprecated anyway). With this change, we should be able to update flake8 to a recent version but subprocess can be surprising on Windows platform so i'd like someone to try out those changes.

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: -->
- [x] `./mach build -d` does not report any errors
- [x] `./mach test-tidy` does not report any errors

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
bors-servo 2019-10-22 13:48:50 -04:00 committed by GitHub
commit 3e7a0bfc42
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 79 additions and 72 deletions

View file

@ -8,6 +8,7 @@ import os
import sys
import subprocess
import platform
from six import itervalues, iteritems
DEFAULT_MOVE_UP_CODE = u"\x1b[A"
DEFAULT_CLEAR_EOL_CODE = u"\x1b[K"
@ -105,7 +106,7 @@ class GroupingFormatter(base.BaseFormatter):
return new_display + "No tests running.\n"
def suite_start(self, data):
self.number_of_tests = sum(len(tests) for tests in data["tests"].itervalues())
self.number_of_tests = sum(len(tests) for tests in itervalues(data["tests"]))
self.start_time = data["time"]
if self.number_of_tests == 0:
@ -182,7 +183,7 @@ class GroupingFormatter(base.BaseFormatter):
else:
failures_by_stack[failure['stack']].append(failure)
for (stack, failures) in failures_by_stack.iteritems():
for (stack, failures) in iteritems(failures_by_stack):
output += make_subtests_failure(test_name, failures, stack)
return output

View file

@ -8,6 +8,7 @@ import os
import sys
import tempfile
from collections import defaultdict
from six import iterkeys, iteritems
from mozlog.structured import commandline
from wptrunner.wptcommandline import get_test_paths, set_from_config
@ -58,7 +59,7 @@ def update(logger, wpt_dir, check_clean=True, rebuild=False):
def _update(logger, test_paths, rebuild):
for url_base, paths in test_paths.iteritems():
for url_base, paths in iteritems(test_paths):
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
cache_subdir = os.path.relpath(os.path.dirname(manifest_path),
os.path.dirname(__file__))
@ -75,7 +76,7 @@ def _update(logger, test_paths, rebuild):
def _check_clean(logger, test_paths):
manifests_by_path = {}
rv = 0
for url_base, paths in test_paths.iteritems():
for url_base, paths in iteritems(test_paths):
tests_path = paths["tests_path"]
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
@ -102,7 +103,7 @@ def _check_clean(logger, test_paths):
manifests_by_path[manifest_path] = (old_manifest, new_manifest)
for manifest_path, (old_manifest, new_manifest) in manifests_by_path.iteritems():
for manifest_path, (old_manifest, new_manifest) in iteritems(manifests_by_path):
if not diff_manifests(logger, manifest_path, old_manifest, new_manifest):
logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path)
rv = 1
@ -136,8 +137,8 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
test_id = tuple(test_id)
items[path].add((test_type, test_id))
old_paths = set(old_items.iterkeys())
new_paths = set(new_items.iterkeys())
old_paths = set(iterkeys(old_items))
new_paths = set(iterkeys(new_items))
added_paths = new_paths - old_paths
deleted_paths = old_paths - new_paths
@ -166,9 +167,9 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
# Manifest currently has some list vs tuple inconsistencies that break
# a simple equality comparison.
new_paths = {(key, value[0], value[1])
for (key, value) in new_manifest.to_json()["paths"].iteritems()}
for (key, value) in iteritems(new_manifest.to_json()["paths"])}
old_paths = {(key, value[0], value[1])
for (key, value) in old_manifest.to_json()["paths"].iteritems()}
for (key, value) in iteritems(old_manifest.to_json()["paths"])}
if old_paths != new_paths:
logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa
clean = False

View file

@ -8,7 +8,7 @@ import gzip
import json
import os
import requests
import urlparse
import six.moves.urllib as urllib
treeherder_base = "https://treeherder.mozilla.org/"
@ -53,7 +53,7 @@ def download(url, prefix, dest, force_suffix=True):
def get_blobber_url(branch, job):
job_id = job["id"]
resp = requests.get(urlparse.urljoin(treeherder_base,
resp = requests.get(urllib.parse.urljoin(treeherder_base,
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
job_id)))
job_data = resp.json()
@ -71,13 +71,13 @@ def get_blobber_url(branch, job):
def get_structured_logs(branch, commit, dest=None):
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
revision_data = resp.json()
result_set = revision_data["results"][0]["id"]
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
job_data = resp.json()

View file

@ -2,8 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import print_function
import json
from urlparse import urljoin
from six.moves.urllib.parse import urljoin
requests = None
class GitHubError(Exception):
@ -46,7 +48,7 @@ class GitHub(object):
if 200 <= resp.status_code < 300:
return resp.json()
else:
print resp.status_code, resp.json()
print(resp.status_code, resp.json())
raise GitHubError(resp.status_code, resp.json())
def repo(self, owner, name):

View file

@ -1,15 +1,18 @@
from __future__ import print_function
import os
import re
import subprocess
import sys
import urlparse
import six.moves.urllib as urllib
from six.moves import input
from six import iteritems
from wptrunner.update.sync import UpdateCheckout
from wptrunner.update.tree import get_unique_name
from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean
from .tree import Commit, GitTree, Patch
import github
from .github import GitHub
@ -155,9 +158,9 @@ class SelectCommits(Step):
while True:
commits = state.source_commits[:]
for i, commit in enumerate(commits):
print "%i:\t%s" % (i, commit.message.summary)
print("%i:\t%s" % (i, commit.message.summary))
remove = raw_input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
remove = input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
remove_idx = set()
invalid = False
for item in remove.split(" "):
@ -178,10 +181,10 @@ class SelectCommits(Step):
keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx]
#TODO: consider printed removed commits
print "Selected the following commits to keep:"
print("Selected the following commits to keep:")
for i, commit in keep_commits:
print "%i:\t%s" % (i, commit.message.summary)
confirm = raw_input("Keep the above commits? y/n\n").strip().lower()
print("%i:\t%s" % (i, commit.message.summary))
confirm = input("Keep the above commits? y/n\n").strip().lower()
if confirm == "y":
state.source_commits = [item[1] for item in keep_commits]
@ -210,7 +213,7 @@ class MovePatches(Step):
try:
state.sync_tree.import_patch(stripped_patch, 1 + strip_count)
except:
print patch.diff
print(patch.diff)
raise
state.commits_loaded = i
@ -262,7 +265,7 @@ class MergeUpstream(Step):
if "merge_index" not in state:
state.merge_index = 0
org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
org, name = urllib.parse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
if name.endswith(".git"):
name = name[:-4]
state.gh_repo = gh.repo(org, name)
@ -285,7 +288,7 @@ class UpdateLastSyncData(Step):
data = {"local": state.local_tree.rev,
"upstream": state.sync_tree.rev}
with open(state.sync_data_path, "w") as f:
for key, value in data.iteritems():
for key, value in iteritems(data):
f.write("%s: %s\n" % (key, value))
# This gets added to the patch later on