mirror of
https://github.com/servo/servo.git
synced 2025-06-28 19:13:41 +01:00
Auto merge of #24435 - marmeladema:issue-23607/compat, r=jdm
Issue 23607: first pass of changes for compatibility with Python3 As much as i want to migrate entirely to Python3 (see #23607), it will require some time as changes in web-platform-tests are significant and rely on upstream fixes to be merged and synced downstream. In the meantime, lets improve compatibility with Python3 so that later, migration will be less painful. Build system is definitely not ready yet for Python3, but its a step in the right direction. --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: --> - [x ] `./mach build -d` does not report any errors - [ x] `./mach test-tidy` does not report any errors <!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.--> <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
commit
6d488f1be2
13 changed files with 70 additions and 56 deletions
|
@ -11,7 +11,7 @@ import os
|
|||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import urllib
|
||||
import six.moves.urllib as urllib
|
||||
from subprocess import PIPE
|
||||
from zipfile import BadZipfile
|
||||
|
||||
|
@ -293,7 +293,7 @@ def windows_msvc(context, force=False):
|
|||
|
||||
def prepare_file(zip_path, full_spec):
|
||||
if not os.path.isfile(zip_path):
|
||||
zip_url = "{}{}.zip".format(deps_url, urllib.quote(full_spec))
|
||||
zip_url = "{}{}.zip".format(deps_url, urllib.parse.quote(full_spec))
|
||||
download_file(full_spec, zip_url, zip_path)
|
||||
|
||||
print("Extracting {}...".format(full_spec), end='')
|
||||
|
|
|
@ -18,7 +18,7 @@ import re
|
|||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import urllib2
|
||||
import six.moves.urllib as urllib
|
||||
import glob
|
||||
|
||||
from mach.decorators import (
|
||||
|
@ -220,7 +220,7 @@ class MachCommands(CommandBase):
|
|||
|
||||
try:
|
||||
content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url)
|
||||
except urllib2.URLError:
|
||||
except urllib.error.URLError:
|
||||
print("Unable to download chromium HSTS preload list; are you connected to the internet?")
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -244,7 +244,7 @@ class MachCommands(CommandBase):
|
|||
|
||||
with open(path.join(preload_path, preload_filename), 'w') as fd:
|
||||
json.dump(entries, fd, indent=4)
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
print("Unable to parse chromium HSTS preload list, has the format changed?")
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -258,7 +258,7 @@ class MachCommands(CommandBase):
|
|||
|
||||
try:
|
||||
content = download_bytes("Public suffix list", list_url)
|
||||
except urllib2.URLError:
|
||||
except urllib.error.URLError:
|
||||
print("Unable to download the public suffix list; are you connected to the internet?")
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import platform
|
|||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
import six.moves.urllib as urllib
|
||||
import zipfile
|
||||
import stat
|
||||
|
||||
|
@ -498,7 +498,7 @@ class MachCommands(CommandBase):
|
|||
print("Downloading GStreamer dependencies")
|
||||
gst_url = "https://servo-deps.s3.amazonaws.com/gstreamer/%s" % gst_lib_zip
|
||||
print(gst_url)
|
||||
urllib.urlretrieve(gst_url, gst_lib_zip)
|
||||
urllib.request.urlretrieve(gst_url, gst_lib_zip)
|
||||
zip_ref = zipfile.ZipFile(gst_lib_zip, "r")
|
||||
zip_ref.extractall(gst_dir)
|
||||
os.remove(gst_lib_zip)
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
# option. This file may not be copied, modified, or distributed
|
||||
# except according to those terms.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from errno import ENOENT as NO_SUCH_FILE_OR_DIRECTORY
|
||||
from glob import glob
|
||||
import shutil
|
||||
|
@ -25,7 +27,7 @@ import tarfile
|
|||
import zipfile
|
||||
from xml.etree.ElementTree import XML
|
||||
from servo.util import download_file
|
||||
import urllib2
|
||||
import six.moves.urllib as urllib
|
||||
from bootstrap import check_gstreamer_lib
|
||||
|
||||
from mach.decorators import CommandArgument
|
||||
|
@ -105,7 +107,7 @@ def archive_deterministically(dir_to_archive, dest_archive, prepend_path=None):
|
|||
# packaging (in case of exceptional situations like running out of disk space).
|
||||
# TODO do this in a temporary folder after #11983 is fixed
|
||||
temp_file = '{}.temp~'.format(dest_archive)
|
||||
with os.fdopen(os.open(temp_file, os.O_WRONLY | os.O_CREAT, 0644), 'w') as out_file:
|
||||
with os.fdopen(os.open(temp_file, os.O_WRONLY | os.O_CREAT, 0o644), 'w') as out_file:
|
||||
if dest_archive.endswith('.zip'):
|
||||
with zipfile.ZipFile(temp_file, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for entry in file_list:
|
||||
|
@ -350,15 +352,15 @@ class CommandBase(object):
|
|||
version_line = subprocess.check_output(["rustup" + BIN_SUFFIX, "--version"])
|
||||
except OSError as e:
|
||||
if e.errno == NO_SUCH_FILE_OR_DIRECTORY:
|
||||
print "It looks like rustup is not installed. See instructions at " \
|
||||
"https://github.com/servo/servo/#setting-up-your-environment"
|
||||
print
|
||||
print("It looks like rustup is not installed. See instructions at "
|
||||
"https://github.com/servo/servo/#setting-up-your-environment")
|
||||
print()
|
||||
return 1
|
||||
raise
|
||||
version = tuple(map(int, re.match("rustup (\d+)\.(\d+)\.(\d+)", version_line).groups()))
|
||||
if version < (1, 11, 0):
|
||||
print "rustup is at version %s.%s.%s, Servo requires 1.11.0 or more recent." % version
|
||||
print "Try running 'rustup self update'."
|
||||
print("rustup is at version %s.%s.%s, Servo requires 1.11.0 or more recent." % version)
|
||||
print("Try running 'rustup self update'.")
|
||||
return 1
|
||||
toolchain = self.toolchain()
|
||||
if platform.system() == "Windows":
|
||||
|
@ -504,15 +506,15 @@ class CommandBase(object):
|
|||
nightly_date = nightly_date.strip()
|
||||
# Fetch the filename to download from the build list
|
||||
repository_index = NIGHTLY_REPOSITORY_URL + "?list-type=2&prefix=nightly"
|
||||
req = urllib2.Request(
|
||||
req = urllib.request.Request(
|
||||
"{}/{}/{}".format(repository_index, os_prefix, nightly_date))
|
||||
try:
|
||||
response = urllib2.urlopen(req).read()
|
||||
response = urllib.request.urlopen(req).read()
|
||||
tree = XML(response)
|
||||
namespaces = {'ns': tree.tag[1:tree.tag.index('}')]}
|
||||
file_to_download = tree.find('ns:Contents', namespaces).find(
|
||||
'ns:Key', namespaces).text
|
||||
except urllib2.URLError as e:
|
||||
except urllib.error.URLError as e:
|
||||
print("Could not fetch the available nightly versions from the repository : {}".format(
|
||||
e.reason))
|
||||
sys.exit(1)
|
||||
|
|
|
@ -14,7 +14,7 @@ from time import time
|
|||
import signal
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib2
|
||||
import six.moves.urllib as urllib
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
|
@ -201,7 +201,7 @@ class MachCommands(CommandBase):
|
|||
category='devenv')
|
||||
def rustup(self):
|
||||
url = get_static_rust_lang_org_dist() + "/channel-rust-nightly-date.txt"
|
||||
nightly_date = urllib2.urlopen(url, **get_urlopen_kwargs()).read()
|
||||
nightly_date = urllib.request.urlopen(url, **get_urlopen_kwargs()).read()
|
||||
toolchain = "nightly-" + nightly_date
|
||||
filename = path.join(self.context.topdir, "rust-toolchain")
|
||||
with open(filename, "w") as f:
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
# option. This file may not be copied, modified, or distributed
|
||||
# except according to those terms.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -20,7 +22,7 @@ class Lint(LintRunner):
|
|||
def _get_wpt_files(self, suite):
|
||||
working_dir = os.path.join(WPT_PATH, suite, '')
|
||||
file_iter = self.get_files(working_dir, exclude_dirs=[])
|
||||
print '\nRunning the WPT lint on %s...' % working_dir
|
||||
print('\nRunning the WPT lint on %s...' % working_dir)
|
||||
for f in file_iter:
|
||||
if filter_file(f):
|
||||
yield f[len(working_dir):]
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
# option. This file may not be copied, modified, or distributed
|
||||
# except according to those terms.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import fileinput
|
||||
import re
|
||||
import random
|
||||
|
@ -28,7 +30,7 @@ def init_variables(if_blocks):
|
|||
def deleteStatements(file_name, line_numbers):
|
||||
for line in fileinput.input(file_name, inplace=True):
|
||||
if fileinput.lineno() not in line_numbers:
|
||||
print line.rstrip()
|
||||
print(line.rstrip())
|
||||
|
||||
|
||||
class Strategy:
|
||||
|
@ -48,7 +50,7 @@ class Strategy:
|
|||
for line in fileinput.input(file_name, inplace=True):
|
||||
if fileinput.lineno() == mutation_line_number:
|
||||
line = re.sub(self._replace_strategy['regex'], self._replace_strategy['replaceString'], line)
|
||||
print line.rstrip()
|
||||
print(line.rstrip())
|
||||
return mutation_line_number
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import shutil
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib
|
||||
import six.moves.urllib as urllib
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
|
@ -594,7 +594,7 @@ class PackageCommands(CommandBase):
|
|||
"/secrets/v1/secret/project/servo/" +
|
||||
name
|
||||
)
|
||||
return json.load(urllib.urlopen(url))["secret"]
|
||||
return json.load(urllib.request.urlopen(url))["secret"]
|
||||
|
||||
def get_s3_secret():
|
||||
aws_access_key = None
|
||||
|
|
|
@ -18,10 +18,11 @@ import copy
|
|||
from collections import OrderedDict
|
||||
import time
|
||||
import json
|
||||
import urllib2
|
||||
import six.moves.urllib as urllib
|
||||
import base64
|
||||
import shutil
|
||||
import subprocess
|
||||
from six import iteritems
|
||||
|
||||
from mach.registrar import Registrar
|
||||
from mach.decorators import (
|
||||
|
@ -59,7 +60,7 @@ TEST_SUITES = OrderedDict([
|
|||
"include_arg": "test_name"}),
|
||||
])
|
||||
|
||||
TEST_SUITES_BY_PREFIX = {path: k for k, v in TEST_SUITES.iteritems() if "paths" in v for path in v["paths"]}
|
||||
TEST_SUITES_BY_PREFIX = {path: k for k, v in iteritems(TEST_SUITES) if "paths" in v for path in v["paths"]}
|
||||
|
||||
|
||||
def create_parser_wpt():
|
||||
|
@ -158,7 +159,7 @@ class MachCommands(CommandBase):
|
|||
return 1
|
||||
|
||||
test_start = time.time()
|
||||
for suite, tests in selected_suites.iteritems():
|
||||
for suite, tests in iteritems(selected_suites):
|
||||
props = suites[suite]
|
||||
kwargs = props.get("kwargs", {})
|
||||
if tests:
|
||||
|
@ -174,7 +175,7 @@ class MachCommands(CommandBase):
|
|||
def suite_for_path(self, path_arg):
|
||||
if os.path.exists(path.abspath(path_arg)):
|
||||
abs_path = path.abspath(path_arg)
|
||||
for prefix, suite in TEST_SUITES_BY_PREFIX.iteritems():
|
||||
for prefix, suite in iteritems(TEST_SUITES_BY_PREFIX):
|
||||
if abs_path.startswith(prefix):
|
||||
return suite
|
||||
return None
|
||||
|
@ -510,9 +511,9 @@ class MachCommands(CommandBase):
|
|||
elif tracker_api.endswith('/'):
|
||||
tracker_api = tracker_api[0:-1]
|
||||
|
||||
query = urllib2.quote(failure['test'], safe='')
|
||||
request = urllib2.Request("%s/query.py?name=%s" % (tracker_api, query))
|
||||
search = urllib2.urlopen(request)
|
||||
query = urllib.parse.quote(failure['test'], safe='')
|
||||
request = urllib.request.Request("%s/query.py?name=%s" % (tracker_api, query))
|
||||
search = urllib.request.urlopen(request)
|
||||
data = json.load(search)
|
||||
if len(data) == 0:
|
||||
actual_failures += [failure]
|
||||
|
@ -521,11 +522,11 @@ class MachCommands(CommandBase):
|
|||
else:
|
||||
qstr = "repo:servo/servo+label:I-intermittent+type:issue+state:open+%s" % failure['test']
|
||||
# we want `/` to get quoted, but not `+` (github's API doesn't like that), so we set `safe` to `+`
|
||||
query = urllib2.quote(qstr, safe='+')
|
||||
request = urllib2.Request("https://api.github.com/search/issues?q=%s" % query)
|
||||
query = urllib.parse.quote(qstr, safe='+')
|
||||
request = urllib.request.Request("https://api.github.com/search/issues?q=%s" % query)
|
||||
if encoded_auth:
|
||||
request.add_header("Authorization", "Basic %s" % encoded_auth)
|
||||
search = urllib2.urlopen(request)
|
||||
search = urllib.request.urlopen(request)
|
||||
data = json.load(search)
|
||||
if data['total_count'] == 0:
|
||||
actual_failures += [failure]
|
||||
|
|
|
@ -16,11 +16,11 @@ import platform
|
|||
import shutil
|
||||
from socket import error as socket_error
|
||||
import stat
|
||||
import StringIO
|
||||
from io import BytesIO
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
import urllib2
|
||||
import six.moves.urllib as urllib
|
||||
|
||||
|
||||
try:
|
||||
|
@ -101,10 +101,10 @@ def download(desc, src, writer, start_byte=0):
|
|||
dumb = (os.environ.get("TERM") == "dumb") or (not sys.stdout.isatty())
|
||||
|
||||
try:
|
||||
req = urllib2.Request(src)
|
||||
req = urllib.request.Request(src)
|
||||
if start_byte:
|
||||
req = urllib2.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)})
|
||||
resp = urllib2.urlopen(req, **get_urlopen_kwargs())
|
||||
req = urllib.request.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)})
|
||||
resp = urllib.request.urlopen(req, **get_urlopen_kwargs())
|
||||
|
||||
fsize = None
|
||||
if resp.info().getheader('Content-Length'):
|
||||
|
@ -136,16 +136,16 @@ def download(desc, src, writer, start_byte=0):
|
|||
|
||||
if not dumb:
|
||||
print()
|
||||
except urllib2.HTTPError, e:
|
||||
except urllib.error.HTTPError as e:
|
||||
print("Download failed ({}): {} - {}".format(e.code, e.reason, src))
|
||||
if e.code == 403:
|
||||
print("No Rust compiler binary available for this platform. "
|
||||
"Please see https://github.com/servo/servo/#prerequisites")
|
||||
sys.exit(1)
|
||||
except urllib2.URLError, e:
|
||||
except urllib.error.URLError as e:
|
||||
print("Error downloading {}: {}. The failing URL was: {}".format(desc, e.reason, src))
|
||||
sys.exit(1)
|
||||
except socket_error, e:
|
||||
except socket_error as e:
|
||||
print("Looks like there's a connectivity issue, check your Internet connection. {}".format(e))
|
||||
sys.exit(1)
|
||||
except KeyboardInterrupt:
|
||||
|
@ -154,7 +154,7 @@ def download(desc, src, writer, start_byte=0):
|
|||
|
||||
|
||||
def download_bytes(desc, src):
|
||||
content_writer = StringIO.StringIO()
|
||||
content_writer = BytesIO()
|
||||
download(desc, src, content_writer)
|
||||
return content_writer.getvalue()
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue