Use urllib from six module in order to be compatible with Python3

This commit is contained in:
marmeladema 2019-10-14 00:36:20 +01:00
parent f063ea64a5
commit f1d42fe787
8 changed files with 28 additions and 28 deletions

View file

@ -11,7 +11,7 @@ import os
import platform import platform
import shutil import shutil
import subprocess import subprocess
import urllib import six.moves.urllib as urllib
from subprocess import PIPE from subprocess import PIPE
from zipfile import BadZipfile from zipfile import BadZipfile
@ -293,7 +293,7 @@ def windows_msvc(context, force=False):
def prepare_file(zip_path, full_spec): def prepare_file(zip_path, full_spec):
if not os.path.isfile(zip_path): if not os.path.isfile(zip_path):
zip_url = "{}{}.zip".format(deps_url, urllib.quote(full_spec)) zip_url = "{}{}.zip".format(deps_url, urllib.parse.quote(full_spec))
download_file(full_spec, zip_url, zip_path) download_file(full_spec, zip_url, zip_path)
print("Extracting {}...".format(full_spec), end='') print("Extracting {}...".format(full_spec), end='')

View file

@ -18,7 +18,7 @@ import re
import subprocess import subprocess
import sys import sys
import traceback import traceback
import urllib2 import six.moves.urllib as urllib
import glob import glob
from mach.decorators import ( from mach.decorators import (
@ -220,7 +220,7 @@ class MachCommands(CommandBase):
try: try:
content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url) content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url)
except urllib2.URLError: except urllib.error.URLError:
print("Unable to download chromium HSTS preload list; are you connected to the internet?") print("Unable to download chromium HSTS preload list; are you connected to the internet?")
sys.exit(1) sys.exit(1)
@ -258,7 +258,7 @@ class MachCommands(CommandBase):
try: try:
content = download_bytes("Public suffix list", list_url) content = download_bytes("Public suffix list", list_url)
except urllib2.URLError: except urllib.error.URLError:
print("Unable to download the public suffix list; are you connected to the internet?") print("Unable to download the public suffix list; are you connected to the internet?")
sys.exit(1) sys.exit(1)

View file

@ -16,7 +16,7 @@ import platform
import shutil import shutil
import subprocess import subprocess
import sys import sys
import urllib import six.moves.urllib as urllib
import zipfile import zipfile
import stat import stat
@ -484,7 +484,7 @@ class MachCommands(CommandBase):
print("Downloading GStreamer dependencies") print("Downloading GStreamer dependencies")
gst_url = "https://servo-deps.s3.amazonaws.com/gstreamer/%s" % gst_lib_zip gst_url = "https://servo-deps.s3.amazonaws.com/gstreamer/%s" % gst_lib_zip
print(gst_url) print(gst_url)
urllib.urlretrieve(gst_url, gst_lib_zip) urllib.request.urlretrieve(gst_url, gst_lib_zip)
zip_ref = zipfile.ZipFile(gst_lib_zip, "r") zip_ref = zipfile.ZipFile(gst_lib_zip, "r")
zip_ref.extractall(gst_dir) zip_ref.extractall(gst_dir)
os.remove(gst_lib_zip) os.remove(gst_lib_zip)

View file

@ -27,7 +27,7 @@ import tarfile
import zipfile import zipfile
from xml.etree.ElementTree import XML from xml.etree.ElementTree import XML
from servo.util import download_file from servo.util import download_file
import urllib2 import six.moves.urllib as urllib
from bootstrap import check_gstreamer_lib from bootstrap import check_gstreamer_lib
from mach.decorators import CommandArgument from mach.decorators import CommandArgument
@ -506,15 +506,15 @@ class CommandBase(object):
nightly_date = nightly_date.strip() nightly_date = nightly_date.strip()
# Fetch the filename to download from the build list # Fetch the filename to download from the build list
repository_index = NIGHTLY_REPOSITORY_URL + "?list-type=2&prefix=nightly" repository_index = NIGHTLY_REPOSITORY_URL + "?list-type=2&prefix=nightly"
req = urllib2.Request( req = urllib.request.Request(
"{}/{}/{}".format(repository_index, os_prefix, nightly_date)) "{}/{}/{}".format(repository_index, os_prefix, nightly_date))
try: try:
response = urllib2.urlopen(req).read() response = urllib.request.urlopen(req).read()
tree = XML(response) tree = XML(response)
namespaces = {'ns': tree.tag[1:tree.tag.index('}')]} namespaces = {'ns': tree.tag[1:tree.tag.index('}')]}
file_to_download = tree.find('ns:Contents', namespaces).find( file_to_download = tree.find('ns:Contents', namespaces).find(
'ns:Key', namespaces).text 'ns:Key', namespaces).text
except urllib2.URLError as e: except urllib.error.URLError as e:
print("Could not fetch the available nightly versions from the repository : {}".format( print("Could not fetch the available nightly versions from the repository : {}".format(
e.reason)) e.reason))
sys.exit(1) sys.exit(1)

View file

@ -14,7 +14,7 @@ from time import time
import signal import signal
import sys import sys
import tempfile import tempfile
import urllib2 import six.moves.urllib as urllib
import json import json
import subprocess import subprocess
@ -201,7 +201,7 @@ class MachCommands(CommandBase):
category='devenv') category='devenv')
def rustup(self): def rustup(self):
url = get_static_rust_lang_org_dist() + "/channel-rust-nightly-date.txt" url = get_static_rust_lang_org_dist() + "/channel-rust-nightly-date.txt"
nightly_date = urllib2.urlopen(url, **get_urlopen_kwargs()).read() nightly_date = urllib.request.urlopen(url, **get_urlopen_kwargs()).read()
toolchain = "nightly-" + nightly_date toolchain = "nightly-" + nightly_date
filename = path.join(self.context.topdir, "rust-toolchain") filename = path.join(self.context.topdir, "rust-toolchain")
with open(filename, "w") as f: with open(filename, "w") as f:

View file

@ -19,7 +19,7 @@ import shutil
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
import urllib import six.moves.urllib as urllib
from mach.decorators import ( from mach.decorators import (
CommandArgument, CommandArgument,
@ -594,7 +594,7 @@ class PackageCommands(CommandBase):
"/secrets/v1/secret/project/servo/" + "/secrets/v1/secret/project/servo/" +
name name
) )
return json.load(urllib.urlopen(url))["secret"] return json.load(urllib.request.urlopen(url))["secret"]
def get_s3_secret(): def get_s3_secret():
aws_access_key = None aws_access_key = None

View file

@ -18,7 +18,7 @@ import copy
from collections import OrderedDict from collections import OrderedDict
import time import time
import json import json
import urllib2 import six.moves.urllib as urllib
import base64 import base64
import shutil import shutil
import subprocess import subprocess
@ -510,9 +510,9 @@ class MachCommands(CommandBase):
elif tracker_api.endswith('/'): elif tracker_api.endswith('/'):
tracker_api = tracker_api[0:-1] tracker_api = tracker_api[0:-1]
query = urllib2.quote(failure['test'], safe='') query = urllib.parse.quote(failure['test'], safe='')
request = urllib2.Request("%s/query.py?name=%s" % (tracker_api, query)) request = urllib.request.Request("%s/query.py?name=%s" % (tracker_api, query))
search = urllib2.urlopen(request) search = urllib.request.urlopen(request)
data = json.load(search) data = json.load(search)
if len(data) == 0: if len(data) == 0:
actual_failures += [failure] actual_failures += [failure]
@ -521,11 +521,11 @@ class MachCommands(CommandBase):
else: else:
qstr = "repo:servo/servo+label:I-intermittent+type:issue+state:open+%s" % failure['test'] qstr = "repo:servo/servo+label:I-intermittent+type:issue+state:open+%s" % failure['test']
# we want `/` to get quoted, but not `+` (github's API doesn't like that), so we set `safe` to `+` # we want `/` to get quoted, but not `+` (github's API doesn't like that), so we set `safe` to `+`
query = urllib2.quote(qstr, safe='+') query = urllib.parse.quote(qstr, safe='+')
request = urllib2.Request("https://api.github.com/search/issues?q=%s" % query) request = urllib.request.Request("https://api.github.com/search/issues?q=%s" % query)
if encoded_auth: if encoded_auth:
request.add_header("Authorization", "Basic %s" % encoded_auth) request.add_header("Authorization", "Basic %s" % encoded_auth)
search = urllib2.urlopen(request) search = urllib.request.urlopen(request)
data = json.load(search) data = json.load(search)
if data['total_count'] == 0: if data['total_count'] == 0:
actual_failures += [failure] actual_failures += [failure]

View file

@ -20,7 +20,7 @@ import StringIO
import sys import sys
import time import time
import zipfile import zipfile
import urllib2 import six.moves.urllib as urllib
try: try:
@ -101,10 +101,10 @@ def download(desc, src, writer, start_byte=0):
dumb = (os.environ.get("TERM") == "dumb") or (not sys.stdout.isatty()) dumb = (os.environ.get("TERM") == "dumb") or (not sys.stdout.isatty())
try: try:
req = urllib2.Request(src) req = urllib.request.Request(src)
if start_byte: if start_byte:
req = urllib2.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)}) req = urllib.request.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)})
resp = urllib2.urlopen(req, **get_urlopen_kwargs()) resp = urllib.request.urlopen(req, **get_urlopen_kwargs())
fsize = None fsize = None
if resp.info().getheader('Content-Length'): if resp.info().getheader('Content-Length'):
@ -136,13 +136,13 @@ def download(desc, src, writer, start_byte=0):
if not dumb: if not dumb:
print() print()
except urllib2.HTTPError, e: except urllib.error.HTTPError, e:
print("Download failed ({}): {} - {}".format(e.code, e.reason, src)) print("Download failed ({}): {} - {}".format(e.code, e.reason, src))
if e.code == 403: if e.code == 403:
print("No Rust compiler binary available for this platform. " print("No Rust compiler binary available for this platform. "
"Please see https://github.com/servo/servo/#prerequisites") "Please see https://github.com/servo/servo/#prerequisites")
sys.exit(1) sys.exit(1)
except urllib2.URLError, e: except urllib.error.URLError, e:
print("Error downloading {}: {}. The failing URL was: {}".format(desc, e.reason, src)) print("Error downloading {}: {}. The failing URL was: {}".format(desc, e.reason, src))
sys.exit(1) sys.exit(1)
except socket_error, e: except socket_error, e: