tests/wpt/update: use urllib module from six package

This commit is contained in:
marmeladema 2019-10-20 23:08:49 +01:00
parent 179caed30f
commit d8190a7933
3 changed files with 7 additions and 7 deletions

View file

@ -8,7 +8,7 @@ import gzip
import json
import os
import requests
import urlparse
import six.moves.urllib as urllib
treeherder_base = "https://treeherder.mozilla.org/"
@ -53,7 +53,7 @@ def download(url, prefix, dest, force_suffix=True):
def get_blobber_url(branch, job):
job_id = job["id"]
resp = requests.get(urlparse.urljoin(treeherder_base,
resp = requests.get(urllib.parse.urljoin(treeherder_base,
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
job_id)))
job_data = resp.json()
@ -71,13 +71,13 @@ def get_blobber_url(branch, job):
def get_structured_logs(branch, commit, dest=None):
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
revision_data = resp.json()
result_set = revision_data["results"][0]["id"]
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
job_data = resp.json()

View file

@ -5,7 +5,7 @@
from __future__ import print_function
import json
from urlparse import urljoin
from six.moves.urllib.parse import urljoin
requests = None
class GitHubError(Exception):

View file

@ -4,7 +4,7 @@ import os
import re
import subprocess
import sys
import urlparse
import six.moves.urllib as urllib
from wptrunner.update.sync import UpdateCheckout
from wptrunner.update.tree import get_unique_name
@ -264,7 +264,7 @@ class MergeUpstream(Step):
if "merge_index" not in state:
state.merge_index = 0
org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
org, name = urllib.parse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
if name.endswith(".git"):
name = name[:-4]
state.gh_repo = gh.repo(org, name)