Auto merge of #24512 - marmeladema:issue-23607/compat, r=SimonSapin

More changes for Python3 compatibility

Following #24435 for #23607 here are even more changes to be compatible with Python3.
I managed to get `./mach build` properly work with Python3 but `test-tidy` does not work yet because of a lot of problems in `web-platform-tests` which i will have to deal with at some point.

Because flake8 appears to be incompatible with the distro package in some way, i had to change to way we call flake8 (which was deprecated anyway). With this change, we should be able to update flake8 to a recent version but subprocess can be surprising on Windows platform so i'd like someone to try out those changes.

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: -->
- [x] `./mach build -d` does not report any errors
- [x] `./mach test-tidy` does not report any errors

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
bors-servo 2019-10-22 13:48:50 -04:00 committed by GitHub
commit 3e7a0bfc42
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 79 additions and 72 deletions

View file

@ -166,7 +166,7 @@ def _activate_virtualenv(topdir, is_firefox):
# We want to upgrade pip when virtualenv created for the first time # We want to upgrade pip when virtualenv created for the first time
need_pip_upgrade = True need_pip_upgrade = True
execfile(activate_path, dict(__file__=activate_path)) exec(compile(open(activate_path).read(), activate_path, 'exec'), dict(__file__=activate_path))
python = _get_exec_path(PYTHON_NAMES, is_valid_path=check_exec_path) python = _get_exec_path(PYTHON_NAMES, is_valid_path=check_exec_path)
if not python: if not python:

View file

@ -2,6 +2,7 @@
# since `--system-site-packages` is enabled # since `--system-site-packages` is enabled
blessings == 1.6 blessings == 1.6
distro == 1.4
mach == 0.6.0 mach == 0.6.0
mozdebug == 0.1 mozdebug == 0.1
mozinfo == 0.8 mozinfo == 0.8
@ -33,6 +34,6 @@ boto3 == 1.4.4
certifi certifi
# For Python3 compatibility # For Python3 compatibility
six six == 1.12
-e python/tidy -e python/tidy

View file

@ -8,10 +8,12 @@ from distutils.spawn import find_executable
from distutils.version import LooseVersion from distutils.version import LooseVersion
import json import json
import os import os
import platform import distro
import shutil import shutil
import subprocess import subprocess
import six
import six.moves.urllib as urllib import six.moves.urllib as urllib
from six.moves import input
from subprocess import PIPE from subprocess import PIPE
from zipfile import BadZipfile from zipfile import BadZipfile
@ -253,7 +255,7 @@ def salt(context, force=False):
print('Something went wrong while bootstrapping') print('Something went wrong while bootstrapping')
return retcode return retcode
proceed = raw_input( proceed = input(
'Proposed changes are above, proceed with bootstrap? [y/N]: ' 'Proposed changes are above, proceed with bootstrap? [y/N]: '
) )
if proceed.lower() not in ['y', 'yes']: if proceed.lower() not in ['y', 'yes']:
@ -342,9 +344,11 @@ LINUX_SPECIFIC_BOOTSTRAPPERS = {
def get_linux_distribution(): def get_linux_distribution():
distro, version, _ = platform.linux_distribution() distrib, version, _ = distro.linux_distribution()
distrib = six.ensure_str(distrib)
version = six.ensure_str(version)
if distro == 'LinuxMint': if distrib == 'LinuxMint':
if '.' in version: if '.' in version:
major, _ = version.split('.', 1) major, _ = version.split('.', 1)
else: else:
@ -357,10 +361,10 @@ def get_linux_distribution():
elif major == '17': elif major == '17':
base_version = '14.04' base_version = '14.04'
else: else:
raise Exception('unsupported version of %s: %s' % (distro, version)) raise Exception('unsupported version of %s: %s' % (distrib, version))
distro, version = 'Ubuntu', base_version distrib, version = 'Ubuntu', base_version
elif distro.lower() == 'elementary': elif distrib.lower() == 'elementary':
if version == '5.0': if version == '5.0':
base_version = '18.04' base_version = '18.04'
elif version[0:3] == '0.4': elif version[0:3] == '0.4':
@ -372,21 +376,21 @@ def get_linux_distribution():
elif version == '0.1': elif version == '0.1':
base_version = '10.10' base_version = '10.10'
else: else:
raise Exception('unsupported version of %s: %s' % (distro, version)) raise Exception('unsupported version of %s: %s' % (distrib, version))
distro, version = 'Ubuntu', base_version distrib, version = 'Ubuntu', base_version
elif distro.lower() == 'ubuntu': elif distrib.lower() == 'ubuntu':
if version > '19.04': if version > '19.04':
raise Exception('unsupported version of %s: %s' % (distro, version)) raise Exception('unsupported version of %s: %s' % (distrib, version))
# Fixme: we should allow checked/supported versions only # Fixme: we should allow checked/supported versions only
elif distro.lower() not in [ elif distrib.lower() not in [
'centos', 'centos',
'centos linux', 'centos linux',
'debian', 'debian',
'fedora', 'fedora',
]: ]:
raise Exception('mach bootstrap does not support %s, please file a bug' % distro) raise Exception('mach bootstrap does not support %s, please file a bug' % distrib)
return distro, version return distrib, version
def bootstrap(context, force=False, specific=None): def bootstrap(context, force=False, specific=None):
@ -396,9 +400,9 @@ def bootstrap(context, force=False, specific=None):
if "windows-msvc" in host_triple(): if "windows-msvc" in host_triple():
bootstrapper = windows_msvc bootstrapper = windows_msvc
elif "linux-gnu" in host_triple(): elif "linux-gnu" in host_triple():
distro, version = get_linux_distribution() distrib, version = get_linux_distribution()
context.distro = distro context.distro = distrib
context.distro_version = version context.distro_version = version
bootstrapper = LINUX_SPECIFIC_BOOTSTRAPPERS.get(specific, linux) bootstrapper = LINUX_SPECIFIC_BOOTSTRAPPERS.get(specific, linux)

View file

@ -18,17 +18,19 @@ import locale
import os import os
from os import path from os import path
import platform import platform
import distro
import re import re
import contextlib import contextlib
import subprocess import subprocess
from subprocess import PIPE from subprocess import PIPE
import six
import sys import sys
import tarfile import tarfile
import zipfile import zipfile
from xml.etree.ElementTree import XML from xml.etree.ElementTree import XML
from servo.util import download_file from servo.util import download_file
import six.moves.urllib as urllib import six.moves.urllib as urllib
from bootstrap import check_gstreamer_lib from .bootstrap import check_gstreamer_lib
from mach.decorators import CommandArgument from mach.decorators import CommandArgument
from mach.registrar import Registrar from mach.registrar import Registrar
@ -133,10 +135,10 @@ def normalize_env(env):
# want UTF-8, they shouldn't pass in a unicode instance. # want UTF-8, they shouldn't pass in a unicode instance.
normalized_env = {} normalized_env = {}
for k, v in env.items(): for k, v in env.items():
if isinstance(k, unicode): if isinstance(k, six.text_type):
k = k.encode('utf-8', 'strict') k = k.encode('utf-8', 'strict')
if isinstance(v, unicode): if isinstance(v, six.text_type):
v = v.encode('utf-8', 'strict') v = v.encode('utf-8', 'strict')
normalized_env[k] = v normalized_env[k] = v
@ -357,7 +359,7 @@ class CommandBase(object):
print() print()
return 1 return 1
raise raise
version = tuple(map(int, re.match("rustup (\d+)\.(\d+)\.(\d+)", version_line).groups())) version = tuple(map(int, re.match(b"rustup (\d+)\.(\d+)\.(\d+)", version_line).groups()))
if version < (1, 11, 0): if version < (1, 11, 0):
print("rustup is at version %s.%s.%s, Servo requires 1.11.0 or more recent." % version) print("rustup is at version %s.%s.%s, Servo requires 1.11.0 or more recent." % version)
print("Try running 'rustup self update'.") print("Try running 'rustup self update'.")
@ -679,8 +681,10 @@ install them, let us know by filing a bug!")
append_to_path_env(path.join(libpath, "pkgconfig"), env, "PKG_CONFIG_PATH") append_to_path_env(path.join(libpath, "pkgconfig"), env, "PKG_CONFIG_PATH")
if sys.platform == "linux2": if sys.platform == "linux2":
distro, version, _ = platform.linux_distribution() distrib, version, _ = distro.linux_distribution()
if distro == "Ubuntu" and (version == "16.04" or version == "14.04"): distrib = six.ensure_str(distrib)
version = six.ensure_str(version)
if distrib == "Ubuntu" and (version == "16.04" or version == "14.04"):
env["HARFBUZZ_SYS_NO_PKG_CONFIG"] = "true" env["HARFBUZZ_SYS_NO_PKG_CONFIG"] = "true"
if extra_path: if extra_path:
@ -763,7 +767,7 @@ install them, let us know by filing a bug!")
]).strip()) ]).strip())
git_info.append('') git_info.append('')
git_info.append(git_sha) git_info.append(six.ensure_str(git_sha))
if git_is_dirty: if git_is_dirty:
git_info.append('dirty') git_info.append('dirty')

View file

@ -350,7 +350,7 @@ class MachCommands(CommandBase):
run_file = path.abspath(path.join(test_file_dir, "runtests.py")) run_file = path.abspath(path.join(test_file_dir, "runtests.py"))
run_globals = {"__file__": run_file} run_globals = {"__file__": run_file}
execfile(run_file, run_globals) exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
verbose = not quiet verbose = not quiet
return run_globals["run_tests"](tests, verbose) return run_globals["run_tests"](tests, verbose)
@ -442,7 +442,7 @@ class MachCommands(CommandBase):
kwargs["binary_args"] = binary_args kwargs["binary_args"] = binary_args
run_globals = {"__file__": run_file} run_globals = {"__file__": run_file}
execfile(run_file, run_globals) exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
return run_globals["run_tests"](**kwargs) return run_globals["run_tests"](**kwargs)
@Command('update-manifest', @Command('update-manifest',
@ -473,7 +473,7 @@ class MachCommands(CommandBase):
return 1 return 1
run_globals = {"__file__": run_file} run_globals = {"__file__": run_file}
execfile(run_file, run_globals) exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
return run_globals["update_tests"](**kwargs) return run_globals["update_tests"](**kwargs)
@Command('filter-intermittents', @Command('filter-intermittents',
@ -913,7 +913,7 @@ testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
"components", "net", "tests", "components", "net", "tests",
"cookie_http_state_utils.py")) "cookie_http_state_utils.py"))
run_globals = {"__file__": run_file} run_globals = {"__file__": run_file}
execfile(run_file, run_globals) exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
return run_globals["update_test_file"](cache_dir) return run_globals["update_test_file"](cache_dir)
@Command('update-webgl', @Command('update-webgl',
@ -934,7 +934,7 @@ testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
shutil.rmtree(dest_folder) shutil.rmtree(dest_folder)
run_globals = {"__file__": run_file} run_globals = {"__file__": run_file}
execfile(run_file, run_globals) exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
return run_globals["update_conformance"](version, dest_folder, None, patches_dir) return run_globals["update_conformance"](version, dest_folder, None, patches_dir)
@Command('smoketest', @Command('smoketest',

View file

@ -9,14 +9,12 @@
from __future__ import print_function from __future__ import print_function
import contextlib
import fnmatch import fnmatch
import imp import imp
import itertools import itertools
import json import json
import os import os
import re import re
from io import StringIO
import subprocess import subprocess
import sys import sys
@ -24,7 +22,7 @@ import colorama
import toml import toml
import voluptuous import voluptuous
import yaml import yaml
from licenseck import OLD_MPL, MPL, APACHE, COPYRIGHT, licenses_toml, licenses_dep_toml from .licenseck import OLD_MPL, MPL, APACHE, COPYRIGHT, licenses_toml, licenses_dep_toml
from six import iteritems from six import iteritems
topdir = os.path.abspath(os.path.dirname(sys.argv[0])) topdir = os.path.abspath(os.path.dirname(sys.argv[0]))
wpt = os.path.join(topdir, "tests", "wpt") wpt = os.path.join(topdir, "tests", "wpt")
@ -327,28 +325,21 @@ def check_by_line(file_name, lines):
def check_flake8(file_name, contents): def check_flake8(file_name, contents):
from flake8.main import check_code
if not file_name.endswith(".py"): if not file_name.endswith(".py"):
raise StopIteration raise StopIteration
@contextlib.contextmanager
def stdout_redirect(where):
sys.stdout = where
try:
yield where
finally:
sys.stdout = sys.__stdout__
ignore = { ignore = {
"W291", # trailing whitespace; the standard tidy process will enforce no trailing whitespace "W291", # trailing whitespace; the standard tidy process will enforce no trailing whitespace
"E501", # 80 character line length; the standard tidy process will enforce line length "E501", # 80 character line length; the standard tidy process will enforce line length
} }
output = StringIO() output = ""
with stdout_redirect(output): try:
check_code(contents, ignore=ignore) args = ["flake8", "--ignore=" + ",".join(ignore), file_name]
for error in output.getvalue().splitlines(): subprocess.check_output(args)
except subprocess.CalledProcessError as e:
output = e.output
for error in output.splitlines():
_, line_num, _, message = error.split(":", 3) _, line_num, _, message = error.split(":", 3)
yield line_num, message.strip() yield line_num, message.strip()

View file

@ -8,6 +8,7 @@ import os
import sys import sys
import subprocess import subprocess
import platform import platform
from six import itervalues, iteritems
DEFAULT_MOVE_UP_CODE = u"\x1b[A" DEFAULT_MOVE_UP_CODE = u"\x1b[A"
DEFAULT_CLEAR_EOL_CODE = u"\x1b[K" DEFAULT_CLEAR_EOL_CODE = u"\x1b[K"
@ -105,7 +106,7 @@ class GroupingFormatter(base.BaseFormatter):
return new_display + "No tests running.\n" return new_display + "No tests running.\n"
def suite_start(self, data): def suite_start(self, data):
self.number_of_tests = sum(len(tests) for tests in data["tests"].itervalues()) self.number_of_tests = sum(len(tests) for tests in itervalues(data["tests"]))
self.start_time = data["time"] self.start_time = data["time"]
if self.number_of_tests == 0: if self.number_of_tests == 0:
@ -182,7 +183,7 @@ class GroupingFormatter(base.BaseFormatter):
else: else:
failures_by_stack[failure['stack']].append(failure) failures_by_stack[failure['stack']].append(failure)
for (stack, failures) in failures_by_stack.iteritems(): for (stack, failures) in iteritems(failures_by_stack):
output += make_subtests_failure(test_name, failures, stack) output += make_subtests_failure(test_name, failures, stack)
return output return output

View file

@ -8,6 +8,7 @@ import os
import sys import sys
import tempfile import tempfile
from collections import defaultdict from collections import defaultdict
from six import iterkeys, iteritems
from mozlog.structured import commandline from mozlog.structured import commandline
from wptrunner.wptcommandline import get_test_paths, set_from_config from wptrunner.wptcommandline import get_test_paths, set_from_config
@ -58,7 +59,7 @@ def update(logger, wpt_dir, check_clean=True, rebuild=False):
def _update(logger, test_paths, rebuild): def _update(logger, test_paths, rebuild):
for url_base, paths in test_paths.iteritems(): for url_base, paths in iteritems(test_paths):
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json") manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
cache_subdir = os.path.relpath(os.path.dirname(manifest_path), cache_subdir = os.path.relpath(os.path.dirname(manifest_path),
os.path.dirname(__file__)) os.path.dirname(__file__))
@ -75,7 +76,7 @@ def _update(logger, test_paths, rebuild):
def _check_clean(logger, test_paths): def _check_clean(logger, test_paths):
manifests_by_path = {} manifests_by_path = {}
rv = 0 rv = 0
for url_base, paths in test_paths.iteritems(): for url_base, paths in iteritems(test_paths):
tests_path = paths["tests_path"] tests_path = paths["tests_path"]
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json") manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
@ -102,7 +103,7 @@ def _check_clean(logger, test_paths):
manifests_by_path[manifest_path] = (old_manifest, new_manifest) manifests_by_path[manifest_path] = (old_manifest, new_manifest)
for manifest_path, (old_manifest, new_manifest) in manifests_by_path.iteritems(): for manifest_path, (old_manifest, new_manifest) in iteritems(manifests_by_path):
if not diff_manifests(logger, manifest_path, old_manifest, new_manifest): if not diff_manifests(logger, manifest_path, old_manifest, new_manifest):
logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path) logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path)
rv = 1 rv = 1
@ -136,8 +137,8 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
test_id = tuple(test_id) test_id = tuple(test_id)
items[path].add((test_type, test_id)) items[path].add((test_type, test_id))
old_paths = set(old_items.iterkeys()) old_paths = set(iterkeys(old_items))
new_paths = set(new_items.iterkeys()) new_paths = set(iterkeys(new_items))
added_paths = new_paths - old_paths added_paths = new_paths - old_paths
deleted_paths = old_paths - new_paths deleted_paths = old_paths - new_paths
@ -166,9 +167,9 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
# Manifest currently has some list vs tuple inconsistencies that break # Manifest currently has some list vs tuple inconsistencies that break
# a simple equality comparison. # a simple equality comparison.
new_paths = {(key, value[0], value[1]) new_paths = {(key, value[0], value[1])
for (key, value) in new_manifest.to_json()["paths"].iteritems()} for (key, value) in iteritems(new_manifest.to_json()["paths"])}
old_paths = {(key, value[0], value[1]) old_paths = {(key, value[0], value[1])
for (key, value) in old_manifest.to_json()["paths"].iteritems()} for (key, value) in iteritems(old_manifest.to_json()["paths"])}
if old_paths != new_paths: if old_paths != new_paths:
logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa
clean = False clean = False

View file

@ -8,7 +8,7 @@ import gzip
import json import json
import os import os
import requests import requests
import urlparse import six.moves.urllib as urllib
treeherder_base = "https://treeherder.mozilla.org/" treeherder_base = "https://treeherder.mozilla.org/"
@ -53,7 +53,7 @@ def download(url, prefix, dest, force_suffix=True):
def get_blobber_url(branch, job): def get_blobber_url(branch, job):
job_id = job["id"] job_id = job["id"]
resp = requests.get(urlparse.urljoin(treeherder_base, resp = requests.get(urllib.parse.urljoin(treeherder_base,
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch, "/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
job_id))) job_id)))
job_data = resp.json() job_data = resp.json()
@ -71,13 +71,13 @@ def get_blobber_url(branch, job):
def get_structured_logs(branch, commit, dest=None): def get_structured_logs(branch, commit, dest=None):
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit))) resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
revision_data = resp.json() revision_data = resp.json()
result_set = revision_data["results"][0]["id"] result_set = revision_data["results"][0]["id"]
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set))) resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
job_data = resp.json() job_data = resp.json()

View file

@ -2,8 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/. # file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import print_function
import json import json
from urlparse import urljoin from six.moves.urllib.parse import urljoin
requests = None requests = None
class GitHubError(Exception): class GitHubError(Exception):
@ -46,7 +48,7 @@ class GitHub(object):
if 200 <= resp.status_code < 300: if 200 <= resp.status_code < 300:
return resp.json() return resp.json()
else: else:
print resp.status_code, resp.json() print(resp.status_code, resp.json())
raise GitHubError(resp.status_code, resp.json()) raise GitHubError(resp.status_code, resp.json())
def repo(self, owner, name): def repo(self, owner, name):

View file

@ -1,15 +1,18 @@
from __future__ import print_function
import os import os
import re import re
import subprocess import subprocess
import sys import sys
import urlparse import six.moves.urllib as urllib
from six.moves import input
from six import iteritems
from wptrunner.update.sync import UpdateCheckout from wptrunner.update.sync import UpdateCheckout
from wptrunner.update.tree import get_unique_name from wptrunner.update.tree import get_unique_name
from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean
from .tree import Commit, GitTree, Patch from .tree import Commit, GitTree, Patch
import github
from .github import GitHub from .github import GitHub
@ -155,9 +158,9 @@ class SelectCommits(Step):
while True: while True:
commits = state.source_commits[:] commits = state.source_commits[:]
for i, commit in enumerate(commits): for i, commit in enumerate(commits):
print "%i:\t%s" % (i, commit.message.summary) print("%i:\t%s" % (i, commit.message.summary))
remove = raw_input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip() remove = input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
remove_idx = set() remove_idx = set()
invalid = False invalid = False
for item in remove.split(" "): for item in remove.split(" "):
@ -178,10 +181,10 @@ class SelectCommits(Step):
keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx] keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx]
#TODO: consider printed removed commits #TODO: consider printed removed commits
print "Selected the following commits to keep:" print("Selected the following commits to keep:")
for i, commit in keep_commits: for i, commit in keep_commits:
print "%i:\t%s" % (i, commit.message.summary) print("%i:\t%s" % (i, commit.message.summary))
confirm = raw_input("Keep the above commits? y/n\n").strip().lower() confirm = input("Keep the above commits? y/n\n").strip().lower()
if confirm == "y": if confirm == "y":
state.source_commits = [item[1] for item in keep_commits] state.source_commits = [item[1] for item in keep_commits]
@ -210,7 +213,7 @@ class MovePatches(Step):
try: try:
state.sync_tree.import_patch(stripped_patch, 1 + strip_count) state.sync_tree.import_patch(stripped_patch, 1 + strip_count)
except: except:
print patch.diff print(patch.diff)
raise raise
state.commits_loaded = i state.commits_loaded = i
@ -262,7 +265,7 @@ class MergeUpstream(Step):
if "merge_index" not in state: if "merge_index" not in state:
state.merge_index = 0 state.merge_index = 0
org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/") org, name = urllib.parse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
if name.endswith(".git"): if name.endswith(".git"):
name = name[:-4] name = name[:-4]
state.gh_repo = gh.repo(org, name) state.gh_repo = gh.repo(org, name)
@ -285,7 +288,7 @@ class UpdateLastSyncData(Step):
data = {"local": state.local_tree.rev, data = {"local": state.local_tree.rev,
"upstream": state.sync_tree.rev} "upstream": state.sync_tree.rev}
with open(state.sync_data_path, "w") as f: with open(state.sync_data_path, "w") as f:
for key, value in data.iteritems(): for key, value in iteritems(data):
f.write("%s: %s\n" % (key, value)) f.write("%s: %s\n" % (key, value))
# This gets added to the patch later on # This gets added to the patch later on