mirror of
https://github.com/servo/servo.git
synced 2025-06-25 09:34:32 +01:00
Put a copy of the wpt upstreaming scripts in-tree.
This is copied from gecko, which is slightly unfortunate. However it allows committing changes to wpt directly in-tree and having them upstreamed in the next update.
This commit is contained in:
parent
168b81773e
commit
0a7429f147
7 changed files with 920 additions and 0 deletions
38
tests/wpt/update/__init__.py
Normal file
38
tests/wpt/update/__init__.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from mozlog.structured import structuredlog
|
||||
|
||||
here = os.path.split(__file__)[0]
|
||||
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(here, os.pardir, "harness")))
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(here, os.pardir, "tests", "tools", "scripts")))
|
||||
|
||||
from wptrunner.update import setup_logging, WPTUpdate
|
||||
from wptrunner.update.base import exit_unclean
|
||||
|
||||
from . import updatecommandline
|
||||
from .update import UpdateRunner
|
||||
|
||||
def run_update(logger, **kwargs):
|
||||
updater = WPTUpdate(logger, runner_cls=UpdateRunner, **kwargs)
|
||||
return updater.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = updatecommandline.parse_args()
|
||||
logger = setup_logging(args, {"mach": sys.stdout})
|
||||
assert structuredlog.get_default_logger() is not None
|
||||
|
||||
|
||||
rv = run_update(logger, **args)
|
||||
if rv is exit_unclean:
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(0)
|
99
tests/wpt/update/fetchlogs.py
Normal file
99
tests/wpt/update/fetchlogs.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import argparse
|
||||
import cStringIO
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import urlparse
|
||||
|
||||
treeherder_base = "https://treeherder.mozilla.org/"
|
||||
|
||||
"""Simple script for downloading structured logs from treeherder.
|
||||
|
||||
For the moment this is specialised to work with web-platform-tests
|
||||
logs; in due course it should move somewhere generic and get hooked
|
||||
up to mach or similar"""
|
||||
|
||||
# Interpretation of the "job" list from
|
||||
# https://github.com/mozilla/treeherder-service/blob/master/treeherder/webapp/api/utils.py#L18
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("branch", action="store",
|
||||
help="Branch on which jobs ran")
|
||||
parser.add_argument("commit",
|
||||
action="store",
|
||||
help="Commit hash for push")
|
||||
|
||||
return parser
|
||||
|
||||
def download(url, prefix, dest, force_suffix=True):
|
||||
if dest is None:
|
||||
dest = "."
|
||||
|
||||
if prefix and not force_suffix:
|
||||
name = os.path.join(dest, prefix + ".log")
|
||||
else:
|
||||
name = None
|
||||
counter = 0
|
||||
|
||||
while not name or os.path.exists(name):
|
||||
counter += 1
|
||||
sep = "" if not prefix else "-"
|
||||
name = os.path.join(dest, prefix + sep + str(counter) + ".log")
|
||||
|
||||
with open(name, "wb") as f:
|
||||
resp = requests.get(url, stream=True)
|
||||
for chunk in resp.iter_content(1024):
|
||||
f.write(chunk)
|
||||
|
||||
def get_blobber_url(branch, job):
|
||||
job_id = job["id"]
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base,
|
||||
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
|
||||
job_id)))
|
||||
job_data = resp.json()
|
||||
|
||||
if job_data:
|
||||
assert len(job_data) == 1
|
||||
job_data = job_data[0]
|
||||
try:
|
||||
details = job_data["blob"]["job_details"]
|
||||
for item in details:
|
||||
if item["value"] == "wpt_raw.log":
|
||||
return item["url"]
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_structured_logs(branch, commit, dest=None):
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
|
||||
|
||||
revision_data = resp.json()
|
||||
|
||||
result_set = revision_data["results"][0]["id"]
|
||||
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
|
||||
|
||||
job_data = resp.json()
|
||||
|
||||
for result in job_data["results"]:
|
||||
job_type_name = result["job_type_name"]
|
||||
if job_type_name.startswith("W3C Web Platform"):
|
||||
url = get_blobber_url(branch, result)
|
||||
if url:
|
||||
prefix = result["platform"] # platform
|
||||
download(url, prefix, None)
|
||||
|
||||
def main():
|
||||
parser = create_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
get_structured_logs(args.branch, args.commit)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
168
tests/wpt/update/github.py
Normal file
168
tests/wpt/update/github.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
from urlparse import urljoin
|
||||
requests = None
|
||||
|
||||
class GitHubError(Exception):
|
||||
def __init__(self, status, data):
|
||||
self.status = status
|
||||
self.data = data
|
||||
|
||||
|
||||
class GitHub(object):
|
||||
url_base = "https://api.github.com"
|
||||
|
||||
def __init__(self, token):
|
||||
# Defer the import of requests since it isn't installed by default
|
||||
global requests
|
||||
if requests is None:
|
||||
import requests
|
||||
|
||||
self.headers = {"Accept": "application/vnd.github.v3+json"}
|
||||
self.auth = (token, "x-oauth-basic")
|
||||
|
||||
def get(self, path):
|
||||
return self._request("GET", path)
|
||||
|
||||
def post(self, path, data):
|
||||
return self._request("POST", path, data=data)
|
||||
|
||||
def put(self, path, data):
|
||||
return self._request("PUT", path, data=data)
|
||||
|
||||
def _request(self, method, path, data=None):
|
||||
url = urljoin(self.url_base, path)
|
||||
|
||||
kwargs = {"headers": self.headers,
|
||||
"auth": self.auth}
|
||||
if data is not None:
|
||||
kwargs["data"] = json.dumps(data)
|
||||
|
||||
resp = requests.request(method, url, **kwargs)
|
||||
|
||||
if 200 <= resp.status_code < 300:
|
||||
return resp.json()
|
||||
else:
|
||||
raise GitHubError(resp.status_code, resp.json())
|
||||
|
||||
def repo(self, owner, name):
|
||||
"""GitHubRepo for a particular repository.
|
||||
|
||||
:param owner: String repository owner
|
||||
:param name: String repository name
|
||||
"""
|
||||
return GitHubRepo.from_name(self, owner, name)
|
||||
|
||||
|
||||
class GitHubRepo(object):
|
||||
def __init__(self, github, data):
|
||||
"""Object respresenting a GitHub respoitory"""
|
||||
self.gh = github
|
||||
self.owner = data["owner"]
|
||||
self.name = data["name"]
|
||||
self.url = data["ssh_url"]
|
||||
self._data = data
|
||||
|
||||
@classmethod
|
||||
def from_name(cls, github, owner, name):
|
||||
data = github.get("/repos/%s/%s" % (owner, name))
|
||||
return cls(github, data)
|
||||
|
||||
@property
|
||||
def url_base(self):
|
||||
return "/repos/%s/" % (self._data["full_name"])
|
||||
|
||||
def create_pr(self, title, head, base, body):
|
||||
"""Create a Pull Request in the repository
|
||||
|
||||
:param title: Pull Request title
|
||||
:param head: ref to the HEAD of the PR branch.
|
||||
:param base: ref to the base branch for the Pull Request
|
||||
:param body: Description of the PR
|
||||
"""
|
||||
return PullRequest.create(self, title, head, base, body)
|
||||
|
||||
def load_pr(self, number):
|
||||
"""Load an existing Pull Request by number.
|
||||
|
||||
:param number: Pull Request number
|
||||
"""
|
||||
return PullRequest.from_number(self, number)
|
||||
|
||||
def path(self, suffix):
|
||||
return urljoin(self.url_base, suffix)
|
||||
|
||||
|
||||
class PullRequest(object):
|
||||
def __init__(self, repo, data):
|
||||
"""Object representing a Pull Request"""
|
||||
|
||||
self.repo = repo
|
||||
self._data = data
|
||||
self.number = data["number"]
|
||||
self.title = data["title"]
|
||||
self.base = data["base"]["ref"]
|
||||
self.base = data["head"]["ref"]
|
||||
self._issue = None
|
||||
|
||||
@classmethod
|
||||
def from_number(cls, repo, number):
|
||||
data = repo.gh.get(repo.path("pulls/%i" % number))
|
||||
return cls(repo, data)
|
||||
|
||||
@classmethod
|
||||
def create(cls, repo, title, head, base, body):
|
||||
data = repo.gh.post(repo.path("pulls"),
|
||||
{"title": title,
|
||||
"head": head,
|
||||
"base": base,
|
||||
"body": body})
|
||||
return cls(repo, data)
|
||||
|
||||
def path(self, suffix):
|
||||
return urljoin(self.repo.path("pulls/%i/" % self.number), suffix)
|
||||
|
||||
@property
|
||||
def issue(self):
|
||||
"""Issue related to the Pull Request"""
|
||||
if self._issue is None:
|
||||
self._issue = Issue.from_number(self.repo, self.number)
|
||||
return self._issue
|
||||
|
||||
def merge(self, commit_message=None):
|
||||
"""Merge the Pull Request into its base branch.
|
||||
|
||||
:param commit_message: Message to use for the merge commit. If None a default
|
||||
message is used instead
|
||||
"""
|
||||
if commit_message is None:
|
||||
commit_message = "Merge pull request #%i from %s" % (self.number, self.base)
|
||||
self.repo.gh.put(self.path("merge"),
|
||||
{"commit_message": commit_message})
|
||||
|
||||
|
||||
class Issue(object):
|
||||
def __init__(self, repo, data):
|
||||
"""Object representing a GitHub Issue"""
|
||||
self.repo = repo
|
||||
self._data = data
|
||||
self.number = data["number"]
|
||||
|
||||
@classmethod
|
||||
def from_number(cls, repo, number):
|
||||
data = repo.gh.get(repo.path("issues/%i" % number))
|
||||
return cls(repo, data)
|
||||
|
||||
def path(self, suffix):
|
||||
return urljoin(self.repo.path("issues/%i/" % self.number), suffix)
|
||||
|
||||
def add_comment(self, message):
|
||||
"""Add a comment to the issue
|
||||
|
||||
:param message: The text of the comment
|
||||
"""
|
||||
self.repo.gh.post(self.path("comments"),
|
||||
{"body": message})
|
184
tests/wpt/update/tree.py
Normal file
184
tests/wpt/update/tree.py
Normal file
|
@ -0,0 +1,184 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
from wptrunner import update as wptupdate
|
||||
|
||||
from wptrunner.update.tree import Commit, CommitMessage, get_unique_name
|
||||
|
||||
class HgTree(wptupdate.tree.HgTree):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.commit_cls = kwargs.pop("commit_cls", Commit)
|
||||
wptupdate.tree.HgTree.__init__(self, *args, **kwargs)
|
||||
|
||||
# TODO: The extra methods for upstreaming patches from a
|
||||
# hg checkout
|
||||
|
||||
class GitTree(wptupdate.tree.GitTree):
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Extension of the basic GitTree with extra methods for
|
||||
transfering patches"""
|
||||
commit_cls = kwargs.pop("commit_cls", Commit)
|
||||
wptupdate.tree.GitTree.__init__(self, *args, **kwargs)
|
||||
self.commit_cls = commit_cls
|
||||
|
||||
def create_branch(self, name, ref=None):
|
||||
"""Create a named branch,
|
||||
|
||||
:param name: String representing the branch name.
|
||||
:param ref: None to use current HEAD or rev that the branch should point to"""
|
||||
|
||||
args = []
|
||||
if ref is not None:
|
||||
if hasattr(ref, "sha1"):
|
||||
ref = ref.sha1
|
||||
args.append(ref)
|
||||
self.git("branch", name, *args)
|
||||
|
||||
def commits_by_message(self, message, path=None):
|
||||
"""List of commits with messages containing a given string.
|
||||
|
||||
:param message: The string that must be contained in the message.
|
||||
:param path: Path to a file or directory the commit touches
|
||||
"""
|
||||
args = ["--pretty=format:%H", "--reverse", "-z", "--grep=%s" % message]
|
||||
if path is not None:
|
||||
args.append("--")
|
||||
args.append(path)
|
||||
data = self.git("log", *args)
|
||||
return [self.commit_cls(self, sha1) for sha1 in data.split("\0")]
|
||||
|
||||
def log(self, base_commit=None, path=None):
|
||||
"""List commits touching a certian path from a given base commit.
|
||||
|
||||
:base_param commit: Commit object for the base commit from which to log
|
||||
:param path: Path that the commits must touch
|
||||
"""
|
||||
args = ["--pretty=format:%H", "--reverse", "-z"]
|
||||
if base_commit is not None:
|
||||
args.append("%s.." % base_commit.sha1)
|
||||
if path is not None:
|
||||
args.append("--")
|
||||
args.append(path)
|
||||
data = self.git("log", *args)
|
||||
return [self.commit_cls(self, sha1) for sha1 in data.split("\0") if sha1]
|
||||
|
||||
def import_patch(self, patch):
|
||||
"""Import a patch file into the tree and commit it
|
||||
|
||||
:param patch: a Patch object containing the patch to import
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile() as f:
|
||||
f.write(patch.diff)
|
||||
f.flush()
|
||||
f.seek(0)
|
||||
self.git("apply", "--index", f.name)
|
||||
self.git("commit", "-m", patch.message.text, "--author=%s" % patch.full_author)
|
||||
|
||||
def rebase(self, ref, continue_rebase=False):
|
||||
"""Rebase the current branch onto another commit.
|
||||
|
||||
:param ref: A Commit object for the commit to rebase onto
|
||||
:param continue_rebase: Continue an in-progress rebase"""
|
||||
if continue_rebase:
|
||||
args = ["--continue"]
|
||||
else:
|
||||
if hasattr(ref, "sha1"):
|
||||
ref = ref.sha1
|
||||
args = [ref]
|
||||
self.git("rebase", *args)
|
||||
|
||||
def push(self, remote, local_ref, remote_ref, force=False):
|
||||
"""Push local changes to a remote.
|
||||
|
||||
:param remote: URL of the remote to push to
|
||||
:param local_ref: Local branch to push
|
||||
:param remote_ref: Name of the remote branch to push to
|
||||
:param force: Do a force push
|
||||
"""
|
||||
args = []
|
||||
if force:
|
||||
args.append("-f")
|
||||
args.extend([remote, "%s:%s" % (local_ref, remote_ref)])
|
||||
self.git("push", *args)
|
||||
|
||||
def unique_branch_name(self, prefix):
|
||||
"""Get an unused branch name in the local tree
|
||||
|
||||
:param prefix: Prefix to use at the start of the branch name"""
|
||||
branches = [ref[len("refs/heads/"):] for sha1, ref in self.list_refs()
|
||||
if ref.startswith("refs/heads/")]
|
||||
return get_unique_name(branches, prefix)
|
||||
|
||||
class Patch(object):
|
||||
def __init__(self, author, email, message, diff):
|
||||
self.author = author
|
||||
self.email = email
|
||||
if isinstance(message, CommitMessage):
|
||||
self.message = message
|
||||
else:
|
||||
self.message = GeckoCommitMessage(message)
|
||||
self.diff = diff
|
||||
|
||||
def __repr__(self):
|
||||
return "<Patch (%s)>" % self.message.full_summary
|
||||
|
||||
@property
|
||||
def full_author(self):
|
||||
return "%s <%s>" % (self.author, self.email)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
return bool(self.diff.strip())
|
||||
|
||||
|
||||
class GeckoCommitMessage(CommitMessage):
|
||||
"""Commit message following the Gecko conventions for identifying bug number
|
||||
and reviewer"""
|
||||
|
||||
# c.f. http://hg.mozilla.org/hgcustom/version-control-tools/file/tip/hghooks/mozhghooks/commit-message.py
|
||||
# which has the regexps that are actually enforced by the VCS hooks. These are
|
||||
# slightly different because we need to parse out specific parts of the message rather
|
||||
# than just enforce a general pattern.
|
||||
|
||||
_bug_re = re.compile("^Bug (\d+)[^\w]*(?:Part \d+[^\w]*)?(.*?)\s*(?:r=(\w*))?$",
|
||||
re.IGNORECASE)
|
||||
|
||||
_backout_re = re.compile("^(?:Back(?:ing|ed)\s+out)|Backout|(?:Revert|(?:ed|ing))",
|
||||
re.IGNORECASE)
|
||||
_backout_sha1_re = re.compile("(?:\s|\:)(0-9a-f){12}")
|
||||
|
||||
def _parse_message(self):
|
||||
CommitMessage._parse_message(self)
|
||||
|
||||
if self._backout_re.match(self.full_summary):
|
||||
self.backouts = self._backout_re.findall(self.full_summary)
|
||||
else:
|
||||
self.backouts = []
|
||||
|
||||
m = self._bug_re.match(self.full_summary)
|
||||
if m is not None:
|
||||
self.bug, self.summary, self.reviewer = m.groups()
|
||||
else:
|
||||
self.bug, self.summary, self.reviewer = None, self.full_summary, None
|
||||
|
||||
|
||||
class GeckoCommit(Commit):
|
||||
msg_cls = GeckoCommitMessage
|
||||
|
||||
def export_patch(self, path=None):
|
||||
"""Convert a commit in the tree to a Patch with the bug number and
|
||||
reviewer stripped from the message"""
|
||||
args = ["%s^..%s" % (self.sha1, self.sha1)]
|
||||
if path is not None:
|
||||
args.append("--")
|
||||
args.append(path)
|
||||
|
||||
diff = self.git("diff", *args)
|
||||
|
||||
return Patch(self.author, self.email, self.message, diff)
|
||||
|
42
tests/wpt/update/update.py
Normal file
42
tests/wpt/update/update.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
|
||||
from wptrunner.update.base import Step, StepRunner
|
||||
from wptrunner.update.update import LoadConfig, SyncFromUpstream, UpdateMetadata
|
||||
from wptrunner.update.tree import NoVCSTree
|
||||
|
||||
from .tree import GitTree, HgTree, GeckoCommit
|
||||
from .upstream import SyncToUpstream
|
||||
|
||||
class LoadTrees(Step):
|
||||
"""Load gecko tree and sync tree containing web-platform-tests"""
|
||||
|
||||
provides = ["local_tree", "sync_tree"]
|
||||
|
||||
def create(self, state):
|
||||
if os.path.exists(state.sync["path"]):
|
||||
sync_tree = GitTree(root=state.sync["path"])
|
||||
else:
|
||||
sync_tree = None
|
||||
|
||||
if GitTree.is_type():
|
||||
local_tree = GitTree(commit_cls=GeckoCommit)
|
||||
elif HgTree.is_type():
|
||||
local_tree = HgTree(commit_cls=GeckoCommit)
|
||||
else:
|
||||
local_tree = NoVCSTree()
|
||||
|
||||
state.update({"local_tree": local_tree,
|
||||
"sync_tree": sync_tree})
|
||||
|
||||
|
||||
class UpdateRunner(StepRunner):
|
||||
"""Overall runner for updating web-platform-tests in Gecko."""
|
||||
steps = [LoadConfig,
|
||||
LoadTrees,
|
||||
SyncToUpstream,
|
||||
SyncFromUpstream,
|
||||
UpdateMetadata]
|
40
tests/wpt/update/updatecommandline.py
Normal file
40
tests/wpt/update/updatecommandline.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
def create_parser():
|
||||
from wptrunner import wptcommandline
|
||||
|
||||
parser = wptcommandline.create_parser_update()
|
||||
parser.add_argument("--upstream", dest="upstream", action="store_true", default=None,
|
||||
help="Push local changes to upstream repository even when not syncing")
|
||||
parser.add_argument("--no-upstream", dest="upstream", action="store_false", default=None,
|
||||
help="Dont't push local changes to upstream repository when syncing")
|
||||
parser.add_argument("--token-file", action="store", type=wptcommandline.abs_path,
|
||||
help="Path to file containing github token")
|
||||
parser.add_argument("--token", action="store", help="GitHub token to use")
|
||||
return parser
|
||||
|
||||
|
||||
def check_args(kwargs):
|
||||
from wptrunner import wptcommandline
|
||||
|
||||
wptcommandline.set_from_config(kwargs)
|
||||
kwargs["upstream"] = kwargs["upstream"] if kwargs["upstream"] is not None else kwargs["sync"]
|
||||
|
||||
if kwargs["upstream"]:
|
||||
if kwargs["rev"]:
|
||||
raise ValueError("Setting --rev with --upstream isn't supported")
|
||||
if kwargs["token"] is None:
|
||||
if kwargs["token_file"] is None:
|
||||
raise ValueError("Must supply either a token file or a token")
|
||||
with open(kwargs["token_file"]) as f:
|
||||
token = f.read().strip()
|
||||
kwargs["token"] = token
|
||||
del kwargs["token_file"]
|
||||
return kwargs
|
||||
|
||||
def parse_args():
|
||||
parser = create_parser()
|
||||
kwargs = vars(parser.parse_args())
|
||||
return check_args(kwargs)
|
349
tests/wpt/update/upstream.py
Normal file
349
tests/wpt/update/upstream.py
Normal file
|
@ -0,0 +1,349 @@
|
|||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import urlparse
|
||||
|
||||
from wptrunner.update.sync import LoadManifest
|
||||
from wptrunner.update.tree import get_unique_name
|
||||
from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean
|
||||
|
||||
from .tree import Commit, GitTree, Patch
|
||||
import github
|
||||
from .github import GitHub
|
||||
|
||||
|
||||
def rewrite_patch(patch, strip_dir):
|
||||
"""Take a Patch and convert to a different repository by stripping a prefix from the
|
||||
file paths. Also rewrite the message to remove the bug number and reviewer, but add
|
||||
a bugzilla link in the summary.
|
||||
|
||||
:param patch: the Patch to convert
|
||||
:param strip_dir: the path prefix to remove
|
||||
"""
|
||||
|
||||
if not strip_dir.startswith("/"):
|
||||
strip_dir = "/%s"% strip_dir
|
||||
|
||||
new_diff = []
|
||||
line_starts = ["diff ", "+++ ", "--- "]
|
||||
for line in patch.diff.split("\n"):
|
||||
for start in line_starts:
|
||||
if line.startswith(start):
|
||||
new_diff.append(line.replace(strip_dir, "").encode("utf8"))
|
||||
break
|
||||
else:
|
||||
new_diff.append(line)
|
||||
|
||||
new_diff = "\n".join(new_diff)
|
||||
|
||||
assert new_diff != patch
|
||||
|
||||
return Patch(patch.author, patch.email, rewrite_message(patch), new_diff)
|
||||
|
||||
def rewrite_message(patch):
|
||||
rest = patch.message.body
|
||||
|
||||
if patch.message.bug is not None:
|
||||
return "\n".join([patch.message.summary,
|
||||
patch.message.body,
|
||||
"",
|
||||
"Upstreamed from https://bugzilla.mozilla.org/show_bug.cgi?id=%s" %
|
||||
patch.message.bug])
|
||||
|
||||
return "\n".join([patch.message.full_summary, rest])
|
||||
|
||||
|
||||
class SyncToUpstream(Step):
|
||||
"""Sync local changes to upstream"""
|
||||
|
||||
def create(self, state):
|
||||
if not state.kwargs["upstream"]:
|
||||
return
|
||||
|
||||
if not isinstance(state.local_tree, GitTree):
|
||||
self.logger.error("Cannot sync with upstream from a non-Git checkout.")
|
||||
return exit_clean
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
self.logger.error("Upstream sync requires the requests module to be installed")
|
||||
return exit_clean
|
||||
|
||||
if not state.sync_tree:
|
||||
os.makedirs(state.sync["path"])
|
||||
state.sync_tree = GitTree(root=state.sync["path"])
|
||||
|
||||
kwargs = state.kwargs
|
||||
with state.push(["local_tree", "sync_tree", "tests_path", "metadata_path",
|
||||
"sync"]):
|
||||
state.token = kwargs["token"]
|
||||
runner = SyncToUpstreamRunner(self.logger, state)
|
||||
runner.run()
|
||||
|
||||
|
||||
class CheckoutBranch(Step):
|
||||
"""Create a branch in the sync tree pointing at the last upstream sync commit
|
||||
and check it out"""
|
||||
|
||||
provides = ["branch"]
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Updating sync tree from %s" % state.sync["remote_url"])
|
||||
state.branch = state.sync_tree.unique_branch_name(
|
||||
"outbound_update_%s" % state.test_manifest.rev)
|
||||
state.sync_tree.update(state.sync["remote_url"],
|
||||
state.sync["branch"],
|
||||
state.branch)
|
||||
state.sync_tree.checkout(state.test_manifest.rev, state.branch, force=True)
|
||||
|
||||
|
||||
class GetLastSyncCommit(Step):
|
||||
"""Find the gecko commit at which we last performed a sync with upstream."""
|
||||
|
||||
provides = ["last_sync_path", "last_sync_commit"]
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Looking for last sync commit")
|
||||
state.last_sync_path = os.path.join(state.metadata_path, "mozilla-sync")
|
||||
with open(state.last_sync_path) as f:
|
||||
last_sync_sha1 = f.read().strip()
|
||||
|
||||
state.last_sync_commit = Commit(state.local_tree, last_sync_sha1)
|
||||
|
||||
if not state.local_tree.contains_commit(state.last_sync_commit):
|
||||
self.logger.error("Could not find last sync commit %s" % last_sync_sha1)
|
||||
return exit_clean
|
||||
|
||||
self.logger.info("Last sync to web-platform-tests happened in %s" % state.last_sync_commit.sha1)
|
||||
|
||||
|
||||
class GetBaseCommit(Step):
|
||||
"""Find the latest upstream commit on the branch that we are syncing with"""
|
||||
|
||||
provides = ["base_commit"]
|
||||
|
||||
def create(self, state):
|
||||
state.base_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"],
|
||||
state.sync["branch"])
|
||||
self.logger.debug("New base commit is %s" % state.base_commit.sha1)
|
||||
|
||||
|
||||
class LoadCommits(Step):
|
||||
"""Get a list of commits in the gecko tree that need to be upstreamed"""
|
||||
|
||||
provides = ["source_commits"]
|
||||
|
||||
def create(self, state):
|
||||
state.source_commits = state.local_tree.log(state.last_sync_commit,
|
||||
state.tests_path)
|
||||
|
||||
update_regexp = re.compile("Bug \d+ - Update web-platform-tests to revision [0-9a-f]{40}")
|
||||
|
||||
for i, commit in enumerate(state.source_commits[:]):
|
||||
if update_regexp.match(commit.message.text):
|
||||
# This is a previous update commit so ignore it
|
||||
state.source_commits.remove(commit)
|
||||
continue
|
||||
|
||||
if commit.message.backouts:
|
||||
#TODO: Add support for collapsing backouts
|
||||
raise NotImplementedError("Need to get the Git->Hg commits for backouts and remove the backed out patch")
|
||||
|
||||
if not commit.message.bug:
|
||||
self.logger.error("Commit %i (%s) doesn't have an associated bug number." %
|
||||
(i + 1, commit.sha1))
|
||||
return exit_unclean
|
||||
|
||||
self.logger.debug("Source commits: %s" % state.source_commits)
|
||||
|
||||
class MovePatches(Step):
|
||||
"""Convert gecko commits into patches against upstream and commit these to the sync tree."""
|
||||
|
||||
provides = ["commits_loaded"]
|
||||
|
||||
def create(self, state):
|
||||
state.commits_loaded = 0
|
||||
|
||||
strip_path = os.path.relpath(state.tests_path,
|
||||
state.local_tree.root)
|
||||
self.logger.debug("Stripping patch %s" % strip_path)
|
||||
|
||||
for commit in state.source_commits[state.commits_loaded:]:
|
||||
i = state.commits_loaded + 1
|
||||
self.logger.info("Moving commit %i: %s" % (i, commit.message.full_summary))
|
||||
patch = commit.export_patch(state.tests_path)
|
||||
stripped_patch = rewrite_patch(patch, strip_path)
|
||||
try:
|
||||
state.sync_tree.import_patch(stripped_patch)
|
||||
except:
|
||||
print patch.diff
|
||||
raise
|
||||
state.commits_loaded = i
|
||||
|
||||
class RebaseCommits(Step):
|
||||
"""Rebase commits from the current branch on top of the upstream destination branch.
|
||||
|
||||
This step is particularly likely to fail if the rebase generates merge conflicts.
|
||||
In that case the conflicts can be fixed up locally and the sync process restarted
|
||||
with --continue.
|
||||
"""
|
||||
|
||||
provides = ["rebased_commits"]
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Rebasing local commits")
|
||||
continue_rebase = False
|
||||
# Check if there's a rebase in progress
|
||||
if (os.path.exists(os.path.join(state.sync_tree.root,
|
||||
".git",
|
||||
"rebase-merge")) or
|
||||
os.path.exists(os.path.join(state.sync_tree.root,
|
||||
".git",
|
||||
"rebase-apply"))):
|
||||
continue_rebase = True
|
||||
|
||||
try:
|
||||
state.sync_tree.rebase(state.base_commit, continue_rebase=continue_rebase)
|
||||
except subprocess.CalledProcessError:
|
||||
self.logger.info("Rebase failed, fix merge and run %s again with --continue" % sys.argv[0])
|
||||
raise
|
||||
state.rebased_commits = state.sync_tree.log(state.base_commit)
|
||||
self.logger.info("Rebase successful")
|
||||
|
||||
class CheckRebase(Step):
|
||||
"""Check if there are any commits remaining after rebase"""
|
||||
|
||||
def create(self, state):
|
||||
if not state.rebased_commits:
|
||||
self.logger.info("Nothing to upstream, exiting")
|
||||
return exit_clean
|
||||
|
||||
class MergeUpstream(Step):
|
||||
"""Run steps to push local commits as seperate PRs and merge upstream."""
|
||||
|
||||
provides = ["merge_index", "gh_repo"]
|
||||
|
||||
def create(self, state):
|
||||
gh = GitHub(state.token)
|
||||
if "merge_index" not in state:
|
||||
state.merge_index = 0
|
||||
|
||||
org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
|
||||
if name.endswith(".git"):
|
||||
name = name[:-4]
|
||||
state.gh_repo = gh.repo(org, name)
|
||||
for commit in state.rebased_commits[state.merge_index:]:
|
||||
with state.push(["gh_repo", "sync_tree"]):
|
||||
state.commit = commit
|
||||
pr_merger = PRMergeRunner(self.logger, state)
|
||||
rv = pr_merger.run()
|
||||
if rv is not None:
|
||||
return rv
|
||||
state.merge_index += 1
|
||||
|
||||
class UpdateLastSyncCommit(Step):
|
||||
"""Update the gecko commit at which we last performed a sync with upstream."""
|
||||
|
||||
provides = []
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Updating last sync commit")
|
||||
with open(state.last_sync_path, "w") as f:
|
||||
f.write(state.local_tree.rev)
|
||||
# This gets added to the patch later on
|
||||
|
||||
class MergeLocalBranch(Step):
|
||||
"""Create a local branch pointing at the commit to upstream"""
|
||||
|
||||
provides = ["local_branch"]
|
||||
|
||||
def create(self, state):
|
||||
branch_prefix = "sync_%s" % state.commit.sha1
|
||||
local_branch = state.sync_tree.unique_branch_name(branch_prefix)
|
||||
|
||||
state.sync_tree.create_branch(local_branch, state.commit)
|
||||
state.local_branch = local_branch
|
||||
|
||||
class MergeRemoteBranch(Step):
|
||||
"""Get an unused remote branch name to use for the PR"""
|
||||
provides = ["remote_branch"]
|
||||
|
||||
def create(self, state):
|
||||
remote_branch = "sync_%s" % state.commit.sha1
|
||||
branches = [ref[len("refs/heads/"):] for sha1, ref in
|
||||
state.sync_tree.list_remote(state.gh_repo.url)
|
||||
if ref.startswith("refs/heads")]
|
||||
state.remote_branch = get_unique_name(branches, remote_branch)
|
||||
|
||||
|
||||
class PushUpstream(Step):
|
||||
"""Push local branch to remote"""
|
||||
def create(self, state):
|
||||
self.logger.info("Pushing commit upstream")
|
||||
state.sync_tree.push(state.gh_repo.url,
|
||||
state.local_branch,
|
||||
state.remote_branch)
|
||||
|
||||
class CreatePR(Step):
|
||||
"""Create a PR for the remote branch"""
|
||||
|
||||
provides = ["pr"]
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Creating a PR")
|
||||
commit = state.commit
|
||||
state.pr = state.gh_repo.create_pr(commit.message.full_summary,
|
||||
state.remote_branch,
|
||||
"master",
|
||||
commit.message.body if commit.message.body else "")
|
||||
|
||||
|
||||
class PRAddComment(Step):
|
||||
"""Add an issue comment indicating that the code has been reviewed already"""
|
||||
def create(self, state):
|
||||
state.pr.issue.add_comment("Code reviewed upstream.")
|
||||
|
||||
|
||||
class MergePR(Step):
|
||||
"""Merge the PR"""
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Merging PR")
|
||||
state.pr.merge()
|
||||
|
||||
|
||||
class PRDeleteBranch(Step):
|
||||
"""Delete the remote branch"""
|
||||
|
||||
def create(self, state):
|
||||
self.logger.info("Deleting remote branch")
|
||||
state.sync_tree.push(state.gh_repo.url, "", state.remote_branch)
|
||||
|
||||
|
||||
class SyncToUpstreamRunner(StepRunner):
|
||||
"""Runner for syncing local changes to upstream"""
|
||||
steps = [LoadManifest,
|
||||
CheckoutBranch,
|
||||
GetLastSyncCommit,
|
||||
GetBaseCommit,
|
||||
LoadCommits,
|
||||
MovePatches,
|
||||
RebaseCommits,
|
||||
CheckRebase,
|
||||
MergeUpstream,
|
||||
UpdateLastSyncCommit]
|
||||
|
||||
|
||||
class PRMergeRunner(StepRunner):
|
||||
"""(Sub)Runner for creating and merging a PR"""
|
||||
steps = [
|
||||
MergeLocalBranch,
|
||||
MergeRemoteBranch,
|
||||
PushUpstream,
|
||||
CreatePR,
|
||||
PRAddComment,
|
||||
MergePR,
|
||||
PRDeleteBranch,
|
||||
]
|
Loading…
Add table
Add a link
Reference in a new issue