Reorganize Servo's WPT Python scripts

This change moves all of Servo's WPT Python support scripts into one
directory as they were previously scattered throughout the directory
structure. This should allow more code reuse and make it easier to
understand how everything fits together.

The changes:

- `tests/wpt/update` → `python/wpt/importer`
- `etc/ci/upstream-wpt-changes/wptupstreamer` → `python/wpt/exporter`
- `etc/ci/upstream-wpt-changes/test.py` → `python/wpt/test.py`
- `etc/ci/upstream-wpt-changes/tests` → `python/wpt/tests`
- `tests/wpt/servowpt.py` →
    - `python/wpt/update.py`
    - `python/wpt/run.py`
- `tests/wpt/manifestupdate.py` → `python/wpt/manifestupdate.py`

This change also removes
 - The ability to run the `update-wpt` and `test-wpt` commands without
   using `mach`. These didn't work very well, because it was difficult
   to get all of the wptrunner and mach dependencies installed outside
   of the Python virtualenv. It's simpler if they are always run through
   `mach`.
- The old WPT change upstreaming script that was no longer used.
This commit is contained in:
Martin Robinson 2023-04-16 11:33:02 +02:00
parent 9acb9cc5cf
commit e2cf3e8d1a
52 changed files with 237 additions and 888 deletions

View file

@ -32,9 +32,9 @@ jobs:
# See https://github.com/actions/checkout/issues/162.
token: ${{ secrets.WPT_SYNC_TOKEN }}
- name: Install requirements
run: pip install -r servo/etc/ci/upstream-wpt-changes/requirements.txt
run: pip install -r servo/python/wpt/requirements.txt
- name: Process pull request
run: servo/etc/ci/upstream-wpt-changes/upstream-wpt-changes.py
run: servo/python/wpt/upstream.py
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
WPT_SYNC_TOKEN: ${{ secrets.WPT_SYNC_TOKEN }}

View file

@ -2,7 +2,7 @@ name: WPT exporter test
on:
pull_request:
branches: ["**"]
paths: ["etc/ci/upstream-wpt-changes/**"]
paths: ["python/wpt/exporter/**"]
jobs:
test:
@ -12,7 +12,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install dependencies
run: |
python3 -m pip install --upgrade -r etc/ci/upstream-wpt-changes/requirements-dev.txt
python3 -m pip install --upgrade -r python/wpt/requirements-dev.txt
- name: Running tests
run: |
python3 etc/ci/upstream-wpt-changes/test.py
python3 python/wpt/test.py

View file

@ -35,7 +35,7 @@ function unsafe_pull_from_upstream() {
# Fetch all changes from upstream WPT and automatically transpose them
# into a single servo commit.
./mach update-wpt --sync --no-upstream --patch || return 2
./mach update-wpt --sync --patch || return 2
# If there was no new commit created, there are no changes that need syncing.
# Skip the remaining steps.

View file

@ -10,3 +10,8 @@ is the canonical repository for this code.
servo-tidy is used to check licenses, line lengths, whitespace, flake8 on
Python files, lock file versions, and more.
# `wpt`
servo-wpt is a module with support scripts for running, importing,
exporting, updating manifests, and updating expectations for WPT
tests.

View file

@ -22,6 +22,11 @@ import subprocess
from xml.etree.ElementTree import XML
from six import iteritems
import wpt
import wpt.manifestupdate
import wpt.run
import wpt.update
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
@ -29,25 +34,19 @@ from mach.decorators import (
Command,
)
from servo_tidy import tidy
from servo.command_base import (
CommandBase,
call, check_call, check_output,
)
from servo.util import host_triple
from wptrunner import wptcommandline
from update import updatecommandline
from servo_tidy import tidy
from servo_tidy_tests import test_tidy
from servo.util import host_triple
SCRIPT_PATH = os.path.split(__file__)[0]
PROJECT_TOPLEVEL_PATH = os.path.abspath(os.path.join(SCRIPT_PATH, "..", ".."))
WEB_PLATFORM_TESTS_PATH = os.path.join("tests", "wpt", "web-platform-tests")
SERVO_TESTS_PATH = os.path.join("tests", "wpt", "mozilla", "tests")
sys.path.insert(0, os.path.join(PROJECT_TOPLEVEL_PATH, 'tests', 'wpt'))
import servowpt # noqa: E402
CLANGFMT_CPP_DIRS = ["support/hololens/"]
CLANGFMT_VERSION = "15"
@ -67,45 +66,6 @@ TEST_SUITES = OrderedDict([
TEST_SUITES_BY_PREFIX = {path: k for k, v in iteritems(TEST_SUITES) if "paths" in v for path in v["paths"]}
def create_parser_wpt():
import mozlog.commandline
parser = wptcommandline.create_parser()
parser.add_argument('--release', default=False, action="store_true",
help="Run with a release build of servo")
parser.add_argument('--rr-chaos', default=False, action="store_true",
help="Run under chaos mode in rr until a failure is captured")
parser.add_argument('--pref', default=[], action="append", dest="prefs",
help="Pass preferences to servo")
parser.add_argument('--layout-2020', '--with-layout-2020', default=False,
action="store_true", help="Use expected results for the 2020 layout engine")
parser.add_argument('--log-servojson', action="append", type=mozlog.commandline.log_file,
help="Servo's JSON logger of unexpected results")
parser.add_argument('--always-succeed', default=False, action="store_true",
help="Always yield exit code of zero")
parser.add_argument('--no-default-test-types', default=False, action="store_true",
help="Run all of the test types provided by wptrunner or specified explicitly by --test-types")
parser.add_argument('--filter-intermittents', default=None, action="store",
help="Filter intermittents against known intermittents "
"and save the filtered output to the given file.")
parser.add_argument('--log-raw-unexpected', default=None, action="store",
help="Raw structured log messages for unexpected results."
" '--log-raw' Must also be passed in order to use this.")
return parser
def create_parser_manifest_update():
import manifestupdate
return manifestupdate.create_parser()
def run_update(topdir, check_clean=False, rebuild=False, **kwargs):
import manifestupdate
from wptrunner import wptlogging
logger = wptlogging.setup(kwargs, {"mach": sys.stdout})
wpt_dir = os.path.abspath(os.path.join(topdir, 'tests', 'wpt'))
return manifestupdate.update(logger, wpt_dir, check_clean, rebuild)
@CommandProvider
class MachCommands(CommandBase):
DEFAULT_RENDER_MODE = "cpu"
@ -353,7 +313,7 @@ class MachCommands(CommandBase):
if no_wpt:
manifest_dirty = False
else:
manifest_dirty = run_update(self.context.topdir, check_clean=True)
manifest_dirty = wpt.manifestupdate.update(check_clean=True)
tidy_failed = tidy.scan(not all_files, not no_progress, stylo=stylo, no_wpt=no_wpt)
self.install_rustfmt()
rustfmt_failed = self.call_rustup_run(["cargo", "fmt", "--", "--check"])
@ -399,7 +359,7 @@ class MachCommands(CommandBase):
@Command('test-wpt-failure',
description='Run the tests harness that verifies that the test failures are reported correctly',
category='testing',
parser=create_parser_wpt)
parser=wpt.create_parser)
def test_wpt_failure(self, **kwargs):
kwargs["pause_after_test"] = False
kwargs["include"] = ["infrastructure/failing-test.html"]
@ -408,7 +368,7 @@ class MachCommands(CommandBase):
@Command('test-wpt',
description='Run the regular web platform test suite',
category='testing',
parser=create_parser_wpt)
parser=wpt.create_parser)
def test_wpt(self, **kwargs):
ret = self.run_test_list_or_dispatch(kwargs["test_list"], "wpt", self._test_wpt, **kwargs)
if kwargs["always_succeed"]:
@ -419,7 +379,7 @@ class MachCommands(CommandBase):
@Command('test-wpt-android',
description='Run the web platform test suite in an Android emulator',
category='testing',
parser=create_parser_wpt)
parser=wpt.create_parser)
def test_wpt_android(self, release=False, dev=False, binary_args=None, **kwargs):
kwargs.update(
release=release,
@ -433,7 +393,7 @@ class MachCommands(CommandBase):
def _test_wpt(self, android=False, **kwargs):
self.set_run_env(android)
return servowpt.run_tests(**kwargs)
return wpt.run.run_tests(**kwargs)
# Helper to ensure all specified paths are handled, otherwise dispatch to appropriate test suite.
def run_test_list_or_dispatch(self, requested_paths, correct_suite, correct_function, **kwargs):
@ -454,9 +414,9 @@ class MachCommands(CommandBase):
@Command('update-manifest',
description='Run test-wpt --manifest-update SKIP_TESTS to regenerate MANIFEST.json',
category='testing',
parser=create_parser_manifest_update)
parser=wpt.manifestupdate.create_parser)
def update_manifest(self, **kwargs):
return run_update(self.context.topdir, **kwargs)
return wpt.manifestupdate.update(check_clean=False)
@Command('fmt',
description='Format the Rust and CPP source files with rustfmt and clang-format',
@ -474,13 +434,13 @@ class MachCommands(CommandBase):
@Command('update-wpt',
description='Update the web platform tests',
category='testing',
parser=updatecommandline.create_parser())
parser=wpt.update.create_parser)
def update_wpt(self, **kwargs):
patch = kwargs.get("patch", False)
if not patch and kwargs["sync"]:
print("Are you sure you don't want a patch?")
return 1
return servowpt.update_tests(**kwargs)
return wpt.update.update_tests(**kwargs)
@Command('test-android-startup',
description='Extremely minimal testing of Servo for Android',
@ -826,7 +786,7 @@ testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
proc = subprocess.Popen("%s %s" % (editor, test_path), shell=True)
if not kwargs["no_run"]:
p = create_parser_wpt()
p = wpt.create_parser()
args = []
if kwargs["release"]:
args.append("--release")

61
python/wpt/__init__.py Normal file
View file

@ -0,0 +1,61 @@
# Copyright 2023 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import os
import sys
import mozlog.commandline
SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))
SERVO_ROOT = os.path.abspath(os.path.join(SCRIPT_PATH, "..", ".."))
WPT_PATH = os.path.join(SERVO_ROOT, "tests", "wpt")
WPT_TOOLS_PATH = os.path.join(WPT_PATH, "web-platform-tests", "tools")
CERTS_PATH = os.path.join(WPT_TOOLS_PATH, "certs")
sys.path.insert(0, WPT_TOOLS_PATH)
import localpaths # noqa: F401,E402
import wptrunner.wptcommandline # noqa: E402
def create_parser():
parser = wptrunner.wptcommandline.create_parser()
parser.add_argument('--release', default=False, action="store_true",
help="Run with a release build of servo")
parser.add_argument('--rr-chaos', default=False, action="store_true",
help="Run under chaos mode in rr until a failure is captured")
parser.add_argument('--pref', default=[], action="append", dest="prefs",
help="Pass preferences to servo")
parser.add_argument('--layout-2020', '--with-layout-2020', default=False,
action="store_true", help="Use expected results for the 2020 layout engine")
parser.add_argument('--log-servojson', action="append", type=mozlog.commandline.log_file,
help="Servo's JSON logger of unexpected results")
parser.add_argument('--always-succeed', default=False, action="store_true",
help="Always yield exit code of zero")
parser.add_argument('--no-default-test-types', default=False, action="store_true",
help="Run all of the test types provided by wptrunner or specified explicitly by --test-types")
parser.add_argument('--filter-intermittents', default=None, action="store",
help="Filter intermittents against known intermittents "
"and save the filtered output to the given file.")
parser.add_argument('--log-raw-unexpected', default=None, action="store",
help="Raw structured log messages for unexpected results."
" '--log-raw' Must also be passed in order to use this.")
return parser
def update_args_for_layout_2020(kwargs: dict):
if kwargs.pop("layout_2020"):
kwargs["test_paths"]["/"]["metadata_path"] = os.path.join(
WPT_PATH, "metadata-layout-2020"
)
kwargs["test_paths"]["/_mozilla/"]["metadata_path"] = os.path.join(
WPT_PATH, "mozilla", "meta-layout-2020"
)
kwargs["include_manifest"] = os.path.join(
WPT_PATH, "include-layout-2020.ini"
)

View file

@ -17,7 +17,7 @@ import logging
import os
import sys
from wptupstreamer import WPTSync
from exporter import WPTSync
def main() -> int:

View file

@ -0,0 +1,57 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import os
import sys
from .tree import GitTree, GeckoCommit
from wptrunner.update import setup_logging, WPTUpdate # noqa: F401
from wptrunner.update.base import Step, StepRunner, exit_unclean # noqa: F401
from wptrunner.update.update import LoadConfig, SyncFromUpstream, UpdateMetadata # noqa: F401
from wptrunner import wptcommandline # noqa: F401
class LoadTrees(Step):
"""Load gecko tree and sync tree containing web-platform-tests"""
provides = ["local_tree", "sync_tree"]
def create(self, state):
if os.path.exists(state.sync["path"]):
sync_tree = GitTree(root=state.sync["path"])
else:
sync_tree = None
assert GitTree.is_type()
state.update({"local_tree": GitTree(commit_cls=GeckoCommit),
"sync_tree": sync_tree})
class UpdateRunner(StepRunner):
"""Overall runner for updating web-platform-tests in Gecko."""
steps = [LoadConfig,
LoadTrees,
SyncFromUpstream,
UpdateMetadata]
def run_update(**kwargs):
logger = setup_logging(kwargs, {"mach": sys.stdout})
updater = WPTUpdate(logger, runner_cls=UpdateRunner, **kwargs)
return updater.run() != exit_unclean
def create_parser():
parser = wptcommandline.create_parser_update()
parser.add_argument("--layout-2020", "--with-layout-2020", default=False, action="store_true",
help="Use expected results for the 2020 layout engine")
return parser
def check_args(kwargs):
wptcommandline.set_from_config(kwargs)
if hasattr(wptcommandline, 'check_paths'):
wptcommandline.check_paths(kwargs)
return kwargs

View file

@ -9,16 +9,8 @@ import sys
import tempfile
from wptrunner import update as wptupdate
from wptrunner.update.tree import Commit, CommitMessage, get_unique_name
class HgTree(wptupdate.tree.HgTree):
def __init__(self, *args, **kwargs):
self.commit_cls = kwargs.pop("commit_cls", Commit)
wptupdate.tree.HgTree.__init__(self, *args, **kwargs)
# TODO: The extra methods for upstreaming patches from a
# hg checkout
class GitTree(wptupdate.tree.GitTree):
def __init__(self, *args, **kwargs):
@ -117,6 +109,7 @@ class GitTree(wptupdate.tree.GitTree):
if ref.startswith("refs/heads/")]
return get_unique_name(branches, prefix)
class Patch(object):
def __init__(self, author, email, message, merge_message, diff):
self.author = author
@ -149,13 +142,13 @@ class GeckoCommitMessage(CommitMessage):
# slightly different because we need to parse out specific parts of the message rather
# than just enforce a general pattern.
_bug_re = re.compile("^Bug (\d+)[^\w]*(?:Part \d+[^\w]*)?(.*?)\s*(?:r=(\w*))?$",
_bug_re = re.compile(r"^Bug (\d+)[^\w]*(?:Part \d+[^\w]*)?(.*?)\s*(?:r=(\w*))?$",
re.IGNORECASE)
_merge_re = re.compile("^Auto merge of #(\d+) - [^:]+:[^,]+, r=(.+)$", re.IGNORECASE)
_merge_re = re.compile(r"^Auto merge of #(\d+) - [^:]+:[^,]+, r=(.+)$", re.IGNORECASE)
_backout_re = re.compile("^(?:Back(?:ing|ed)\s+out)|Backout|(?:Revert|(?:ed|ing))",
_backout_re = re.compile(r"^(?:Back(?:ing|ed)\s+out)|Backout|(?:Revert|(?:ed|ing))",
re.IGNORECASE)
_backout_sha1_re = re.compile("(?:\s|\:)(0-9a-f){12}")
_backout_sha1_re = re.compile(r"(?:\s|\:)(0-9a-f){12}")
def _parse_message(self):
CommitMessage._parse_message(self)
@ -188,7 +181,7 @@ class GeckoCommit(Commit):
merge_rev = self.git("when-merged", *args).strip()
except subprocess.CalledProcessError as exn:
if not find_executable('git-when-merged'):
print('Please add the `when-merged` git command to your PATH ' +
print('Please add the `when-merged` git command to your PATH '
'(https://github.com/mhagger/git-when-merged/).')
sys.exit(1)
raise exn
@ -206,4 +199,3 @@ class GeckoCommit(Commit):
merge_message = self.merge.message if self.merge else None
return Patch(self.author, self.email, self.message, merge_message, diff)

View file

@ -3,28 +3,21 @@
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import argparse
import imp
import os
import sys
import tempfile
from collections import defaultdict
from six import iterkeys, iteritems
from . import SERVO_ROOT, WPT_PATH
from mozlog.structured import commandline
# This must happen after importing from "." since it adds WPT
# tools to the Python system path.
import manifest as wptmanifest
from wptrunner.wptcommandline import get_test_paths, set_from_config
manifest = None
servo_root = os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir)
def do_delayed_imports(wpt_dir):
global manifest
sys.path.insert(0, os.path.join(wpt_dir, "tools", "manifest"))
import manifest # noqa
from wptrunner import wptlogging
def create_parser():
@ -38,11 +31,10 @@ def create_parser():
return p
def update(logger, wpt_dir, check_clean=True, rebuild=False):
localpaths = imp.load_source("localpaths", # noqa
os.path.join(wpt_dir, "web-platform-tests", "tools", "localpaths.py"))
kwargs = {"config": os.path.join(wpt_dir, "config.ini"),
"manifest_path": os.path.join(wpt_dir, "metadata"),
def update(check_clean=True, rebuild=False, **kwargs):
logger = wptlogging.setup(kwargs, {"mach": sys.stdout})
kwargs = {"config": os.path.join(WPT_PATH, "config.ini"),
"manifest_path": os.path.join(WPT_PATH, "metadata"),
"tests_root": None,
"metadata_root": None}
@ -50,8 +42,6 @@ def update(logger, wpt_dir, check_clean=True, rebuild=False):
config = kwargs["config"]
test_paths = get_test_paths(config)
do_delayed_imports(wpt_dir)
if check_clean:
return _check_clean(logger, test_paths)
@ -63,12 +53,12 @@ def _update(logger, test_paths, rebuild):
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
cache_subdir = os.path.relpath(os.path.dirname(manifest_path),
os.path.dirname(__file__))
manifest.manifest.load_and_update(paths["tests_path"],
wptmanifest.manifest.load_and_update(paths["tests_path"],
manifest_path,
url_base,
working_copy=True,
rebuild=rebuild,
cache_root=os.path.join(servo_root, ".wpt",
cache_root=os.path.join(SERVO_ROOT, ".wpt",
cache_subdir))
return 0
@ -80,19 +70,19 @@ def _check_clean(logger, test_paths):
tests_path = paths["tests_path"]
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
old_manifest = manifest.manifest.load_and_update(tests_path,
old_manifest = wptmanifest.manifest.load_and_update(tests_path,
manifest_path,
url_base,
working_copy=False,
update=False,
write_manifest=False,)
write_manifest=False)
# Even if no cache is specified, one will be used automatically by the
# VCS integration. Create a brand new cache every time to ensure that
# the VCS integration always thinks that any file modifications in the
# working directory are new and interesting.
cache_root = tempfile.mkdtemp()
new_manifest = manifest.manifest.load_and_update(tests_path,
new_manifest = wptmanifest.manifest.load_and_update(tests_path,
manifest_path,
url_base,
working_copy=True,

View file

@ -1,10 +1,11 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# pylint: disable=missing-docstring
import dataclasses
import grouping_formatter
import json
import multiprocessing
import os
import re
import sys
@ -12,22 +13,21 @@ import urllib.error
import urllib.parse
import urllib.request
from typing import List, NamedTuple, Optional, Union
import mozlog
import mozlog.formatters
import multiprocessing
from typing import List, NamedTuple, Optional, Union
from grouping_formatter import UnexpectedResult, UnexpectedSubtestResult
from . import SERVO_ROOT, WPT_PATH, WPT_TOOLS_PATH, update_args_for_layout_2020
from .grouping_formatter import (
ServoFormatter, ServoHandler,
UnexpectedResult, UnexpectedSubtestResult
)
from wptrunner import wptcommandline
from wptrunner import wptrunner
SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))
SERVO_ROOT = os.path.abspath(os.path.join(SCRIPT_PATH, "..", ".."))
WPT_TOOLS_PATH = os.path.join(SCRIPT_PATH, "web-platform-tests", "tools")
CERTS_PATH = os.path.join(WPT_TOOLS_PATH, "certs")
sys.path.insert(0, WPT_TOOLS_PATH)
import localpaths # noqa: F401,E402
import update # noqa: F401,E402
TRACKER_API = "https://build.servo.org/intermittent-tracker"
TRACKER_API_ENV_VAR = "INTERMITTENT_TRACKER_API"
TRACKER_DASHBOARD_SECRET_ENV_VAR = "INTERMITTENT_TRACKER_DASHBOARD_SECRET"
@ -50,23 +50,7 @@ def set_if_none(args: dict, key: str, value):
args[key] = value
def update_args_for_layout_2020(kwargs: dict):
if kwargs.pop("layout_2020"):
kwargs["test_paths"]["/"]["metadata_path"] = os.path.join(
SCRIPT_PATH, "metadata-layout-2020"
)
kwargs["test_paths"]["/_mozilla/"]["metadata_path"] = os.path.join(
SCRIPT_PATH, "mozilla", "meta-layout-2020"
)
kwargs["include_manifest"] = os.path.join(
SCRIPT_PATH, "include-layout-2020.ini"
)
def run_tests(**kwargs):
from wptrunner import wptrunner
from wptrunner import wptcommandline
# By default, Rayon selects the number of worker threads based on the
# available CPU count. This doesn't work very well when running tests on CI,
# since we run so many Servo processes in parallel. The result is a lot of
@ -77,8 +61,8 @@ def run_tests(**kwargs):
os.environ["HOST_FILE"] = os.path.join(SERVO_ROOT, "tests", "wpt", "hosts")
set_if_none(kwargs, "product", "servo")
set_if_none(kwargs, "config", os.path.join(SCRIPT_PATH, "config.ini"))
set_if_none(kwargs, "include_manifest", os.path.join(SCRIPT_PATH, "include.ini"))
set_if_none(kwargs, "config", os.path.join(WPT_PATH, "config.ini"))
set_if_none(kwargs, "include_manifest", os.path.join(WPT_PATH, "include.ini"))
set_if_none(kwargs, "manifest_update", False)
set_if_none(kwargs, "processes", multiprocessing.cpu_count())
@ -131,7 +115,7 @@ def run_tests(**kwargs):
update_args_for_layout_2020(kwargs)
mozlog.commandline.log_formatters["servo"] = (
grouping_formatter.ServoFormatter,
ServoFormatter,
"Servo's grouping output formatter",
)
@ -146,7 +130,7 @@ def run_tests(**kwargs):
else:
logger = wptrunner.setup_logging(kwargs, {"servo": sys.stdout})
handler = grouping_formatter.ServoHandler()
handler = ServoHandler()
logger.add_handler(handler)
wptrunner.run_tests(**kwargs)
@ -200,21 +184,6 @@ def run_tests(**kwargs):
return return_value
def update_tests(**kwargs):
from update import updatecommandline
set_if_none(kwargs, "product", "servo")
set_if_none(kwargs, "config", os.path.join(SCRIPT_PATH, "config.ini"))
kwargs["store_state"] = False
updatecommandline.check_args(kwargs)
update_args_for_layout_2020(kwargs)
logger = update.setup_logging(kwargs, {"mach": sys.stdout})
return_value = update.run_update(logger, **kwargs)
return 1 if return_value is update.exit_unclean else 0
class GithubContextInformation(NamedTuple):
build_url: Optional[str]
pull_url: Optional[str]
@ -317,7 +286,7 @@ def filter_intermittents(
dashboard = TrackerDashboardFilter()
dashboard.report_failures(unexpected_results)
def add_result(output, text, results, filter_func) -> None:
def add_result(output, text, results: List[UnexpectedResult], filter_func) -> None:
filtered = [str(result) for result in filter(filter_func, results)]
if filtered:
output += [f"{text} ({len(results)}): ", *filtered]
@ -325,7 +294,7 @@ def filter_intermittents(
def is_stable_and_unexpected(result):
return not result.flaky and not result.issues
output = []
output: List[str] = []
add_result(output, "Flaky unexpected results", unexpected_results,
lambda result: result.flaky)
add_result(output, "Stable unexpected results that are known-intermittent",
@ -355,15 +324,3 @@ def write_unexpected_only_raw_log(
if data["action"] in ["suite_start", "suite_end"] or \
("test" in data and data["test"] in tests):
output.write(line)
def main():
from wptrunner import wptcommandline
parser = wptcommandline.create_parser()
kwargs = vars(parser.parse_args())
return run_tests(**kwargs)
if __name__ == "__main__":
sys.exit(0 if main() else 1)

View file

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Copyright 2023 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
@ -37,8 +39,8 @@ from wsgiref.simple_server import WSGIRequestHandler, make_server
import flask
import flask.cli
import requests
from wptupstreamer import SyncRun, WPTSync
from wptupstreamer.step import CreateOrUpdateBranchForPRStep
from exporter import SyncRun, WPTSync
from exporter.step import CreateOrUpdateBranchForPRStep
TESTS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "tests")
SYNC: Optional[WPTSync] = None

29
python/wpt/update.py Normal file
View file

@ -0,0 +1,29 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# pylint: disable=missing-docstring
import os
from . import WPT_PATH, update_args_for_layout_2020
from . import importer
def set_if_none(args: dict, key: str, value):
if key not in args or args[key] is None:
args[key] = value
def update_tests(**kwargs):
set_if_none(kwargs, "product", "servo")
set_if_none(kwargs, "config", os.path.join(WPT_PATH, "config.ini"))
kwargs["store_state"] = False
importer.check_args(kwargs)
update_args_for_layout_2020(kwargs)
return 1 if not importer.run_update(**kwargs) else 0
def create_parser(**kwargs):
return importer.create_parser()

View file

@ -136,11 +136,9 @@ directories = [
"./support/magicleap/Servo2D/code/src.gen",
"./support/magicleap/Servo2D/pipeline",
"./tests/wpt/harness",
"./tests/wpt/update",
"./tests/wpt/web-platform-tests",
"./tests/wpt/mozilla/tests/mozilla/referrer-policy",
"./tests/wpt/mozilla/tests/webgl",
"./tests/wpt/sync",
"./python/tidy/servo_tidy_tests",
"./components/script/dom/bindings/codegen/parser",
"./components/script/dom/bindings/codegen/ply",

View file

@ -9,7 +9,6 @@ In particular, this folder contains:
* `config.ini`: some configuration for the web-platform-tests.
* `include.ini`: the subset of web-platform-tests we currently run.
* `servowpt.py`: run the web-platform-tests in Servo.
* `web-platform-tests`: copy of the web-platform-tests.
* `metadata`: expected failures for the web-platform-tests we run.
* `mozilla`: web-platform-tests that cannot be upstreamed.
@ -78,12 +77,6 @@ testharnessreport.js may have been installed incorrectly (see
[**Running the tests manually**](#running-the-tests-manually)
for more details).
Running the tests without mach
------------------------------
When avoiding `mach` for some reason, one can run `servowpt.py`
directly. However, this requires that all the dependencies for
`wptrunner` are available in the current python environment.
Running the tests manually
--------------------------

View file

@ -1,41 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
#!/usr/bin/env python
import os
import subprocess
import sys
from mozlog.structured import structuredlog
here = os.path.split(__file__)[0]
sys.path.insert(0, os.path.abspath(os.path.join(here, os.pardir, "web-platform-tests", "tools", "wptrunner")))
sys.path.insert(0, os.path.abspath(os.path.join(here, os.pardir, "web-platform-tests", "tools", "wptserve")))
sys.path.insert(0, os.path.abspath(os.path.join(here, os.pardir, "web-platform-tests", "tools")))
import localpaths
from wptrunner.update import setup_logging, WPTUpdate
from wptrunner.update.base import exit_unclean
from . import updatecommandline
from .update import UpdateRunner
def run_update(logger, **kwargs):
updater = WPTUpdate(logger, runner_cls=UpdateRunner, **kwargs)
return updater.run()
if __name__ == "__main__":
args = updatecommandline.parse_args()
logger = setup_logging(args, {"mach": sys.stdout})
assert structuredlog.get_default_logger() is not None
rv = run_update(logger, **args)
if rv is exit_unclean:
sys.exit(1)
else:
sys.exit(0)

View file

@ -1,179 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import print_function
import json
import urllib
requests = None
class GitHubError(Exception):
def __init__(self, status, data):
self.status = status
self.data = data
class GitHub(object):
url_base = "https://api.github.com"
def __init__(self, token):
# Defer the import of requests since it isn't installed by default
global requests
if requests is None:
import requests
self.headers = {"Accept": "application/vnd.github.v3+json"}
self.auth = (token, "x-oauth-basic")
def get(self, path):
return self._request("GET", path)
def post(self, path, data):
return self._request("POST", path, data=data)
def put(self, path, data):
return self._request("PUT", path, data=data)
def _request(self, method, path, data=None):
url = urllib.parse.urljoin(self.url_base, path)
kwargs = {"headers": self.headers,
"auth": self.auth}
if data is not None:
kwargs["data"] = json.dumps(data)
resp = requests.request(method, url, **kwargs)
if 200 <= resp.status_code < 300:
return resp.json()
else:
print(resp.status_code, resp.json())
raise GitHubError(resp.status_code, resp.json())
def repo(self, owner, name):
"""GitHubRepo for a particular repository.
:param owner: String repository owner
:param name: String repository name
"""
return GitHubRepo.from_name(self, owner, name)
class GitHubRepo(object):
def __init__(self, github, data):
"""Object respresenting a GitHub respoitory"""
self.gh = github
self.owner = data["owner"]
self.name = data["name"]
self.url = data["ssh_url"]
self._data = data
@classmethod
def from_name(cls, github, owner, name):
data = github.get("/repos/%s/%s" % (owner, name))
return cls(github, data)
@property
def url_base(self):
return "/repos/%s/" % (self._data["full_name"])
def create_pr(self, title, head, base, body):
"""Create a Pull Request in the repository
:param title: Pull Request title
:param head: ref to the HEAD of the PR branch.
:param base: ref to the base branch for the Pull Request
:param body: Description of the PR
"""
return PullRequest.create(self, title, head, base, body)
def load_pr(self, number):
"""Load an existing Pull Request by number.
:param number: Pull Request number
"""
return PullRequest.from_number(self, number)
def path(self, suffix):
return urllib.parse.urljoin(self.url_base, suffix)
class PullRequest(object):
def __init__(self, repo, data):
"""Object representing a Pull Request"""
self.repo = repo
self._data = data
self.number = data["number"]
self.title = data["title"]
self.base = data["base"]["ref"]
self.base = data["head"]["ref"]
self._issue = None
@classmethod
def from_number(cls, repo, number):
data = repo.gh.get(repo.path("pulls/%i" % number))
return cls(repo, data)
@classmethod
def create(cls, repo, title, head, base, body):
data = repo.gh.post(repo.path("pulls"),
{"title": title,
"head": head,
"base": base,
"body": body})
return cls(repo, data)
def path(self, suffix):
return urllib.parse.urljoin(self.repo.path("pulls/%i/" % self.number), suffix)
@property
def issue(self):
"""Issue related to the Pull Request"""
if self._issue is None:
self._issue = Issue.from_number(self.repo, self.number)
return self._issue
def merge(self, commit_message=None):
"""Merge the Pull Request into its base branch.
:param commit_message: Message to use for the merge commit. If None a default
message is used instead
"""
if commit_message is None:
commit_message = "Merge pull request #%i from %s" % (self.number, self.base)
self.repo.gh.put(self.path("merge"),
{"commit_message": commit_message})
class Issue(object):
def __init__(self, repo, data):
"""Object representing a GitHub Issue"""
self.repo = repo
self._data = data
self.number = data["number"]
@classmethod
def from_number(cls, repo, number):
data = repo.gh.get(repo.path("issues/%i" % number))
return cls(repo, data)
def path(self, suffix):
return urllib.parse.urljoin(self.repo.path("issues/%i/" % self.number), suffix)
def add_label(self, label):
"""Add a label to the issue.
:param label: The name of the label
"""
self.repo.gh.post(self.path("labels"), [label])
def add_comment(self, message):
"""Add a comment to the issue
:param message: The text of the comment
"""
self.repo.gh.post(self.path("comments"),
{"body": message})

View file

@ -1,42 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import os
from wptrunner.update.base import Step, StepRunner
from wptrunner.update.update import LoadConfig, SyncFromUpstream, UpdateMetadata
from wptrunner.update.tree import NoVCSTree
from .tree import GitTree, HgTree, GeckoCommit
from .upstream import SyncToUpstream
class LoadTrees(Step):
"""Load gecko tree and sync tree containing web-platform-tests"""
provides = ["local_tree", "sync_tree"]
def create(self, state):
if os.path.exists(state.sync["path"]):
sync_tree = GitTree(root=state.sync["path"])
else:
sync_tree = None
if GitTree.is_type():
local_tree = GitTree(commit_cls=GeckoCommit)
elif HgTree.is_type():
local_tree = HgTree(commit_cls=GeckoCommit)
else:
local_tree = NoVCSTree()
state.update({"local_tree": local_tree,
"sync_tree": sync_tree})
class UpdateRunner(StepRunner):
"""Overall runner for updating web-platform-tests in Gecko."""
steps = [LoadConfig,
LoadTrees,
SyncToUpstream,
SyncFromUpstream,
UpdateMetadata]

View file

@ -1,44 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
def create_parser():
from wptrunner import wptcommandline
parser = wptcommandline.create_parser_update()
parser.add_argument("--upstream", dest="upstream", action="store_true", default=None,
help="Push local changes to upstream repository even when not syncing")
parser.add_argument("--no-upstream", dest="upstream", action="store_false", default=None,
help="Dont't push local changes to upstream repository when syncing")
parser.add_argument("--token-file", action="store", type=wptcommandline.abs_path,
help="Path to file containing github token")
parser.add_argument("--token", action="store", help="GitHub token to use")
parser.add_argument("--layout-2020", "--with-layout-2020", default=False, action="store_true",
help="Use expected results for the 2020 layout engine")
return parser
def check_args(kwargs):
from wptrunner import wptcommandline
wptcommandline.set_from_config(kwargs)
if hasattr(wptcommandline, 'check_paths'):
wptcommandline.check_paths(kwargs)
kwargs["upstream"] = kwargs["upstream"] if kwargs["upstream"] is not None else kwargs["sync"]
if kwargs["upstream"]:
if kwargs["rev"]:
raise ValueError("Setting --rev with --upstream isn't supported")
if kwargs["token"] is None:
if kwargs["token_file"] is None:
raise ValueError("Must supply either a token file or a token")
with open(kwargs["token_file"]) as f:
token = f.read().strip()
kwargs["token"] = token
del kwargs["token_file"]
return kwargs
def parse_args():
parser = create_parser()
kwargs = vars(parser.parse_args())
return check_args(kwargs)

View file

@ -1,389 +0,0 @@
from __future__ import print_function
import os
import re
import subprocess
import sys
import urllib
from six.moves import input
from six import iteritems
from wptrunner.update.sync import UpdateCheckout
from wptrunner.update.tree import get_unique_name
from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean
from .tree import Commit, GitTree, Patch
from .github import GitHub
def rewrite_patch(patch, strip_dir):
"""Take a Patch and rewrite the message to remove the bug number and reviewer, but add
a bugzilla link in the summary.
:param patch: the Patch to convert
"""
return Patch(patch.author, patch.email, rewrite_message(patch), None, patch.diff)
def rewrite_message(patch):
if patch.merge_message and patch.merge_message.bug:
bug = patch.merge_message.bug
else:
bug = patch.message.bug
if bug is not None:
return "\n".join([patch.message.summary,
patch.message.body,
"",
"Upstreamed from https://github.com/servo/servo/pull/%s [ci skip]" %
bug])
return "\n".join([patch.message.full_summary, "%s\n[ci skip]\n" % patch.message.body])
class SyncToUpstream(Step):
"""Sync local changes to upstream"""
def create(self, state):
if not state.kwargs["upstream"]:
return
if not isinstance(state.local_tree, GitTree):
self.logger.error("Cannot sync with upstream from a non-Git checkout.")
return exit_clean
try:
import requests
except ImportError:
self.logger.error("Upstream sync requires the requests module to be installed")
return exit_clean
if not state.sync_tree:
os.makedirs(state.sync["path"])
state.sync_tree = GitTree(root=state.sync["path"])
kwargs = state.kwargs
with state.push(["local_tree", "sync_tree", "tests_path", "metadata_path",
"sync"]):
state.token = kwargs["token"]
runner = SyncToUpstreamRunner(self.logger, state)
runner.run()
class GetLastSyncData(Step):
"""Find the gecko commit at which we last performed a sync with upstream and the upstream
commit that was synced."""
provides = ["sync_data_path", "last_sync_commit", "old_upstream_rev"]
def create(self, state):
self.logger.info("Looking for last sync commit")
state.sync_data_path = os.path.join(state.metadata_path, "mozilla-sync")
items = {}
with open(state.sync_data_path) as f:
for line in f.readlines():
key, value = [item.strip() for item in line.split(":", 1)]
items[key] = value
state.last_sync_commit = Commit(state.local_tree, items["local"])
state.old_upstream_rev = items["upstream"]
if not state.local_tree.contains_commit(state.last_sync_commit):
self.logger.error("Could not find last sync commit %s" % last_sync_sha1)
return exit_clean
self.logger.info("Last sync to web-platform-tests happened in %s" % state.last_sync_commit.sha1)
class CheckoutBranch(Step):
"""Create a branch in the sync tree pointing at the last upstream sync commit
and check it out"""
provides = ["branch"]
def create(self, state):
self.logger.info("Updating sync tree from %s" % state.sync["remote_url"])
state.branch = state.sync_tree.unique_branch_name(
"outbound_update_%s" % state.old_upstream_rev)
state.sync_tree.update(state.sync["remote_url"],
state.sync["branch"],
state.branch)
state.sync_tree.checkout(state.old_upstream_rev, state.branch, force=True)
class GetBaseCommit(Step):
"""Find the latest upstream commit on the branch that we are syncing with"""
provides = ["base_commit"]
def create(self, state):
state.base_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"],
state.sync["branch"])
self.logger.debug("New base commit is %s" % state.base_commit.sha1)
class LoadCommits(Step):
"""Get a list of commits in the gecko tree that need to be upstreamed"""
provides = ["source_commits"]
def create(self, state):
state.source_commits = state.local_tree.log(state.last_sync_commit,
state.tests_path)
update_regexp = re.compile("Update web-platform-tests to revision [0-9a-f]{40}")
for i, commit in enumerate(state.source_commits[:]):
if update_regexp.match(commit.message.text):
# This is a previous update commit so ignore it
state.source_commits.remove(commit)
continue
if commit.message.backouts:
#TODO: Add support for collapsing backouts
raise NotImplementedError("Need to get the Git->Hg commits for backouts and remove the backed out patch")
if not commit.message.bug and not (commit.merge and commit.merge.message.bug):
self.logger.error("Commit %i (%s) doesn't have an associated bug number." %
(i + 1, commit.sha1))
return exit_unclean
self.logger.debug("Source commits: %s" % state.source_commits)
class SelectCommits(Step):
"""Provide a UI to select which commits to upstream"""
def create(self, state):
if not state.source_commits:
return
while True:
commits = state.source_commits[:]
for i, commit in enumerate(commits):
print("%i:\t%s" % (i, commit.message.summary))
remove = input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
remove_idx = set()
invalid = False
for item in remove.split(" "):
if not item:
continue
try:
item = int(item)
except:
invalid = True
break
if item < 0 or item >= len(commits):
invalid = True
break
remove_idx.add(item)
if invalid:
continue
keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx]
#TODO: consider printed removed commits
print("Selected the following commits to keep:")
for i, commit in keep_commits:
print("%i:\t%s" % (i, commit.message.summary))
confirm = input("Keep the above commits? y/n\n").strip().lower()
if confirm == "y":
state.source_commits = [item[1] for item in keep_commits]
break
class MovePatches(Step):
"""Convert gecko commits into patches against upstream and commit these to the sync tree."""
provides = ["commits_loaded"]
def create(self, state):
state.commits_loaded = 0
strip_path = os.path.relpath(state.tests_path,
state.local_tree.root)
self.logger.debug("Stripping patch %s" % strip_path)
for commit in state.source_commits[state.commits_loaded:]:
i = state.commits_loaded + 1
self.logger.info("Moving commit %i: %s" % (i, commit.message.full_summary))
patch = commit.export_patch(state.tests_path)
stripped_patch = rewrite_patch(patch, strip_path)
strip_count = strip_path.count('/')
if strip_path[-1] != '/':
strip_count += 1
try:
state.sync_tree.import_patch(stripped_patch, 1 + strip_count)
except:
print(patch.diff)
raise
state.commits_loaded = i
class RebaseCommits(Step):
"""Rebase commits from the current branch on top of the upstream destination branch.
This step is particularly likely to fail if the rebase generates merge conflicts.
In that case the conflicts can be fixed up locally and the sync process restarted
with --continue.
"""
provides = ["rebased_commits"]
def create(self, state):
self.logger.info("Rebasing local commits")
continue_rebase = False
# Check if there's a rebase in progress
if (os.path.exists(os.path.join(state.sync_tree.root,
".git",
"rebase-merge")) or
os.path.exists(os.path.join(state.sync_tree.root,
".git",
"rebase-apply"))):
continue_rebase = True
try:
state.sync_tree.rebase(state.base_commit, continue_rebase=continue_rebase)
except subprocess.CalledProcessError:
self.logger.info("Rebase failed, fix merge and run %s again with --continue" % sys.argv[0])
raise
state.rebased_commits = state.sync_tree.log(state.base_commit)
self.logger.info("Rebase successful")
class CheckRebase(Step):
"""Check if there are any commits remaining after rebase"""
def create(self, state):
if not state.rebased_commits:
self.logger.info("Nothing to upstream, exiting")
return exit_clean
class MergeUpstream(Step):
"""Run steps to push local commits as seperate PRs and merge upstream."""
provides = ["merge_index", "gh_repo"]
def create(self, state):
gh = GitHub(state.token)
if "merge_index" not in state:
state.merge_index = 0
org, name = urllib.parse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
if name.endswith(".git"):
name = name[:-4]
state.gh_repo = gh.repo(org, name)
for commit in state.rebased_commits[state.merge_index:]:
with state.push(["gh_repo", "sync_tree"]):
state.commit = commit
pr_merger = PRMergeRunner(self.logger, state)
rv = pr_merger.run()
if rv is not None:
return rv
state.merge_index += 1
class UpdateLastSyncData(Step):
"""Update the gecko commit at which we last performed a sync with upstream."""
provides = []
def create(self, state):
self.logger.info("Updating last sync commit")
data = {"local": state.local_tree.rev,
"upstream": state.sync_tree.rev}
with open(state.sync_data_path, "w") as f:
for key, value in iteritems(data):
f.write("%s: %s\n" % (key, value))
# This gets added to the patch later on
class MergeLocalBranch(Step):
"""Create a local branch pointing at the commit to upstream"""
provides = ["local_branch"]
def create(self, state):
branch_prefix = "sync_%s" % state.commit.sha1
local_branch = state.sync_tree.unique_branch_name(branch_prefix)
state.sync_tree.create_branch(local_branch, state.commit)
state.local_branch = local_branch
class MergeRemoteBranch(Step):
"""Get an unused remote branch name to use for the PR"""
provides = ["remote_branch"]
def create(self, state):
remote_branch = "sync_%s" % state.commit.sha1
branches = [ref[len("refs/heads/"):] for sha1, ref in
state.sync_tree.list_remote(state.gh_repo.url)
if ref.startswith("refs/heads")]
state.remote_branch = get_unique_name(branches, remote_branch)
class PushUpstream(Step):
"""Push local branch to remote"""
def create(self, state):
self.logger.info("Pushing commit upstream")
state.sync_tree.push(state.gh_repo.url,
state.local_branch,
state.remote_branch)
class CreatePR(Step):
"""Create a PR for the remote branch"""
provides = ["pr"]
def create(self, state):
self.logger.info("Creating a PR")
commit = state.commit
state.pr = state.gh_repo.create_pr(commit.message.full_summary,
state.remote_branch,
"master",
commit.message.body if commit.message.body else "")
class PRAddComment(Step):
"""Add an issue comment indicating that the code has been reviewed already"""
def create(self, state):
state.pr.issue.add_comment("Code reviewed upstream.")
state.pr.issue.add_label("servo-export")
class MergePR(Step):
"""Merge the PR"""
def create(self, state):
self.logger.info("Merging PR")
state.pr.merge()
class PRDeleteBranch(Step):
"""Delete the remote branch"""
def create(self, state):
self.logger.info("Deleting remote branch")
state.sync_tree.push(state.gh_repo.url, "", state.remote_branch)
class SyncToUpstreamRunner(StepRunner):
"""Runner for syncing local changes to upstream"""
steps = [GetLastSyncData,
UpdateCheckout,
CheckoutBranch,
GetBaseCommit,
LoadCommits,
SelectCommits,
MovePatches,
RebaseCommits,
CheckRebase,
MergeUpstream,
UpdateLastSyncData]
class PRMergeRunner(StepRunner):
"""(Sub)Runner for creating and merging a PR"""
steps = [
MergeLocalBranch,
MergeRemoteBranch,
PushUpstream,
CreatePR,
PRAddComment,
MergePR,
PRDeleteBranch,
]