Update web-platform-tests to revision 78f764c05c229883e87ad135c7153051a66e2851

This commit is contained in:
WPT Sync Bot 2019-03-06 20:32:15 -05:00
parent 55347aa39f
commit bf84a079f9
1983 changed files with 58006 additions and 31437 deletions

View file

@ -22,9 +22,9 @@ then
echo Unrecognized release channel: $CHANNEL >&2
exit 1
fi
wget https://dl.google.com/linux/direct/$deb_archive
wget -O /tmp/$deb_archive https://dl.google.com/linux/direct/$deb_archive
sudo apt-get -qqy update && sudo gdebi -n $deb_archive
sudo apt-get -qqy update && sudo gdebi -n /tmp/$deb_archive
fi
sudo Xvfb $DISPLAY -screen 0 ${SCREEN_WIDTH}x${SCREEN_HEIGHT}x${SCREEN_DEPTH} &

View file

@ -1,24 +1,8 @@
from copy import copy
from six.moves.urllib.parse import urljoin, urlparse
from abc import ABCMeta, abstractproperty
class SourceFileCache(object):
def __init__(self):
self.source_files = {}
def make_new(self, tests_root, path, url_base):
from .sourcefile import SourceFile
return SourceFile(tests_root, path, url_base)
def get(self, tests_root, manifest, path):
if path not in self.source_files:
self.source_files[path] = self.make_new(tests_root, path, manifest.url_base)
return self.source_files[path]
item_types = {}
@ -38,39 +22,26 @@ class ManifestItemMeta(ABCMeta):
class ManifestItem(object):
__metaclass__ = ManifestItemMeta
__slots__ = ("_tests_root", "path")
item_type = None
source_file_cache = SourceFileCache()
def __init__(self, source_file, manifest=None):
self.source_file = source_file
def __init__(self, tests_root=None, path=None):
self._tests_root = tests_root
self.path = path
@abstractproperty
def id(self):
"""The test's id (usually its url)"""
pass
@property
def meta_flags(self):
return set(self.source_file.meta_flags)
@property
def path(self):
"""The test path relative to the test_root"""
return self.source_file.rel_path
@property
def https(self):
flags = self.meta_flags
return ("https" in flags or "serviceworker" in flags)
def key(self):
"""A unique identifier for the test"""
return (self.item_type, self.id)
def meta_key(self):
"""Extra metadata that doesn't form part of the test identity, but for
which changes mean regenerating the manifest (e.g. the test timeout."""
which changes mean regenerating the manifest (e.g. the test timeout)."""
return ()
def __eq__(self, other):
@ -88,55 +59,80 @@ class ManifestItem(object):
return [{}]
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
return cls(source_file,
manifest=manifest)
def from_json(cls, manifest, path, obj):
return cls(manifest.tests_root, path)
class URLManifestItem(ManifestItem):
def __init__(self, source_file, url, url_base="/", manifest=None):
ManifestItem.__init__(self, source_file, manifest=manifest)
self._url = url
__slots__ = ("url_base", "_url", "_extras")
def __init__(self, tests_root, path, url_base, url, **extras):
super(URLManifestItem, self).__init__(tests_root, path)
self.url_base = url_base
self._url = url
self._extras = extras or {}
@property
def _source_file(self):
"""create a SourceFile for the item"""
from .sourcefile import SourceFile
return SourceFile(self._tests_root, self.path, self.url_base)
@property
def id(self):
return self.url
@property
def meta_flags(self):
return set(urlparse(self.url).path.rsplit("/", 1)[1].split(".")[1:-1])
@property
def url(self):
return urljoin(self.url_base, self._url)
@property
def https(self):
flags = set(urlparse(self.url).path.rsplit("/", 1)[1].split(".")[1:-1])
return ("https" in flags or "serviceworker" in flags)
def to_json(self):
rv = [self._url, {}]
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, path, obj):
url, extras = obj
return cls(source_file,
return cls(manifest.tests_root,
path,
manifest.url_base,
url,
url_base=manifest.url_base,
manifest=manifest)
**extras)
class TestharnessTest(URLManifestItem):
item_type = "testharness"
def __init__(self, source_file, url, url_base="/", timeout=None, testdriver=False, jsshell=False, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
self.testdriver = testdriver
self.jsshell = jsshell
@property
def timeout(self):
return self._extras.get("timeout")
@property
def testdriver(self):
return self._extras.get("testdriver")
@property
def jsshell(self):
return self._extras.get("jsshell")
@property
def script_metadata(self):
if "script_metadata" in self._extras:
return self._extras["script_metadata"]
else:
# this branch should go when the manifest version is bumped
return self._source_file.script_metadata
def meta_key(self):
return (self.timeout, self.testdriver)
script_metadata = self.script_metadata
if script_metadata is not None:
script_metadata = tuple(tuple(x) for x in script_metadata)
return (self.timeout, self.testdriver, self.jsshell, script_metadata)
def to_json(self):
rv = URLManifestItem.to_json(self)
@ -146,35 +142,32 @@ class TestharnessTest(URLManifestItem):
rv[-1]["testdriver"] = self.testdriver
if self.jsshell:
rv[-1]["jsshell"] = True
if self.script_metadata is not None:
# we store this even if it is [] to avoid having to read the source file
rv[-1]["script_metadata"] = self.script_metadata
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
url, extras = obj
return cls(source_file,
url,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
testdriver=bool(extras.get("testdriver")),
jsshell=bool(extras.get("jsshell")),
manifest=manifest)
class RefTestBase(URLManifestItem):
__slots__ = ("references",)
item_type = "reftest_base"
class RefTestNode(URLManifestItem):
item_type = "reftest_node"
def __init__(self, tests_root, path, url_base, url, references=None, **extras):
super(RefTestBase, self).__init__(tests_root, path, url_base, url, **extras)
self.references = references or []
def __init__(self, source_file, url, references, url_base="/", timeout=None,
viewport_size=None, dpi=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
for _, ref_type in references:
if ref_type not in ["==", "!="]:
raise ValueError("Unrecognised ref_type %s" % ref_type)
self.references = tuple(references)
self.timeout = timeout
self.viewport_size = viewport_size
self.dpi = dpi
@property
def timeout(self):
return self._extras.get("timeout")
@property
def viewport_size(self):
return self._extras.get("viewport_size")
@property
def dpi(self):
return self._extras.get("dpi")
def meta_key(self):
return (self.timeout, self.viewport_size, self.dpi)
@ -191,34 +184,35 @@ class RefTestNode(URLManifestItem):
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, path, obj):
url, references, extras = obj
return cls(source_file,
return cls(manifest.tests_root,
path,
manifest.url_base,
url,
references,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
viewport_size=extras.get("viewport_size"),
dpi=extras.get("dpi"),
manifest=manifest)
**extras)
def to_RefTest(self):
if type(self) == RefTest:
return self
rv = RefTest.__new__(RefTest)
rv.__dict__.update(self.__dict__)
rv = copy(self)
rv.__class__ = RefTest
return rv
def to_RefTestNode(self):
if type(self) == RefTestNode:
return self
rv = RefTestNode.__new__(RefTestNode)
rv.__dict__.update(self.__dict__)
rv = copy(self)
rv.__class__ = RefTestNode
return rv
class RefTest(RefTestNode):
class RefTestNode(RefTestBase):
item_type = "reftest_node"
class RefTest(RefTestBase):
item_type = "reftest"
@ -241,9 +235,9 @@ class Stub(URLManifestItem):
class WebDriverSpecTest(URLManifestItem):
item_type = "wdspec"
def __init__(self, source_file, url, url_base="/", timeout=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
@property
def timeout(self):
return self._extras.get("timeout")
def to_json(self):
rv = URLManifestItem.to_json(self)
@ -251,21 +245,10 @@ class WebDriverSpecTest(URLManifestItem):
rv[-1]["timeout"] = self.timeout
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
url, extras = obj
return cls(source_file,
url,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
manifest=manifest)
class SupportFile(ManifestItem):
item_type = "support"
@property
def id(self):
return self.source_file.rel_path
return self.path

View file

@ -4,7 +4,7 @@ from collections import defaultdict
from six import iteritems, iterkeys, itervalues, string_types
from . import vcs
from .item import (ManualTest, WebDriverSpecTest, Stub, RefTestNode, RefTest,
from .item import (ManualTest, WebDriverSpecTest, Stub, RefTestNode, RefTest, RefTestBase,
TestharnessTest, SupportFile, ConformanceCheckerTest, VisualTest)
from .log import get_logger
from .utils import from_os_path, to_os_path
@ -37,6 +37,7 @@ def iterfilter(filters, iter):
item_classes = {"testharness": TestharnessTest,
"reftest": RefTest,
"reftest_node": RefTestNode,
"reftest_base": RefTestBase,
"manual": ManualTest,
"stub": Stub,
"wdspec": WebDriverSpecTest,
@ -132,10 +133,7 @@ class TypeData(object):
data = set()
path = from_os_path(key)
for test in iterfilter(self.meta_filters, self.json_data.get(path, [])):
manifest_item = self.type_cls.from_json(self.manifest,
self.tests_root,
path,
test)
manifest_item = self.type_cls.from_json(self.manifest, path, test)
data.add(manifest_item)
try:
del self.json_data[path]
@ -154,10 +152,7 @@ class TypeData(object):
continue
data = set()
for test in iterfilter(self.meta_filters, self.json_data.get(path, [])):
manifest_item = self.type_cls.from_json(self.manifest,
self.tests_root,
path,
test)
manifest_item = self.type_cls.from_json(self.manifest, path, test)
data.add(manifest_item)
self.data[key] = data
self.json_data = None
@ -202,11 +197,12 @@ class ManifestData(dict):
class Manifest(object):
def __init__(self, url_base="/", meta_filters=None):
def __init__(self, tests_root=None, url_base="/", meta_filters=None):
assert url_base is not None
self._path_hash = {}
self._data = ManifestData(self, meta_filters)
self._reftest_nodes_by_url = None
self.tests_root = tests_root
self.url_base = url_base
def __iter__(self):
@ -285,22 +281,21 @@ class Manifest(object):
new_type, manifest_items = source_file.manifest_items()
hash_changed = True
if new_type != old_type:
try:
del self._data[old_type][rel_path]
except KeyError:
pass
del self._data[old_type][rel_path]
if old_type in reftest_types:
reftest_changes = True
else:
new_type, manifest_items = old_type, self._data[old_type][rel_path]
if old_type in reftest_types and new_type != old_type:
reftest_changes = True
new_type = old_type
if old_type in reftest_types:
manifest_items = self._data[old_type][rel_path]
else:
new_type, manifest_items = source_file.manifest_items()
if new_type in ("reftest", "reftest_node"):
reftest_nodes.extend(manifest_items)
if new_type in reftest_types:
reftest_nodes.extend((item, file_hash) for item in manifest_items)
if is_new or hash_changed:
reftest_changes = True
elif new_type:
elif is_new or hash_changed:
self._data[new_type][rel_path] = set(manifest_items)
self._path_hash[rel_path] = (file_hash, new_type)
@ -337,7 +332,7 @@ class Manifest(object):
def _compute_reftests(self, reftest_nodes):
self._reftest_nodes_by_url = {}
has_inbound = set()
for item in reftest_nodes:
for item, _ in reftest_nodes:
for ref_url, ref_type in item.references:
has_inbound.add(ref_url)
@ -345,20 +340,20 @@ class Manifest(object):
references = defaultdict(set)
changed_hashes = {}
for item in reftest_nodes:
for item, file_hash in reftest_nodes:
if item.url in has_inbound:
# This is a reference
if isinstance(item, RefTest):
item = item.to_RefTestNode()
changed_hashes[item.source_file.rel_path] = (item.source_file.hash,
item.item_type)
references[item.source_file.rel_path].add(item)
changed_hashes[item.path] = (file_hash,
item.item_type)
references[item.path].add(item)
else:
if isinstance(item, RefTestNode):
item = item.to_RefTest()
changed_hashes[item.source_file.rel_path] = (item.source_file.hash,
item.item_type)
reftests[item.source_file.rel_path].add(item)
changed_hashes[item.path] = (file_hash,
item.item_type)
reftests[item.path].add(item)
self._reftest_nodes_by_url[item.url] = item
return reftests, references, changed_hashes
@ -384,7 +379,7 @@ class Manifest(object):
if version != CURRENT_VERSION:
raise ManifestVersionMismatch
self = cls(url_base=obj.get("url_base", "/"), meta_filters=meta_filters)
self = cls(tests_root, url_base=obj.get("url_base", "/"), meta_filters=meta_filters)
if not hasattr(obj, "items") and hasattr(obj, "paths"):
raise ManifestError
@ -473,7 +468,7 @@ def load_and_update(tests_root,
logger.info("Manifest url base did not match, rebuilding")
if manifest is None:
manifest = Manifest(url_base, meta_filters=meta_filters)
manifest = Manifest(tests_root, url_base, meta_filters=meta_filters)
update = True
if update:

View file

@ -13,7 +13,7 @@ import html5lib
from . import XMLParser
from .item import Stub, ManualTest, WebDriverSpecTest, RefTestNode, TestharnessTest, SupportFile, ConformanceCheckerTest, VisualTest
from .utils import rel_path_to_url, ContextManagerBytesIO, cached_property
from .utils import ContextManagerBytesIO, cached_property
wd_pattern = "*.py"
js_meta_re = re.compile(b"//\s*META:\s*(\w*)=(.*)$")
@ -232,9 +232,14 @@ class SourceFile(object):
def path(self):
return os.path.join(self.tests_root, self.rel_path)
@cached_property
def rel_url(self):
assert not os.path.isabs(self.rel_path), self.rel_path
return self.rel_path.replace(os.sep, "/")
@cached_property
def url(self):
return rel_path_to_url(self.rel_path, self.url_base)
return urljoin(self.url_base, self.rel_url)
@cached_property
def hash(self):
@ -599,22 +604,54 @@ class SourceFile(object):
return self.items_cache
if self.name_is_non_test:
rv = "support", [SupportFile(self)]
rv = "support", [
SupportFile(
self.tests_root,
self.rel_path
)]
elif self.name_is_stub:
rv = Stub.item_type, [Stub(self, self.url)]
rv = Stub.item_type, [
Stub(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
elif self.name_is_manual:
rv = ManualTest.item_type, [ManualTest(self, self.url)]
rv = ManualTest.item_type, [
ManualTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
elif self.name_is_conformance:
rv = ConformanceCheckerTest.item_type, [ConformanceCheckerTest(self, self.url)]
rv = ConformanceCheckerTest.item_type, [
ConformanceCheckerTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
elif self.name_is_conformance_support:
rv = "support", [SupportFile(self)]
rv = "support", [
SupportFile(
self.tests_root,
self.rel_path
)]
elif self.name_is_visual:
rv = VisualTest.item_type, [VisualTest(self, self.url)]
rv = VisualTest.item_type, [
VisualTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
elif self.name_is_multi_global:
globals = b""
@ -624,53 +661,112 @@ class SourceFile(object):
break
tests = [
TestharnessTest(self, global_variant_url(self.url, suffix) + variant, timeout=self.timeout,
jsshell=jsshell)
TestharnessTest(
self.tests_root,
self.rel_path,
self.url_base,
global_variant_url(self.rel_url, suffix) + variant,
timeout=self.timeout,
jsshell=jsshell,
script_metadata=self.script_metadata
)
for (suffix, jsshell) in sorted(global_suffixes(globals))
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_worker:
test_url = replace_end(self.url, ".worker.js", ".worker.html")
test_url = replace_end(self.rel_url, ".worker.js", ".worker.html")
tests = [
TestharnessTest(self, test_url + variant, timeout=self.timeout)
TestharnessTest(
self.tests_root,
self.rel_path,
self.url_base,
test_url + variant,
timeout=self.timeout,
script_metadata=self.script_metadata
)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_window:
test_url = replace_end(self.url, ".window.js", ".window.html")
test_url = replace_end(self.rel_url, ".window.js", ".window.html")
tests = [
TestharnessTest(self, test_url + variant, timeout=self.timeout)
TestharnessTest(
self.tests_root,
self.rel_path,
self.url_base,
test_url + variant,
timeout=self.timeout,
script_metadata=self.script_metadata
)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_webdriver:
rv = WebDriverSpecTest.item_type, [WebDriverSpecTest(self, self.url,
timeout=self.timeout)]
rv = WebDriverSpecTest.item_type, [
WebDriverSpecTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url,
timeout=self.timeout
)]
elif self.content_is_css_manual and not self.name_is_reference:
rv = ManualTest.item_type, [ManualTest(self, self.url)]
rv = ManualTest.item_type, [
ManualTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
elif self.content_is_testharness:
rv = TestharnessTest.item_type, []
testdriver = self.has_testdriver
for variant in self.test_variants:
url = self.url + variant
rv[1].append(TestharnessTest(self, url, timeout=self.timeout, testdriver=testdriver))
url = self.rel_url + variant
rv[1].append(TestharnessTest(
self.tests_root,
self.rel_path,
self.url_base,
url,
timeout=self.timeout,
testdriver=testdriver,
script_metadata=self.script_metadata
))
elif self.content_is_ref_node:
rv = (RefTestNode.item_type,
[RefTestNode(self, self.url, self.references, timeout=self.timeout,
viewport_size=self.viewport_size, dpi=self.dpi)])
rv = RefTestNode.item_type, [
RefTestNode(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url,
references=self.references,
timeout=self.timeout,
viewport_size=self.viewport_size,
dpi=self.dpi
)]
elif self.content_is_css_visual and not self.name_is_reference:
rv = VisualTest.item_type, [VisualTest(self, self.url)]
rv = VisualTest.item_type, [
VisualTest(
self.tests_root,
self.rel_path,
self.url_base,
self.rel_url
)]
else:
rv = "support", [SupportFile(self)]
rv = "support", [
SupportFile(
self.tests_root,
self.rel_path
)]
self.items_cache = rv

View file

@ -1,23 +1,66 @@
from ..item import SupportFile, URLManifestItem
from ..sourcefile import SourceFile
import pytest
from ..item import URLManifestItem, TestharnessTest
def test_base_meta_flags():
s = SourceFile("/", "a.b.c.d", "/", contents="")
m = SupportFile(s)
@pytest.mark.parametrize("path", [
"a.https.c",
"a.b.https.c",
"a.https.b.c",
"a.b.https.c.d",
"a.serviceworker.c",
"a.b.serviceworker.c",
"a.serviceworker.b.c",
"a.b.serviceworker.c.d",
])
def test_url_https(path):
m = URLManifestItem("/foobar", "/" + path, "/", "/foo.bar/" + path)
assert m.meta_flags == {"b", "c"}
assert m.https is True
def test_url_meta_flags():
s = SourceFile("/", "a.b.c", "/", contents="")
m = URLManifestItem(s, "/foo.bar/a.b.d.e")
@pytest.mark.parametrize("path", [
"https",
"a.https",
"a.b.https",
"https.a",
"https.a.b",
"a.bhttps.c",
"a.httpsb.c",
"serviceworker",
"a.serviceworker",
"a.b.serviceworker",
"serviceworker.a",
"serviceworker.a.b",
"a.bserviceworker.c",
"a.serviceworkerb.c",
])
def test_url_not_https(path):
m = URLManifestItem("/foobar", "/" + path, "/", "/foo.bar/" + path)
assert m.meta_flags == {"b", "d"}
assert m.https is False
def test_url_empty_meta_flags():
s = SourceFile("/", "a.b.c", "/", contents="")
m = URLManifestItem(s, "/foo.bar/abcde")
def test_testharness_meta_key_includes_jsshell():
a = TestharnessTest("/foobar", "/foo", "/foo.bar", "/foo.bar/foo",
jsshell=False, script_metadata=[])
b = TestharnessTest("/foobar", "/foo", "/foo.bar", "/foo.bar/foo",
jsshell=True, script_metadata=[])
assert m.meta_flags == set()
assert a.meta_key() != b.meta_key()
@pytest.mark.parametrize("script_metadata", [
None,
[],
[('script', '/resources/WebIDLParser.js'), ('script', '/resources/idlharness.js')],
[[u'script', u'/resources/WebIDLParser.js'], [u'script', u'/resources/idlharness.js']],
])
def test_testharness_hashable_script_metadata(script_metadata):
a = TestharnessTest("/",
"BackgroundSync/interfaces.https.any.js",
"/",
"/BackgroundSync/interfaces.https.any.js",
script_metadata=script_metadata)
assert hash(a) is not None

View file

@ -10,15 +10,18 @@ import pytest
from .. import manifest, item, utils
def SourceFileWithTest(path, hash, cls, *args):
def SourceFileWithTest(path, hash, cls, **kwargs):
s = mock.Mock(rel_path=path, hash=hash)
test = cls(s, utils.rel_path_to_url(path), *args)
if cls == item.SupportFile:
test = cls("/foobar", path, **kwargs)
else:
test = cls("/foobar", path, "/", utils.rel_path_to_url(path), **kwargs)
s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
return s
def SourceFileWithTests(path, hash, cls, variants):
s = mock.Mock(rel_path=path, hash=hash)
tests = [cls(s, item[0], *item[1:]) for item in variants]
tests = [cls("/foobar", path, "/", item[0], *item[1:]) for item in variants]
s.manifest_items = mock.Mock(return_value=(cls.item_type, tests))
return s
@ -42,7 +45,7 @@ def rel_dir_file_path(draw):
@hs.composite
def sourcefile_strategy(draw):
item_classes = [item.TestharnessTest, item.RefTest, item.RefTestNode,
item_classes = [item.TestharnessTest, item.RefTestNode,
item.ManualTest, item.Stub, item.WebDriverSpecTest,
item.ConformanceCheckerTest, item.SupportFile]
cls = draw(hs.sampled_from(item_classes))
@ -51,15 +54,15 @@ def sourcefile_strategy(draw):
hash = draw(hs.text(alphabet="0123456789abcdef", min_size=40, max_size=40))
s = mock.Mock(rel_path=path, hash=hash)
if cls in (item.RefTest, item.RefTestNode):
if cls is item.RefTestNode:
ref_path = draw(rel_dir_file_path())
h.assume(path != ref_path)
ref_eq = draw(hs.sampled_from(["==", "!="]))
test = cls(s, utils.rel_path_to_url(path), [(utils.rel_path_to_url(ref_path), ref_eq)])
test = cls("/foobar", path, "/", utils.rel_path_to_url(path), references=[(utils.rel_path_to_url(ref_path), ref_eq)])
elif cls is item.SupportFile:
test = cls(s)
test = cls("/foobar", path)
else:
test = cls(s, utils.rel_path_to_url(path))
test = cls("/foobar", path, "/", utils.rel_path_to_url(path))
s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
return s
@ -84,7 +87,7 @@ def test_manifest_to_json(s):
@h.given(hs.lists(sourcefile_strategy(),
min_size=1, unique_by=lambda x: x.rel_path))
@h.example([SourceFileWithTest("a", "0"*40, item.TestharnessTest)])
@h.example([SourceFileWithTest("a", "0"*40, item.RefTest, [("/aa", "==")])])
@h.example([SourceFileWithTest("a", "0"*40, item.RefTestNode, references=[("/aa", "==")])])
def test_manifest_idempotent(s):
m = manifest.Manifest()
@ -167,72 +170,68 @@ def test_manifest_from_json_backslash():
def test_reftest_computation_chain():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
s1 = SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, references=[("/test3", "==")])
m.update([(s1, True), (s2, True)])
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
test2_node = test2.to_RefTestNode()
assert list(m) == [("reftest", test1.path, {test1}),
("reftest_node", test2.path, {test2_node})]
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()}),
("reftest_node", test2.path, {test2})]
def test_reftest_computation_chain_update_add():
m = manifest.Manifest()
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, references=[("/test3", "==")])
test2 = s2.manifest_items()[1][0]
assert m.update([(s2, True)]) is True
assert list(m) == [("reftest", test2.path, {test2})]
assert list(m) == [("reftest", test2.path, {test2.to_RefTest()})]
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s1 = SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test2", "==")])
test1 = s1.manifest_items()[1][0]
# s2's hash is unchanged, but it has gone from a test to a node
assert m.update([(s1, True), (s2, True)]) is True
test2_node = test2.to_RefTestNode()
assert list(m) == [("reftest", test1.path, {test1}),
("reftest_node", test2.path, {test2_node})]
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()}),
("reftest_node", test2.path, {test2})]
def test_reftest_computation_chain_update_remove():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
s1 = SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, references=[("/test3", "==")])
assert m.update([(s1, True), (s2, True)]) is True
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
test2_node = test2.to_RefTestNode()
assert list(m) == [("reftest", test1.path, {test1}),
("reftest_node", test2.path, {test2_node})]
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()}),
("reftest_node", test2.path, {test2})]
# s2's hash is unchanged, but it has gone from a node to a test
assert m.update([(s2, True)]) is True
assert list(m) == [("reftest", test2.path, {test2})]
assert list(m) == [("reftest", test2.path, {test2.to_RefTest()})]
def test_reftest_computation_chain_update_test_type():
m = manifest.Manifest()
s1 = SourceFileWithTest("test", "0"*40, item.RefTest, [("/test-ref", "==")])
s1 = SourceFileWithTest("test", "0"*40, item.RefTestNode, references=[("/test-ref", "==")])
assert m.update([(s1, True)]) is True
test1 = s1.manifest_items()[1][0]
assert list(m) == [("reftest", test1.path, {test1})]
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()})]
# test becomes a testharness test (hash change because that is determined
# based on the file contents). The updated manifest should not includes the
@ -248,15 +247,15 @@ def test_reftest_computation_chain_update_test_type():
def test_reftest_computation_chain_update_node_change():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, [("/test3", "==")])
s1 = SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, references=[("/test3", "==")])
assert m.update([(s1, True), (s2, True)]) is True
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
assert list(m) == [("reftest", test1.path, {test1}),
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()}),
("reftest_node", test2.path, {test2})]
#test2 changes to support type
@ -265,24 +264,20 @@ def test_reftest_computation_chain_update_node_change():
assert m.update([(s1, True), (s2, True)]) is True
test3 = s2.manifest_items()[1][0]
assert list(m) == [("reftest", test1.path, {test1}),
assert list(m) == [("reftest", test1.path, {test1.to_RefTest()}),
("support", test3.path, {test3})]
def test_iterpath():
m = manifest.Manifest()
# This has multiple test types from the same file, which isn't really supported,
# so pretend they have different hashes
sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test1-ref", "==")]),
SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test2-ref", "==")]),
sources = [SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test1-ref", "==")]),
SourceFileWithTests("test2", "1"*40, item.TestharnessTest, [("/test2-1.html",),
("/test2-2.html",)]),
SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
m.update([(s, True) for s in sources])
assert set(item.url for item in m.iterpath("test2")) == set(["/test2",
"/test2-1.html",
assert set(item.url for item in m.iterpath("test2")) == set(["/test2-1.html",
"/test2-2.html"])
assert set(m.iterpath("missing")) == set()
@ -290,10 +285,7 @@ def test_iterpath():
def test_filter():
m = manifest.Manifest()
# This has multiple test types from the same file, which isn't really supported,
# so pretend they have different hashes
sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test1-ref", "==")]),
SourceFileWithTest("test2", "1"*40, item.RefTest, [("/test2-ref", "==")]),
sources = [SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test1-ref", "==")]),
SourceFileWithTests("test2", "0"*40, item.TestharnessTest, [("/test2-1.html",),
("/test2-2.html",)]),
SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
@ -321,20 +313,19 @@ def test_filter():
def test_reftest_node_by_url():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
s1 = SourceFileWithTest("test1", "0"*40, item.RefTestNode, references=[("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, references=[("/test3", "==")])
m.update([(s1, True), (s2, True)])
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
test2_node = test2.to_RefTestNode()
assert m.reftest_nodes_by_url == {"/test1": test1,
"/test2": test2_node}
assert m.reftest_nodes_by_url == {"/test1": test1.to_RefTest(),
"/test2": test2}
m._reftest_nodes_by_url = None
assert m.reftest_nodes_by_url == {"/test1": test1,
"/test2": test2_node}
assert m.reftest_nodes_by_url == {"/test1": test1.to_RefTest(),
"/test2": test2}
def test_no_update():

View file

@ -766,3 +766,26 @@ def test_spec_links_whitespace(url):
content = b"<link rel=help href='%s'>" % url
s = create("foo/test.html", content)
assert s.spec_links == {"http://example.com/"}
def test_url_base():
contents = b"""// META: global=window,worker
// META: variant=
// META: variant=?wss
test()"""
s = SourceFile("/", "html/test.any.js", "/_fake_base/", contents=contents)
item_type, items = s.manifest_items()
assert item_type == "testharness"
assert [item.url for item in items] == [u'/_fake_base/html/test.any.html',
u'/_fake_base/html/test.any.html?wss',
u'/_fake_base/html/test.any.serviceworker.html',
u'/_fake_base/html/test.any.serviceworker.html?wss',
u'/_fake_base/html/test.any.sharedworker.html',
u'/_fake_base/html/test.any.sharedworker.html?wss',
u'/_fake_base/html/test.any.worker.html',
u'/_fake_base/html/test.any.worker.html?wss']
assert items[0].url_base == "/_fake_base/"

View file

@ -0,0 +1,14 @@
import os
import subprocess
import mock
from .. import vcs
def test_git_for_path_no_git():
this_dir = os.path.dirname(__file__)
with mock.patch(
"subprocess.check_output",
side_effect=subprocess.CalledProcessError(1, "foo")):
assert vcs.Git.for_path(this_dir, "/", this_dir) is None

View file

@ -60,10 +60,13 @@ class Git(object):
def for_path(cls, path, url_base, cache_path, manifest_path=None, rebuild=False):
git = Git.get_func(path)
try:
return cls(git("rev-parse", "--show-toplevel").rstrip(), url_base, cache_path,
manifest_path=manifest_path, rebuild=rebuild)
# this needs to be a command that fails if we aren't in a git repo
git("rev-parse", "--show-toplevel")
except (subprocess.CalledProcessError, OSError):
return None
else:
return cls(path, url_base, cache_path,
manifest_path=manifest_path, rebuild=rebuild)
def _local_changes(self):
changes = {}
@ -97,16 +100,15 @@ class Git(object):
for result in self.git(*cmd).split("\0")[:-1]:
rel_path = result.split("\t")[-1]
hash = result.split()[2]
if not os.path.isdir(os.path.join(self.root, rel_path)):
if rel_path in local_changes:
contents = self._show_file(rel_path)
else:
contents = None
yield SourceFile(self.root,
rel_path,
self.url_base,
hash,
contents=contents), True
if rel_path in local_changes:
contents = self._show_file(rel_path)
else:
contents = None
yield SourceFile(self.root,
rel_path,
self.url_base,
hash,
contents=contents), True
def dump_caches(self):
pass

View file

@ -60,7 +60,7 @@ class MemorizingFile(object):
def __getattribute__(self, name):
"""Return a file attribute.
Returns the value overridden by this class for some attributes,
and forwards the call to _file for the other attributes.
"""

View file

@ -1,4 +1,4 @@
flake8==3.7.7
pycodestyle==2.5.0
pyflakes==2.1.0
pyflakes==2.1.1
pep8-naming==0.7.0

View file

@ -2,119 +2,119 @@
function __result_handler() {
function __get_metadata() {
var obj = new Object();
var author = [];
var assert = [];
var help = [];
var match = [];
var mismatch = [];
var flags = [];
var nodes;
var obj = new Object();
var author = [];
var assert = [];
var help = [];
var match = [];
var mismatch = [];
var flags = [];
var nodes;
nodes = document.querySelectorAll('link[rel="author"]');
for (var i = 0; i < nodes.length; i++) {
var href = nodes[i].getAttribute("href");
var title = nodes[i].getAttribute("title");
var s = title;
if (href != null) {
s += " <" + href + ">";
}
author.push(s);
}
if (nodes.length > 0) obj.author = author;
nodes = document.querySelectorAll('meta[name="assert"]');
for (var i = 0; i < nodes.length; i++) {
assert.push(nodes[i].getAttribute("content"));
}
if (nodes.length > 0) obj.assert = assert;
nodes = document.querySelectorAll('link[rel="help"]');
for (var i = 0; i < nodes.length; i++) {
help.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.help = help;
nodes = document.querySelectorAll('link[rel="match"]');
for (var i = 0; i < nodes.length; i++) {
match.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.match = match;
nodes = document.querySelectorAll('link[rel="mismatch"]');
for (var i = 0; i < nodes.length; i++) {
mismatch.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.match = mismatch;
nodes = document.querySelectorAll('meta[name="flags"]');
for (var i = 0; i < nodes.length; i++) {
flags.push(nodes[i].getAttribute("content"));
}
if (nodes.length > 0) obj.flags = flags;
nodes = document.querySelectorAll('link[rel="author"]');
for (var i = 0; i < nodes.length; i++) {
var href = nodes[i].getAttribute("href");
var title = nodes[i].getAttribute("title");
var s = title;
if (href != null) {
s += " <" + href + ">";
}
author.push(s);
}
if (nodes.length > 0) obj.author = author;
nodes = document.querySelectorAll('meta[name="assert"]');
for (var i = 0; i < nodes.length; i++) {
assert.push(nodes[i].getAttribute("content"));
}
if (nodes.length > 0) obj.assert = assert;
nodes = document.querySelectorAll('link[rel="help"]');
for (var i = 0; i < nodes.length; i++) {
help.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.help = help;
nodes = document.querySelectorAll('link[rel="match"]');
for (var i = 0; i < nodes.length; i++) {
match.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.match = match;
nodes = document.querySelectorAll('link[rel="mismatch"]');
for (var i = 0; i < nodes.length; i++) {
mismatch.push(nodes[i].getAttribute("href"));
}
if (nodes.length > 0) obj.match = mismatch;
nodes = document.querySelectorAll('meta[name="flags"]');
for (var i = 0; i < nodes.length; i++) {
flags.push(nodes[i].getAttribute("content"));
}
if (nodes.length > 0) obj.flags = flags;
return obj;
return obj;
}
var meta = __get_metadata();
var nodes;
function copy(obj, prop, arr) {
if (typeof arr !== "undefined") {
var a = [];
for (var i = 0; i<arr.length;i++) {
a[i] = arr[i];
}
obj[prop] = a;
}
if (typeof arr !== "undefined") {
var a = [];
for (var i = 0; i<arr.length;i++) {
a[i] = arr[i];
}
obj[prop] = a;
}
}
var ret = new Object();
ret.location = document.location.href;
ret.type = "manual";
ret.type = "manual";
ret.tests = new Object();
var node = document.querySelector('script[src$="/resources/testharness.js"]');
if (node !== null) {
ret.type = "script";
ret.type = "script";
}
if (ret.type === "script") {
if (typeof metadata_generator === "undefined"
|| Object.keys(metadata_generator.currentMetadata).length === 0)
return "WRAPPER:TRY_AGAIN";
else {
for (var key in metadata_generator.currentMetadata) {
var obj = metadata_generator.currentMetadata[key];
var newtest = new Object();
ret.tests[key]= newtest;
if (typeof obj.help === "undefined") {
copy(newtest, "help", meta.help);
} else if (typeof obj.help === "string") {
newtest.help = [ obj.help ];
}
if (typeof obj.author === "undefined") {
copy(newtest, "author", meta.author);
} else if (typeof obj.author === "string") {
newtest.author = [ obj.author ];
}
if (typeof obj.assert === "undefined") {
copy(newtest, "assert", meta.assert);
} else if (typeof obj.assert === "string") {
newtest.assert = [ obj.assert ];
}
copy(newtest, "match", meta.match);
copy(newtest, "mismatch", meta.mismatch);
copy(newtest, "flags", meta.flags);
}
return ret;
}
if (typeof metadata_generator === "undefined"
|| Object.keys(metadata_generator.currentMetadata).length === 0)
return "WRAPPER:TRY_AGAIN";
else {
for (var key in metadata_generator.currentMetadata) {
var obj = metadata_generator.currentMetadata[key];
var newtest = new Object();
ret.tests[key]= newtest;
if (typeof obj.help === "undefined") {
copy(newtest, "help", meta.help);
} else if (typeof obj.help === "string") {
newtest.help = [ obj.help ];
}
if (typeof obj.author === "undefined") {
copy(newtest, "author", meta.author);
} else if (typeof obj.author === "string") {
newtest.author = [ obj.author ];
}
if (typeof obj.assert === "undefined") {
copy(newtest, "assert", meta.assert);
} else if (typeof obj.assert === "string") {
newtest.assert = [ obj.assert ];
}
copy(newtest, "match", meta.match);
copy(newtest, "mismatch", meta.mismatch);
copy(newtest, "flags", meta.flags);
}
return ret;
}
} else {
var newtest = meta;
ret.tests[document.title]= newtest;
var newtest = meta;
ret.tests[document.title]= newtest;
if (typeof newtest.match !== "undefined"
|| typeof newtest.mismatch !== "undefined") {
ret.type = "reftest";
}
if (typeof newtest.match !== "undefined"
|| typeof newtest.mismatch !== "undefined") {
ret.type = "reftest";
}
return ret;
return ret;
}
}
@ -122,19 +122,19 @@ function __result_handler() {
function __give_up() {
var ret = new Object();
ret.location = document.location.href;
ret.type = "manual";
ret.type = "manual";
ret.tests = new Object();
var node = document.querySelector('script[src$="/resources/testharness.js"]');
if (node !== null) {
ret.type = "script";
ret.type = "script";
} else if (typeof newtest.match !== "undefined"
|| typeof newtest.mismatch !== "undefined") {
ret.type = "reftest";
|| typeof newtest.mismatch !== "undefined") {
ret.type = "reftest";
}
var newtest = __get_metadata();
ret.tests[document.title]= newtest;
ret.tests[document.title]= newtest;
return ret;
}

View file

@ -1,106 +1,106 @@
// grab the table of contents filled with all the anchors
function __result_handler() {
function getMap() {
var toc_element = document.getElementById("contents").nextElementSibling;
var toc_element = document.getElementById("contents").nextElementSibling;
function getSection() {
function getIds(node) {
var a = [];
var nodes = node.querySelectorAll('*[id]');
for (var i = 0; i < nodes.length; i++) {
a.push(nodes[i].getAttribute("id"));
}
return a;
}
function getTOCIds() {
var a = [];
var nodes = toc_element.querySelectorAll('li');
for (var i = 0; i < nodes.length; i++) {
var href = nodes[i].firstElementChild.getAttribute("href");
a.push(href.substring(1));
}
return a;
}
function getSection() {
function getIds(node) {
var a = [];
var obj = new Object();
var ids = getIds(document);
var toc = getTOCIds();
for (var i = 1; i < toc.length; i++) {
var key1 = toc[i-1];
var key2 = toc[i];
var map = [];
var nodes = node.querySelectorAll('*[id]');
for (var i = 0; i < nodes.length; i++) {
a.push(nodes[i].getAttribute("id"));
}
return a;
}
var index1 = ids.indexOf(key1);
var index2 = ids.indexOf(key2);
function getTOCIds() {
var a = [];
if ((index2-index1) > 1) {
for (var j = index1+1; j < index2;j++) {
map.push(ids[j]);
}
}
var nodes = toc_element.querySelectorAll('li');
for (var i = 0; i < nodes.length; i++) {
var href = nodes[i].firstElementChild.getAttribute("href");
a.push(href.substring(1));
}
return a;
}
obj[key1] = map;
}
{
var key = toc[toc.length-1];
var index = ids.indexOf(key);
var map = [];
var obj = new Object();
var ids = getIds(document);
var toc = getTOCIds();
for (var j = index+1; j < ids.length;j++) {
map.push(ids[j]);
}
obj[key] = map;
}
return obj;
}
for (var i = 1; i < toc.length; i++) {
var key1 = toc[i-1];
var key2 = toc[i];
var map = [];
function section(id) {
this.id = id;
}
function addSubSection(section, sub) {
if (typeof section.sections === "undefined") {
section.sections = [];
}
section.sections.push(sub);
}
var index1 = ids.indexOf(key1);
var index2 = ids.indexOf(key2);
function li(el, map) {
var obj = new section(el.firstElementChild.getAttribute("href").substring(1));
obj.title = el.firstElementChild.textContent;
var child = el.firstElementChild;
if ((index2-index1) > 1) {
for (var j = index1+1; j < index2;j++) {
map.push(ids[j]);
}
}
var m = map[obj.id];
for (var i = 0; i < m.length; i++) {
var sub = new section(m[i]);
addSubSection(obj, sub);
}
while (child !== null) {
if (child.nodeName === "OL") ol(child, obj, map);
child = child.nextElementSibling;
}
return obj;
}
obj[key1] = map;
}
{
var key = toc[toc.length-1];
var index = ids.indexOf(key);
var map = [];
function ol(el, section, map) {
var child = el.firstElementChild;
while (child !== null) {
addSubSection(section, li(child, map));
child = child.nextElementSibling;
}
}
for (var j = index+1; j < ids.length;j++) {
map.push(ids[j]);
}
obj[key] = map;
}
var map = getSection();
var main = new section("___main___");
main.title = document.title;
return obj;
}
ol(toc_element, main, map);
function section(id) {
this.id = id;
}
function addSubSection(section, sub) {
if (typeof section.sections === "undefined") {
section.sections = [];
}
section.sections.push(sub);
}
return main;
function li(el, map) {
var obj = new section(el.firstElementChild.getAttribute("href").substring(1));
obj.title = el.firstElementChild.textContent;
var child = el.firstElementChild;
var m = map[obj.id];
for (var i = 0; i < m.length; i++) {
var sub = new section(m[i]);
addSubSection(obj, sub);
}
while (child !== null) {
if (child.nodeName === "OL") ol(child, obj, map);
child = child.nextElementSibling;
}
return obj;
}
function ol(el, section, map) {
var child = el.firstElementChild;
while (child !== null) {
addSubSection(section, li(child, map));
child = child.nextElementSibling;
}
}
var map = getSection();
var main = new section("___main___");
main.title = document.title;
ol(toc_element, main, map);
return main;
}
return getMap();

View file

@ -0,0 +1,4 @@
{
"tc-verify": {"path": "verify.py", "script": "run", "parser": "create_parser", "help": "Verify .taskcluster.yml file is parsable",
"virtualenv": true, "install": ["json-e", "pyyaml"]}
}

View file

@ -0,0 +1,214 @@
{
"ref": "refs/heads/master",
"before": "a4bfa25bfc35e6dd8aabf9bc5af714bf3d70d712",
"after": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
"created": false,
"deleted": false,
"forced": false,
"base_ref": null,
"compare": "https://github.com/web-platform-tests/wpt/compare/a4bfa25bfc35...5baef702c26b",
"commits": [
{
"id": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
"tree_id": "045949cd04598b19f5ed1bebf2d5cbed647f3c86",
"distinct": true,
"message": "Add support for verifying taskcluster config (#15593)\n\nAdds as wpt tc-verify command that verifies that the TaskCluster\r\nconfig is a valid yaml file and computes the tasks that will run on a\r\nPR synchronize event. This can be expanded to more events and pushes\r\nin the future.",
"timestamp": "2019-03-01T14:43:07Z",
"url": "https://github.com/web-platform-tests/wpt/commit/5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
"author": {
"name": "jgraham",
"email": "james@hoppipolla.co.uk",
"username": "jgraham"
},
"committer": {
"name": "GitHub",
"email": "noreply@github.com",
"username": "web-flow"
},
"added": [
"tools/taskcluster/__init__.py",
"tools/taskcluster/commands.json",
"tools/taskcluster/testdata/pr_event.json",
"tools/taskcluster/verify.py"
],
"removed": [
],
"modified": [
"tools/wpt/paths"
]
}
],
"head_commit": {
"id": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
"tree_id": "045949cd04598b19f5ed1bebf2d5cbed647f3c86",
"distinct": true,
"message": "Add support for verifying taskcluster config (#15593)\n\nAdds as wpt tc-verify command that verifies that the TaskCluster\r\nconfig is a valid yaml file and computes the tasks that will run on a\r\nPR synchronize event. This can be expanded to more events and pushes\r\nin the future.",
"timestamp": "2019-03-01T14:43:07Z",
"url": "https://github.com/web-platform-tests/wpt/commit/5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
"author": {
"name": "jgraham",
"email": "james@hoppipolla.co.uk",
"username": "jgraham"
},
"committer": {
"name": "GitHub",
"email": "noreply@github.com",
"username": "web-flow"
},
"added": [
"tools/taskcluster/__init__.py",
"tools/taskcluster/commands.json",
"tools/taskcluster/testdata/pr_event.json",
"tools/taskcluster/verify.py"
],
"removed": [
],
"modified": [
"tools/wpt/paths"
]
},
"repository": {
"id": 3618133,
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"name": "wpt",
"full_name": "web-platform-tests/wpt",
"private": false,
"owner": {
"name": "web-platform-tests",
"email": "",
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"html_url": "https://github.com/web-platform-tests/wpt",
"description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
"fork": false,
"url": "https://github.com/web-platform-tests/wpt",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"created_at": 1330865891,
"updated_at": "2019-03-01T14:16:52Z",
"pushed_at": 1551451389,
"git_url": "git://github.com/web-platform-tests/wpt.git",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"svn_url": "https://github.com/web-platform-tests/wpt",
"homepage": "http://irc.w3.org/?channels=testing",
"size": 324722,
"stargazers_count": 2060,
"watchers_count": 2060,
"language": "HTML",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": true,
"forks_count": 1605,
"mirror_url": null,
"archived": false,
"open_issues_count": 1355,
"license": {
"key": "other",
"name": "Other",
"spdx_id": "NOASSERTION",
"url": null,
"node_id": "MDc6TGljZW5zZTA="
},
"forks": 1605,
"open_issues": 1366,
"watchers": 2060,
"default_branch": "master",
"stargazers": 2060,
"master_branch": "master",
"organization": "web-platform-tests"
},
"pusher": {
"name": "jgraham",
"email": "james@hoppipolla.co.uk"
},
"organization": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"url": "https://api.github.com/orgs/web-platform-tests",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"description": ""
},
"sender": {
"login": "jgraham",
"id": 294864,
"node_id": "MDQ6VXNlcjI5NDg2NA==",
"avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/jgraham",
"html_url": "https://github.com/jgraham",
"followers_url": "https://api.github.com/users/jgraham/followers",
"following_url": "https://api.github.com/users/jgraham/following{/other_user}",
"gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
"starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
"organizations_url": "https://api.github.com/users/jgraham/orgs",
"repos_url": "https://api.github.com/users/jgraham/repos",
"events_url": "https://api.github.com/users/jgraham/events{/privacy}",
"received_events_url": "https://api.github.com/users/jgraham/received_events",
"type": "User",
"site_admin": false
}
}

View file

@ -0,0 +1,577 @@
{
"action": "synchronize",
"number": 15574,
"pull_request": {
"url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574",
"id": 256653065,
"node_id": "MDExOlB1bGxSZXF1ZXN0MjU2NjUzMDY1",
"html_url": "https://github.com/web-platform-tests/wpt/pull/15574",
"diff_url": "https://github.com/web-platform-tests/wpt/pull/15574.diff",
"patch_url": "https://github.com/web-platform-tests/wpt/pull/15574.patch",
"issue_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574",
"number": 15574,
"state": "open",
"locked": false,
"title": "Move the lint from Travis to TaskCluster",
"user": {
"login": "jgraham",
"id": 294864,
"node_id": "MDQ6VXNlcjI5NDg2NA==",
"avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/jgraham",
"html_url": "https://github.com/jgraham",
"followers_url": "https://api.github.com/users/jgraham/followers",
"following_url": "https://api.github.com/users/jgraham/following{/other_user}",
"gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
"starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
"organizations_url": "https://api.github.com/users/jgraham/orgs",
"repos_url": "https://api.github.com/users/jgraham/repos",
"events_url": "https://api.github.com/users/jgraham/events{/privacy}",
"received_events_url": "https://api.github.com/users/jgraham/received_events",
"type": "User",
"site_admin": false
},
"body": "",
"created_at": "2019-02-27T12:03:38Z",
"updated_at": "2019-02-28T13:43:17Z",
"closed_at": null,
"merged_at": null,
"merge_commit_sha": "70a272296dad0db4f0be1133a59aa97f0a72d9ac",
"assignee": {
"login": "gsnedders",
"id": 176218,
"node_id": "MDQ6VXNlcjE3NjIxOA==",
"avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/gsnedders",
"html_url": "https://github.com/gsnedders",
"followers_url": "https://api.github.com/users/gsnedders/followers",
"following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
"gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
"starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
"organizations_url": "https://api.github.com/users/gsnedders/orgs",
"repos_url": "https://api.github.com/users/gsnedders/repos",
"events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
"received_events_url": "https://api.github.com/users/gsnedders/received_events",
"type": "User",
"site_admin": false
},
"assignees": [
{
"login": "gsnedders",
"id": 176218,
"node_id": "MDQ6VXNlcjE3NjIxOA==",
"avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/gsnedders",
"html_url": "https://github.com/gsnedders",
"followers_url": "https://api.github.com/users/gsnedders/followers",
"following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
"gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
"starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
"organizations_url": "https://api.github.com/users/gsnedders/orgs",
"repos_url": "https://api.github.com/users/gsnedders/repos",
"events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
"received_events_url": "https://api.github.com/users/gsnedders/received_events",
"type": "User",
"site_admin": false
}
],
"requested_reviewers": [
{
"login": "gsnedders",
"id": 176218,
"node_id": "MDQ6VXNlcjE3NjIxOA==",
"avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/gsnedders",
"html_url": "https://github.com/gsnedders",
"followers_url": "https://api.github.com/users/gsnedders/followers",
"following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
"gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
"starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
"organizations_url": "https://api.github.com/users/gsnedders/orgs",
"repos_url": "https://api.github.com/users/gsnedders/repos",
"events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
"received_events_url": "https://api.github.com/users/gsnedders/received_events",
"type": "User",
"site_admin": false
},
{
"login": "jugglinmike",
"id": 677252,
"node_id": "MDQ6VXNlcjY3NzI1Mg==",
"avatar_url": "https://avatars2.githubusercontent.com/u/677252?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/jugglinmike",
"html_url": "https://github.com/jugglinmike",
"followers_url": "https://api.github.com/users/jugglinmike/followers",
"following_url": "https://api.github.com/users/jugglinmike/following{/other_user}",
"gists_url": "https://api.github.com/users/jugglinmike/gists{/gist_id}",
"starred_url": "https://api.github.com/users/jugglinmike/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jugglinmike/subscriptions",
"organizations_url": "https://api.github.com/users/jugglinmike/orgs",
"repos_url": "https://api.github.com/users/jugglinmike/repos",
"events_url": "https://api.github.com/users/jugglinmike/events{/privacy}",
"received_events_url": "https://api.github.com/users/jugglinmike/received_events",
"type": "User",
"site_admin": false
}
],
"requested_teams": [
],
"labels": [
{
"id": 1012999603,
"node_id": "MDU6TGFiZWwxMDEyOTk5NjAz",
"url": "https://api.github.com/repos/web-platform-tests/wpt/labels/ci",
"name": "ci",
"color": "fef2c0",
"default": false
},
{
"id": 45230790,
"node_id": "MDU6TGFiZWw0NTIzMDc5MA==",
"url": "https://api.github.com/repos/web-platform-tests/wpt/labels/infra",
"name": "infra",
"color": "fbca04",
"default": false
}
],
"milestone": null,
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/commits",
"review_comments_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/comments",
"review_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574/comments",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/fef69c5d47196433234d6c37a0ff987491bd2dfc",
"head": {
"label": "web-platform-tests:taskcluster_lint",
"ref": "taskcluster_lint",
"sha": "fef69c5d47196433234d6c37a0ff987491bd2dfc",
"user": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"repo": {
"id": 3618133,
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"name": "wpt",
"full_name": "web-platform-tests/wpt",
"private": false,
"owner": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"html_url": "https://github.com/web-platform-tests/wpt",
"description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
"fork": false,
"url": "https://api.github.com/repos/web-platform-tests/wpt",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"created_at": "2012-03-04T12:58:11Z",
"updated_at": "2019-02-28T12:41:33Z",
"pushed_at": "2019-02-28T13:43:16Z",
"git_url": "git://github.com/web-platform-tests/wpt.git",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"svn_url": "https://github.com/web-platform-tests/wpt",
"homepage": "http://irc.w3.org/?channels=testing",
"size": 324641,
"stargazers_count": 2058,
"watchers_count": 2058,
"language": "HTML",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": true,
"forks_count": 1604,
"mirror_url": null,
"archived": false,
"open_issues_count": 1354,
"license": {
"key": "other",
"name": "Other",
"spdx_id": "NOASSERTION",
"url": null,
"node_id": "MDc6TGljZW5zZTA="
},
"forks": 1604,
"open_issues": 1354,
"watchers": 2058,
"default_branch": "master"
}
},
"base": {
"label": "web-platform-tests:master",
"ref": "master",
"sha": "bb657c4bd0cc4729daa27c1f3a1e1f86ef5a1dc0",
"user": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"repo": {
"id": 3618133,
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"name": "wpt",
"full_name": "web-platform-tests/wpt",
"private": false,
"owner": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"html_url": "https://github.com/web-platform-tests/wpt",
"description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
"fork": false,
"url": "https://api.github.com/repos/web-platform-tests/wpt",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"created_at": "2012-03-04T12:58:11Z",
"updated_at": "2019-02-28T12:41:33Z",
"pushed_at": "2019-02-28T13:43:16Z",
"git_url": "git://github.com/web-platform-tests/wpt.git",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"svn_url": "https://github.com/web-platform-tests/wpt",
"homepage": "http://irc.w3.org/?channels=testing",
"size": 324641,
"stargazers_count": 2058,
"watchers_count": 2058,
"language": "HTML",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": true,
"forks_count": 1604,
"mirror_url": null,
"archived": false,
"open_issues_count": 1354,
"license": {
"key": "other",
"name": "Other",
"spdx_id": "NOASSERTION",
"url": null,
"node_id": "MDc6TGljZW5zZTA="
},
"forks": 1604,
"open_issues": 1354,
"watchers": 2058,
"default_branch": "master"
}
},
"_links": {
"self": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574"
},
"html": {
"href": "https://github.com/web-platform-tests/wpt/pull/15574"
},
"issue": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574"
},
"comments": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574/comments"
},
"review_comments": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/comments"
},
"review_comment": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}"
},
"commits": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/commits"
},
"statuses": {
"href": "https://api.github.com/repos/web-platform-tests/wpt/statuses/fef69c5d47196433234d6c37a0ff987491bd2dfc"
}
},
"author_association": "CONTRIBUTOR",
"draft": false,
"merged": false,
"mergeable": null,
"rebaseable": null,
"mergeable_state": "unknown",
"merged_by": null,
"comments": 2,
"review_comments": 3,
"maintainer_can_modify": false,
"commits": 2,
"additions": 55,
"deletions": 7,
"changed_files": 3
},
"before": "46d2f316ae10b83726dfb43150b321533bc9539f",
"after": "fef69c5d47196433234d6c37a0ff987491bd2dfc",
"repository": {
"id": 3618133,
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"name": "wpt",
"full_name": "web-platform-tests/wpt",
"private": false,
"owner": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/web-platform-tests",
"html_url": "https://github.com/web-platform-tests",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"type": "Organization",
"site_admin": false
},
"html_url": "https://github.com/web-platform-tests/wpt",
"description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
"fork": false,
"url": "https://api.github.com/repos/web-platform-tests/wpt",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"created_at": "2012-03-04T12:58:11Z",
"updated_at": "2019-02-28T12:41:33Z",
"pushed_at": "2019-02-28T13:43:16Z",
"git_url": "git://github.com/web-platform-tests/wpt.git",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"svn_url": "https://github.com/web-platform-tests/wpt",
"homepage": "http://irc.w3.org/?channels=testing",
"size": 324641,
"stargazers_count": 2058,
"watchers_count": 2058,
"language": "HTML",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": true,
"forks_count": 1604,
"mirror_url": null,
"archived": false,
"open_issues_count": 1354,
"license": {
"key": "other",
"name": "Other",
"spdx_id": "NOASSERTION",
"url": null,
"node_id": "MDc6TGljZW5zZTA="
},
"forks": 1604,
"open_issues": 1354,
"watchers": 2058,
"default_branch": "master"
},
"organization": {
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"url": "https://api.github.com/orgs/web-platform-tests",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"description": ""
},
"sender": {
"login": "jgraham",
"id": 294864,
"node_id": "MDQ6VXNlcjI5NDg2NA==",
"avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/jgraham",
"html_url": "https://github.com/jgraham",
"followers_url": "https://api.github.com/users/jgraham/followers",
"following_url": "https://api.github.com/users/jgraham/following{/other_user}",
"gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
"starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
"organizations_url": "https://api.github.com/users/jgraham/orgs",
"repos_url": "https://api.github.com/users/jgraham/repos",
"events_url": "https://api.github.com/users/jgraham/events{/privacy}",
"received_events_url": "https://api.github.com/users/jgraham/received_events",
"type": "User",
"site_admin": false
}
}

View file

@ -0,0 +1,37 @@
import argparse
import json
import os
import jsone
import yaml
here = os.path.dirname(__file__)
root = os.path.abspath(os.path.join(here, "..", ".."))
def create_parser():
return argparse.ArgumentParser()
def run(venv, **kwargs):
with open(os.path.join(root, ".taskcluster.yml")) as f:
template = yaml.safe_load(f)
events = [("pr_event.json", "github-pull-request", "Pull Request"),
("master_push_event.json", "github-push", "Push to master")]
for filename, tasks_for, title in events:
with open(os.path.join(here, "testdata", filename)) as f:
event = json.load(f)
context = {"tasks_for": tasks_for,
"event": event,
"as_slugid": lambda x: x}
data = jsone.render(template, context)
heading = "Got %s tasks for %s" % (len(data["tasks"]), title)
print(heading)
print("=" * len(heading))
for item in data["tasks"]:
print(json.dumps(item, indent=2))
print("")

View file

@ -16,9 +16,10 @@ class Response(object):
body has been read and parsed as appropriate.
"""
def __init__(self, status, body):
def __init__(self, status, body, headers):
self.status = status
self.body = body
self.headers = headers
def __repr__(self):
cls_name = self.__class__.__name__
@ -39,11 +40,12 @@ class Response(object):
def from_http(cls, http_response, decoder=json.JSONDecoder, **kwargs):
try:
body = json.load(http_response, cls=decoder, **kwargs)
headers = dict(http_response.getheaders())
except ValueError:
raise ValueError("Failed to decode response body as JSON:\n" +
http_response.read())
return cls(http_response.status, body)
return cls(http_response.status, body, headers)
class HTTPWireProtocol(object):

View file

@ -2,4 +2,5 @@ tools/ci/
tools/lint/
tools/manifest/
tools/serve/
tools/taskcluster/
tools/wpt/

View file

@ -49,7 +49,13 @@ class Virtualenv(object):
self.activate()
def install(self, *requirements):
call(self.pip_path, "install", *requirements)
# `--prefer-binary` guards against race conditions when installation
# occurs while packages are in the process of being published.
call(self.pip_path, "install", "--prefer-binary", *requirements)
def install_requirements(self, requirements_path):
call(self.pip_path, "install", "-r", requirements_path)
# `--prefer-binary` guards against race conditions when installation
# occurs while packages are in the process of being published.
call(
self.pip_path, "install", "--prefer-binary", "-r", requirements_path
)

View file

@ -2,7 +2,7 @@ marionette_driver==2.7.0
mozprofile==2.2.0
mozprocess==1.0.0
mozcrash==1.1.0
mozrunner==7.3.0
mozrunner==7.4.0
mozleak==0.2
mozinstall==2.0.0
mozdownload==1.25

View file

@ -1,3 +1,4 @@
import base64
import hashlib
import httplib
import os
@ -76,11 +77,34 @@ class TestharnessResultConverter(object):
testharness_result_converter = TestharnessResultConverter()
def hash_screenshot(data):
"""Computes the sha1 checksum of a base64-encoded screenshot."""
return hashlib.sha1(base64.b64decode(data)).hexdigest()
def _ensure_hash_in_reftest_screenshots(extra):
"""Make sure reftest_screenshots have hashes.
Marionette internal reftest runner does not produce hashes.
"""
log_data = extra.get("reftest_screenshots")
if not log_data:
return
for item in log_data:
if type(item) != dict:
# Skip relation strings.
continue
if "hash" not in item:
item["hash"] = hash_screenshot(item["screenshot"])
def reftest_result_converter(self, test, result):
extra = result.get("extra", {})
_ensure_hash_in_reftest_screenshots(extra)
return (test.result_cls(
result["status"],
result["message"],
extra=result.get("extra", {}),
extra=extra,
stack=result.get("stack")), [])
@ -152,6 +176,11 @@ class TestExecutor(object):
if self.protocol is not None:
self.protocol.teardown()
def reset(self):
"""Re-initialize internal state to facilitate repeated test execution
as implemented by the `--rerun` command-line argument."""
pass
def run_test(self, test):
"""Run a particular test.
@ -256,7 +285,7 @@ class RefTestImplementation(object):
return False, data
screenshot = data
hash_value = hashlib.sha1(screenshot).hexdigest()
hash_value = hash_screenshot(data)
self.screenshot_cache[key] = (hash_value, None)
@ -267,6 +296,9 @@ class RefTestImplementation(object):
self.message.append("%s %s" % (test.url, rv[0]))
return True, rv
def reset(self):
self.screenshot_cache.clear()
def is_pass(self, lhs_hash, rhs_hash, relation):
assert relation in ("==", "!=")
self.message.append("Testing %s %s %s" % (lhs_hash, relation, rhs_hash))
@ -310,8 +342,11 @@ class RefTestImplementation(object):
if success:
screenshots[i] = screenshot
log_data = [{"url": nodes[0].url, "screenshot": screenshots[0]}, relation,
{"url": nodes[1].url, "screenshot": screenshots[1]}]
log_data = [
{"url": nodes[0].url, "screenshot": screenshots[0], "hash": hashes[0]},
relation,
{"url": nodes[1].url, "screenshot": screenshots[1], "hash": hashes[1]},
]
return {"status": "FAIL",
"message": "\n".join(self.message),

View file

@ -780,6 +780,9 @@ class MarionetteRefTestExecutor(RefTestExecutor):
self.logger.warning("Exception during reftest teardown:\n%s" %
traceback.format_exc(e))
def reset(self):
self.implementation.reset(**self.implementation_kwargs)
def is_alive(self):
return self.protocol.is_alive
@ -861,6 +864,11 @@ class InternalRefTestImplementation(object):
self.executor.protocol.marionette.set_context(self.executor.protocol.marionette.CONTEXT_CHROME)
self.executor.protocol.marionette._send_message("reftest:setup", data)
def reset(self, screenshot=None):
# this is obvious wrong; it shouldn't be a no-op
# see https://github.com/web-platform-tests/wpt/issues/15604
pass
def run_test(self, test):
references = self.get_references(test)
timeout = (test.timeout * 1000) * self.timeout_multiplier
@ -884,6 +892,11 @@ class InternalRefTestImplementation(object):
if self.executor.protocol.marionette and self.executor.protocol.marionette.session_id:
self.executor.protocol.marionette._send_message("reftest:teardown", {})
self.executor.protocol.marionette.set_context(self.executor.protocol.marionette.CONTEXT_CONTENT)
# the reftest runner opens/closes a window with focus, so as
# with after closing a window we need to give a new window
# focus
handles = self.executor.protocol.marionette.window_handles
self.executor.protocol.marionette.switch_to_window(handles[0])
except Exception as e:
# Ignore errors during teardown
self.logger.warning(traceback.format_exc(e))

View file

@ -359,6 +359,9 @@ class SeleniumRefTestExecutor(RefTestExecutor):
with open(os.path.join(here, "reftest-wait_webdriver.js")) as f:
self.wait_script = f.read()
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()
@ -369,7 +372,8 @@ class SeleniumRefTestExecutor(RefTestExecutor):
"""return [window.outerWidth - window.innerWidth,
window.outerHeight - window.innerHeight];"""
)
self.protocol.webdriver.set_window_rect(0, 0, 800 + width_offset, 600 + height_offset)
self.protocol.webdriver.set_window_size(0, 0)
self.protocol.webdriver.set_window_position(800 + width_offset, 600 + height_offset)
result = self.implementation.run_test(test)

View file

@ -181,6 +181,9 @@ class ServoRefTestExecutor(ProcessTestExecutor):
self.tempdir = tempfile.mkdtemp()
self.hosts_path = write_hosts_file(server_config)
def reset(self):
self.implementation.reset()
def teardown(self):
try:
os.unlink(self.hosts_path)

View file

@ -260,6 +260,9 @@ class ServoWebDriverRefTestExecutor(RefTestExecutor):
with open(os.path.join(here, "reftest-wait_webdriver.js")) as f:
self.wait_script = f.read()
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()

View file

@ -405,6 +405,9 @@ class WebDriverRefTestExecutor(RefTestExecutor):
with open(os.path.join(here, "reftest-wait_webdriver.js")) as f:
self.wait_script = f.read()
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()

View file

@ -3,6 +3,7 @@ import re
import sys
from mozlog.structured.formatters.base import BaseFormatter
from ..executors.base import strip_server
LONE_SURROGATE_RE = re.compile(u"[\uD800-\uDFFF]")
@ -115,6 +116,12 @@ class WptreportFormatter(BaseFormatter):
test["expected"] = data["expected"]
if "message" in data:
test["message"] = replace_lone_surrogate(data["message"])
if "reftest_screenshots" in data.get("extra", {}):
test["screenshots"] = {
strip_server(item["url"]): "sha1:" + item["hash"]
for item in data["extra"]["reftest_screenshots"]
if type(item) == dict
}
def assertion_count(self, data):
test = self.find_or_create_test(data)

View file

@ -0,0 +1,23 @@
from mozlog.structured.formatters.base import BaseFormatter
class WptscreenshotFormatter(BaseFormatter):
"""Formatter that outputs screenshots in the format expected by wpt.fyi."""
def __init__(self):
self.cache = set()
def suite_start(self, data):
# TODO: ask wpt.fyi for known hashes.
pass
def test_end(self, data):
if "reftest_screenshots" not in data.get("extra", {}):
return
output = ""
for item in data["extra"]["reftest_screenshots"]:
if type(item) != dict or item["hash"] in self.cache:
continue
self.cache.add(item["hash"])
output += "data:image/png;base64,{}\n".format(item["screenshot"])
return output if output else None

View file

@ -55,7 +55,7 @@ def update_expected(test_paths, serve_root, log_file_names,
print("disabled: %s" % test.root.test_path)
def do_delayed_imports(serve_root):
def do_delayed_imports(serve_root=None):
global manifest, manifestitem
from manifest import manifest, item as manifestitem
@ -443,6 +443,7 @@ class ExpectedUpdater(object):
def create_test_tree(metadata_path, test_manifest):
"""Create a map of test_id to TestFileData for that test.
"""
do_delayed_imports()
id_test_map = {}
exclude_types = frozenset(["stub", "helper", "manual", "support", "conformancechecker"])
all_types = manifestitem.item_types.keys()

View file

@ -3,7 +3,7 @@ import os
import urlparse
from abc import ABCMeta, abstractmethod
from Queue import Empty
from collections import defaultdict, OrderedDict, deque
from collections import defaultdict, deque
from multiprocessing import Queue
import manifestinclude
@ -68,275 +68,6 @@ class DirectoryHashChunker(TestChunker):
yield test_type, test_path, tests
class EqualTimeChunker(TestChunker):
def _group_by_directory(self, manifest_items):
"""Split the list of manifest items into a ordered dict that groups tests in
so that anything in the same subdirectory beyond a depth of 3 is in the same
group. So all tests in a/b/c, a/b/c/d and a/b/c/e will be grouped together
and separate to tests in a/b/f
Returns: tuple (ordered dict of {test_dir: PathData}, total estimated runtime)
"""
class PathData(object):
def __init__(self, path):
self.path = path
self.time = 0
self.tests = []
by_dir = OrderedDict()
total_time = 0
for i, (test_type, test_path, tests) in enumerate(manifest_items):
test_dir = tuple(os.path.split(test_path)[0].split(os.path.sep)[:3])
if test_dir not in by_dir:
by_dir[test_dir] = PathData(test_dir)
data = by_dir[test_dir]
time = sum(test.default_timeout if test.timeout !=
"long" else test.long_timeout for test in tests)
data.time += time
total_time += time
data.tests.append((test_type, test_path, tests))
return by_dir, total_time
def _maybe_remove(self, chunks, i, direction):
"""Trial removing a chunk from one chunk to an adjacent one.
:param chunks: - the list of all chunks
:param i: - the chunk index in the list of chunks to try removing from
:param direction: either "next" if we are going to move from the end to
the subsequent chunk, or "prev" if we are going to move
from the start into the previous chunk.
:returns bool: Did a chunk get moved?"""
source_chunk = chunks[i]
if direction == "next":
target_chunk = chunks[i+1]
path_index = -1
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
elif direction == "prev":
target_chunk = chunks[i-1]
path_index = 0
move_func = lambda: target_chunk.append(source_chunk.popleft())
else:
raise ValueError("Unexpected move direction %s" % direction)
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
def _maybe_add(self, chunks, i, direction):
"""Trial adding a chunk from one chunk to an adjacent one.
:param chunks: - the list of all chunks
:param i: - the chunk index in the list of chunks to try adding to
:param direction: either "next" if we are going to remove from the
the subsequent chunk, or "prev" if we are going to remove
from the the previous chunk.
:returns bool: Did a chunk get moved?"""
target_chunk = chunks[i]
if direction == "next":
source_chunk = chunks[i+1]
path_index = 0
move_func = lambda: target_chunk.append(source_chunk.popleft())
elif direction == "prev":
source_chunk = chunks[i-1]
path_index = -1
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
else:
raise ValueError("Unexpected move direction %s" % direction)
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
def _maybe_move(self, source_chunk, target_chunk, path_index, move_func):
"""Move from one chunk to another, assess the change in badness,
and keep the move iff it decreases the badness score.
:param source_chunk: chunk to move from
:param target_chunk: chunk to move to
:param path_index: 0 if we are moving from the start or -1 if we are moving from the
end
:param move_func: Function that actually moves between chunks"""
if len(source_chunk.paths) <= 1:
return False
move_time = source_chunk.paths[path_index].time
new_source_badness = self._badness(source_chunk.time - move_time)
new_target_badness = self._badness(target_chunk.time + move_time)
delta_badness = ((new_source_badness + new_target_badness) -
(source_chunk.badness + target_chunk.badness))
if delta_badness < 0:
move_func()
return True
return False
def _badness(self, time):
"""Metric of badness for a specific chunk
:param time: the time for a specific chunk"""
return (time - self.expected_time)**2
def _get_chunk(self, manifest_items):
by_dir, total_time = self._group_by_directory(manifest_items)
if len(by_dir) < self.total_chunks:
raise ValueError("Tried to split into %i chunks, but only %i subdirectories included" % (
self.total_chunks, len(by_dir)))
self.expected_time = float(total_time) / self.total_chunks
chunks = self._create_initial_chunks(by_dir)
while True:
# Move a test from one chunk to the next until doing so no longer
# reduces the badness
got_improvement = self._update_chunks(chunks)
if not got_improvement:
break
self.logger.debug(self.expected_time)
for i, chunk in chunks.iteritems():
self.logger.debug("%i: %i, %i" % (i + 1, chunk.time, chunk.badness))
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
return self._get_tests(chunks)
@staticmethod
def _all_tests(by_dir):
"""Return a set of all tests in the manifest from a grouping by directory"""
return set(x[0] for item in by_dir.itervalues()
for x in item.tests)
@staticmethod
def _chunked_tests(chunks):
"""Return a set of all tests in the manifest from the chunk list"""
return set(x[0] for chunk in chunks.itervalues()
for path in chunk.paths
for x in path.tests)
def _create_initial_chunks(self, by_dir):
"""Create an initial unbalanced list of chunks.
:param by_dir: All tests in the manifest grouped by subdirectory
:returns list: A list of Chunk objects"""
class Chunk(object):
def __init__(self, paths, index):
"""List of PathData objects that together form a single chunk of
tests"""
self.paths = deque(paths)
self.time = sum(item.time for item in paths)
self.index = index
def appendleft(self, path):
"""Add a PathData object to the start of the chunk"""
self.paths.appendleft(path)
self.time += path.time
def append(self, path):
"""Add a PathData object to the end of the chunk"""
self.paths.append(path)
self.time += path.time
def pop(self):
"""Remove PathData object from the end of the chunk"""
assert len(self.paths) > 1
self.time -= self.paths[-1].time
return self.paths.pop()
def popleft(self):
"""Remove PathData object from the start of the chunk"""
assert len(self.paths) > 1
self.time -= self.paths[0].time
return self.paths.popleft()
@property
def badness(self_): # noqa: N805
"""Badness metric for this chunk"""
return self._badness(self_.time)
initial_size = len(by_dir) / self.total_chunks
chunk_boundaries = [initial_size * i
for i in xrange(self.total_chunks)] + [len(by_dir)]
chunks = OrderedDict()
for i, lower in enumerate(chunk_boundaries[:-1]):
upper = chunk_boundaries[i + 1]
paths = by_dir.values()[lower:upper]
chunks[i] = Chunk(paths, i)
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
return chunks
def _update_chunks(self, chunks):
"""Run a single iteration of the chunk update algorithm.
:param chunks: - List of chunks
"""
#TODO: consider replacing this with a heap
sorted_chunks = sorted(chunks.values(), key=lambda x:-x.badness)
got_improvement = False
for chunk in sorted_chunks:
if chunk.time < self.expected_time:
f = self._maybe_add
else:
f = self._maybe_remove
if chunk.index == 0:
order = ["next"]
elif chunk.index == self.total_chunks - 1:
order = ["prev"]
else:
if chunk.time < self.expected_time:
# First try to add a test from the neighboring chunk with the
# greatest total time
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
order = ["next", "prev"]
else:
order = ["prev", "next"]
else:
# First try to remove a test and add to the neighboring chunk with the
# lowest total time
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
order = ["prev", "next"]
else:
order = ["next", "prev"]
for direction in order:
if f(chunks, chunk.index, direction):
got_improvement = True
break
if got_improvement:
break
return got_improvement
def _get_tests(self, chunks):
"""Return the list of tests corresponding to the chunk number we are running.
:param chunks: List of chunks"""
tests = []
for path in chunks[self.chunk_number - 1].paths:
tests.extend(path.tests)
return tests
def __call__(self, manifest_iter):
manifest = list(manifest_iter)
tests = self._get_chunk(manifest)
for item in tests:
yield item
class TestFilter(object):
def __init__(self, test_manifests, include=None, exclude=None, manifest_path=None, explicit=False):
if manifest_path is None or include or explicit:
@ -445,9 +176,8 @@ class TestLoader(object):
self.chunker = {"none": Unchunked,
"hash": HashChunker,
"dir_hash": DirectoryHashChunker,
"equal_time": EqualTimeChunker}[chunk_type](total_chunks,
chunk_number)
"dir_hash": DirectoryHashChunker}[chunk_type](total_chunks,
chunk_number)
self._test_ids = None

View file

@ -91,6 +91,7 @@ class TestRunner(object):
the associated methods"""
self.setup()
commands = {"run_test": self.run_test,
"reset": self.reset,
"stop": self.stop,
"wait": self.wait}
while True:
@ -108,6 +109,9 @@ class TestRunner(object):
def stop(self):
return Stop
def reset(self):
self.executor.reset()
def run_test(self, test):
try:
return self.executor.run_test(test)
@ -540,6 +544,7 @@ class TestRunnerManager(threading.Thread):
self.logger.test_start(self.state.test.id)
if self.rerun > 1:
self.logger.info("Run %d/%d" % (self.run_count, self.rerun))
self.send_message("reset")
self.run_count += 1
self.send_message("run_test", self.state.test)

View file

@ -1,98 +0,0 @@
import unittest
import sys
from os.path import join, dirname
from mozlog import structured
sys.path.insert(0, join(dirname(__file__), "..", "..", ".."))
from wptrunner.testloader import EqualTimeChunker
from manifest.sourcefile import SourceFile
structured.set_default_logger(structured.structuredlog.StructuredLogger("TestChunker"))
testharness_test = """<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>"""
class MockTest(object):
default_timeout = 10
def __init__(self, id, path, timeout=10, contents=testharness_test):
self.id = id
self.url = "/" + path
self.item_type = "testharness"
self.timeout = timeout
self.source_file = SourceFile("/", path, "/", contents=contents)
def make_mock_manifest(*items):
rv = []
for test_type, dir_path, num_tests in items:
for i in range(num_tests):
filename = "/%i.html" % i
rv.append((test_type,
dir_path + filename,
set([MockTest("%i.html" % i, dir_path + filename)])))
return rv
class TestEqualTimeChunker(unittest.TestCase):
def test_include_all(self):
tests = make_mock_manifest(("test", "a", 10), ("test", "a/b", 10),
("test", "c", 10))
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
self.assertEquals(tests[:10], chunk_1)
self.assertEquals(tests[10:20], chunk_2)
self.assertEquals(tests[20:], chunk_3)
def test_include_all_1(self):
tests = make_mock_manifest(("test", "a", 5), ("test", "a/b", 5),
("test", "c", 10), ("test", "d", 10))
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
self.assertEquals(tests[:10], chunk_1)
self.assertEquals(tests[10:20], chunk_2)
self.assertEquals(tests[20:], chunk_3)
def test_long(self):
tests = make_mock_manifest(("test", "a", 100), ("test", "a/b", 1),
("test", "c", 1))
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
self.assertEquals(tests[:100], chunk_1)
self.assertEquals(tests[100:101], chunk_2)
self.assertEquals(tests[101:102], chunk_3)
def test_long_1(self):
tests = make_mock_manifest(("test", "a", 1), ("test", "a/b", 100),
("test", "c", 1))
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
self.assertEquals(tests[:1], chunk_1)
self.assertEquals(tests[1:101], chunk_2)
self.assertEquals(tests[101:102], chunk_3)
def test_too_few_dirs(self):
with self.assertRaises(ValueError):
tests = make_mock_manifest(("test", "a", 1), ("test", "a/b", 100),
("test", "c", 1))
list(EqualTimeChunker(4, 1)(tests))
if __name__ == "__main__":
unittest.main()

View file

@ -10,8 +10,8 @@ from mozlog import handlers, structuredlog
sys.path.insert(0, join(dirname(__file__), "..", ".."))
from wptrunner import formatters
from wptrunner.formatters import WptreportFormatter
from wptrunner.formatters import wptreport
from wptrunner.formatters.wptreport import WptreportFormatter
def test_wptreport_runtime(capfd):
@ -108,7 +108,7 @@ def test_wptreport_lone_surrogate_ucs2(capfd):
logger = structuredlog.StructuredLogger("test_a")
logger.add_handler(handlers.StreamHandler(output, WptreportFormatter()))
with mock.patch.object(formatters, 'surrogate_replacement', formatters.SurrogateReplacementUcs2()):
with mock.patch.object(wptreport, 'surrogate_replacement', wptreport.SurrogateReplacementUcs2()):
# output a bunch of stuff
logger.suite_start(["test-id-1"]) # no run_info arg!
logger.test_start("test-id-1")

View file

@ -10,7 +10,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from mozlog import structured
from wptrunner.testloader import TestFilter as Filter
from .test_chunker import make_mock_manifest
from .test_wpttest import make_mock_manifest
structured.set_default_logger(structured.structuredlog.StructuredLogger("TestLoader"))

View file

@ -5,6 +5,8 @@ import sys
from io import BytesIO
from .. import metadata, manifestupdate
from ..update import WPTUpdate
from ..update.base import StepRunner, Step
from mozlog import structuredlog, handlers, formatters
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
@ -22,7 +24,7 @@ def rel_path_to_url(rel_path, url_base="/"):
def SourceFileWithTest(path, hash, cls, *args):
s = mock.Mock(rel_path=path, hash=hash)
test = cls(s, rel_path_to_url(path), *args)
test = cls("/foobar", path, "/", rel_path_to_url(path), *args)
s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
return s
@ -56,18 +58,13 @@ def create_updater(tests, url_base="/", **kwargs):
expected_data = {}
metadata.load_expected = lambda _, __, test_path, *args: expected_data[test_path]
id_test_map = metadata.create_test_tree(None, m)
for test_path, test_ids, test_type, manifest_str in tests:
tests = list(m.iterpath(test_path))
if isinstance(test_ids, (str, unicode)):
test_ids = [test_ids]
test_data = metadata.TestFileData("/", "testharness", None, test_path, tests)
expected_data[test_path] = manifestupdate.compile(BytesIO(manifest_str),
test_path,
url_base)
for test_id in test_ids:
id_test_map[test_id] = test_data
return id_test_map, metadata.ExpectedUpdater(id_test_map, **kwargs)
@ -595,7 +592,8 @@ def test_update_wptreport_0():
def test_update_wptreport_1():
tests = [("path/to/__dir__", ["path/to/__dir__"], None, "")]
tests = [("path/to/test.htm", ["/path/to/test.htm"], "testharness", ""),
("path/to/__dir__", ["path/to/__dir__"], None, "")]
log = {"run_info": {},
"results": [],
@ -701,3 +699,29 @@ leak-total: 110""")]
assert not new_manifest.is_empty
assert new_manifest.has_key("leak-threshold") is False
class TestStep(Step):
def create(self, state):
test_id = "/path/to/test.htm"
tests = [("path/to/test.htm", [test_id], "testharness", "")]
state.foo = create_test_manifest(tests)
class UpdateRunner(StepRunner):
steps = [TestStep]
def test_update_pickle():
logger = structuredlog.StructuredLogger("expected_test")
args = {
"test_paths": {
"/": {"tests_path": ""},
},
"abort": False,
"continue": False,
"sync": False,
}
args2 = args.copy()
args2["abort"] = True
wptupdate = WPTUpdate(logger, **args2)
wptupdate = WPTUpdate(logger, runner_cls=UpdateRunner, **args)
wptupdate.run()

View file

@ -2,10 +2,12 @@ import os
import sys
from io import BytesIO
from mock import Mock
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from manifest.item import TestharnessTest
from wptrunner import manifestexpected, wpttest
from .test_chunker import make_mock_manifest
dir_ini_0 = """\
prefs: [a:b]
@ -43,6 +45,24 @@ test_2 = """\
"""
testharness_test = """<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>"""
def make_mock_manifest(*items):
rv = Mock(tests_root="/foobar")
tests = []
rv.__iter__ = lambda self: iter(tests)
rv.__getitem__ = lambda self, k: tests[k]
for test_type, dir_path, num_tests in items:
for i in range(num_tests):
filename = dir_path + "/%i.html" % i
tests.append((test_type,
filename,
set([TestharnessTest("/foo.bar", filename, "/", filename)])))
return rv
def test_metadata_inherit():
tests = make_mock_manifest(("test", "a", 10), ("test", "a/b", 10),
("test", "c", 10))
@ -56,10 +76,10 @@ def test_metadata_inherit():
test_metadata = manifestexpected.static.compile(BytesIO(test_0),
{},
data_cls_getter=manifestexpected.data_cls_getter,
test_path="a",
url_base="")
test_path="a/0.html",
url_base="/")
test = tests[0][2].pop()
test = next(iter(tests[0][2]))
test_obj = wpttest.from_manifest(tests, test, inherit_metadata, test_metadata.get_test(test.id))
assert test_obj.max_assertion_count == 3
assert test_obj.min_assertion_count == 1
@ -74,10 +94,10 @@ def test_conditional():
test_metadata = manifestexpected.static.compile(BytesIO(test_1),
{"os": "win"},
data_cls_getter=manifestexpected.data_cls_getter,
test_path="a",
url_base="")
test_path="a/1.html",
url_base="/")
test = tests[1][2].pop()
test = next(iter(tests[1][2]))
test_obj = wpttest.from_manifest(tests, test, [], test_metadata.get_test(test.id))
assert test_obj.prefs == {"a": "b", "c": "d"}
assert test_obj.expected() == "FAIL"
@ -89,15 +109,15 @@ def test_metadata_lsan_stack_depth():
test_metadata = manifestexpected.static.compile(BytesIO(test_2),
{},
data_cls_getter=manifestexpected.data_cls_getter,
test_path="a",
url_base="")
test_path="a/2.html",
url_base="/")
test = tests[2][2].pop()
test = next(iter(tests[2][2]))
test_obj = wpttest.from_manifest(tests, test, [], test_metadata.get_test(test.id))
assert test_obj.lsan_max_stack_depth == 42
test = tests[1][2].pop()
test = next(iter(tests[1][2]))
test_obj = wpttest.from_manifest(tests, test, [], test_metadata.get_test(test.id))
assert test_obj.lsan_max_stack_depth is None
@ -105,8 +125,8 @@ def test_metadata_lsan_stack_depth():
test_metadata = manifestexpected.static.compile(BytesIO(test_0),
{},
data_cls_getter=manifestexpected.data_cls_getter,
test_path="a",
url_base="")
test_path="a/0.html",
url_base="/")
inherit_metadata = [
manifestexpected.static.compile(

View file

@ -77,7 +77,7 @@ class BaseState(object):
@classmethod
def load(self):
def load(cls):
raise NotImplementedError
def save(self):

View file

@ -149,7 +149,7 @@ class WPTUpdate(object):
# If the sync path doesn't exist we defer this until it does
setup_paths(kwargs["sync_path"])
if kwargs["store_state"]:
if kwargs.get("store_state", False):
self.state = SavedState(logger)
else:
self.state = UnsavedState(logger)

View file

@ -8,7 +8,7 @@ from datetime import timedelta
import config
import wpttest
import formatters
from formatters import wptreport, wptscreenshot
def abs_path(path):
@ -230,7 +230,7 @@ scheme host and port.""")
help="Total number of chunks to use")
chunking_group.add_argument("--this-chunk", action="store", type=int, default=1,
help="Chunk number to run")
chunking_group.add_argument("--chunk-type", action="store", choices=["none", "equal_time", "hash", "dir_hash"],
chunking_group.add_argument("--chunk-type", action="store", choices=["none", "hash", "dir_hash"],
default=None, help="Chunking type to use")
ssl_group = parser.add_argument_group("SSL/TLS")
@ -326,7 +326,8 @@ scheme host and port.""")
help="List of URLs for tests to run, or paths including tests to run. "
"(equivalent to --include)")
commandline.log_formatters["wptreport"] = (formatters.WptreportFormatter, "wptreport format")
commandline.log_formatters["wptreport"] = (wptreport.WptreportFormatter, "wptreport format")
commandline.log_formatters["wptscreenshot"] = (wptscreenshot.WptscreenshotFormatter, "wpt.fyi screenshots")
commandline.add_logging_group(parser)
return parser

View file

@ -149,12 +149,12 @@ class Test(object):
def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
protocol = "https" if hasattr(manifest_item, "https") and manifest_item.https else "http"
return cls(manifest_item.source_file.tests_root,
return cls(manifest_file.tests_root,
manifest_item.url,
inherit_metadata,
test_metadata,
timeout=timeout,
path=manifest_item.source_file.path,
path=os.path.join(manifest_file.tests_root, manifest_item.path),
protocol=protocol)
@property
@ -327,18 +327,19 @@ class TestharnessTest(Test):
protocol = "https" if hasattr(manifest_item, "https") and manifest_item.https else "http"
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
script_metadata = manifest_item.source_file.script_metadata or []
script_metadata = manifest_item.script_metadata or []
scripts = [v for (k, v) in script_metadata if k == b"script"]
return cls(manifest_item.source_file.tests_root,
return cls(manifest_file.tests_root,
manifest_item.url,
inherit_metadata,
test_metadata,
timeout=timeout,
path=manifest_item.source_file.path,
path=os.path.join(manifest_file.tests_root, manifest_item.path),
protocol=protocol,
testdriver=testdriver,
jsshell=jsshell,
scripts=scripts)
scripts=scripts
)
@property
def id(self):
@ -388,7 +389,7 @@ class ReftestTest(Test):
url = manifest_test.url
node = cls(manifest_test.source_file.tests_root,
node = cls(manifest_file.tests_root,
manifest_test.url,
inherit_metadata,
test_metadata,
@ -424,7 +425,7 @@ class ReftestTest(Test):
nodes,
references_seen)
else:
reference = ReftestTest(manifest_test.source_file.tests_root,
reference = ReftestTest(manifest_file.tests_root,
ref_url,
[],
None,